diff --git a/.gitignore b/.gitignore index fb88fe536d93652413217e4acaf175f334913e32..51980485d06dda6a59fac60350d6e792249e8db7 100644 --- a/.gitignore +++ b/.gitignore @@ -2,81 +2,105 @@ .idea .DS_Store -assembly/target -core/cloudModule/target/ -core/cloudMybatis/target/ -core/cloudProtocol/target/ -core/cloudRPC/target/ -core/common/target/ -core/httpclient/target/ -core/scheduler/target/ -eurekaServer/target/ -gateway/core/target/ -gateway/gateway-httpclient-support/target/ -gateway/gateway-ujes-support/target/ -gateway/springcloudgateway/target/ - - -metadata/target/ -publicService/application/target/ -publicService/configuration/target/ -publicService/jobhistory/target/ -publicService/target/ -publicService/udf/target/ -publicService/variable/target/ -publicService/workspace/target/ -resourceManager/resourcemanagerclient/target/ -resourceManager/resourcemanagercommon/target/ -resourceManager/resourcemanagerserver/target/ -storage/storage/target/ - -contextservice/cs-cache/target -contextservice/cs-client/target -contextservice/cs-common/target -contextservice/cs-highavailable/target -contextservice/cs-listener/target -contextservice/cs-persistence/target -contextservice/cs-search/target -contextservice/cs-server/target -contextservice/cs-ujes-client/target - -datasource/datasourcemanager/common/target -datasource/datasourcemanager/server/target -datasource/metadatamanager/common/target -datasource/metadatamanager/server/target -datasource/metadatamanager/service/elasticsearch/target -datasource/metadatamanager/service/hive/target -datasource/metadatamanager/service/mysql/target - - - -ujes/client/target/ -ujes/definedEngines/hive/engine/target/ -ujes/definedEngines/hive/enginemanager/target/ -ujes/definedEngines/hive/entrance/target/ -ujes/definedEngines/jdbc/entrance/target/ -ujes/definedEngines/pipeline/engine/target/ -ujes/definedEngines/pipeline/enginemanager/target/ -ujes/definedEngines/pipeline/entrance/target/ -ujes/definedEngines/python/engine/target/ -ujes/definedEngines/python/enginemanager/target/ -ujes/definedEngines/python/entrance/target/ -ujes/definedEngines/spark/engine/target/ -ujes/definedEngines/spark/enginemanager/target/ -ujes/definedEngines/spark/entrance/target/ -ujes/engine/target/ -ujes/enginemanager/target/ -ujes/entrance/target/ - -ujes/definedEngines/mlsql/entrance/target/ +out/ +linkis.ipr +linkis.iws + +linkis-commons/linkis-common/target/ +linkis-commons/linkis-hadoop-common/target/ +linkis-commons/linkis-httpclient/target/ +linkis-commons/linkis-message-scheduler/target/ +linkis-commons/linkis-module/target/ +linkis-commons/linkis-mybatis/target/ +linkis-commons/linkis-protocol/target/ +linkis-commons/linkis-rpc/target/ +linkis-commons/linkis-scheduler/target/ +linkis-commons/linkis-storage/target/ + +linkis-computation-governance/linkis-computation-client/target/ +linkis-computation-governance/linkis-computation-governance-common/target/ +linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-linux-launch/target/ +linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/target/ +linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/target/ + +linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/target/ +linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/target/ +linkis-computation-governance/linkis-engineconn/linkis-engineconn-launch/target/ +linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/target/ +linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/callback-service/target/ +linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/target/ +linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/resource-executor/target/ +linkis-computation-governance/linkis-entrance-client/target/ +linkis-computation-governance/linkis-entrance/target/ +linkis-computation-governance/linkis-manager/label-common/target/ +linkis-computation-governance/linkis-manager/label-manager/target/ +linkis-computation-governance/linkis-manager/linkis-application-manager/target/ +linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/target/ +linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/target/ +linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/target/ +linkis-computation-governance/linkis-manager/linkis-manager-monitor/target/ +linkis-computation-governance/linkis-manager/linkis-manager-persistence/target/ +linkis-computation-governance/linkis-manager/linkis-resource-manager/target/ + + + +linkis-public-enhancements/linkis-bml/linkis-bml-client/target/ +linkis-public-enhancements/linkis-bml/linkis-bml-common/target/ +linkis-public-enhancements/linkis-bml/linkis-bml-server/target/ +linkis-public-enhancements/linkis-bml/linkis-bml-engine-hook/target/ +linkis-public-enhancements/linkis-context-service/linkis-cs-cache/target/ +linkis-public-enhancements/linkis-context-service/linkis-cs-client/target/ +linkis-public-enhancements/linkis-context-service/linkis-cs-common/target/ +linkis-public-enhancements/linkis-context-service/linkis-cs-engine-support/target/ +linkis-public-enhancements/linkis-context-service/linkis-cs-highavailable/target/ +linkis-public-enhancements/linkis-context-service/linkis-cs-listener/target/ +linkis-public-enhancements/linkis-context-service/linkis-cs-persistence/target/ +linkis-public-enhancements/linkis-context-service/linkis-cs-search/target/ +linkis-public-enhancements/linkis-context-service/linkis-cs-server/target/ +linkis-public-enhancements/linkis-datasource/datasourcemanager/common/target/ +linkis-public-enhancements/linkis-datasource/datasourcemanager/server/target/ +linkis-public-enhancements/linkis-datasource/linkis-metadata/target/ +linkis-public-enhancements/linkis-datasource/metadatamanager/common/target/ +linkis-public-enhancements/linkis-datasource/metadatamanager/server/target/ +linkis-public-enhancements/linkis-datasource/metadatamanager/service/elasticsearch/target/ +linkis-public-enhancements/linkis-datasource/metadatamanager/service/hive/target/ +linkis-public-enhancements/linkis-datasource/metadatamanager/service/mysql/target/ +linkis-public-enhancements/linkis-publicService/linkis-configuration/target/ +linkis-public-enhancements/linkis-publicService/linkis-instance-label/client/target/ +linkis-public-enhancements/linkis-publicService/linkis-instance-label/target/ +linkis-public-enhancements/linkis-publicService/linkis-jobhistory/target/ +linkis-public-enhancements/linkis-publicService/linkis-script-dev/linkis-storage-script-dev-client/target/ +linkis-public-enhancements/linkis-publicService/linkis-script-dev/linkis-storage-script-dev-server/target/ +linkis-public-enhancements/linkis-publicService/linkis-udf/target/ +linkis-public-enhancements/linkis-publicService/linkis-variable/target/ +linkis-public-enhancements/linkis-publicService/target/ + + +linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/target/ +linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/target/ +linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/target/ +linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/target/ +linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/target/ + + + + +linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-cache/target/ +linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/target/ +linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-loader/target/ +linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/target/ +linkis-engineconn-plugins/engineconn-plugins/jdbc/target/ +linkis-engineconn-plugins/engineconn-plugins/hive/target/ +linkis-engineconn-plugins/engineconn-plugins/io_file/target/ +linkis-engineconn-plugins/engineconn-plugins/python/target/ +linkis-engineconn-plugins/engineconn-plugins/spark/target/ +linkis-engineconn-plugins/engineconn-plugins/pipeline/target/ +linkis-engineconn-plugins/engineconn-plugins/shell/target/ + +linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/target/ + +assembly/target assembly/public-module/target/ -bml/bml-engine-hook/target/ -bml/bmlclient/target/ -bml/bmlcommon/target/ -bml/bmlserver/target/ -core/hadoop-common/target/ -publicService/workspace/client/workspace-httpclient/target/ -userControl/target/ diff --git a/Contributing_CN.md b/Contributing_CN.md index 4b8eafde3c4562bcf0672b492f2667c560120796..8495bd08284cf505858e7948e5ac913222e3fa4b 100644 --- a/Contributing_CN.md +++ b/Contributing_CN.md @@ -2,10 +2,9 @@ | **版本管理信息表** | | | ----------- | --------------------------------- | -| 文档所属目录 | Contributing/Contributing.md | -| 现行版本 | 1.0 版,2020年3月 | -| 现行版本发布日期 | 2020 年 3 月 6 日 | -| 修订信息 | 1. 第一次发布 Linkis 项目Contributing规范。 | +| 现行版本 | 1.1 版,2021年5月 | +| 现行版本发布日期 | 2021 年 5 月 6 日 | +| 修订信息 | 1. 增加Issue 提交指引,修改一些描述 | 非常感谢贡献Linkis项目!在参与贡献之前,请仔细阅读以下指引。 @@ -17,15 +16,18 @@ ### 1.2 功能交流、实现、重构 -在交流过程中,详细描述新功能(或重构)的细节、机制和使用场景,能够促使它更好更快地被实现。**如果计划实现一个重大的功能(或重构),请务必通过 Issue 或其他方式与核心开发团队进行沟通**,这样大家能以最效率的方式来推进它。包含 `#feature` 标签的打开的 Issue 都是需要被实现的新功能,包含 `#enhancement` 标签打开的 Issue 都是需要改进重构的功能。 +在交流过程中,详细描述新功能(或重构)的细节、机制和使用场景,能够促使它更好更快地被实现(包括测试用例和代码,及CI/CD相关工作)。**如果计划实现一个重大的功能(或重构),请务必通过 Issue 或其他方式与核心开发团队进行沟通**,这样大家能以最效率的方式来推进它。包含 `#feature` 标签的打开的 Issue 都是需要被实现的新功能,包含 `#enhancement` 标签打开的 Issue 都是需要改进重构的功能。 ### 1.3 Issue 答疑 -帮助回答 Issue 中的使用问题是为 Linkis 社区做贡献的一个非常有价值的方式;社区中总会有新用户不断进来,在帮助新用户的同时,也可以展现你的专业知识。 +帮助回答 Issue 中的使用问题是为 Linkis 社区做贡献的一个非常有价值的方式;社区中总会有新用户不断进来,在帮助新用户的同时,也可以展现您的专业知识。 ### 1.4 文档改进 -Linkis 用户手册文档在 docs/ 目录下,我们使用了 [jekyll](https://jekyllrb.com/) 作为 Linkis 的文档服务,可以编辑目录里的 Markdown 文件来对文档做改进。 +Linkis 文档位于[Linkis-Doc](https://github.com/WeBankFinTech/Linkis-Doc) ,文档的补充完善对于Linkis 的发展也至关重要。 + +### 1.5 其他 +包括参与和帮助组织社区交流、社区运营活动等,其他能够帮助Linkis 项目和社区的活动。 --- @@ -33,11 +35,11 @@ Linkis 用户手册文档在 docs/ 目录下,我们使用了 [jekyll](https:// ### 2.1 分支结构 -Linkis 源码可能会产生一些临时分支,但真正有明确意义的只有以下三个分支: - +Linkis 源码可能会产生一些临时分支,但真正有明确意义的只有以下三个分支: - master: 最近一次稳定 release 的源码,偶尔会多几次 hotfix 提交; -- branch-*: 最新稳定版; +- release-*: 稳定的release 版本; - dev-*: 主要开发分支; +- feature-*: 针对某些较大、需要社区联合开发的新特性的开发分支 请注意:大特性的dev分支,在命名时除了版本号,还会加上相应的命名说明,如:dev-0.10.0-flink,指0.10.0的flink特性开发分支。 @@ -102,9 +104,14 @@ git clone https://github.com/yourname/Linkis.git --branch dev-* 1. 打完整 release 包需要修改根目录下 /assembly/src/main/assembly/assembly.xml 中相关版本信息,然后在根目录下执行: `mvn clean package` 即可; 2. 打 每个模块 的包可直接在 模块目录下执行 `mvn clean package`。 -### 2.3 Pull Request 指引 +### 2.3 Issue 提交指引 +- 如果您还不知道怎样向开源项目发起 PR,请参考[About issues](https://docs.github.com/en/github/managing-your-work-on-github/about-issues) +- Issue 名称,应一句话简单描述您的问题或建议;为了项目的国际化推广,请用英文,或中英文双语书写 issue. +- 每个Issue,请至少带上component 和type 两个label,如component=Computation Governance/EngineConn,type=Improvement.参考:[issue #590](https://github.com/WeBankFinTech/Linkis/issues/590) + +### 2.3 Pull Request(PR) 提交指引 -- 如果你还不知道怎样向开源项目发起 PR,请参考[这篇说明](https://help.github.com/en/articles/about-pull-requests) +- 如果您还不知道怎样向开源项目发起 PR,请参考[About pull requests](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests) - 无论是 Bug 修复,还是新功能开发,请将 PR 提交到 dev-* 分支。 - PR 和提交名称遵循 `(): ` 原则,详情可以参考阮一峰的 [Commit message 和 Change log 编写指南](http://www.ruanyifeng.com/blog/2016/01/commit_message_change_log.html) 这篇文章。 - 如果 PR 中包含新功能,理应将文档更新包含在本次 PR 中。 @@ -113,7 +120,7 @@ git clone https://github.com/yourname/Linkis.git --branch dev-* ### 2.4 Review 标准 -在贡献代码之前,可以了解一下什么样的提交在 Review 中是受欢迎的。简单来说,如果一项提交能带来尽可能多增益和尽可能少的副作用或风险,那它被合并的几率就越高,Review 的速度也会越快。风险大、价值低的提交是几乎不可能被合并的,并且有可能会被拒绝连 Review 的机会都没有。 +在贡献代码之前,可以了解一下什么样的提交在 Review 中是受欢迎的。简单来说,如果一项提交能带来尽可能多增益和尽可能少的副作用或风险,那它被合并的几率就越高,Review 的速度也会越快。风险大、价值低的提交是几乎不可能被合并的,并且有可能会被拒绝 Review。 #### 2.4.1 增益 @@ -149,7 +156,7 @@ git clone https://github.com/yourname/Linkis.git --branch dev-* #### 3.1.1 如何成为 Committer -如果你对 Linkis 提过颇具价值的 PR 并且被合并,或是连续贡献超过半年,且至少主导过一次版本的发布,你可以通过官方微信群找到Linkis项目的一个 PMC ,如果他愿意提名你为 committer,并愿意为你陈述你的贡献给所有 PMC和Committer,那么接下来会发起一次投票;PMC和其他 Committers 将会一起投票决定是否允许你的加入,如果得到足够票数,你将成为 Linkis 项目的 Committer。 +如果您对 Linkis 提过颇具价值的 PR 并且被合并,或是连续贡献超过半年,且至少主导过一次版本的发布,您可以通过官方微信群找到Linkis项目的一个 PMC ,如果他愿意提名您为 committer,并愿意为您陈述您的贡献给所有 PMC和Committer,那么接下来会发起一次投票;PMC和其他 Committers 将会一起投票决定是否允许您的加入,如果得到足够票数,您将成为 Linkis 项目的 Committer。 #### 3.1.2 Committer 的权利 @@ -163,7 +170,7 @@ git clone https://github.com/yourname/Linkis.git --branch dev-* #### 3.2.1 如何成为 Committee 成员 -如果你是 Linkis 项目的 Committer,并且你贡献的所有内容得到了其他 Committee 成员的认可,你可以申请成为 Linkis Committee 成员,其他 Committee 成员将会一起投票决定是否允许你的加入,如果全票通过,你将成为 Linkis Committee 成员。 +如果您是 Linkis 项目的 Committer,并且您贡献的所有内容得到了其他 Committee 成员的认可,您可以申请成为 Linkis Committee 成员,其他 Committee 成员将会一起投票决定是否允许您的加入,如果全票通过,您将成为 Linkis Committee 成员。 #### 3.2.2 Committee 成员的权利 diff --git a/README.md b/README.md index 60430007bae628a02c0ee9bd773bd7e34bdbd63a..e9040597a1c7ee1eac16c9db3d6f2a40633190b5 100644 --- a/README.md +++ b/README.md @@ -1,89 +1,100 @@ Linkis -============ +========== [![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html) -English | [中文](docs/zh_CN/README.md) +[English](README.md) | [中文](README_CN.md) -## Introduction +# Introduction -Linkis helps easily connect to various back-end computation/storage engines(Spark, Python, TiDB...), exposes various interfaces(REST, JDBC, Java ...), with multi-tenancy, high performance, and resource control. + Linkis builds a layer of computation middleware between upper applications and underlying engines. By using standard interfaces such as REST/WS/JDBC provided by Linkis, the upper applications can easily access the underlying engines such as MySQL/Spark/Hive/Presto/Flink, etc., and achieve the intercommunication of user resources like unified variables, scripts, UDFs, functions and resource files at the same time. -Linkis connects with computation/storage engines(Spark, Hive, Python and HBase), exposes REST/WS interface, and executes multi-language jobs(SQL, Pyspark, HiveQL and Scala), as a computation middleware. +As a computation middleware, Linkis provides powerful connectivity, reuse, orchestration, expansion, and governance capabilities. By decoupling the application layer and the engine layer, it simplifies the complex network call relationship, and thus reduces the overall complexity and saves the development and maintenance costs as well. -Based on the microservices architecture, Linkis provides enterprise-level features of multi-tenant isolation, resource management and access control. It also offers convenient support to manage unified variables, UDFs, functions and resource files. it is also guaranteed with sophisticated task/job lifecycle management capabilities under high-concurrency, high-performance and high-availability scenarios. +Since the first release of Linkis in 2019, it has accumulated more than **700** trial companies and **1000+** sandbox trial users, which involving diverse industries, from finance, banking, tele-communication, to manufactory, internet companies and so on. Lots of companies have already used Linkis as a unified entrance for the underlying computation and storage engines of the big data platform. ![linkis-intro-01](https://user-images.githubusercontent.com/11496700/84615498-c3030200-aefb-11ea-9b16-7e4058bf6026.png) ![linkis-intro-03](https://user-images.githubusercontent.com/11496700/84615483-bb435d80-aefb-11ea-81b5-67f62b156628.png) -
-
-Based on the concept of the computation middleware architecture of Linkis, we have built a large amount of applications and systems on top of it.Currently available open-source project: - - [**DataSphere Studio - Data Application Development& Management Portal**](https://github.com/WeBankFinTech/DataSphereStudio) - - [**Qualitis - Data Quality Tool**](https://github.com/WeBankFinTech/Qualitis) - - [**Scriptis - Data Development IDE Tool**](https://github.com/WeBankFinTech/Scriptis) - - [**Visualis - Data Visualization Tool**](https://github.com/WeBankFinTech/Visualis) +# Features - There will be more tools released as open-source projects, please stay tuned! - -## Features - -- Unified Job Execution Services: A distributed REST/WebSocket service for processing scripts execution requests from user. - - Available computation engines so far: Spark, Python, TiSpark, Hive and Shell. - - Available languages so far: SparkSQL, Spark Scala, PySpark, R, Python, HQL and Shell. - -- Resource Management Services: Available for real-time control/limit of resource usage from both perspectives of amount and load for both systems and users. With dynamic charts of resource statistics, it is convenient to monitor and manage resource usage for systems and users. +- **Support for diverse underlying computation storage engines**. + Currently supported computation/storage engines: Spark, Hive, Python, Presto, ElasticSearch, MLSQL, TiSpark, JDBC, Shell, etc; + Computation/storage engines to be supported: Flink, Impala, etc; + Supported scripting languages: SparkSQL, HiveQL, Python, Shell, Pyspark, R, Scala and JDBC, etc. - Available resource types so far: Yarn queue resources, server(CPU and memory), number of concurrent instances per user. +- **Powerful task/request governance capabilities**. With services such as Orchestrator, Label Manager and customized Spring Cloud Gateway, Linkis is able to provide multi-level labels based, cross-cluster/cross-IDC fine-grained routing, load balance, multi-tenancy, traffic control, resource control, and orchestration strategies like dual-active, active-standby, etc. +- **Support full stack computation/storage engine**. As a computation middleware, it will receive, execute and manage tasks and requests for various computation storage engines, including batch tasks, interactive query tasks, real-time streaming tasks and storage tasks; -- Application Management Services: Manages global user applications, including offline batch applications, interactive query applications and real-time streaming applications. Also provides powerful reusability especially for offline and interactive applications, with complete lifecycle management which automatically releases idle applications for users. +- **Resource management capabilities**. ResourceManager is not only capable of managing resources for Yarn and Linkis EngineManger as in Linkis 0.X, but also able to provide label-based multi-level resource allocation and recycling, allowing itself to have powerful resource management capabilities across mutiple Yarn clusters and mutiple computation resource types; -- Unified Storage Services: The generic IO architecture can quickly integrate with various storage systems and provide a unified invokable entrance. It is also highly integrated with most common data formats and easy to use. +- **Unified Context Service**. Generate Context ID for each task/request, associate and manage user and system resource files (JAR, ZIP, Properties, etc.), result set, parameter variable, function, etc., across user, system, and computing engine. Set in one place, automatic reference everywhere; -- Unified Context Services: Unite resources files of users and systems (JAR, ZIP, Properties). With unified management of arguments/variables for users, systems and engines, it is achieved that modification in random place will reflect in all the other places automatically. +- **Unified materials**. System and user-level unified material management, which can be shared and transferred across users and systems. +# Supported engine types +| **Engine** | **Supported Version** | **Linkis 0.X version requirement**| **Linkis 1.X version requirement** | **Description** | +|:---- |:---- |:---- |:---- |:---- | +|Flink |1.11.0|\>=dev-0.12.0, PR #703 not merged yet.|ongoing| Flink EngineConn. Supports FlinkSQL code, and also supports Flink Jar to Linkis Manager to start a new Yarn application.| +|Impala|\>=3.2.0, CDH >=6.3.0"|\>=dev-0.12.0, PR #703 not merged yet.|ongoing|Impala EngineConn. Supports Impala SQL.| +|Presto|\>= 0.180|\>=0.11.0|ongoing|Presto EngineConn. Supports Presto SQL.| +|ElasticSearch|\>=6.0|\>=0.11.0|ongoing|ElasticSearch EngineConn. Supports SQL and DSL code.| +|Shell|Bash >=2.0|\>=0.9.3|\>=1.0.0_rc1|Shell EngineConn. Supports shell code.| +|MLSQL|\>=1.1.0|\>=0.9.1|ongoing|MLSQL EngineConn. Supports MLSQL code.| +|JDBC|MySQL >=5.0, Hive >=1.2.1|\>=0.9.0|\>=1.0.0_rc1|JDBC EngineConn. Supports MySQL and HiveQL code.| +|Spark|Apache 2.0.0~2.4.7, CDH >=5.4.0|\>=0.5.0|\>=1.0.0_rc1|Spark EngineConn. Supports SQL, Scala, Pyspark and R code.| +|Hive|Apache >=1.0.0, CDH >=5.4.0|\>=0.5.0|\>=1.0.0_rc1|Hive EngineConn. Supports HiveQL code.| +|Hadoop|Apache >=2.6.0, CDH >=5.4.0|\>=0.5.0|ongoing|Hadoop EngineConn. Supports Hadoop MR/YARN application.| +|Python|\>=2.6|\>=0.5.0|\>=1.0.0_rc1|Python EngineConn. Supports python code.| +|TiSpark|1.1|\>=0.5.0|ongoing|TiSpark EngineConn. Support querying TiDB data by SparkSQL.| -- Material Library: System and user-level material management, capable of sharing, transferring materials and automatic lifecycle management. +# Download -- Metadata Services: Real-time display of dataset table structure and partitions. +Please go to the [Linkis releases page](https://github.com/WeBankFinTech/Linkis/wiki/Linkis-Releases) to download a compiled distribution or a source code package of Linkis. - **Compared with similar systems** +# Compile and deploy +Please follow [Compile Guide](https://github.com/WeBankFinTech/Linkis-Doc/blob/master/zh_CN/Development_Documents/Linkis%E7%BC%96%E8%AF%91%E6%96%87%E6%A1%A3.md) to compile Linkis from source code. +Please refer to [Deployment_Documents](https://github.com/WeBankFinTech/Linkis-Doc/tree/master/zh_CN/Deployment_Documents) to do the deployment. -![introduction01](https://user-images.githubusercontent.com/11496700/84625630-403b7080-af16-11ea-8816-392dda3584d1.png) +# Examples and Guidance +You can find examples and guidance for how to use and manage Linkis in [User_Manual](https://github.com/WeBankFinTech/Linkis-Doc/tree/master/zh_CN/User_Manual), [Engine_Usage_Documents](https://github.com/WeBankFinTech/Linkis-Doc/tree/master/zh_CN/Engine_Usage_Documentations) and [API_Documents](https://github.com/WeBankFinTech/Linkis-Doc/tree/master/zh_CN/API_Documentations). -# Documentations: +# Documentation -[Linkis, make big data easier](docs/en_US/ch3/Linkis_Introduction.md) +The documentation of linkis is in [Linkis-Doc](https://github.com/WeBankFinTech/Linkis-Doc) or in the [wiki](https://github.com/WeBankFinTech/Linkis/wiki). -[Linkis Quick Deploy](docs/en_US/ch1/deploy.md) +# Architecture +Linkis services could be divided into three categories: computation governance services, public enhancement services and microservice governance services. +- The computation governance services, support the 3 major stages of processing a task/request: submission -> preparation -> execution; +- The public enhancement services, including the material library service, context service, and data source service; +- The microservice governance services, including Spring Cloud Gateway, Eureka and Open Feign. -[Linkis Quick Start & Java SDK documentation](docs/en_US/ch3/Linkis_Java_SDK_doc.md) +Below is the Linkis architecture diagram. You can find more detailed architecture docs in [Linkis-Doc/Architecture](https://github.com/WeBankFinTech/Linkis-Doc/tree/master/zh_CN/Architecture_Documents). +![architecture](images/Linkis_1.0_architecture.png) -[HTTP APIs for frontend applications](docs/en_US/ch3/Linkis_HTTP_API_Doc.md) +Based on Linkis the computation middleware, we've built a lot of applications and tools on top of it in the big data platform suite [WeDataSphere](https://github.com/WeBankFinTech/WeDataSphere). Below are the currently available open-source projects. More projects upcoming, please stay tuned. -[WebSocket APIs for frontend applications](docs/en_US/ch3/Linkis_WebSocket_API_Doc.md) +![wedatasphere_stack_Linkis](images/wedatasphere_stack_Linkis.png) -[How to adapt Linkis with a new computation or storage engine](docs/en_US/ch3/How_to_adapt_Linkis_with_a_new_computation_or_storage_engine.md) +# Contributing ----- +Contributions are always welcomed, we need more contributors to build Linkis together. either code, or doc, or other supports that could help the community. +For code and documentation contributions, please follow the [contribution guide](https://github.com/WeBankFinTech/Linkis/blob/master/Contributing_CN.md). -# Architecture: +# Contact Us -![introduction02](https://user-images.githubusercontent.com/11496700/84615549-e9c13880-aefb-11ea-870e-6d2e9b8fa08d.png) +Any questions or suggestions please kindly submit an issue. +You can scan the QR code below to join our WeChat and QQ group to get more immediate response. ----- +![introduction05](images/wedatasphere_contact_01.png) -## Communication -If you desire immediate response, please kindly raise issues to us or scan the below QR code by WeChat and QQ to join our group: -
-![introduction05](https://user-images.githubusercontent.com/11496700/84615565-f2197380-aefb-11ea-8288-c2d7b0410933.png) +Meetup videos on [Bilibili](https://space.bilibili.com/598542776?from=search&seid=14344213924133040656). -## License +# Who is Using Linkis -Linkis is under the Apache 2.0 license. See the [LICENSE ](http://www.apache.org/licenses/LICENSE-2.0)file for details. \ No newline at end of file +We opened [an issue](https://github.com/WeBankFinTech/Linkis/issues/23) for users to feedback and record who is using Linkis. +Since the first release of Linkis in 2019, it has accumulated more than **700** trial companies and **1000+** sandbox trial users, which involving diverse industries, from finance, banking, tele-communication, to manufactory, internet companies and so on. diff --git a/README_CN.md b/README_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..7333f856a65b018e70a863f81b0fb8b349f921a7 --- /dev/null +++ b/README_CN.md @@ -0,0 +1,105 @@ +Linkis +============ + +[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html) + +[English](README.md) | [中文](README_CN.md) + +# 介绍 + +Linkis 在上层应用程序和底层引擎之间构建了一层计算中间件。通过使用Linkis 提供的REST/WebSocket/JDBC 等标准接口,上层应用可以方便地连接访问MySQL/Spark/Hive/Presto/Flink 等底层引擎,同时实现变量、脚本、函数和资源文件等用户资源的跨上层应用互通。 +作为计算中间件,Linkis 提供了强大的连通、复用、编排、扩展和治理管控能力。通过计算中间件将应用层和引擎层解耦,简化了复杂的网络调用关系,降低了整体复杂度,同时节约了整体开发和维护成本。 +Linkis 自2019年开源发布以来,已累计积累了700多家试验企业和1000+沙盒试验用户,涉及金融、电信、制造、互联网等多个行业。许多公司已经将Linkis 作为大数据平台底层计算存储引擎的统一入口,和计算请求/任务的治理管控利器。 + +![没有Linkis 之前](images/before_linkis_cn.png) + +![有了Linkis 之后](images/after_linkis_cn.png) + +# 核心特点 + +- **丰富的底层计算存储引擎支持**。 + **目前支持的计算存储引擎**:Spark、Hive、Python、Presto、ElasticSearch、MLSQL、TiSpark、JDBC和Shell等。 + **正在支持中的计算存储引擎**:Flink、Impala等。 + **支持的脚本语言**:SparkSQL, HiveQL, Python, Shell, Pyspark, R, Scala 和JDBC 等。 +- **强大的计算治理能力**。基于Orchestrator、Label Manager和定制的Spring Cloud Gateway等服务,Linkis能够提供基于多级标签的跨集群/跨IDC 细粒度路由、负载均衡、多租户、流量控制、资源控制和编排策略(如双活、主备等)支持能力。 +- **全栈计算存储引擎架构支持**。能够接收、执行和管理针对各种计算存储引擎的任务和请求,包括离线批量任务、交互式查询任务、实时流式任务和存储型任务; +- **资源管理能力**。 ResourceManager 不仅具备 Linkis0.X 对 Yarn 和 Linkis EngineManager 的资源管理能力,还将提供基于标签的多级资源分配和回收能力,让 ResourceManager 具备跨集群、跨计算资源类型的强大资源管理能力。 +- **统一上下文服务**。为每个计算任务生成context id,跨用户、系统、计算引擎的关联管理用户和系统资源文件(JAR、ZIP、Properties等),结果集,参数变量,函数等,一处设置,处处自动引用; +- **统一物料**。系统和用户级物料管理,可分享和流转,跨用户、系统共享物料。 + +# 支持的引擎类型 + +| **引擎** | **引擎版本** | **Linkis 0.X 版本要求**| **Linkis 1.X 版本要求** | **说明** | +|:---- |:---- |:---- |:---- |:---- | +|Flink |1.11.0|\>=dev-0.12.0, PR #703 尚未合并|ongoing| Flink EngineConn。支持FlinkSQL 代码,也支持以Flink Jar 形式启动一个新的Yarn 应用程序。| +|Impala|\>=3.2.0, CDH >=6.3.0"|\>=dev-0.12.0, PR #703 尚未合并|ongoing|Impala EngineConn. 支持Impala SQL 代码.| +|Presto|\>= 0.180|\>=0.11.0|ongoing|Presto EngineConn. 支持Presto SQL 代码.| +|ElasticSearch|\>=6.0|\>=0.11.0|ongoing|ElasticSearch EngineConn. 支持SQL 和DSL 代码.| +|Shell|Bash >=2.0|\>=0.9.3|\>=1.0.0_rc1|Shell EngineConn. 支持Bash shell 代码.| +|MLSQL|\>=1.1.0|\>=0.9.1|ongoing|MLSQL EngineConn. 支持MLSQL 代码.| +|JDBC|MySQL >=5.0, Hive >=1.2.1|\>=0.9.0|\>=1.0.0_rc1|JDBC EngineConn. 已支持MySQL 和HiveQL,可快速扩展支持其他有JDBC Driver 包的引擎, 如Oracle. +|Spark|Apache 2.0.0~2.4.7, CDH >=5.4.0|\>=0.5.0|\>=1.0.0_rc1|Spark EngineConn. 支持SQL, Scala, Pyspark 和R 代码.| +|Hive|Apache >=1.0.0, CDH >=5.4.0|\>=0.5.0|\>=1.0.0_rc1|Hive EngineConn. 支持HiveQL 代码.| +|Hadoop|Apache >=2.6.0, CDH >=5.4.0|\>=0.5.0|ongoing|Hadoop EngineConn. 支持Hadoop MR/YARN application.| +|Python|\>=2.6|\>=0.5.0|\>=1.0.0_rc1|Python EngineConn. 支持python 代码.| +|TiSpark|1.1|\>=0.5.0|ongoing|TiSpark EngineConn. 支持用SparkSQL 查询TiDB.| + +# 下载 + +请前往[Linkis releases 页面](https://github.com/WeBankFinTech/Linkis/wiki/Linkis-Releases) 下载Linkis 的已编译版本或源码包。 + +# 编译和安装部署 +请参照[编译指引](https://github.com/WeBankFinTech/Linkis-Doc/blob/master/zh_CN/Development_Documents/Linkis%E7%BC%96%E8%AF%91%E6%96%87%E6%A1%A3.md) 来编译Linkis 源码。 +请参考[安装部署文档](https://github.com/WeBankFinTech/Linkis-Doc/tree/master/zh_CN/Deployment_Documents) 来部署Linkis。 + +# 示例和使用指引 +请到 [用户手册](https://github.com/WeBankFinTech/Linkis-Doc/tree/master/zh_CN/User_Manual), [各引擎使用指引](https://github.com/WeBankFinTech/Linkis-Doc/tree/master/zh_CN/Engine_Usage_Documentations) 和[API 文档](https://github.com/WeBankFinTech/Linkis-Doc/tree/master/zh_CN/API_Documentations) 中,查看如何使用和管理Linkis 的示例和指引。 + +# 文档 + +完整的Linkis 文档参见[Linkis-Doc](https://github.com/WeBankFinTech/Linkis-Doc) 或[wiki](https://github.com/WeBankFinTech/Linkis/wiki). + +# 架构概要 +Linkis 基于微服务架构开发,其服务可以分为3类:计算治理服务、公共增强服务和微服务治理服务。 +- 计算治理服务,支持计算任务/请求处理流程的3个主要阶段:提交->准备->执行; +- 公共增强服务,包括上下文服务、物料管理服务及数据源服务等; +- 微服务治理服务,包括定制化的Spring Cloud Gateway、Eureka、Open Feign。 + +下面是Linkis 的架构概要图. 更多详细架构文档请见 [Linkis-Doc/Architecture](https://github.com/WeBankFinTech/Linkis-Doc/tree/master/zh_CN/Architecture_Documents). +![architecture](images/Linkis_1.0_architecture.png) + +基于Linkis 计算中间件,我们在大数据平台套件[WeDataSphere](https://github.com/WeBankFinTech/WeDataSphere) 中构建了许多应用和工具系统。下面是目前可用的开源项目。 + +![wedatasphere_stack_Linkis](images/wedatasphere_stack_Linkis.png) + +- [**DataSphere Studio** - 数据应用集成开发框架](https://github.com/WeBankFinTech/DataSphereStudio) + +- [**Scriptis** - 数据研发IDE工具](https://github.com/WeBankFinTech/Scriptis) + +- [**Visualis** - 数据可视化工具](https://github.com/WeBankFinTech/Visualis) + +- [**Schedulis** - 工作流调度工具](https://github.com/WeBankFinTech/Schedulis) + +- [**Qualitis** - 数据质量工具](https://github.com/WeBankFinTech/Qualitis) + +- [**MLLabis** - 容器化机器学习notebook 开发环境](https://github.com/WeBankFinTech/prophecis) + +更多项目开源准备中,敬请期待。 + +# 贡献 + +我们非常欢迎和期待更多的贡献者参与共建Linkis, 不论是代码、文档,或是其他能够帮助到社区的贡献形式。 +代码和文档相关的贡献请参照[贡献指引](https://github.com/WeBankFinTech/Linkis/blob/master/Contributing_CN.md). + +# 联系我们 + +对Linkis 的任何问题和建议,敬请提交issue,以便跟踪处理和经验沉淀共享。 +您也可以扫描下面的二维码,加入我们的微信/QQ群,以获得更快速的响应。 +![introduction05](images/wedatasphere_contact_01.png) + +Meetup 视频 [Bilibili](https://space.bilibili.com/598542776?from=search&seid=14344213924133040656). + +# 谁在使用Linkis + +我们创建了[一个 issue](https://github.com/WeBankFinTech/Linkis/issues/23) 以便用户反馈和记录谁在使用Linkis. +Linkis 自2019年开源发布以来,累计已有700多家试验企业和1000+沙盒试验用户,涉及金融、电信、制造、互联网等多个行业。 \ No newline at end of file diff --git a/assembly/pom.xml b/assembly/pom.xml index a78592c4f5205fb27a6f301c24077b6de22218bd..2a7706a27825c8bf4e351c3bc7d0755acde46560 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -1,13 +1,10 @@ + + io.protostuff + protostuff-runtime + 1.6.2 + + + + io.protostuff + protostuff-api + 1.6.2 + + @@ -114,7 +173,7 @@ false - module + out false false diff --git a/assembly/public-module/src/main/assembly/distribution.xml b/assembly/public-module/src/main/assembly/distribution.xml index 865b4a9a349bdb3fe078d3b3253d09787c2374c1..5568d90d9fd294f93e1bd0891b47506647c997da 100644 --- a/assembly/public-module/src/main/assembly/distribution.xml +++ b/assembly/public-module/src/main/assembly/distribution.xml @@ -1,12 +1,9 @@ @@ -36,11 +33,11 @@ true true - io.netty:netty-buffer* - io.netty:netty-codec* - io.netty:netty-common* - io.netty:netty-handler* - io.netty:netty-transport* + + + + + diff --git a/assembly/src/main/assembly/assembly.xml b/assembly/src/main/assembly/assembly.xml index ec4b928890dd51b31366f7ae89789a6183be28f5..9c811ef5c06ced2b6b654a7f008cd92dedbaf602 100644 --- a/assembly/src/main/assembly/assembly.xml +++ b/assembly/src/main/assembly/assembly.xml @@ -1,12 +1,9 @@ + - ${project.parent.basedir}/ujes/definedEngines/spark/entrance/target/ + ${project.parent.basedir}/linkis-engineconn-plugins/engineconn-plugins/spark/target/out/ - share/linkis/ujes/spark + lib/linkis-engineconn-plugins/ - *.zip + **/* + - ${project.parent.basedir}/ujes/definedEngines/spark/enginemanager/target/ + ${project.parent.basedir}/linkis-engineconn-plugins/engineconn-plugins/hive/target/out/ - share/linkis/ujes/spark + lib/linkis-engineconn-plugins/ - *.zip + **/* - + - ${project.parent.basedir}/ujes/definedEngines/hive/entrance/target/ + ${project.parent.basedir}/linkis-engineconn-plugins/engineconn-plugins/python/target/out/ - share/linkis/ujes/hive + lib/linkis-engineconn-plugins/ - *.zip + **/* + - ${project.parent.basedir}/ujes/definedEngines/hive/enginemanager/target/ + ${project.parent.basedir}/linkis-engineconn-plugins/engineconn-plugins/shell/target/out/ - share/linkis/ujes/hive + lib/linkis-engineconn-plugins/ - *.zip + **/* - - - - ${project.parent.basedir}/ujes/definedEngines/python/entrance/target/ - - share/linkis/ujes/python - - *.zip - - - - - ${project.parent.basedir}/ujes/definedEngines/python/enginemanager/target/ - - share/linkis/ujes/python - - *.zip - - + + + + + + ${project.parent.basedir}/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/target/out/lib + + + lib/linkis-spring-cloud-services/linkis-mg-gateway + + + *.jar + + - - - - ${project.parent.basedir}/ujes/definedEngines/shell/entrance/target/ - - share/linkis/ujes/shell - - *.zip - - + + + ${project.parent.basedir}/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/target/out/conf + + + conf/linkis-spring-cloud-services/linkis-mg-gateway + + + * + + - - - ${project.parent.basedir}/ujes/definedEngines/shell/enginemanager/target/ - - share/linkis/ujes/shell - - *.zip - - + + + + ${project.parent.basedir}/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/target/out/lib + + + lib/linkis-spring-cloud-services/linkis-mg-eureka + + + *.jar + + + + + ${project.parent.basedir}/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/target/out/conf + + + conf/linkis-spring-cloud-services/linkis-mg-eureka + + + * + + - - - - - - ${project.parent.basedir}/ujes/definedEngines/jdbc/entrance/target/ - - share/linkis/ujes/jdbc - - *.zip - - + + + + + + ${project.parent.basedir}/assembly/public-module/target/out/lib + + lib/linkis-commons/public-module/ + + *.jar + + - + + + + + + ${project.parent.basedir}/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/target/out/lib + + + lib/linkis-computation-governance/linkis-cg-engineconnmanager + + + *.jar + + - - - - ${project.parent.basedir}/resourceManager/resourcemanagerserver/target/ - - share/linkis/rm/ - - *.zip - - + + + ${project.parent.basedir}/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/target/out/conf + + + conf/linkis-computation-governance/linkis-cg-engineconnmanager + + + * + + - - - - ${project.parent.basedir}/bml/bmlserver/target/ - - share/linkis/linkis-bml/ - - *.zip - - + + + + ${project.parent.basedir}/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/target/out/lib/ + + + lib/linkis-computation-governance/linkis-cg-engineplugin + + + *.jar + + - - - - ${project.parent.basedir}/gateway/gateway-ujes-support/target/ - - share/springcloud/gateway/ - - *.zip - - - - ${project.parent.basedir}/eurekaServer/target/ + ${project.parent.basedir}/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/target/out/conf/ - share/springcloud/eureka/ + + conf/linkis-computation-governance/linkis-cg-engineplugin + - *.zip + * - + + - ${project.parent.basedir}/assembly/public-module/target/ + ${project.parent.basedir}/linkis-computation-governance/linkis-manager/linkis-application-manager/target/out/lib - share/linkis/module/ + + lib/linkis-computation-governance/linkis-cg-linkismanager + - *.zip + *.jar - - - - ${project.parent.basedir}/publicService/target/ - - share/linkis/linkis-publicservice/ - - *.zip - - - - - - ${project.parent.basedir}/metadata/target/ - - share/linkis/linkis-metadata/ - - *.zip - - - - - - ${project.parent.basedir}/contextservice/cs-server/target/ - - share/linkis/linkis-cs-server/ - - *.zip - - - - - - ${project.parent.basedir}/datasource/datasourcemanager/server/target - - share/linkis/datasource/linkis-dsm-server/ - - *.zip - - + + + ${project.parent.basedir}/linkis-computation-governance/linkis-manager/linkis-application-manager/target/out/conf + + + conf/linkis-computation-governance/linkis-cg-linkismanager + + + * + + - - - - ${project.parent.basedir}/datasource/metadatamanager/server/target - - share/linkis/datasource/linkis-mdm-server/ - - *.zip - - - + + + + ${project.parent.basedir}/linkis-computation-governance/linkis-entrance/target/out/lib + + + lib/linkis-computation-governance/linkis-cg-entrance + + + *.jar + + + + + + ${project.parent.basedir}/linkis-computation-governance/linkis-entrance/target/out/conf + + + conf/linkis-computation-governance/linkis-cg-entrance + + + * + + + + + + + + + ${project.parent.basedir}/linkis-public-enhancements/linkis-bml/linkis-bml-server/target/out/lib + + + lib/linkis-public-enhancements/linkis-ps-bml + + + *.jar + + + + + + ${project.parent.basedir}/linkis-public-enhancements/linkis-bml/linkis-bml-server/target/out/conf + + + conf/linkis-public-enhancements/linkis-ps-bml + + + * + + + + + + + ${project.parent.basedir}/linkis-public-enhancements/linkis-publicservice/target/out/lib + + + lib/linkis-public-enhancements/linkis-ps-publicservice + + + *.jar + + + + + ${project.parent.basedir}/linkis-public-enhancements/linkis-publicservice/target/out/conf + + + conf/linkis-public-enhancements/linkis-ps-publicservice + + + * + + + + + + ${project.parent.basedir}/linkis-public-enhancements/linkis-datasource/linkis-metadata/target/out/lib + + + lib/linkis-public-enhancements/linkis-ps-datasource + + + *.jar + + + + + + ${project.parent.basedir}/linkis-public-enhancements/linkis-datasource/linkis-metadata/target/out/conf + + + conf/linkis-public-enhancements/linkis-ps-datasource + + + * + + + + + + + ${project.parent.basedir}/linkis-public-enhancements/linkis-context-service/linkis-cs-server/target/out/lib + + + lib/linkis-public-enhancements/linkis-ps-cs + + + *.jar + + + + + + ${project.parent.basedir}/linkis-public-enhancements/linkis-context-service/linkis-cs-server/target/out/conf + + + conf/linkis-public-enhancements/linkis-ps-cs + + + * + + + + @@ -320,4 +410,4 @@ - + \ No newline at end of file diff --git a/bin/checkEnv.sh b/bin/checkEnv.sh old mode 100755 new mode 100644 diff --git a/bin/checkServices.sh b/bin/checkServices.sh index 7e53bb5b2b897572a63ec5e558782c320d6275b0..561277b129a990acf0dba9dc40a0b1322e803f4a 100644 --- a/bin/checkServices.sh +++ b/bin/checkServices.sh @@ -29,11 +29,9 @@ MICRO_SERVICE_NAME=$1 MICRO_SERVICE_IP=$2 MICRO_SERVICE_PORT=$3 - -source ${workDir}/bin/common.sh local_host="`hostname --fqdn`" -ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}'|awk 'NR==1') +ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}') function isLocal(){ if [ "$1" == "127.0.0.1" ];then @@ -60,7 +58,6 @@ function executeCMD(){ } - echo "Start to Check if your microservice:$MICRO_SERVICE_NAME is normal via telnet" echo "--------------------------------------------------------------------------------------------------------------------------" echo $MICRO_SERVICE_NAME diff --git a/bin/common.sh b/bin/common.sh old mode 100755 new mode 100644 index c0ba75521085e072222042f0b1d075d1c9914d2f..49d5cf7a904feb78030b27dadf10c4fd01e75e03 --- a/bin/common.sh +++ b/bin/common.sh @@ -20,7 +20,7 @@ source ~/.bash_profile local_host="`hostname --fqdn`" -ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}' | awk 'NR==1') +ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}') function isLocal(){ if [ "$1" == "127.0.0.1" ];then diff --git a/bin/install.sh b/bin/install.sh old mode 100755 new mode 100644 index b646689abf7d20ad2b0209acd32c666c2b5886c2..be8cfeecdcbd514a6c864e49553003a380e35fc3 --- a/bin/install.sh +++ b/bin/install.sh @@ -17,7 +17,6 @@ #Actively load user env source ~/.bash_profile - shellDir=`dirname $0` workDir=`cd ${shellDir}/..;pwd` @@ -45,7 +44,14 @@ else exit 1 fi -source ${workDir}/bin/common.sh +function isSuccess(){ +if [ $? -ne 0 ]; then + echo "Failed to " + $1 + exit 1 +else + echo "Succeed to" + $1 +fi +} function checkPythonAndJava(){ python --version @@ -83,8 +89,8 @@ fi } function checkSpark(){ - spark-submit --version - isSuccess "execute spark-submit --version" + spark-submit --version + isSuccess "execute spark-submit --version" } say() { @@ -113,52 +119,66 @@ isSuccess "check env" ##load config echo "step1:load config " -export LINKIS_CONFIG_PATH=${LINKIS_CONFIG_PATH:-"${workDir}/conf/config.sh"} +export LINKIS_CONFIG_PATH=${LINKIS_CONFIG_PATH:-"${workDir}/conf/linkis-env.sh"} export LINKIS_DB_CONFIG_PATH=${LINKIS_DB_CONFIG_PATH:-"${workDir}/conf/db.sh"} -export DISTRIBUTION=${DISTRIBUTION:-"${workDir}/conf/config.sh"} source ${LINKIS_CONFIG_PATH} source ${LINKIS_DB_CONFIG_PATH} -source ${DISTRIBUTION} isSuccess "load config" +local_host="`hostname --fqdn`" + +ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}') + +function isLocal(){ + if [ "$1" == "127.0.0.1" ];then + return 0 + elif [ $1 == "localhost" ]; then + return 0 + elif [ $1 == $local_host ]; then + return 0 + elif [ $1 == $ipaddr ]; then + return 0 + fi + return 1 +} +function executeCMD(){ + isLocal $1 + flag=$? + if [ $flag == "0" ];then + echo "Is local execution:$2" + eval $2 + else + echo "Is remote execution:$2" + ssh -p $SSH_PORT $1 $2 + fi +} +function copyFile(){ + isLocal $1 + flag=$? + src=$2 + dest=$3 + if [ $flag == "0" ];then + echo "Is local cp " + eval "cp -r $src $dest" + else + echo "Is remote cp " + scp -r -P $SSH_PORT $src $1:$dest + fi -##install mode choice -if [ "$INSTALL_MODE" == "" ];then - echo "Please enter the mode selection such as: 1" - echo " 1: Lite" - echo " 2: Simple" - echo " 3: Standard" - echo "" - read -p "Please input the choice:" idx - INSTALL_MODE=$idx -fi +} -if [[ '1' = "$INSTALL_MODE" ]];then - echo "You chose Lite installation mode" - checkPythonAndJava -elif [[ '2' = "$INSTALL_MODE" ]];then - echo "You chose Simple installation mode" - checkPythonAndJava - checkHadoopAndHive -elif [[ '3' = "$INSTALL_MODE" ]];then - echo "You chose Standard installation mode" - checkPythonAndJava - checkHadoopAndHive - checkSpark -else - echo "no choice,exit!" - exit 1 -fi ##env check echo "Do you want to clear Linkis table information in the database?" echo " 1: Do not execute table-building statements" echo " 2: Dangerous! Clear all data and rebuild the tables" +echo " other: exit" echo "" MYSQL_INSTALL_MODE=1 +#使用read参数[-p]后,允许在[-p]后面跟一字符串,在字符串后面跟n个shell变量。n个shell变量用来接收从shell界面输入的字符串 read -p "Please input the choice:" idx if [[ '2' = "$idx" ]];then MYSQL_INSTALL_MODE=2 @@ -171,7 +191,6 @@ else exit 1 fi - echo "create hdfs directory and local directory" if [ "$WORKSPACE_USER_ROOT_PATH" != "" ] then @@ -183,6 +202,7 @@ then elif [[ $WORKSPACE_USER_ROOT_PATH == hdfs://* ]];then localRootDir=${WORKSPACE_USER_ROOT_PATH#hdfs://} hdfs dfs -mkdir -p $localRootDir/$deployUser + hdfs dfs -chmod -R 775 $localRootDir/$deployUser else echo "does not support $WORKSPACE_USER_ROOT_PATH filesystem types" fi @@ -190,491 +210,147 @@ fi isSuccess "create $WORKSPACE_USER_ROOT_PATH directory" -if [ "$HDFS_USER_ROOT_PATH" != "" ] -then - localRootDir=$HDFS_USER_ROOT_PATH - if [[ $HDFS_USER_ROOT_PATH == file://* ]];then - localRootDir=${HDFS_USER_ROOT_PATH#file://} - mkdir -p $localRootDir/$deployUser - sudo chmod -R 775 $localRootDir/$deployUser - elif [[ $HDFS_USER_ROOT_PATH == hdfs://* ]];then - localRootDir=${HDFS_USER_ROOT_PATH#hdfs://} - hdfs dfs -mkdir -p $localRootDir/$deployUser - else - echo "does not support $HDFS_USER_ROOT_PATH filesystem types" - fi -fi -isSuccess "create $HDFS_USER_ROOT_PATH directory" - +######################## init hdfs and db ################################ + if [ "$HDFS_USER_ROOT_PATH" != "" ] + then + localRootDir=$HDFS_USER_ROOT_PATH + if [[ $HDFS_USER_ROOT_PATH == file://* ]];then + localRootDir=${HDFS_USER_ROOT_PATH#file://} + mkdir -p $localRootDir/$deployUser + sudo chmod -R 775 $localRootDir/$deployUser + elif [[ $HDFS_USER_ROOT_PATH == hdfs://* ]];then + localRootDir=${HDFS_USER_ROOT_PATH#hdfs://} + hdfs dfs -mkdir -p $localRootDir/$deployUser + hdfs dfs -chmod -R 775 $localRootDir/$deployUser + else + echo "does not support $HDFS_USER_ROOT_PATH filesystem types" + fi + fi + isSuccess "create $HDFS_USER_ROOT_PATH directory" + + + if [ "$RESULT_SET_ROOT_PATH" != "" ] + then + localRootDir=$RESULT_SET_ROOT_PATH + if [[ $RESULT_SET_ROOT_PATH == file://* ]];then + localRootDir=${RESULT_SET_ROOT_PATH#file://} + mkdir -p $localRootDir/$deployUser + sudo chmod -R 775 $localRootDir/$deployUser + elif [[ $RESULT_SET_ROOT_PATH == hdfs://* ]];then + localRootDir=${RESULT_SET_ROOT_PATH#hdfs://} + hdfs dfs -mkdir -p $localRootDir/$deployUser + hdfs dfs -chmod -R 775 $localRootDir/$deployUser + else + echo "does not support $RESULT_SET_ROOT_PATH filesystem types" + fi + fi + isSuccess "create $RESULT_SET_ROOT_PATH directory" -if [ "$RESULT_SET_ROOT_PATH" != "" ] +## sql init +if [ "$YARN_RESTFUL_URL" != "" ] then - localRootDir=$RESULT_SET_ROOT_PATH - if [[ $RESULT_SET_ROOT_PATH == file://* ]];then - localRootDir=${RESULT_SET_ROOT_PATH#file://} - mkdir -p $localRootDir/$deployUser - sudo chmod -R 775 $localRootDir/$deployUser - elif [[ $RESULT_SET_ROOT_PATH == hdfs://* ]];then - localRootDir=${RESULT_SET_ROOT_PATH#hdfs://} - hdfs dfs -mkdir -p $localRootDir/$deployUser - else - echo "does not support $RESULT_SET_ROOT_PATH filesystem types" - fi + sed -i ${txt} "s#@YARN_RESTFUL_URL#$YARN_RESTFUL_URL#g" $workDir/db/linkis_dml.sql fi -isSuccess "create $RESULT_SET_ROOT_PATH directory" -##init db -if [[ '2' = "$MYSQL_INSTALL_MODE" ]];then - mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD --default-character-set=utf8 -e "CREATE DATABASE IF NOT EXISTS $MYSQL_DB DEFAULT CHARSET utf8 COLLATE utf8_general_ci;" - mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/linkis_ddl.sql" - isSuccess "source linkis_ddl.sql" - mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/linkis_dml.sql" - isSuccess "source linkis_dml.sql" - echo "Rebuild the table" -fi +common_conf=$workDir/conf/linkis.properties +SERVER_IP=$local_host -##Deal special symbol '#' -HIVE_META_PASSWORD=$(echo ${HIVE_META_PASSWORD//'#'/'\#'}) -MYSQL_PASSWORD=$(echo ${MYSQL_PASSWORD//'#'/'\#'}) - -##Eurkea install -SERVER_NAME=eureka -SERVER_IP=$EUREKA_INSTALL_IP -SERVER_PORT=$EUREKA_PORT -SERVER_HOME=$LINKIS_INSTALL_HOME -echo "$SERVER_NAME-step1: create dir" -if test -z "$SERVER_IP" +##Label set start +if [ "$SPARK_VERSION" != "" ] then - SERVER_IP=$local_host -fi -EUREKA_URL=http://$SERVER_IP:$EUREKA_PORT/eureka/ - -if ! executeCMD $SERVER_IP "test -e $SERVER_HOME"; then - executeCMD $SERVER_IP "sudo mkdir -p $SERVER_HOME;sudo chown -R $deployUser:$deployUser $SERVER_HOME" - isSuccess "create the dir of $SERVER_HOME" + sed -i ${txt} "s#spark-2.4.3#spark-$SPARK_VERSION#g" $workDir/db/linkis_dml.sql + executeCMD $SERVER_IP "sed -i ${txt} \"s#\#wds.linkis.spark.engine.version.*#wds.linkis.spark.engine.version=$SPARK_VERSION#g\" $common_conf" fi -echo "$SERVER_NAME-step2:copy install package" -copyFile $SERVER_IP ${workDir}/share/springcloud/$SERVER_NAME/$SERVER_NAME.zip $SERVER_HOME -isSuccess "copy $SERVER_NAME" -executeCMD $SERVER_IP "cd $SERVER_HOME/;rm -rf eureka;unzip $SERVER_NAME.zip > /dev/null;cd -" -echo "$SERVER_NAME-step3:subsitution conf" -eureka_conf_path=$SERVER_HOME/$SERVER_NAME/conf/application-$SERVER_NAME.yml -executeCMD $SERVER_IP "sed -i ${txt} \"s#port:.*#port: $SERVER_PORT#g\" $eureka_conf_path" -executeCMD $SERVER_IP "sed -i ${txt} \"s#defaultZone:.*#defaultZone: $EUREKA_URL#g\" $eureka_conf_path" -executeCMD $SERVER_IP "sed -i ${txt} \"s#hostname:.*#hostname: $SERVER_IP#g\" $eureka_conf_path" -isSuccess "subsitution conf of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##Eurkea install end - - - -##function -function installPackage(){ -echo "$SERVER_NAME-step1: create dir" -if test -z "$SERVER_IP" +if [ "$HIVE_VERSION" != "" ] then - SERVER_IP=$local_host + sed -i ${txt} "s#hive-1.2.1#hive-$HIVE_VERSION#g" $workDir/db/linkis_dml.sql + executeCMD $SERVER_IP "sed -i ${txt} \"s#\#wds.linkis.hive.engine.version.*#wds.linkis.hive.engine.version=$HIVE_VERSION#g\" $common_conf" fi -if ! executeCMD $SERVER_IP "test -e $SERVER_HOME"; then - executeCMD $SERVER_IP "sudo mkdir -p $SERVER_HOME;sudo chown -R $deployUser:$deployUser $SERVER_HOME" - isSuccess "create the dir of $SERVER_NAME" +if [ "$PYTHON_VERSION" != "" ] +then + sed -i ${txt} "s#python-python2#python-$PYTHON_VERSION#g" $workDir/db/linkis_dml.sql + executeCMD $SERVER_IP "sed -i ${txt} \"s#\#wds.linkis.python.engine.version.*#wds.linkis.python.engine.version=$PYTHON_VERSION#g\" $common_conf" fi -if ! executeCMD $SERVER_IP "test -e $SERVER_HOME/module"; then - copyFile $SERVER_IP ${workDir}/share/linkis/module/module.zip $SERVER_HOME - isSuccess "cp module.zip" - executeCMD $SERVER_IP "cd $SERVER_HOME/;unzip -o module.zip > /dev/null;cd -" - isSuccess "unzip module.zip" -fi +##Label set end -echo "$SERVER_NAME-step2:copy install package" -copyFile $SERVER_IP ${workDir}/share/$PACKAGE_DIR/$SERVER_NAME.zip $SERVER_HOME -isSuccess "copy ${SERVER_NAME}.zip" -executeCMD $SERVER_IP "cd $SERVER_HOME/;rm -rf $SERVER_NAME-bak; mv -f $SERVER_NAME $SERVER_NAME-bak;cd -" -executeCMD $SERVER_IP "cd $SERVER_HOME/;unzip -o $SERVER_NAME.zip > /dev/null; cd -" -isSuccess "unzip ${SERVER_NAME}.zip" -echo "$SERVER_NAME-step3:subsitution conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/application.yml -executeCMD $SERVER_IP "sed -i ${txt} \"s#port:.*#port: $SERVER_PORT#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#defaultZone:.*#defaultZone: $EUREKA_URL#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#hostname:.*#hostname: $SERVER_IP#g\" $SERVER_CONF_PATH" -isSuccess "subsitution conf of $SERVER_NAME" -} -##function end -##cp module to em lib -function emExtraInstallModule(){ - executeCMD $SERVER_IP "cd $SERVER_HOME/;cp -f module/lib/* $SERVER_HOME/$SERVER_NAME/lib/;cd -" - isSuccess "copy module" -} -##replace conf 1. replace if it exists 2.not exists add -function replaceConf(){ - option=$1 - value=$2 - file=$3 - executeCMD $SERVER_IP "grep -q '^$option' $file && sed -i ${txt} 's/^$option.*/$option=$value/' $file || echo '$option=$value' >> $file" - isSuccess "copy module" -} +#init db +if [[ '2' = "$MYSQL_INSTALL_MODE" ]];then + mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD --default-character-set=utf8 -e "CREATE DATABASE IF NOT EXISTS $MYSQL_DB DEFAULT CHARSET utf8 COLLATE utf8_general_ci;" + mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/linkis_ddl.sql" + isSuccess "source linkis_ddl.sql" + mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/linkis_dml.sql" + isSuccess "source linkis_dml.sql" + echo "Rebuild the table" +fi +########################################################################### +#Deal special symbol '#' +HIVE_META_PASSWORD=$(echo ${HIVE_META_PASSWORD//'#'/'\#'}) +MYSQL_PASSWORD=$(echo ${MYSQL_PASSWORD//'#'/'\#'}) +#Deal common config +echo "Update common config..." -##GateWay Install -PACKAGE_DIR=springcloud/gateway -SERVER_NAME=linkis-gateway -SERVER_IP=$GATEWAY_INSTALL_IP -SERVER_PORT=$GATEWAY_PORT -SERVER_HOME=$LINKIS_INSTALL_HOME -if test -z "$SERVER_IP" +if test -z "$GATEWAY_INSTALL_IP" then - GATEWAY_INSTALL_IP=$local_host + export GATEWAY_INSTALL_IP="`hostname --fqdn`" fi -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.ldap.proxy.url.*#wds.linkis.ldap.proxy.url=$LDAP_URL#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.ldap.proxy.baseDN.*#wds.linkis.ldap.proxy.baseDN=$LDAP_BASEDN#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.gateway.admin.user.*#wds.linkis.gateway.admin.user=$deployUser#g\" $SERVER_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##GateWay Install end - -##publicservice install -PACKAGE_DIR=linkis/linkis-publicservice -SERVER_NAME=linkis-publicservice -SERVER_IP=$PUBLICSERVICE_INSTALL_IP -SERVER_PORT=$PUBLICSERVICE_PORT -SERVER_HOME=$LINKIS_INSTALL_HOME -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.workspace.filesystem.localuserrootpath.*#wds.linkis.workspace.filesystem.localuserrootpath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.workspace.filesystem.hdfsuserrootpath.prefix.*#wds.linkis.workspace.filesystem.hdfsuserrootpath.prefix=$HDFS_USER_ROOT_PATH#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_CONF_PATH" -replaceConf "wds.linkis.gateway.url" "http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT" "$SERVER_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##publicservice end - -##ResourceManager install -PACKAGE_DIR=linkis/rm -SERVER_NAME=linkis-resourcemanager -SERVER_IP=$RESOURCEMANAGER_INSTALL_IP -SERVER_PORT=$RESOURCEMANAGER_PORT -SERVER_HOME=$LINKIS_INSTALL_HOME -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "rm $SERVER_HOME/$SERVER_NAME/lib/json4s-*3.5.3.jar" -echo "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##ResourceManager install end - - -##PythonEM install -PACKAGE_DIR=linkis/ujes/python -SERVER_NAME=linkis-ujes-python-enginemanager -SERVER_IP=$PYTHON_INSTALL_IP -SERVER_PORT=$PYTHON_EM_PORT -SERVER_HOME=$LINKIS_INSTALL_HOME -###install dir -installPackage -emExtraInstallModule -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.enginemanager.sudo.script.*#wds.linkis.enginemanager.sudo.script=$SERVER_HOME/$SERVER_NAME/bin/rootScript.sh#g\" $SERVER_CONF_PATH" -SERVER_ENGINE_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis-engine.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_ENGINE_CONF_PATH" -replaceConf "wds.linkis.gateway.url" "http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT" "$SERVER_ENGINE_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" - - -##PythonEntrance install -PACKAGE_DIR=linkis/ujes/python -SERVER_NAME=linkis-ujes-python-entrance -SERVER_PORT=$PYTHON_ENTRANCE_PORT -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_CONF_PATH" -replaceConf "wds.linkis.gateway.url" "http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT" "$SERVER_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##PythonEntrance install end - -if [[ '1' = "$INSTALL_MODE" ]];then - echo "Lite install end" - exit 0 -fi +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.version.*#wds.linkis.server.version=$LINKIS_SERVER_VERSION#g\" $common_conf" +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.gateway.url.*#wds.linkis.gateway.url=http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT#g\" $common_conf" +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.eureka.defaultZone.*#wds.linkis.eureka.defaultZone=$EUREKA_URL#g\" $common_conf" +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $common_conf" +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g\" $common_conf" +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g\" $common_conf" + +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.ldap.proxy.url.*#wds.linkis.ldap.proxy.url=$LDAP_URL#g\" $common_conf" +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.ldap.proxy.baseDN.*#wds.linkis.ldap.proxy.baseDN=$LDAP_BASEDN#g\" $common_conf" +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.ldap.proxy.userNameFormat.*#wds.linkis.ldap.proxy.userNameFormat=$LDAP_USER_NAME_FORMAT#g\" $common_conf" +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.admin.user.*#wds.linkis.gateway.admin.user=$deployUser#g\" $common_conf" +# hadoop config +executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $common_conf" +#hive config +executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hive.config.dir.*#hive.config.dir=$HIVE_CONF_DIR#g\" $common_conf" + +#spark config +executeCMD $SERVER_IP "sed -i ${txt} \"s#spark.config.dir.*#spark.config.dir=$SPARK_CONF_DIR#g\" $common_conf" -##BML install -PACKAGE_DIR=linkis/linkis-bml -SERVER_NAME=linkis-bml -SERVER_IP=$BML_INSTALL_IP -SERVER_PORT=$BML_PORT -SERVER_HOME=$LINKIS_INSTALL_HOME -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVERNAME" -echo "<----------------$SERVER_NAME:end------------------->" -##BML end - -##linkis-metadata install -PACKAGE_DIR=linkis/linkis-metadata -SERVER_NAME=linkis-metadata -SERVER_IP=$METADATA_INSTALL_IP -SERVER_PORT=$METADATA_PORT -SERVER_HOME=$LINKIS_INSTALL_HOME -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hive.config.dir.*#hive.config.dir=$HIVE_CONF_DIR#g\" $SERVER_CONF_PATH" if [ "$HIVE_META_URL" != "" ] then - executeCMD $SERVER_IP "sed -i ${txt} \"s#hive.meta.url.*#hive.meta.url=$HIVE_META_URL#g\" $SERVER_CONF_PATH" + executeCMD $SERVER_IP "sed -i ${txt} \"s#hive.meta.url.*#hive.meta.url=$HIVE_META_URL#g\" $common_conf" fi if [ "$HIVE_META_USER" != "" ] then - executeCMD $SERVER_IP "sed -i ${txt} \"s#hive.meta.user.*#hive.meta.user=$HIVE_META_USER#g\" $SERVER_CONF_PATH" + executeCMD $SERVER_IP "sed -i ${txt} \"s#hive.meta.user.*#hive.meta.user=$HIVE_META_USER#g\" $common_conf" fi if [ "$HIVE_META_PASSWORD" != "" ] then HIVE_META_PASSWORD=$(echo ${HIVE_META_PASSWORD//'#'/'\#'}) - executeCMD $SERVER_IP "sed -i ${txt} \"s#hive.meta.password.*#hive.meta.password=$HIVE_META_PASSWORD#g\" $SERVER_CONF_PATH" -fi -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##metadata end - -##linkis-cs-server install -PACKAGE_DIR=linkis/linkis-cs-server -SERVER_NAME=linkis-cs-server -SERVER_IP=$CS_INSTALL_IP -SERVER_PORT=$CS_PORT -SERVER_HOME=$LINKIS_INSTALL_HOME -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g\" $SERVER_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##cs end - -##HiveEM install -PACKAGE_DIR=linkis/ujes/hive -SERVER_NAME=linkis-ujes-hive-enginemanager -SERVER_IP=$HIVE_INSTALL_IP -SERVER_PORT=$HIVE_EM_PORT -SERVER_HOME=$LINKIS_INSTALL_HOME -###install dir -installPackage -emExtraInstallModule -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.enginemanager.sudo.script.*#wds.linkis.enginemanager.sudo.script=$SERVER_HOME/$SERVER_NAME/bin/rootScript.sh#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hive.config.dir.*#hive.config.dir=$HIVE_CONF_DIR#g\" $SERVER_CONF_PATH" -SERVER_ENGINE_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis-engine.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_ENGINE_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hive.config.dir.*#hive.config.dir=$HIVE_CONF_DIR#g\" $SERVER_ENGINE_CONF_PATH" -replaceConf "wds.linkis.gateway.url" "http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT" "$SERVER_ENGINE_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -executeCMD $SERVER_IP "rm $SERVER_HOME/$SERVER_NAME/lib/guava-25.1-jre.jar" -executeCMD $SERVER_IP "rm $SERVER_HOME/$SERVER_NAME/lib/servlet-api-2.5.jar" -echo "<----------------$SERVER_NAME:end------------------->" -##HiveEM install end - -##HiveEntrance install -PACKAGE_DIR=linkis/ujes/hive -SERVER_NAME=linkis-ujes-hive-entrance -SERVER_PORT=$HIVE_ENTRANCE_PORT -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_CONF_PATH" -replaceConf "wds.linkis.gateway.url" "http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT" "$SERVER_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##HiveEntrance install end - - -if [[ '2' = "$INSTALL_MODE" ]];then - echo "Simple install end" - exit 0 + executeCMD $SERVER_IP "sed -i ${txt} \"s#hive.meta.password.*#hive.meta.password=$HIVE_META_PASSWORD#g\" $common_conf" fi -if [[ '3' != "$INSTALL_MODE" ]];then - exit 0 + +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.filesystem.root.path.*#wds.linkis.filesystem.root.path=$WORKSPACE_USER_ROOT_PATH#g\" $common_conf" +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.filesystem.hdfs.root.path.*#wds.linkis.filesystem.hdfs.root.path=$HDFS_USER_ROOT_PATH#g\" $common_conf" + +# engineconn +if test -z $ENGINECONN_ROOT_PATH +then + ENGINECONN_ROOT_PATH=$workDir/engineroot fi +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.engineconn.root.dir.*#wds.linkis.engineconn.root.dir=$ENGINECONN_ROOT_PATH#g\" $common_conf" +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.engineconn.home.*#wds.linkis.engineconn.home=${workDir}/lib/linkis-engineconn-plugins#g\" $common_conf" +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.engineconn.plugin.loader.store.path.*#wds.linkis.engineconn.plugin.loader.store.path=${workDir}/lib/linkis-engineconn-plugins#g\" $common_conf" + +# common lib +executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.public_module.path.*#wds.linkis.public_module.path=${workDir}/lib/linkis-commons/public-module#g\" $common_conf" + +echo "Congratulations! You have installed Linkis $LINKIS_VERSION successfully, please use sbin/start-all.sh to start it!" + + + -##SparkEM install -PACKAGE_DIR=linkis/ujes/spark -SERVER_NAME=linkis-ujes-spark-enginemanager -SERVER_IP=$SPARK_INSTALL_IP -SERVER_PORT=$SPARK_EM_PORT -SERVER_HOME=$LINKIS_INSTALL_HOME -###install dir -installPackage -emExtraInstallModule -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -ENGINE_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis-engine.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.enginemanager.sudo.script.*#wds.linkis.enginemanager.sudo.script=$SERVER_HOME/$SERVER_NAME/bin/rootScript.sh#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.enginemanager.core.jar.*#wds.linkis.enginemanager.core.jar=$SERVER_HOME/$SERVER_NAME/lib/linkis-ujes-spark-engine-$LINKIS_VERSION.jar#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.spark.driver.conf.mainjar.*#wds.linkis.spark.driver.conf.mainjar=$SERVER_HOME/$SERVER_NAME/conf:$SERVER_HOME/$SERVER_NAME/lib/*#g\" $SERVER_CONF_PATH" -SERVER_ENGINE_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis-engine.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_ENGINE_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#spark.config.dir.*#spark.config.dir=$SPARK_CONF_DIR#g\" $SERVER_ENGINE_CONF_PATH" -replaceConf "wds.linkis.gateway.url" "http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT" "$SERVER_ENGINE_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##SparkEM install end - -##SparkEntrance install -PACKAGE_DIR=linkis/ujes/spark -SERVER_NAME=linkis-ujes-spark-entrance -SERVER_PORT=$SPARK_ENTRANCE_PORT -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$HDFS_USER_ROOT_PATH#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_CONF_PATH" -replaceConf "wds.linkis.gateway.url" "http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT" "$SERVER_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##SparkEntrance install end - - -##JDBCEntrance install -PACKAGE_DIR=linkis/ujes/jdbc -SERVER_NAME=linkis-ujes-jdbc-entrance -SERVER_IP=$JDBC_INSTALL_IP -SERVER_PORT=$JDBC_ENTRANCE_PORT -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$HDFS_USER_ROOT_PATH#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_CONF_PATH" -replaceConf "wds.linkis.gateway.url" "http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT" "$SERVER_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##JDBCEntrance install end - - -##ShellEM install -PACKAGE_DIR=linkis/ujes/shell -SERVER_NAME=linkis-ujes-shell-enginemanager -SERVER_IP=$SHELL_INSTALL_IP -SERVER_PORT=$SHELL_EM_PORT -SERVER_HOME=$LINKIS_INSTALL_HOME -###install dir -installPackage -emExtraInstallModule -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.enginemanager.sudo.script.*#wds.linkis.enginemanager.sudo.script=$SERVER_HOME/$SERVER_NAME/bin/rootScript.sh#g\" $SERVER_CONF_PATH" -SERVER_ENGINE_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis-engine.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_ENGINE_CONF_PATH" -replaceConf "wds.linkis.gateway.url" "http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT" "$SERVER_ENGINE_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" - -##SHELLEntrance install -PACKAGE_DIR=linkis/ujes/shell -SERVER_NAME=linkis-ujes-shell-entrance -SERVER_PORT=$SHELL_ENTRANCE_PORT -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $SERVER_CONF_PATH" -replaceConf "wds.linkis.gateway.url" "http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT" "$SERVER_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##SHELLEntrance install end - - -##Datasource Manager Server install -PACKAGE_DIR=linkis/datasource/linkis-dsm-server -SERVER_NAME=linkis-dsm-server -SERVER_PORT=$DSM_PORT -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g\" $SERVER_CONF_PATH" -executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.dsm.admin.users.*#wds.linkis.server.dsm.admin.users=$deployUser#g\" $SERVER_CONF_PATH" -replaceConf "wds.linkis.gateway.url" "http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT" "$SERVER_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##Datasource Manager Server install end - - - -##Metadata Manager Server install -PACKAGE_DIR=linkis/datasource/linkis-mdm-server -SERVER_NAME=linkis-mdm-server -SERVER_PORT=$MDM_PORT -###install dir -installPackage -###update linkis.properties -echo "$SERVER_NAME-step4:update linkis conf" -SERVER_CONF_PATH=$SERVER_HOME/$SERVER_NAME/conf/linkis.properties -replaceConf "wds.linkis.gateway.url" "http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT" "$SERVER_CONF_PATH" -isSuccess "subsitution linkis.properties of $SERVER_NAME" -echo "<----------------$SERVER_NAME:end------------------->" -##Metadata Manager Server install end diff --git a/bin/start-all.sh b/bin/start-all.sh deleted file mode 100755 index 05f5046984df27ee0e7fa9e1b6d3ce7f3b62627a..0000000000000000000000000000000000000000 --- a/bin/start-all.sh +++ /dev/null @@ -1,314 +0,0 @@ -#!/usr/bin/env bash -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - - -# Start all linkis applications -info="We will start all linkis applications, it will take some time, please wait" -echo ${info} - -#Actively load user env -source /etc/profile -source ~/.bash_profile - -shellDir=`dirname $0` -workDir=`cd ${shellDir}/..;pwd` - -CONF_DIR="${workDir}"/conf -export LINKIS_DSS_CONF_FILE=${LINKIS_DSS_CONF_FILE:-"${CONF_DIR}/config.sh"} -export DISTRIBUTION=${DISTRIBUTION:-"${CONF_DIR}/config.sh"} -#source $LINKIS_DSS_CONF_FILE -source ${DISTRIBUTION} - -source ${workDir}/bin/common.sh - - -local_host="`hostname --fqdn`" - -ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}'|awk 'NR==1') - -function isLocal(){ - if [ "$1" == "127.0.0.1" ];then - return 0 - elif [ $1 == "localhost" ]; then - return 0 - elif [ $1 == $local_host ]; then - return 0 - elif [ $1 == $ipaddr ]; then - return 0 - fi - return 1 -} - -function executeCMD(){ - isLocal $1 - flag=$? - echo "Is local "$flag - if [ $flag == "0" ];then - eval $2 - else - ssh -p $SSH_PORT $1 $2 - fi - -} - - -#if there is no LINKIS_INSTALL_HOME,we need to source config again -if [ -z ${LINKIS_INSTALL_HOME} ];then - echo "Info: LINKIS_INSTALL_HOME does not exist, we will source config" - if [ ! -f "${LINKIS_DSS_CONF_FILE}" ];then - echo "Error: can not find config file, start applications failed" - exit 1 - else - source ${LINKIS_DSS_CONF_FILE} - fi -fi -APP_PREFIX="linkis-" - -function startApp(){ -echo "<-------------------------------->" -echo "Begin to start $SERVER_NAME" -SERVER_PATH=${APP_PREFIX}${SERVER_NAME} - -SERVER_BIN=${LINKIS_INSTALL_HOME}/${SERVER_PATH}/bin -SERVER_LOCAL_START_CMD="dos2unix ${SERVER_BIN}/* > /dev/null 2>&1; dos2unix ${SERVER_BIN}/../conf/* > /dev/null 2>&1; sh ${SERVER_BIN}/start-${SERVER_NAME}.sh" -SERVER_REMOTE_START_CMD="source /etc/profile;source ~/.bash_profile;cd ${SERVER_BIN}; dos2unix ./* > /dev/null 2>&1; dos2unix ../conf/* > /dev/null 2>&1; sh start-${SERVER_NAME}.sh > /dev/null 2>&1" -if test -z "$SERVER_IP" -then - SERVER_IP=$local_host -fi - -if ! executeCMD $SERVER_IP "test -e $SERVER_BIN"; then - echo "$SERVER_NAME is not installed,the startup steps will be skipped" - return -fi - -isLocal $SERVER_IP -flag=$? -echo "Is local "$flag -if [ $flag == "0" ];then - eval $SERVER_LOCAL_START_CMD -else - ssh -p $SSH_PORT $SERVER_IP $SERVER_REMOTE_START_CMD -fi -isSuccess "End to start $SERVER_NAME" -echo "<-------------------------------->" -sleep 3 -} - - -#eureka -SERVER_NAME="eureka" -APP_PREFIX="" -SERVER_IP=$EUREKA_INSTALL_IP -startApp - - -APP_PREFIX="linkis-" -#gateway -SERVER_NAME="gateway" -SERVER_IP=$GATEWAY_INSTALL_IP -startApp - -#publicservice -SERVER_NAME="publicservice" -SERVER_IP=$PUBLICSERVICE_INSTALL_IP -startApp - - -#metadata -SERVER_NAME="metadata" -SERVER_IP=$METADATA_INSTALL_IP -startApp - -#bml -SERVER_NAME="bml" -SERVER_IP=$BML_INSTALL_IP -startApp - -#cs-server -SERVER_NAME="cs-server" -SERVER_IP=$CS_INSTALL_IP -startApp - -#datasource management -SERVER_NAME="dsm-server" -SERVER_IP=$DSM_INSTALL_IP -startApp - -#metadata management -SERVER_NAME="mdm-server" -SERVER_IP=$MDM_INSTALL_IP -startApp - -#resourcemanager -SERVER_NAME="resourcemanager" -SERVER_IP=$RESOURCEMANAGER_INSTALL_IP -startApp -echo "sleep 15 seconds to wait RM to be ready" -sleep 15 - -APP_PREFIX="linkis-ujes-" - -#python-entrance -SERVER_NAME="python-entrance" -SERVER_IP=$PYTHON_INSTALL_IP -startApp - -#python-enginemanager -SERVER_NAME="python-enginemanager" -SERVER_IP=$PYTHON_INSTALL_IP -startApp - -#shell-entrance -SERVER_NAME="shell-entrance" -SERVER_IP=$SHELL_INSTALL_IP -startApp - -#shell-enginemanager -SERVER_NAME="shell-enginemanager" -SERVER_IP=$SHELL_INSTALL_IP -startApp - -#spark-entrance -SERVER_NAME="spark-entrance" -SERVER_IP=$SPARK_INSTALL_IP -startApp - -#spark-enginemanager -SERVER_NAME="spark-enginemanager" -SERVER_IP=$SPARK_INSTALL_IP -startApp - -#hive-entrance -SERVER_NAME="hive-entrance" -SERVER_IP=$HIVE_INSTALL_IP -startApp - - -#hive-enginemanager -SERVER_NAME="hive-enginemanager" -SERVER_IP=$HIVE_INSTALL_IP -startApp - - -#JDBCEntrance -SERVER_NAME="jdbc-entrance" -SERVER_IP=$JDBC_INSTALL_IP -startApp - - - -echo "start-all shell script executed completely" - -echo "Start to check all dss microservice" - -function checkServer(){ -echo "<-------------------------------->" -echo "Begin to check $SERVER_NAME" -if test -z "$SERVER_IP" -then - SERVER_IP=$local_host -fi - -SERVER_BIN=${LINKIS_INSTALL_HOME}/$SERVER_NAME/bin - -if ! executeCMD $SERVER_IP "test -e $SERVER_BIN"; then - echo "$SERVER_NAME is not installed,the checkServer steps will be skipped" - return -fi - -sh $workDir/bin/checkServices.sh $SERVER_NAME $SERVER_IP $SERVER_PORT -isSuccess "start $SERVER_NAME " -echo "<-------------------------------->" -sleep 5 -} -SERVER_NAME="eureka" -SERVER_IP=$EUREKA_INSTALL_IP -SERVER_PORT=$EUREKA_PORT -checkServer - -APP_PREFIX="linkis-" -SERVER_NAME=$APP_PREFIX"gateway" -SERVER_IP=$GATEWAY_INSTALL_IP -SERVER_PORT=$GATEWAY_PORT -checkServer - -SERVER_NAME=$APP_PREFIX"publicservice" -SERVER_IP=$PUBLICSERVICE_INSTALL_IP -SERVER_PORT=$PUBLICSERVICE_PORT -checkServer - -SERVER_NAME=$APP_PREFIX"metadata" -SERVER_IP=$METADATA_INSTALL_IP -SERVER_PORT=$METADATA_PORT -checkServer - -SERVER_NAME=$APP_PREFIX"resourcemanager" -SERVER_IP=$RESOURCEMANAGER_INSTALL_IP -SERVER_PORT=$RESOURCEMANAGER_PORT -checkServer - - -SERVER_NAME=$APP_PREFIX"bml" -SERVER_IP=$BML_INSTALL_IP -SERVER_PORT=$BML_PORT -checkServer - -#cs-server -SERVER_NAME="cs-server" -SERVER_IP=$CS_INSTALL_IP -checkServer - -APP_PREFIX="linkis-ujes-" -SERVER_NAME=$APP_PREFIX"python-entrance" -SERVER_IP=$PYTHON_INSTALL_IP -SERVER_PORT=$PYTHON_ENTRANCE_PORT -checkServer - -SERVER_NAME=$APP_PREFIX"python-enginemanager" -SERVER_IP=$PYTHON_INSTALL_IP -SERVER_PORT=$PYTHON_EM_PORT -checkServer - -SERVER_NAME=$APP_PREFIX"spark-entrance" -SERVER_IP=$SPARK_INSTALL_IP -SERVER_PORT=$SPARK_ENTRANCE_PORT -checkServer - -SERVER_NAME=$APP_PREFIX"spark-enginemanager" -SERVER_IP=$SPARK_INSTALL_IP -SERVER_PORT=$SPARK_EM_PORT -checkServer - -SERVER_NAME=$APP_PREFIX"hive-enginemanager" -SERVER_IP=$HIVE_INSTALL_IP -SERVER_PORT=$HIVE_EM_PORT -checkServer - -SERVER_NAME=$APP_PREFIX"hive-entrance" -SERVER_IP=$HIVE_INSTALL_IP -SERVER_PORT=$HIVE_ENTRANCE_PORT -checkServer - -SERVER_NAME=$APP_PREFIX"jdbc-entrance" -SERVER_IP=$JDBC_INSTALL_IP -SERVER_PORT=$JDBC_ENTRANCE_PORT -checkServer - - -echo "Linkis started successfully" diff --git a/bin/stop-all.sh b/bin/stop-all.sh deleted file mode 100755 index 098a5772b7bf756df339060e3bae59d21ece1f5d..0000000000000000000000000000000000000000 --- a/bin/stop-all.sh +++ /dev/null @@ -1,227 +0,0 @@ -#!/usr/bin/env bash -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - - -# Stop all linkis applications -info="We will stop all linkis applications, it will take some time, please wait" -echo ${info} - -#Actively load user env -source /etc/profile -source ~/.bash_profile - -workDir=`dirname "${BASH_SOURCE-$0}"` -workDir=`cd "$workDir"; pwd` - - -CONF_DIR="${workDir}"/../conf -export LINKIS_DSS_CONF_FILE=${LINKIS_DSS_CONF_FILE:-"${CONF_DIR}/config.sh"} -export DISTRIBUTION=${DISTRIBUTION:-"${CONF_DIR}/config.sh"} -source ${DISTRIBUTION} -function isSuccess(){ -if [ $? -ne 0 ]; then - echo "ERROR: " + $1 - exit 1 -else - echo "INFO:" + $1 -fi -} - - -local_host="`hostname --fqdn`" - -ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}'|awk 'NR==1') - -function isLocal(){ - if [ "$1" == "127.0.0.1" ];then - return 0 - elif [ $1 == "localhost" ]; then - return 0 - elif [ $1 == $local_host ]; then - return 0 - elif [ $1 == $ipaddr ]; then - return 0 - fi - return 1 -} - - -source ${workDir}/bin/common.sh - - -#if there is no LINKIS_INSTALL_HOME,we need to source config again -if [ -z ${LINKIS_INSTALL_HOME} ];then - echo "Warning: LINKIS_INSTALL_HOME does not exist, we will source config" - if [ ! -f "${LINKIS_DSS_CONF_FILE}" ];then - echo "Error: can not find config file, stop applications failed" - exit 1 - else - source ${LINKIS_DSS_CONF_FILE} - fi -fi -APP_PREFIX="linkis-" - -function stopApp(){ -echo "<-------------------------------->" -echo "Begin to stop $SERVER_NAME" -SERVER_PATH=${APP_PREFIX}${SERVER_NAME} -SERVER_BIN=${LINKIS_INSTALL_HOME}/${SERVER_PATH}/bin -SERVER_LOCAL_STOP_CMD="sh ${SERVER_BIN}/stop-${SERVER_NAME}.sh" -SERVER_REMOTE_STOP_CMD="source /etc/profile;source ~/.bash_profile;cd ${SERVER_BIN}; sh stop-${SERVER_NAME}.sh " -if test -z "$SERVER_IP" -then - SERVER_IP=$local_host -fi - -if ! executeCMD $SERVER_IP "test -e $SERVER_BIN"; then - echo "$SERVER_NAME is not installed,the stopApp steps will be skipped" - return -fi - -isLocal $SERVER_IP -flag=$? -echo "Is local "$flag -if [ $flag == "0" ];then - eval $SERVER_LOCAL_STOP_CMD -else - ssh -p $SSH_PORT $SERVER_IP $SERVER_REMOTE_STOP_CMD -fi -echo "<-------------------------------->" -sleep 3 -} - - -#eureka -SERVER_NAME="eureka" -APP_PREFIX="" -SERVER_IP=$EUREKA_INSTALL_IP -stopApp - - -APP_PREFIX="linkis-" -#gateway -SERVER_NAME="gateway" -SERVER_IP=$GATEWAY_INSTALL_IP -stopApp - -#publicservice -SERVER_NAME="publicservice" -SERVER_IP=$PUBLICSERVICE_INSTALL_IP -stopApp - -#bml -SERVER_NAME="bml" -SERVER_IP=$BML_INSTALL_IP -stopApp - - -#metadata -SERVER_NAME="metadata" -SERVER_IP=$METADATA_INSTALL_IP -stopApp - -#cs-server -SERVER_NAME="cs-server" -SERVER_IP=$CS_INSTALL_IP -stopApp - -#datasource management -SERVER_NAME="dsm-server" -SERVER_IP=$DSM_INSTALL_IP -stopApp - -#metadata management -SERVER_NAME="mdm-server" -SERVER_IP=$MDM_INSTALL_IP -stopApp - -APP_PREFIX="linkis-ujes-" - -#python-entrance -SERVER_NAME="python-entrance" -SERVER_IP=$PYTHON_INSTALL_IP -stopApp - -#python-enginemanager -SERVER_NAME="python-enginemanager" -SERVER_IP=$PYTHON_INSTALL_IP -stopApp - -#shell-entrance -SERVER_NAME="shell-entrance" -SERVER_IP=$SHELL_INSTALL_IP -stopApp - -#shell-enginemanager -SERVER_NAME="shell-enginemanager" -SERVER_IP=$SHELL_INSTALL_IP -stopApp - -#spark-entrance -SERVER_NAME="spark-entrance" -SERVER_IP=$SPARK_INSTALL_IP -stopApp - -#spark-enginemanager -SERVER_NAME="spark-enginemanager" -SERVER_IP=$SPARK_INSTALL_IP -stopApp - -#hive-entrance -SERVER_NAME="hive-entrance" -SERVER_IP=$HIVE_INSTALL_IP -stopApp - - -#hive-enginemanager -SERVER_NAME="hive-enginemanager" -SERVER_IP=$HIVE_INSTALL_IP -stopApp - -#cs-server -SERVER_NAME="cs-server" -SERVER_IP=$CS_INSTALL_IP -stopApp - - -#JDBCEntrance -SERVER_NAME="jdbc-entrance" -SERVER_IP=$JDBC_INSTALL_IP -stopApp - -SERVER_NAME="pipeline-entrance" -SERVER_IP=$PIPELINE_INSTALL_IP -stopApp - -SERVER_NAME="pipeline-enginemanager" -SERVER_IP=$PIPELINE_INSTALL_IP -stopApp - -SERVER_NAME="io-enginemanager" -SERVER_IP=$IO_INSTALL_IP -stopApp - - - -APP_PREFIX="linkis-" -#resourcemanager -SERVER_NAME="resourcemanager" -SERVER_IP=$RESOURCEMANAGER_INSTALL_IP -stopApp - -echo "stop-all shell script executed completely" diff --git a/bml/bml-engine-hook/pom.xml b/bml/bml-engine-hook/pom.xml deleted file mode 100644 index 0ecbb40b68c48720811b217018dd25488ee67ebc..0000000000000000000000000000000000000000 --- a/bml/bml-engine-hook/pom.xml +++ /dev/null @@ -1,102 +0,0 @@ - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-bml-hook - - - com.webank.wedatasphere.linkis - linkis-common - provided - - - com.webank.wedatasphere.linkis - linkis-bmlclient - - - org.apache.httpcomponents - httpclient - - - org.apache.httpcomponents - httpcore - - - org.apache.httpcomponents - httpcore - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-security - - - org.eclipse.jetty - jetty-server - - - org.eclipse.jetty - jetty-http - - - org.eclipse.jetty - jetty-util - - - org.eclipse.jetty - jetty-io - - - javax.servlet - javax.servlet-api - - - - - com.webank.wedatasphere.linkis - linkis-storage - provided - - - com.webank.wedatasphere.linkis - linkis-ujes-engine - provided - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - - - - \ No newline at end of file diff --git a/bml/bml-engine-hook/src/main/scala/com/webank/wedatasphere/linkis/bml/hook/BmlEnginePreExecuteHook.scala b/bml/bml-engine-hook/src/main/scala/com/webank/wedatasphere/linkis/bml/hook/BmlEnginePreExecuteHook.scala deleted file mode 100644 index 22ed4b4cc7b7d4b80ad613407096f72f438af7c3..0000000000000000000000000000000000000000 --- a/bml/bml-engine-hook/src/main/scala/com/webank/wedatasphere/linkis/bml/hook/BmlEnginePreExecuteHook.scala +++ /dev/null @@ -1,77 +0,0 @@ -package com.webank.wedatasphere.linkis.bml.hook - -import java.io.File -import java.util - -import com.webank.wedatasphere.linkis.bml.client.{BmlClient, BmlClientFactory} -import com.webank.wedatasphere.linkis.bml.exception.BmlHookDownloadException -import com.webank.wedatasphere.linkis.bml.utils.BmlHookUtils -import com.webank.wedatasphere.linkis.common.exception.ErrorException -import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils} -import com.webank.wedatasphere.linkis.engine.ResourceExecuteRequest -import com.webank.wedatasphere.linkis.engine.execute.EngineExecutorContext -import com.webank.wedatasphere.linkis.engine.extension.EnginePreExecuteHook -import com.webank.wedatasphere.linkis.scheduler.executer.ExecuteRequest -import org.apache.commons.lang.StringUtils - -import scala.collection.JavaConversions._ -/** - * created by cooperyang on 2019/9/23 - * Description: - */ -class BmlEnginePreExecuteHook extends EnginePreExecuteHook with Logging{ - override val hookName: String = "BmlEnginePreExecuteHook" - - val RESOURCES_STR = "resources" - - val RESOURCE_ID_STR = "resourceId" - - val VERSION_STR = "version" - - val FILE_NAME_STR = "fileName" - - val processUser:String = System.getProperty("user.name") - - val defaultUser:String = "hadoop" - - val bmlClient:BmlClient = if (StringUtils.isNotEmpty(processUser)) - BmlClientFactory.createBmlClient(processUser) else BmlClientFactory.createBmlClient(defaultUser) - - val seperator:String = File.separator - - val pathType:String = "file://" - - override def callPreExecuteHook(engineExecutorContext: EngineExecutorContext, executeRequest: ExecuteRequest, code: String): String = { - val workDir = BmlHookUtils.getCurrentWorkDir - val jobId = engineExecutorContext.getJobId - executeRequest match { - case resourceExecuteRequest:ResourceExecuteRequest => val resources = resourceExecuteRequest.resources - if (null == resources) return code - resources foreach { - case resource:util.Map[String, Object] => val fileName = resource.get(FILE_NAME_STR).toString - val resourceId = resource.get(RESOURCE_ID_STR).toString - val version = resource.get(VERSION_STR).toString - val fullPath = if (workDir.endsWith(seperator)) pathType + workDir + fileName else - pathType + workDir + seperator + fileName - val response = Utils.tryCatch{ - bmlClient.downloadResource(processUser, resourceId, version, fullPath, true) - }{ - case error:ErrorException => logger.error("download resource for {} failed", error) - throw error - case t:Throwable => logger.error(s"download resource for $jobId failed", t) - val e1 = BmlHookDownloadException(t.getMessage) - e1.initCause(t) - throw t - } - if (response.isSuccess){ - logger.info(s"for job $jobId resourceId $resourceId version $version download to path $fullPath ok") - }else{ - logger.warn(s"for job $jobId resourceId $resourceId version $version download to path $fullPath Failed") - } - case _ => logger.warn("job resource cannot download") - } - case _ => - } - if (StringUtils.isNotBlank(code)) code else executeRequest.code - } -} diff --git a/bml/bml-engine-hook/src/main/scala/com/webank/wedatasphere/linkis/bml/utils/BmlHookUtils.scala b/bml/bml-engine-hook/src/main/scala/com/webank/wedatasphere/linkis/bml/utils/BmlHookUtils.scala deleted file mode 100644 index d82b46bd004259911d613f2fcef5696baca54745..0000000000000000000000000000000000000000 --- a/bml/bml-engine-hook/src/main/scala/com/webank/wedatasphere/linkis/bml/utils/BmlHookUtils.scala +++ /dev/null @@ -1,20 +0,0 @@ -package com.webank.wedatasphere.linkis.bml.utils - -import com.webank.wedatasphere.linkis.common.utils.Utils - -/** - * created by cooperyang on 2019/9/24 - * Description: - */ -object BmlHookUtils { - val WORK_DIR_STR = "user.dir" - def getCurrentWorkDir:String = System.getProperty(WORK_DIR_STR) - - - def deleteAllFiles(workDir:String):Unit = { - - } - - - -} diff --git a/bml/bmlclient/pom.xml b/bml/bmlclient/pom.xml deleted file mode 100644 index 645b8e33ef08ef0f7aa6da37488bf6b25cd8b0ee..0000000000000000000000000000000000000000 --- a/bml/bmlclient/pom.xml +++ /dev/null @@ -1,71 +0,0 @@ - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-bmlclient - - - - 4.4 - - - - - com.webank.wedatasphere.linkis - linkis-bmlcommon - - - - com.webank.wedatasphere.linkis - linkis-storage - provided - - - com.webank.wedatasphere.linkis - linkis-common - provided - - - com.webank.wedatasphere.linkis - linkis-gateway-httpclient-support - - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - - - - - - - \ No newline at end of file diff --git a/bml/bmlclient/src/main/scala/com/webank/wedatasphere/linkis/bml/client/impl/HttpBmlClient.scala b/bml/bmlclient/src/main/scala/com/webank/wedatasphere/linkis/bml/client/impl/HttpBmlClient.scala deleted file mode 100644 index e7d63672a4abcc0d08f9f0dad0e58c2d8ba40c3c..0000000000000000000000000000000000000000 --- a/bml/bmlclient/src/main/scala/com/webank/wedatasphere/linkis/bml/client/impl/HttpBmlClient.scala +++ /dev/null @@ -1,323 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.bml.client.impl - -import java.io.{File, IOException, InputStream} -import java.util - -import com.webank.wedatasphere.linkis.bml.client.AbstractBmlClient -import com.webank.wedatasphere.linkis.bml.common._ -import com.webank.wedatasphere.linkis.bml.conf.BmlConfiguration -import com.webank.wedatasphere.linkis.bml.http.HttpConf -import com.webank.wedatasphere.linkis.bml.protocol._ -import com.webank.wedatasphere.linkis.bml.request._ -import com.webank.wedatasphere.linkis.bml.response._ -import com.webank.wedatasphere.linkis.common.io.FsPath -import com.webank.wedatasphere.linkis.httpclient.authentication.AuthenticationStrategy -import com.webank.wedatasphere.linkis.httpclient.config.{ClientConfig, ClientConfigBuilder} -import com.webank.wedatasphere.linkis.httpclient.dws.DWSHttpClient -import com.webank.wedatasphere.linkis.httpclient.dws.authentication.{StaticAuthenticationStrategy, TokenAuthenticationStrategy} -import com.webank.wedatasphere.linkis.httpclient.dws.config.DWSClientConfig -import com.webank.wedatasphere.linkis.storage.FSFactory -import org.apache.commons.io.IOUtils -import org.apache.commons.lang.StringUtils -import org.slf4j.{Logger, LoggerFactory} - -/** - * created by cooperyang on 2019/5/23 - * Description: - */ -class HttpBmlClient extends AbstractBmlClient{ - - private val logger:Logger = LoggerFactory.getLogger(classOf[HttpBmlClient]) - - val serverUrl:String = HttpConf.gatewayInstance - val maxConnection:Int = 10 - val readTimeout:Int = 10000 - val authenticationStrategy:AuthenticationStrategy = new TokenAuthenticationStrategy() - val clientConfig:ClientConfig = ClientConfigBuilder.newBuilder().addUJESServerUrl(serverUrl) - .connectionTimeout(30000).discoveryEnabled(false) - .loadbalancerEnabled(false).maxConnectionSize(maxConnection) - .retryEnabled(false).readTimeout(readTimeout) - .setAuthenticationStrategy(authenticationStrategy).setAuthTokenKey(BmlConfiguration.AUTH_TOKEN_KEY.getValue) - .setAuthTokenValue(BmlConfiguration.AUTH_TOKEN_VALUE.getValue).build() - val dwsClientConfig:DWSClientConfig = new DWSClientConfig(clientConfig) - dwsClientConfig.setDWSVersion(BmlConfiguration.DWS_VERSION.getValue) - val dwsClientName:String = "BML-Client" - val dwsClient:DWSHttpClient = new DWSHttpClient(dwsClientConfig, dwsClientName) - - val FIRST_VERSION:String = "v000001" - - - - override def downloadResource(user:String, resourceID: String): BmlDownloadResponse = { - downloadResource(user, resourceID, "") - } - - override def downloadResource(user: String, resourceId: String, version: String): BmlDownloadResponse = { - val bmlDownloadAction = BmlDownloadAction() - import scala.collection.JavaConversions._ - bmlDownloadAction.getParameters +="resourceId"->resourceId - // TODO: 不能放非空的参数 - if(version != null)bmlDownloadAction.getParameters +="version"->version - bmlDownloadAction.setUser(user) - val result = dwsClient.execute(bmlDownloadAction) - new BmlDownloadResponse(true,bmlDownloadAction.getInputStream,resourceId,version,null) - /* result match { - case downloadResult:BmlResourceDownloadResult => val isSuccess = if (downloadResult.getStatusCode == 0) true else false - if (isSuccess){ - downloadResult.setInputStream(bmlDownloadAction.getInputStream) - BmlDownloadResponse(isSuccess, downloadResult.inputStream, downloadResult.getResourceId, downloadResult.getVersion, "") - }else{ - logger.error(s"user ${user} download resource $resourceId version $version failed, status code is ${ downloadResult.getStatusCode}") - BmlDownloadResponse(isSuccess, null, null, null, null) - } - case r:BmlResult => logger.error(s"result type ${r.getResultType} not match BmlResourceDownloadResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() - }*/ - } - - /** - * 下载资源到指定的path中 - * @param user 用户名 - * @param resourceId 资源ID - * @param version 版本信息 - * @param path 指定的目录,前面要加schema share:// local:// 等 - * @param overwrite 是否是追加 - * @return 返回的inputStream已经被全部读完,所以返回一个null,另外的fullFileName是整个文件的名字 - */ - override def downloadResource(user: String, resourceId: String, version: String, path: String, overwrite:Boolean = false): BmlDownloadResponse = { - //1检查目录是否存在,包括path的schema - //2检查文件是否存在,如果文件存在,并且overwrite是false,则报错 - //3获取downloaded_file_name 拼成一个完整的filePath - //4获取inputStream,然后写入到filePath中 - val fsPath = new FsPath(path) - val fileSystem = FSFactory.getFsByProxyUser(fsPath, user) - fileSystem.init(new util.HashMap[String, String]()) -// if (fileSystem.exists(fsPath)){ -// logger.error(s"path $path not exists") -// throw IllegalPathException() -// } -// val getBasicAction = BmlGetBasicAction(resourceId) -// val getBasicResult = dwsClient.execute(getBasicAction) match{ -// case result:BmlGetBasicResult => result -// case _ => throw GetResultNotMatchException() -// } - -// val fileName:StringBuilder = new StringBuilder -// fileName.append(path).append(if (path.endsWith("/")) "" else "/") - -// if (getBasicResult != null && getBasicResult.getStatusCode == 0){ -// val downloadedFileName = getBasicResult.downloadedFileName -// if (StringUtils.isNotEmpty(downloadedFileName)){ -// fileName.append(downloadedFileName) -// }else{ -// throw BmlResponseErrorException("返回的downloadedFileName参数为空") -// } -// }else{ -// logger.error(s"获取 $resourceId 资源失败, BmlServer的返回码是 ${getBasicResult.getStatusCode}") -// throw BmlResponseErrorException("通过http方式获取") -// } - - val fullFileName = path - val downloadAction = BmlDownloadAction() // TODO: 这里暂时还没改 - import scala.collection.JavaConversions._ - downloadAction.getParameters += "resourceId" -> resourceId - // TODO: 不能放非空的参数 - if(version != null) downloadAction.getParameters += "version" -> version - downloadAction.setUser(user) - val downloadResult = dwsClient.execute(downloadAction) - val fullFilePath = new FsPath(fullFileName) - if (downloadResult != null){ - val inputStream = downloadAction.getInputStream - val outputStream = fileSystem.write(fullFilePath, overwrite) - try{ - IOUtils.copy(inputStream, outputStream) - }catch{ - case e:IOException => logger.error("inputStream和outputStream流copy失败", e) - val exception = BmlClientFailException("inputStream和outputStream流copy失败") - exception.initCause(e) - throw e - case t:Throwable => logger.error("流复制失败",t) - throw t - }finally{ - IOUtils.closeQuietly(inputStream) - IOUtils.closeQuietly(outputStream) - } - BmlDownloadResponse(true, null, resourceId, version, fullFileName) - }else{ - BmlDownloadResponse(false, null, null, null, null) - } - } - - /** - * 更新资源信息 - * - * @param resourceID 资源id - * @param filePath 目标文件路径 - * @return resourceId 新的版本信息 - */ - override def updateResource(user:String, resourceID: String, filePath: String): BmlUpdateResponse = { - val inputStream:InputStream = getInputStream(filePath) - updateResource(user, resourceID, filePath, inputStream) - } - - override def updateResource(user:String, resourceID: String, filePath: String, inputStream: InputStream): BmlUpdateResponse = { - val _inputStreams = new util.HashMap[String, InputStream]() - _inputStreams.put("file", inputStream) - val bmlUpdateAction = BmlUpdateAction(null, _inputStreams) - bmlUpdateAction.setUser(user) - bmlUpdateAction.inputStreamNames.put("file", pathToName(filePath)) - bmlUpdateAction.getParameters.put("resourceId",resourceID) - val result = dwsClient.execute(bmlUpdateAction) - result match{ - case updateResult:BmlUpdateResult => val isSuccess= if (updateResult.getStatus == 0) true else false - if (isSuccess){ - val resourceId = updateResult.getResourceId - val version = updateResult.getVersion - BmlUpdateResponse(isSuccess, resourceId, version) - }else{ - logger.error(s"user $user update resource failed, status code is ${updateResult.getStatusCode}") - BmlUpdateResponse(isSuccess, null, null) - } - case r:BmlResult => logger.error(s"result type ${r.getResultType} not match BmlResourceDownloadResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() - } - } - - /** - * relateResource方法将targetFilePath路径的文件关联到resourceID下面 - * targetFilePath需要包括schema,如果不包含schema,默认是hdfs - * - * @param resourceID resourceID - * @param targetFilePath 指定文件目录 - * @return BmlRelateResult 包含resourceId和新的version - */ - override def relateResource(resourceID: String, targetFilePath: String): BmlRelateResponse = { - null - } - - - - - /** - * 获取resourceid 对应资源的所有版本 - * @param user 用户名 - * @param resourceId 资源Id - * @return resourceId对应下的所有版本信息 - */ - override def getVersions(user: String, resourceId: String): BmlResourceVersionsResponse = { - val getVersionsAction = BmlGetVersionsAction(user, resourceId) - val result = dwsClient.execute(getVersionsAction) - result match{ - case _result:BmlResourceVersionResult => val isSuccess= if (_result.getStatus == 0) true else false - if (isSuccess){ - val resourceId = _result.getResourceId - val resourceVersions = _result.getResourceVersions - BmlResourceVersionsResponse(isSuccess,resourceId, resourceVersions) - }else{ - logger.error(s"user $user get versions failed, status code is ${_result.getStatusCode}") - BmlResourceVersionsResponse(isSuccess, null, null) - } - case r:BmlResult => logger.error(s"result type ${r.getResultType} not match BmlResourceDownloadResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() - } - } - - - - - /** - * 上传文件,用户指定文件路径,客户端自动获取输入流 - * @param user 用户名 - * @param filePath 文件路径 - * @return 包含resourceId和version - */ - override def uploadResource(user: String, filePath: String): BmlUploadResponse = { - val inputStream:InputStream = getInputStream(filePath) - uploadResource(user, filePath, inputStream) - } - - - private def pathToName(filePath:String):String = new File(filePath).getName - - - /** - * 上传资源 - * - * @param user 用户名 - * @param filePath 上传的资源的路径 - * @param inputStream 上传资源的输入流 - * @return - */ - override def uploadResource(user: String, filePath: String, inputStream: InputStream): BmlUploadResponse = { - val _inputStreams = new util.HashMap[String, InputStream]() - _inputStreams.put("file", inputStream) - val uploadAction = BmlUploadAction(null, _inputStreams) - uploadAction.inputStreamNames.put("file", pathToName(filePath)) - uploadAction.setUser(user) - val result = dwsClient.execute(uploadAction) - result match { - case bmlUploadResult:BmlUploadResult => val isSuccess = if(bmlUploadResult.getStatus == 0) true else false - if (isSuccess){ - val resourceId = bmlUploadResult.getResourceId - val version = bmlUploadResult.getVersion - BmlUploadResponse(isSuccess, resourceId,version) - }else{ - logger.error(s"user $user upload resource failed, status code is ${bmlUploadResult.getStatusCode}") - BmlUploadResponse(isSuccess, null, null) - } - case r:BmlResult => logger.error(s"result type ${r.getResultType} not match BmlResourceDownloadResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() - } - } - - - /** - * - */ - override def deleteResource(user: String, resourceId: String, version: String): BmlDeleteResponse = { - null - } - - override def deleteResource(user: String, resourceId: String): BmlDeleteResponse = { - val deleteAction = BmlDeleteAction(resourceId) - deleteAction.getParameters.put("resourceId", resourceId) - val result = dwsClient.execute(deleteAction) - result match { - case bmlDeleteResult: BmlDeleteResult => val isSuccess= if (bmlDeleteResult.getStatus == 0) true else false - if (isSuccess){ - BmlDeleteResponse(isSuccess) - }else{ - logger.error(s"user $user update resource failed, status code is ${bmlDeleteResult.getStatusCode}") - BmlDeleteResponse(isSuccess) - } - case r:BmlResult => logger.error(s"result type ${r.getResultType} not match BmlResourceDownloadResult") - throw POSTResultNotMatchException() - case _ => throw POSTResultNotMatchException() - } - } - - //todo 现在是为了通过编译 - private def getInputStream(str: String):InputStream = { - null - } - -} diff --git a/bml/bmlclient/src/main/scala/com/webank/wedatasphere/linkis/bml/request/BmlPOSTAction.scala b/bml/bmlclient/src/main/scala/com/webank/wedatasphere/linkis/bml/request/BmlPOSTAction.scala deleted file mode 100644 index be915bea2c3dbcba8b4acb151b40f3bdac65d34f..0000000000000000000000000000000000000000 --- a/bml/bmlclient/src/main/scala/com/webank/wedatasphere/linkis/bml/request/BmlPOSTAction.scala +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.bml.request - -import java.io.{File, InputStream} -import java.util - -import com.webank.wedatasphere.linkis.bml.http.HttpConf -import com.webank.wedatasphere.linkis.httpclient.request._ - -/** - * created by cooperyang on 2019/5/23 - * Description: - */ - -abstract class BmlPOSTAction extends POSTAction - -abstract class BmlGETAction extends GetAction - - -/** - * BmlUpload - * @param filePaths - * @param _inputStreams - */ -case class BmlUploadAction(filePaths:Array[String], - _inputStreams:util.Map[String,InputStream]) extends BmlPOSTAction with UploadAction{ - - private val streamNames = new util.HashMap[String,String] - - override val files: util.Map[String, String] = { - if (null == filePaths || filePaths.length == 0) new util.HashMap[String,String]() else{ - val map = new java.util.HashMap[String, String] - filePaths foreach { - filePath => val arr = filePath.split(File.separator) - val fileName = arr(arr.length - 1) - map.put("file", filePath) - } - map - } - } - - override def inputStreams: util.Map[String, InputStream] = _inputStreams - - override def inputStreamNames: util.Map[String, String] = streamNames - - // override def inputStreams: util.Map[String, InputStream] = { - // if (files.size() == 0) new util.HashMap[String, InputStream]() else{ - // val map = new util.HashMap[String, InputStream]() - // files foreach { - // case (fileName, filePath) => val fs = FSFactory.getFs(new FsPath(filePath)) - // fs.init(null) - // val inputStream = fs.read(new FsPath(filePath)) - // - // } - // } - // } - - private var _user:String = _ - - override def setUser(user: String): Unit = this._user = user - - override def getUser: String = this._user - - override def getRequestPayload: String = "" - - override def getURL: String = HttpConf.uploadURL -} - -case class BmlUpdateAction(filePaths:Array[String], - _inputStreams:util.Map[String,InputStream]) extends BmlPOSTAction with UploadAction{ - override def getURL: String = HttpConf.updateVersionURL - - override def getRequestPayload: String = "" - - private var _user:String = _ - - private val streamNames = new util.HashMap[String,String] - - override val files: util.Map[String, String] = { - if (null == filePaths || filePaths.length == 0) new util.HashMap[String,String]() else{ - val map = new java.util.HashMap[String, String] - filePaths foreach { - filePath => val arr = filePath.split(File.separator) - val fileName = arr(arr.length - 1) - map.put("file", filePath) - } - map - } - } - - override def setUser(user: String): Unit = this._user = user - - override def getUser: String = this._user - override def inputStreams: util.Map[String, InputStream] = _inputStreams - - override def inputStreamNames: util.Map[String, String] = streamNames -} - - -case class BmlDownloadAction() extends BmlGETAction with DownloadAction with UserAction{ - - private var inputStream:InputStream = _ - private var user:String = _ - - def getInputStream:InputStream = this.inputStream - - def setInputStream(inputStream: InputStream):Unit = this.inputStream = inputStream - - override def getURL: String = HttpConf.downloadURL - - override def write(inputStream: InputStream): Unit = this.inputStream = inputStream - - override def setUser(user: String): Unit = this.user = user - - override def getUser: String = this.user -} - - - - -case class BmlRelateAction(user:String, - resourceId:String, - inputStream: InputStream) extends BmlPOSTAction{ - override def getRequestPayload: String = "" - - override def getURL: String = HttpConf.updateVersionURL -} - - -case class BmlGetVersionsAction(user:String, - resourceId:String) extends BmlPOSTAction{ - override def getRequestPayload: String = "" - - override def getURL: String = HttpConf.getVersionsUrl -} - - -case class BmlUpdateBasicAction(properties:java.util.Map[String, String]) extends BmlPOSTAction{ - override def getRequestPayload: String = "" - - override def getURL: String = HttpConf.updateBasicUrl -} - - -case class BmlGetBasicAction(resourceId:String) extends BmlGETAction with UserAction { - - private var user:String = _ - - override def getURL: String = HttpConf.getBasicUrl - - override def setUser(user: String): Unit = this.user = user - - override def getUser: String = this.user -} - - - -case class BmlDeleteAction(resourceId:String) extends BmlPOSTAction { - override def getRequestPayload: String = "" - - override def getURL: String = HttpConf.deleteURL -} - - - diff --git a/bml/bmlcommon/pom.xml b/bml/bmlcommon/pom.xml deleted file mode 100644 index 090b92c8a1dcc6ed5c09bd31e88628dd5158ec48..0000000000000000000000000000000000000000 --- a/bml/bmlcommon/pom.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-bmlcommon - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - - - - - - - \ No newline at end of file diff --git a/bml/bmlcommon/src/main/java/com/webank/wedatasphere/linkis/bml/model/AbstractAuditable.java b/bml/bmlcommon/src/main/java/com/webank/wedatasphere/linkis/bml/model/AbstractAuditable.java deleted file mode 100644 index f362ccbc696ddac73491c671f63d31a674e5cded..0000000000000000000000000000000000000000 --- a/bml/bmlcommon/src/main/java/com/webank/wedatasphere/linkis/bml/model/AbstractAuditable.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.bml.model; - -import java.util.Date; - -/** - * created by cooperyang on 2019/5/14 - * Description: - */ -public abstract class AbstractAuditable { - private Date created; - private Date updated; - private String createdBy; - private String updatedBy; - - public Date getCreated() { - return created; - } - - public void setCreated(Date created) { - this.created = created; - } - - public Date getUpdated() { - return updated; - } - - public void setUpdated(Date updated) { - this.updated = updated; - } - - public String getCreatedBy() { - return createdBy; - } - - public void setCreatedBy(String createdBy) { - this.createdBy = createdBy; - } - - public String getUpdatedBy() { - return updatedBy; - } - - public void setUpdatedBy(String updatedBy) { - this.updatedBy = updatedBy; - } - - public void setInfoOnCreate(String user) { - Date current = new Date(); - this.setCreated(current); - this.setUpdated(current); - this.setCreatedBy(user); - this.setUpdatedBy(user); - } - - public void setInfoOnUpdate(String user) { - Date current = new Date(); - this.setUpdated(current); - this.setUpdatedBy(user); - } - -} diff --git a/bml/bmlserver/Dockerfile b/bml/bmlserver/Dockerfile deleted file mode 100644 index 19973e5c15944aaf3963258a716f96e6a45886aa..0000000000000000000000000000000000000000 --- a/bml/bmlserver/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM wedatasphere/linkis:emr-base-spark2.4.4 - -MAINTAINER wedatasphere@webank.com - -RUN yum install -y unzip -WORKDIR /opt/linkis - -COPY target/linkis-bml.zip /opt/linkis -RUN unzip linkis-bml.zip - -WORKDIR /opt/linkis/linkis-bml/bin -ENTRYPOINT ["/opt/linkis/linkis-bml/bin/startup.sh"] diff --git a/bml/bmlserver/bin/start-bml.sh b/bml/bmlserver/bin/start-bml.sh deleted file mode 100755 index 80cc775a4a653ca925220793265a794821f1bfe1..0000000000000000000000000000000000000000 --- a/bml/bmlserver/bin/start-bml.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash - -cd `dirname $0` -cd .. -HOME=`pwd` - -export SERVER_PID=$HOME/bin/linkis.pid -export SERVER_LOG_PATH=$HOME/logs -export SERVER_CLASS=com.webank.wedatasphere.linkis.DataWorkCloudApplication - -if test -z "$SERVER_HEAP_SIZE" -then - export SERVER_HEAP_SIZE="512M" -fi - -if test -z "$SERVER_JAVA_OPTS" -then - export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$HOME/logs/linkis-gc.log" -fi - -if [[ -f "${SERVER_PID}" ]]; then - pid=$(cat ${SERVER_PID}) - if kill -0 ${pid} >/dev/null 2>&1; then - echo "Server is already running." - exit 1 - fi -fi - -nohup java $SERVER_JAVA_OPTS -cp ../module/lib/*:$HOME/conf:$HOME/lib/* $SERVER_CLASS 2>&1 > $SERVER_LOG_PATH/linkis.out & -pid=$! -if [[ -z "${pid}" ]]; then - echo "server $SERVER_NAME start failed!" - exit 1 -else - echo "server $SERVER_NAME start succeeded!" - echo $pid > $SERVER_PID - sleep 1 -fi \ No newline at end of file diff --git a/bml/bmlserver/bin/startup.sh b/bml/bmlserver/bin/startup.sh deleted file mode 100755 index 34b2e0dc1a9b73a67482ac9b49205e90dedd9a8f..0000000000000000000000000000000000000000 --- a/bml/bmlserver/bin/startup.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash -cd `dirname $0` -cd .. -HOME=`pwd` - -export SERVER_LOG_PATH=$HOME/logs -export SERVER_CLASS=com.webank.wedatasphere.linkis.DataWorkCloudApplication - - -if test -z "$SERVER_HEAP_SIZE" -then - export SERVER_HEAP_SIZE="512M" -fi - -if test -z "$SERVER_JAVA_OPTS" -then - export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$HOME/logs/linkis-bml-gc.log" -fi - -if test -z "$START_PORT" -then - export START_PORT=14006 -fi - -export SERVER_PID=$HOME/bin/linkis.pid - -if [[ -f "${SERVER_PID}" ]]; then - pid=$(cat ${SERVER_PID}) - if kill -0 ${pid} >/dev/null 2>&1; then - echo "Server is already running." - exit 1 - fi -fi - -cp -f /opt/linkis/conf/linkis.properties /opt/linkis/linkis-bml/conf - -nohup java $SERVER_JAVA_OPTS -Deurekaurl=$EUREKA_URL -Duser.timezone=Asia/Shanghai -cp $HOME/conf:$HOME/lib/* $SERVER_CLASS --server.port=$START_PORT 2>&1 > $SERVER_LOG_PATH/linkis-bml.log & - -pid=$! -if [[ -z "${pid}" ]]; then - echo "server $SERVER_NAME start failed!" - exit 1 -else - echo "server $SERVER_NAME start succeeded!" - echo $pid > $SERVER_PID - sleep 1 -fi - -tail -f /dev/null diff --git a/bml/bmlserver/bin/stop-bml.sh b/bml/bmlserver/bin/stop-bml.sh deleted file mode 100755 index f032887111aae2a915bd8302af30617599a506e2..0000000000000000000000000000000000000000 --- a/bml/bmlserver/bin/stop-bml.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -cd `dirname $0` -cd .. -HOME=`pwd` - -export SERVER_PID=$HOME/bin/linkis.pid - -function wait_for_server_to_die() { - local pid - local count - pid=$1 - timeout=$2 - count=0 - timeoutTime=$(date "+%s") - let "timeoutTime+=$timeout" - currentTime=$(date "+%s") - forceKill=1 - - while [[ $currentTime -lt $timeoutTime ]]; do - $(kill ${pid} > /dev/null 2> /dev/null) - if kill -0 ${pid} > /dev/null 2>&1; then - sleep 3 - else - forceKill=0 - break - fi - currentTime=$(date "+%s") - done - - if [[ forceKill -ne 0 ]]; then - $(kill -9 ${pid} > /dev/null 2> /dev/null) - fi -} - -if [[ ! -f "${SERVER_PID}" ]]; then - echo "server $SERVER_NAME is not running" -else - pid=$(cat ${SERVER_PID}) - if [[ -z "${pid}" ]]; then - echo "server $SERVER_NAME is not running" - else - wait_for_server_to_die $pid 40 - $(rm -f ${SERVER_PID}) - echo "server $SERVER_NAME is stopped." - fi -fi \ No newline at end of file diff --git a/bml/bmlserver/conf/application.yml b/bml/bmlserver/conf/application.yml deleted file mode 100644 index a1a3ec892597e1c25e8b9f5c1fa7c3a563ca93ba..0000000000000000000000000000000000000000 --- a/bml/bmlserver/conf/application.yml +++ /dev/null @@ -1,36 +0,0 @@ -server: - port: 8056 -spring: - application: - name: bml-server - -eureka: - client: - serviceUrl: - defaultZone: ${eurekaurl} - instance: - lease-renewal-interval-in-second: 5 - lease-expiration-duration-in-second: 10 - prefer-ip-address: true - instance-id: ${spring.cloud.client.ip-address}:${server.port} - metadata-map: - test: test-user - -management: - endpoints: - web: - exposure: - include: refresh,info -logging: - config: classpath:log4j2.xml - - -pagehelper: - helper-dialect: mysql - reasonable: true - support-methods-arguments: true - params: countSql - - -# register-with-eureka: false -# fetch-registry: false diff --git a/bml/bmlserver/conf/linkis.properties b/bml/bmlserver/conf/linkis.properties deleted file mode 100644 index 4135383f09b1622b8b7fbe6b34606509680cfd46..0000000000000000000000000000000000000000 --- a/bml/bmlserver/conf/linkis.properties +++ /dev/null @@ -1,13 +0,0 @@ -wds.linkis.server.mybatis.mapperLocations=classpath:com/webank/wedatasphere/linkis/bml/dao/impl/*.xml -wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.linkis.bml.dao -wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.linkis.bml.dao -wds.test.mode=true -wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.bml.restful - -#sit -wds.linkis.server.mybatis.datasource.url= -wds.linkis.server.mybatis.datasource.username= -wds.linkis.server.mybatis.datasource.password= -wds.linkis.server.version=v1 - -#hadoop.config.dir \ No newline at end of file diff --git a/bml/bmlserver/conf/log4j2.xml b/bml/bmlserver/conf/log4j2.xml deleted file mode 100644 index 3da04d9e2f31b429ac80a44d6bf7b12b8128d8f5..0000000000000000000000000000000000000000 --- a/bml/bmlserver/conf/log4j2.xml +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/bml/bmlserver/pom.xml b/bml/bmlserver/pom.xml deleted file mode 100644 index bcfb9450ef9cba57fb363115560aa5a6dc616724..0000000000000000000000000000000000000000 --- a/bml/bmlserver/pom.xml +++ /dev/null @@ -1,109 +0,0 @@ - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - - 4.0.0 - - linkis-bmlserver - - - - com.webank.wedatasphere.linkis - linkis-mybatis - ${project.version} - - - com.webank.wedatasphere.linkis - linkis-bmlcommon - ${project.version} - - - com.webank.wedatasphere.linkis - linkis-cloudRPC - ${project.version} - - - asm - org.ow2.asm - - - provided - - - com.webank.wedatasphere.linkis - linkis-storage - ${project.version} - provided - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-bml - false - false - - src/main/assembly/distribution.xml - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - - \ No newline at end of file diff --git a/bml/bmlserver/pom_k8s.xml b/bml/bmlserver/pom_k8s.xml deleted file mode 100644 index 777106ab9b9b9bc9ed63baa3c8ac0992cd1dd289..0000000000000000000000000000000000000000 --- a/bml/bmlserver/pom_k8s.xml +++ /dev/null @@ -1,152 +0,0 @@ - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-bmlserver - - - - com.webank.wedatasphere.linkis - linkis-mybatis - - - com.webank.wedatasphere.linkis - linkis-bmlcommon - - - - com.webank.wedatasphere.linkis - linkis-cloudRPC - - - asm - org.ow2.asm - - - tomcat - jasper-compiler - - - tomcat - jasper-runtime - - - - - com.webank.wedatasphere.linkis - linkis-storage - - - org.apache.httpcomponents - httpclient - - - tomcat - jasper-compiler - - - tomcat - jasper-runtime - - - - - org.apache.httpcomponents - httpclient - ${httpclient.version} - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-bml - false - false - - src/main/assembly/distribution.xml - - - - - - - - - - - - - - - - - - - - - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - - \ No newline at end of file diff --git a/bml/bmlserver/src/main/assembly/distribution.xml b/bml/bmlserver/src/main/assembly/distribution.xml deleted file mode 100644 index ff80357dc75d7e4e850389d4e2257ac101edab58..0000000000000000000000000000000000000000 --- a/bml/bmlserver/src/main/assembly/distribution.xml +++ /dev/null @@ -1,70 +0,0 @@ - - - - linkis-bml - - zip - - true - linkis-bml - - - - - - lib - true - true - false - true - true - - - - - - ${basedir}/conf - - * - - 0777 - conf - unix - - - ${basedir}/bin - - * - - 0777 - bin - unix - - - . - - */** - - logs - - - - - diff --git a/bml/bmlserver/src/main/java/com/webank/wedatasphere/linkis/bml/Entity/Resource.java b/bml/bmlserver/src/main/java/com/webank/wedatasphere/linkis/bml/Entity/Resource.java deleted file mode 100644 index 709c365dcabac506ddef70efd37a0774c6a55bce..0000000000000000000000000000000000000000 --- a/bml/bmlserver/src/main/java/com/webank/wedatasphere/linkis/bml/Entity/Resource.java +++ /dev/null @@ -1,262 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.bml.Entity; - -import java.util.Date; -import java.util.Map; - -/** - * Created by cooperyang on 2019/5/16. - */ -public class Resource { - - private static final String MAX_VERSION = "maxVersion"; - private static final String IS_PRIVATE = "isPrivate"; - private static final String RESOURCE_HEADER = "resourceHeader"; - private static final String DOWNLOAD_FILE_NAME = "downloadedFileName"; - private static final String SYSTEM = "system"; - private static final String IS_EXPIRE = "isExpire"; - private static final String EXPIRE_TYPE = "expireType"; - private static final String EXPIRE_TIME = "expireTime"; - private static final String UPDATER = "updator"; - - - - - private int id; - - private boolean isPrivate; - - private String resourceHeader; - - private String downloadedFileName; - - private String sys; - - private Date createTime; - - private boolean isExpire; - - private String expireType; - - /** - * expireTime的形式是 yyyy-MM-dd - * 或 yyyy-MM-dd HH:mm:ss - */ - private String expireTime; - - private Date updateTime; - - private String updator; - - private int maxVersion; - - private String resourceId; - - private String user; - - private String system; - - private boolean enableFlag; - - public String getResourceId() { - return resourceId; - } - - public void setResourceId(String resourceId) { - this.resourceId = resourceId; - } - - public String getUser() { - return user; - } - - public void setUser(String user) { - this.user = user; - } - - public String getSystem() { - return system; - } - - public void setSystem(String system) { - this.system = system; - } - - public Resource() { - } - - public Resource(String resourceId, String user, String downloadedFileName){ - this.user = user; - this.resourceId = resourceId; - this.createTime = new Date(System.currentTimeMillis()); - this.setUpdateTime(new Date(System.currentTimeMillis())); - this.enableFlag = true; - this.downloadedFileName = downloadedFileName; - } - - public boolean isEnableFlag() { - return enableFlag; - } - - public void setEnableFlag(boolean enableFlag) { - this.enableFlag = enableFlag; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public boolean isPrivate() { - return isPrivate; - } - - public void setPrivate(boolean aPrivate) { - isPrivate = aPrivate; - } - - public String getResourceHeader() { - return resourceHeader; - } - - public void setResourceHeader(String resourceHeader) { - this.resourceHeader = resourceHeader; - } - - public String getDownloadedFileName() { - return downloadedFileName; - } - - public void setDownloadedFileName(String downloadedFileName) { - this.downloadedFileName = downloadedFileName; - } - - public String getSys() { - return sys; - } - - public void setSys(String sys) { - this.sys = sys; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public boolean isExpire() { - return isExpire; - } - - public void setExpire(boolean expire) { - isExpire = expire; - } - - public String getExpireType() { - return expireType; - } - - public void setExpireType(String expireType) { - this.expireType = expireType; - } - - public String getExpireTime() { - return expireTime; - } - - public void setExpireTime(String expireTime) { - this.expireTime = expireTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } - - public String getUpdator() { - return updator; - } - - public void setUpdator(String updator) { - this.updator = updator; - } - - public int getMaxVersion() { - return maxVersion; - } - - public void setMaxVersion(int maxVersion) { - this.maxVersion = maxVersion; - } - - public static Resource createNewResource(String resourceId, String user, String downloadedFileName, Map properties){ - Resource resource = new Resource(resourceId, user, downloadedFileName); - if (properties.get(MAX_VERSION) == null){ - resource.setMaxVersion(10); - }else{ - resource.setMaxVersion(Integer.parseInt(properties.get(MAX_VERSION).toString())); - } - if (properties.get(IS_EXPIRE) == null){ - //默认是不过期的 - resource.setExpire(false); - }else{ - resource.setExpire(properties.get(IS_EXPIRE).toString().equalsIgnoreCase("true")); - } - if (properties.get(SYSTEM) == null){ - resource.setSystem("WTSS"); - }else{ - resource.setSystem(properties.get(SYSTEM).toString()); - } - if (properties.get(IS_PRIVATE) == null){ - resource.setPrivate(true); - }else{ - resource.setPrivate(properties.get(IS_PRIVATE).toString().equalsIgnoreCase("true")); - } - if (properties.get(RESOURCE_HEADER) == null){ - resource.setResourceHeader(null); - }else{ - resource.setResourceHeader((String)(properties.get(RESOURCE_HEADER))); - } - //如果资源是过期的,需要设置资源过期的类型和时间 - if (resource.isExpire()){ - if (properties.get(EXPIRE_TYPE) == null){ - resource.setExpireType("time"); - }else{ - resource.setExpireType((String)(properties.get(EXPIRE_TYPE))); - } - if (properties.get(EXPIRE_TIME) == null){ - //默认设置50天过期 - resource.setExpireTime("50d"); - }else{ - resource.setExpireTime((String)(properties.get(EXPIRE_TIME))); - } - } - return resource; - } - - - -} diff --git a/bml/bmlserver/src/main/java/com/webank/wedatasphere/linkis/bml/service/ResourceService.java b/bml/bmlserver/src/main/java/com/webank/wedatasphere/linkis/bml/service/ResourceService.java deleted file mode 100644 index bfed9c080d3764c79af41947d5ae70151f84f3f4..0000000000000000000000000000000000000000 --- a/bml/bmlserver/src/main/java/com/webank/wedatasphere/linkis/bml/service/ResourceService.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.bml.service; - -import com.webank.wedatasphere.linkis.bml.Entity.Resource; -import com.webank.wedatasphere.linkis.bml.service.impl.ResourceServiceImpl; - -import org.glassfish.jersey.media.multipart.FormDataMultiPart; - -import java.util.List; -import java.util.Map; - -/** - * Created by cooperyang on 2019/5/17. - */ -public interface ResourceService { - - List getResources(Map paramMap); - - void deleteResource(String resourceId); - - void batchDeleteResources(List resourceIds); - - /** - * 用于上传文件的函数,上传文件的步骤 - * 1.根据用户名和resourceHeader信息为用户创建一个文件 - * 2.利用storage模块将二进制流存入到物料库 - * 3.二进制流的存储方式有两种,根据资源文件的大小选择合并或者是单独存储 - * 4.生成resourceID - * 4.更新resource 和 resource_version表 - * @param formDataMultiPart notnull - * @param user um_user - * @param properties Map - */ - List upload(FormDataMultiPart formDataMultiPart, String user, Map properties)throws Exception; - - boolean checkResourceId(String resourceId); - - - Resource getResource(String resourceId); - - - boolean checkAuthority(String user, String resourceId); - - boolean checkExpire(String resourceId, String version); - - void cleanExpiredResources(); -} diff --git a/conf/config.sh b/conf/config.sh deleted file mode 100755 index 8f431b88f93d6fed69dce562ecbc3c7382a01e33..0000000000000000000000000000000000000000 --- a/conf/config.sh +++ /dev/null @@ -1,135 +0,0 @@ -#!/bin/sh - -shellDir=`dirname $0` -workDir=`cd ${shellDir}/..;pwd` - -### -SSH_PORT=22 - -### deploy user -deployUser=hadoop - - -### The install home path of Linkis -LINKIS_INSTALL_HOME=$workDir #Must provided - - - -### Specifies the user workspace, which is used to store the user's script files and log files. -### Generally local directory -WORKSPACE_USER_ROOT_PATH=file:///tmp/linkis/ ##file:// required -### User's root hdfs path -HDFS_USER_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required - -### Path to store job ResultSet:file or hdfs path -RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis - -### Provide the DB information of Hive metadata database. -HIVE_META_URL= -HIVE_META_USER= -HIVE_META_PASSWORD= - -###HADOOP CONF DIR -HADOOP_CONF_DIR=/appcom/config/hadoop-config - -###HIVE CONF DIR -HIVE_CONF_DIR=/appcom/config/hive-config - -###SPARK CONF DIR -SPARK_CONF_DIR=/appcom/config/spark-config - -################### The install Configuration of all Micro-Services ##################### -# -# NOTICE: -# 1. If you just wanna try, the following micro-service configuration can be set without any settings. -# These services will be installed by default on this machine. -# 2. In order to get the most complete enterprise-level features, we strongly recommend that you install -# Linkis in a distributed manner and set the following microservice parameters -# - -### EUREKA install information -### You can access it in your browser at the address below:http://${EUREKA_INSTALL_IP}:${EUREKA_PORT} -#EUREKA_INSTALL_IP=127.0.0.1 # Microservices Service Registration Discovery Center -EUREKA_PORT=20303 - -### Gateway install information -#GATEWAY_INSTALL_IP=127.0.0.1 -GATEWAY_PORT=9001 - -### publicservice -#PUBLICSERVICE_INSTALL_IP=127.0.0.1 -PUBLICSERVICE_PORT=9102 - - -### Hive Metadata Query service, provide the metadata information of Hive databases. -#METADATA_INSTALL_IP=127.0.0.1 -METADATA_PORT=9103 - - -### ResourceManager -#RESOURCEMANAGER_INSTALL_IP=127.0.0.1 -RESOURCEMANAGER_PORT=9104 - - -### Spark -### This service is used to provide spark capability. -#SPARK_INSTALL_IP=127.0.0.1 -SPARK_EM_PORT=9105 -SPARK_ENTRANCE_PORT=9106 - - -### Hive -### This service is used to provide hive capability. -#HIVE_INSTALL_IP=127.0.0.1 -HIVE_EM_PORT=9107 -HIVE_ENTRANCE_PORT=9108 - - -### PYTHON -### This service is used to provide python capability. -#PYTHON_INSTALL_IP=127.0.0.1 -PYTHON_EM_PORT=9109 -PYTHON_ENTRANCE_PORT=9110 - - -### JDBC -### This service is used to provide jdbc capability. -#JDBC_INSTALL_IP=127.0.0.1 -JDBC_ENTRANCE_PORT=9111 - -### SHELL -### This service is used to provide shell capability. -#SHELL_INSTALL_IP=127.0.0.1 -SHELL_EM_PORT=9114 -SHELL_ENTRANCE_PORT=9115 - - - -### BML -### This service is used to provide BML capability. -#BML_INSTALL_IP=127.0.0.1 -BML_PORT=9113 - -### cs -#CS_INSTALL_IP=127.0.0.1 -CS_PORT=9116 - - -### datasource management server -#DSM_INSTALL_IP=127.0.0.1 -DSM_PORT=9117 - -### metadata management server -#MDM_INSTALL_IP=127.0.0.1 -MDM_PORT=9118 - -######################################################################################## - -## LDAP is for enterprise authorization, if you just want to have a try, ignore it. -#LDAP_URL=ldap://localhost:1389/ -#LDAP_BASEDN=dc=webank,dc=com - -## java application default jvm memory -export SERVER_HEAP_SIZE="512M" - -LINKIS_VERSION=0.10.0 diff --git a/conf/linkis-env.sh b/conf/linkis-env.sh new file mode 100644 index 0000000000000000000000000000000000000000..59319ac0d0ac02119d961123f6162e72d5bfe17a --- /dev/null +++ b/conf/linkis-env.sh @@ -0,0 +1,135 @@ +#!/bin/bash +# +# description: Starts and stops Server +# +# @name: linkis-demo + +# @created: 01.16.2021 +# +# Modified for Linkis 1.0.0 + +# SSH_PORT=22 + +### deploy user +deployUser=hadoop + +##Linkis_SERVER_VERSION +LINKIS_SERVER_VERSION=v1 + +### Specifies the user workspace, which is used to store the user's script files and log files. +### Generally local directory +WORKSPACE_USER_ROOT_PATH=file:///tmp/linkis/ ##file:// required +### User's root hdfs path +HDFS_USER_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required + +### Path to store job ResultSet:file or hdfs path +RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required + +### Path to store started engines and engine logs, must be local +ENGINECONN_ROOT_PATH=/appcom/tmp ## file:// required + +ENTRANCE_CONFIG_LOG_PATH=hdfs:///tmp/linkis/ ##file:// required + +### Provide the DB information of Hive metadata database. +HIVE_META_URL= +HIVE_META_USER= +HIVE_META_PASSWORD= + +##YARN REST URL spark engine required +YARN_RESTFUL_URL=http://127.0.0.1:8088 + +###HADOOP CONF DIR +HADOOP_CONF_DIR=/appcom/config/hadoop-config + +###HIVE CONF DIR +HIVE_CONF_DIR=/appcom/config/hive-config + +###SPARK CONF DIR +SPARK_CONF_DIR=/appcom/config/spark-config + +## Engine version conf +#SPARK_VERSION +#SPARK_VERSION=2.4.3 +##HIVE_VERSION +#HIVE_VERSION=1.2.1 +#PYTHON_VERSION=python2 + +################### The install Configuration of all Micro-Services ##################### +# +# NOTICE: +# 1. If you just wanna try, the following micro-service configuration can be set without any settings. +# These services will be installed by default on this machine. +# 2. In order to get the most complete enterprise-level features, we strongly recommend that you install +# Linkis in a distributed manner and set the following microservice parameters +# + +### EUREKA install information +### You can access it in your browser at the address below:http://${EUREKA_INSTALL_IP}:${EUREKA_PORT} +#EUREKA_INSTALL_IP=127.0.0.1 # Microservices Service Registration Discovery Center +EUREKA_PORT=20303 +EUREKA_PREFER_IP=false + +### Gateway install information +#GATEWAY_INSTALL_IP=127.0.0.1 +GATEWAY_PORT=9001 + +### ApplicationManager +#MANAGER_INSTALL_IP=127.0.0.1 +MANAGER_PORT=9101 + +### EngineManager +#ENGINECONNMANAGER_INSTALL_IP=127.0.0.1 +ENGINECONNMANAGER_PORT=9102 + + + +### EnginePluginServer +#ENGINECONN_PLUGIN_SERVER_INSTALL_IP=127.0.0.1 +ENGINECONN_PLUGIN_SERVER_PORT=9103 + +### LinkisEntrance +#ENTRANCE_INSTALL_IP=127.0.0.1 +ENTRANCE_PORT=9104 + +### publicservice +#PUBLICSERVICE_INSTALL_IP=127.0.0.1 +PUBLICSERVICE_PORT=9105 + + +### Hive Metadata Query service, provide the metadata information of Hive databases. +#DATASOURCE_INSTALL_IP=127.0.0.1 +DATASOURCE_PORT=9106 + +### BML +### This service is used to provide BML capability. +#BML_INSTALL_IP=127.0.0.1 +BML_PORT=9107 + +### cs +#CS_INSTALL_IP=127.0.0.1 +CS_PORT=9108 + +######################################################################################## + +## LDAP is for enterprise authorization, if you just want to have a try, ignore it. +#LDAP_URL=ldap://localhost:1389/ +#LDAP_BASEDN=dc=webank,dc=com +#LDAP_USER_NAME_FORMAT=cn=%s@xxx.com,OU=xxx,DC=xxx,DC=com + +## java application default jvm memory +export SERVER_HEAP_SIZE="512M" + +if test -z "$EUREKA_INSTALL_IP" +then + export EUREKA_INSTALL_IP="`hostname --fqdn`" +fi +if [ "true" != "$EUREKA_PREFER_IP" ] +then + export EUREKA_HOSTNAME=$EUREKA_INSTALL_IP +fi +export EUREKA_URL=http://$EUREKA_INSTALL_IP:$EUREKA_PORT/eureka/ + +LINKIS_VERSION=1.0.0 + +# for install +LINKIS_PUBLIC_MODULE=lib/linkis-commons/public-module \ No newline at end of file diff --git a/conf/linkis.properties b/conf/linkis.properties new file mode 100644 index 0000000000000000000000000000000000000000..5654bafb0a0086e2ef6362d7f1544d9b9e18e97f --- /dev/null +++ b/conf/linkis.properties @@ -0,0 +1,51 @@ +# +# Copyright 2019 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +## +#wds.linkis.test.mode=true +wds.linkis.server.version=v1 +##spring conf +wds.linkis.gateway.url=http://127.0.0.1:9001 +wds.linkis.eureka.defaultZone=http://127.0.0.1:20303/eureka/ +##mybatis +wds.linkis.server.mybatis.datasource.url= +wds.linkis.server.mybatis.datasource.username= +wds.linkis.server.mybatis.datasource.password= +##hive meta +hive.meta.url= +hive.meta.user= +hive.meta.password= +##LDAP +wds.linkis.ldap.proxy.url= +wds.linkis.ldap.proxy.baseDN= +wds.linkis.ldap.proxy.userNameFormat= + +wds.linkis.admin.user=hadoop +#hadoopconfig +#hadoop.config.dir=/appcom/config/hadoop-config +#hive.config.dir= +#spark.config.dir +##fileSystem +wds.linkis.filesystem.root.path=file:///tmp/linkis/ +wds.linkis.filesystem.hdfs.root.path=hdfs:///tmp/linkis/ +#engine plugin +wds.linkis.engineconn.root.dir=/appcom/tmp +wds.linkis.engineconn.home=/appcom/Install/LinkisInstall/lib/linkis-engineconn-plugins +wds.linkis.engineconn.plugin.loader.store.path=/appcom/Install/LinkisInstall/lib/linkis-engineconn-plugins +wds.linkis.public_module.path=/appcom/Install/LinkisInstall/lib/linkis-commons/public-module +##engine Version +#wds.linkis.spark.engine.version= +#wds.linkis.hive.engine.version= +#wds.linkis.python.engine.version= \ No newline at end of file diff --git a/contextservice/cs-cache/pom.xml b/contextservice/cs-cache/pom.xml deleted file mode 100644 index 06d48a5078f00cc0f51ae648e3977ee99320af20..0000000000000000000000000000000000000000 --- a/contextservice/cs-cache/pom.xml +++ /dev/null @@ -1,85 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-cs-cache - - - - - com.webank.wedatasphere.linkis - linkis-cs-common - - - - com.webank.wedatasphere.linkis - linkis-cs-persistence - - - - org.reflections - reflections - 0.9.10 - - - - com.webank.wedatasphere.linkis - linkis-module - provided - - - - junit - junit - RELEASE - test - - - com.webank.wedatasphere.linkis - linkis-cs-listener - compile - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - \ No newline at end of file diff --git a/contextservice/cs-cache/src/main/java/com/webank/wedatasphere/linkis/cs/contextcache/cache/guava/ContextIDCacheLoader.java b/contextservice/cs-cache/src/main/java/com/webank/wedatasphere/linkis/cs/contextcache/cache/guava/ContextIDCacheLoader.java deleted file mode 100644 index d081e577f613cdd6539ae7a8324b74087cad14ec..0000000000000000000000000000000000000000 --- a/contextservice/cs-cache/src/main/java/com/webank/wedatasphere/linkis/cs/contextcache/cache/guava/ContextIDCacheLoader.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/* -package com.webank.wedatasphere.linkis.cs.contextcache.cache.guava; - -import com.google.common.cache.CacheLoader; -import com.webank.wedatasphere.linkis.cs.contextcache.cache.csid.ContextIDValue; -import com.webank.wedatasphere.linkis.cs.contextcache.cache.csid.ContextIDValueGenerator; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -@Component -public class ContextIDCacheLoader extends CacheLoader { - - private static final Logger logger = LoggerFactory.getLogger(ContextIDCacheLoader.class); - - @Autowired - private ContextIDValueGenerator contextIDValueGenerator; - - @Override - public ContextIDValue load(String contextID) throws Exception { - logger.info("Start to load contextID:{}", contextID); - ContextIDValue contextIDValue = contextIDValueGenerator.createContextIDValue(contextID); - - logger.info("Finished to load contextID:{}", contextID); - return contextIDValue; - } -} -*/ diff --git a/contextservice/cs-cache/src/test/java/com/webank/wedatasphere/linkis/cs/contextcache/test/csid/TestContextID.java b/contextservice/cs-cache/src/test/java/com/webank/wedatasphere/linkis/cs/contextcache/test/csid/TestContextID.java deleted file mode 100644 index a2344cd2b267370a6a99029c3185d8a059fd3444..0000000000000000000000000000000000000000 --- a/contextservice/cs-cache/src/test/java/com/webank/wedatasphere/linkis/cs/contextcache/test/csid/TestContextID.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.contextcache.test.csid; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; - -/** - * @author peacewong - * @date 2020/2/13 20:41 - */ -public class TestContextID implements ContextID { - - String contextID; - - @Override - public String getContextId() { - return contextID; - } - - @Override - public void setContextId(String contextId) { - this.contextID = contextId; - } -} diff --git a/contextservice/cs-cache/src/test/java/com/webank/wedatasphere/linkis/cs/contextcache/test/keyword/TestContextKey.java b/contextservice/cs-cache/src/test/java/com/webank/wedatasphere/linkis/cs/contextcache/test/keyword/TestContextKey.java deleted file mode 100644 index a52adcae0072e610244f63215a8cdf9d3a82337a..0000000000000000000000000000000000000000 --- a/contextservice/cs-cache/src/test/java/com/webank/wedatasphere/linkis/cs/contextcache/test/keyword/TestContextKey.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.contextcache.test.keyword; - -import com.webank.wedatasphere.linkis.cs.common.annotation.KeywordMethod; -import com.webank.wedatasphere.linkis.cs.common.entity.enumeration.ContextScope; -import com.webank.wedatasphere.linkis.cs.common.entity.enumeration.ContextType; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; - -/** - * @author peacewong - * @date 2020/2/13 16:32 - */ -public class TestContextKey implements ContextKey { - - private String key; - - private String keywords; - - @KeywordMethod - @Override - public String getKey() { - return this.key; - } - - @Override - public void setKey(String key) { - this.key = key; - } - - @Override - public ContextType getContextType() { - return ContextType.METADATA; - } - - @Override - public void setContextType(ContextType contextType) { - - } - - @Override - public ContextScope getContextScope() { - return ContextScope.PUBLIC; - } - - @Override - public void setContextScope(ContextScope contextScope) { - - } - - @KeywordMethod(splitter = ",") - @Override - public String getKeywords() { - return this.keywords; - } - - @Override - public void setKeywords(String keywords) { - this.keywords = keywords; - } - - @Override - public int getType() { - return 0; - } - - @Override - public void setType(int type) { - - } -} diff --git a/contextservice/cs-cache/src/test/java/com/webank/wedatasphere/linkis/cs/contextcache/test/keyword/TestContextKeyValue.java b/contextservice/cs-cache/src/test/java/com/webank/wedatasphere/linkis/cs/contextcache/test/keyword/TestContextKeyValue.java deleted file mode 100644 index f279ca74dbec9ef334c01001a5ee86b5871de35d..0000000000000000000000000000000000000000 --- a/contextservice/cs-cache/src/test/java/com/webank/wedatasphere/linkis/cs/contextcache/test/keyword/TestContextKeyValue.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.contextcache.test.keyword; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKeyValue; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextValue; - -/** - * @author peacewong - * @date 2020/2/13 16:46 - */ -public class TestContextKeyValue implements ContextKeyValue { - - private ContextKey contextKey; - - private ContextValue contextValue; - - @Override - public ContextKey getContextKey() { - return this.contextKey; - } - - @Override - public void setContextKey(ContextKey contextKey) { - this.contextKey = contextKey; - } - - @Override - public ContextValue getContextValue() { - return this.contextValue; - } - - @Override - public void setContextValue(ContextValue contextValue) { - this.contextValue = contextValue; - } -} diff --git a/contextservice/cs-cache/src/test/java/com/webank/wedatasphere/linkis/cs/contextcache/test/keyword/TestContextValue.java b/contextservice/cs-cache/src/test/java/com/webank/wedatasphere/linkis/cs/contextcache/test/keyword/TestContextValue.java deleted file mode 100644 index 65e53dfefd3a74ce7be934f1e7f719ae48593e84..0000000000000000000000000000000000000000 --- a/contextservice/cs-cache/src/test/java/com/webank/wedatasphere/linkis/cs/contextcache/test/keyword/TestContextValue.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.contextcache.test.keyword; - -import com.webank.wedatasphere.linkis.cs.common.annotation.KeywordMethod; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextValue; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ValueBean; - -/** - * @author peacewong - * @date 2020/2/13 16:44 - */ -public class TestContextValue implements ContextValue { - - private Object value; - - private String keywords; - - @KeywordMethod(splitter = "-") - @Override - public String getKeywords() { - return this.keywords; - } - - @Override - public void setKeywords(String keywords) { - this.keywords = keywords; - } - - @KeywordMethod(regex = "hello") - @Override - public Object getValue() { - return this.value; - } - - @Override - public void setValue(Object value) { - this.value = value; - } -} diff --git a/contextservice/cs-cache/src/test/resources/linkis.properties b/contextservice/cs-cache/src/test/resources/linkis.properties deleted file mode 100644 index ce3f1ee22a84bf2123b14d3d3908e01f0a0dcd41..0000000000000000000000000000000000000000 --- a/contextservice/cs-cache/src/test/resources/linkis.properties +++ /dev/null @@ -1,35 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -wds.linkis.test.mode=true - -wds.linkis.server.mybatis.datasource.url=jdbc:mysql://127.0.0.1:3306/ide_gz_bdap_sit_01?characterEncoding=UTF-8 -wds.linkis.server.mybatis.datasource.username= -wds.linkis.server.mybatis.datasource.password= - - -wds.linkis.log.clear=true -wds.linkis.server.version=v1 - -##restful -wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.cs.server.restful - -##mybatis -wds.linkis.server.mybatis.mapperLocations=classpath*:com\\webank\\wedatasphere\\linkis\\cs\\persistence\\dao\\impl\\*.xml - -wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.linkis.cs.persistence.entity - -wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.linkis.cs.persistence.dao diff --git a/contextservice/cs-cache/src/test/resources/log4j2.xml b/contextservice/cs-cache/src/test/resources/log4j2.xml deleted file mode 100644 index 2a9e19f81028e18d88a96c7c7c3cedbcc4a86aee..0000000000000000000000000000000000000000 --- a/contextservice/cs-cache/src/test/resources/log4j2.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - diff --git a/contextservice/cs-client/pom.xml b/contextservice/cs-client/pom.xml deleted file mode 100644 index 75c6e1e7bfe0635c6ca4a75791ccbe3aaf6ccf69..0000000000000000000000000000000000000000 --- a/contextservice/cs-client/pom.xml +++ /dev/null @@ -1,98 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-cs-client - - - - com.webank.wedatasphere.linkis - linkis-common - - - - com.webank.wedatasphere.linkis - linkis-gateway-httpclient-support - - - com.webank.wedatasphere.linkis - linkis-storage - - - - - - com.webank.wedatasphere.linkis - linkis-httpclient - - - - com.webank.wedatasphere.linkis - linkis-cs-common - - - - - - - - - com.webank.wedatasphere.linkis - linkis-cs-listener - - - - junit - junit - 4.12 - test - - - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - - - \ No newline at end of file diff --git a/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/listener/ContextIDListener.java b/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/listener/ContextIDListener.java deleted file mode 100644 index 84b99243fc825dd4e7cac665fdf0a042c2f1638c..0000000000000000000000000000000000000000 --- a/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/listener/ContextIDListener.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.client.listener; - -import com.webank.wedatasphere.linkis.common.listener.Event; -import com.webank.wedatasphere.linkis.cs.client.Context; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; -import com.webank.wedatasphere.linkis.cs.listener.event.enumeration.OperateType; -import com.webank.wedatasphere.linkis.cs.listener.event.impl.DefaultContextIDEvent; - - -/** - * created by cooperyang on 2020/2/17 - * Description: 这个listener是用来监听contextID的,用户可以进行实现 - */ -public abstract class ContextIDListener implements ContextClientListener{ - - - private ContextID contextID; - - private Context context; - - - public ContextIDListener(){ - - } - - public ContextIDListener(ContextID contextID){ - this.contextID = contextID; - } - - public ContextID getContextID() { - return contextID; - } - - public void setContextID(ContextID contextID) { - this.contextID = contextID; - } - - - public Context getContext() { - return context; - } - - public void setContext(Context context) { - this.context = context; - } - - @Override - public void onContextCreated(Event event) { - - } - - @Override - public void onContextUpdated(Event event) { - - } - - public abstract void onContextRemoved(Event event); - - - @Override - public void onEvent(Event event) { - if (event instanceof DefaultContextIDEvent){ - DefaultContextIDEvent defaultContextKeyEvent = (DefaultContextIDEvent)event; - if (defaultContextKeyEvent.getContextID().equals(contextID)){ - switch(defaultContextKeyEvent.getOperateType()){ - case UPDATE : onContextUpdated(defaultContextKeyEvent); - break; - case CREATE: onContextCreated(defaultContextKeyEvent);break; - case REMOVE: onContextRemoved(defaultContextKeyEvent);break; - default: break; - } - } - } - } -} diff --git a/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/listener/ContextKeyListener.java b/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/listener/ContextKeyListener.java deleted file mode 100644 index d6e57d978475f27908125e7c908f0332302e42f6..0000000000000000000000000000000000000000 --- a/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/listener/ContextKeyListener.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.client.listener; - -import com.webank.wedatasphere.linkis.common.exception.ErrorException; -import com.webank.wedatasphere.linkis.common.listener.Event; -import com.webank.wedatasphere.linkis.cs.client.Context; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; -import com.webank.wedatasphere.linkis.cs.listener.event.impl.DefaultContextKeyEvent; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - - -/** - * created by cooperyang on 2020/2/18 - * Description:一个微服务对contextKey的监听器 - */ -public abstract class ContextKeyListener implements ContextClientListener{ - - private static final Logger LOGGER = LoggerFactory.getLogger(ContextKeyListener.class); - - private ContextKey contextKey; - - private Context context; - - public ContextKeyListener(){ - - } - - public ContextKeyListener(ContextKey contextKey){ - this.contextKey = contextKey; - } - - public ContextKey getContextKey() { - return contextKey; - } - - public void setContextKey(ContextKey contextKey) { - this.contextKey = contextKey; - } - - public Context getContext() { - return context; - } - - public void setContext(Context context) { - this.context = context; - } - - @Override - public void onContextUpdated(Event event) { - if (event instanceof DefaultContextKeyEvent){ - context.setLocal(((DefaultContextKeyEvent) event).getContextKeyValue()); - } - } - - @Override - public void onEvent(Event event) { - if (event instanceof DefaultContextKeyEvent){ - DefaultContextKeyEvent defaultContextKeyEvent = (DefaultContextKeyEvent)event; - if (defaultContextKeyEvent.getContextKeyValue().getContextKey().equals(contextKey)){ - switch(defaultContextKeyEvent.getOperateType()){ - case UPDATE:onContextUpdated(defaultContextKeyEvent);break; - case CREATE:onContextCreated(defaultContextKeyEvent);break; - default:break; - } - } - } - } -} diff --git a/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/service/ResourceService.java b/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/service/ResourceService.java deleted file mode 100644 index 01b4c9828829e94c4b90242c91d98b1ec54af9c0..0000000000000000000000000000000000000000 --- a/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/service/ResourceService.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.client.service; - -import com.webank.wedatasphere.linkis.cs.common.entity.resource.BMLResource; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; -import com.webank.wedatasphere.linkis.cs.common.exception.CSErrorException; - -import java.util.List; -import java.util.Map; - -/** - * @Author alexyang - * @Date 2020/3/9 - */ -public interface ResourceService { - - /** - * 通过ContextID和NodeName,获取上游的所有Resource数据 - * @param contextIDStr - * @param nodeName - * @return - */ - Map getAllUpstreamBMLResource(String contextIDStr, String nodeName) throws CSErrorException; - - List getUpstreamBMLResource(String contextIDStr, String nodeName) throws CSErrorException; -} diff --git a/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/service/VariableService.java b/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/service/VariableService.java deleted file mode 100644 index 168b4f337705958abf2bff22d654a599b7eae215..0000000000000000000000000000000000000000 --- a/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/service/VariableService.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.client.service; - -import com.webank.wedatasphere.linkis.cs.common.entity.object.LinkisVariable; -import com.webank.wedatasphere.linkis.cs.common.exception.CSErrorException; - -import java.util.List; - -/** - * @author peacewong - * @date 2020/3/12 20:28 - */ -public interface VariableService { - - List getUpstreamVariables(String contextIDStr, String nodeName) throws CSErrorException; - - void putVariable(String contextIDStr, String contextKey, LinkisVariable linkisVariable) throws CSErrorException; -} diff --git a/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/utils/ExceptionHelper.java b/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/utils/ExceptionHelper.java deleted file mode 100644 index 211469f8bbdb94ef93428a6557c0d3ff446673df..0000000000000000000000000000000000000000 --- a/contextservice/cs-client/src/main/java/com/webank/wedatasphere/linkis/cs/client/utils/ExceptionHelper.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.client.utils; - -import com.webank.wedatasphere.linkis.common.exception.ErrorException; - -/** - * created by cooperyang on 2020/2/19 - * Description: - */ -public class ExceptionHelper { - public static void throwErrorException(int errCode, String errMsg, Throwable t)throws ErrorException { - ErrorException errorException = new ErrorException(errCode, errMsg); - errorException.initCause(t); - throw errorException; - } -} diff --git a/contextservice/cs-client/src/main/scala/com/webank/wedatasphere/linkis/cs/client/utils/ContextClientUtils.scala b/contextservice/cs-client/src/main/scala/com/webank/wedatasphere/linkis/cs/client/utils/ContextClientUtils.scala deleted file mode 100644 index f0b895325da8843f7b0cc655efd889b235510ab6..0000000000000000000000000000000000000000 --- a/contextservice/cs-client/src/main/scala/com/webank/wedatasphere/linkis/cs/client/utils/ContextClientUtils.scala +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.client.utils - -import java.lang -import java.lang.reflect.Type - -import com.google.gson.{GsonBuilder, JsonElement, JsonPrimitive, JsonSerializationContext, JsonSerializer} - -/** - * created by cooperyang on 2020/2/23 - * Description: - */ -object ContextClientUtils { - implicit val gson = new GsonBuilder().setPrettyPrinting().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").serializeNulls - .registerTypeAdapter(classOf[java.lang.Double], new JsonSerializer[java.lang.Double] { - override def serialize(t: lang.Double, `type`: Type, jsonSerializationContext: JsonSerializationContext): JsonElement = - if(t == t.longValue()) new JsonPrimitive(t.longValue()) else new JsonPrimitive(t) - }).create -} diff --git a/contextservice/cs-common/pom.xml b/contextservice/cs-common/pom.xml deleted file mode 100644 index 44983431c2dee782f26c7652a655f35f3af57f8e..0000000000000000000000000000000000000000 --- a/contextservice/cs-common/pom.xml +++ /dev/null @@ -1,75 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-cs-common - - - - com.webank.wedatasphere.linkis - linkis-common - provided - - - org.apache.commons - commons-text - 1.6 - - - org.reflections - reflections - 0.9.10 - - - - com.google.code.gson - gson - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - \ No newline at end of file diff --git a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/entity/metadata/Column.java b/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/entity/metadata/Column.java deleted file mode 100644 index 902ef0c2190f2693ce0ba84348175bacf82d7b12..0000000000000000000000000000000000000000 --- a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/entity/metadata/Column.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.common.entity.metadata; - -/** - * Created by patinousward on 2020/2/11. - */ -public interface Column { - - Integer getLength(); - - void setLength(Integer length); - - String getName(); - - void setName(String name); - - String getAlias(); - - void setAlias(String alias); - - String getType(); - - void setType(String type); - - String getComment(); - - void setComment(String comment); - - String getExpress(); - - void setExpress(String express); - - String getRule(); - - void setRule(String rule); - - Boolean getPrimary(); - - void setPrimary(Boolean primary); -} diff --git a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/entity/metadata/DB.java b/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/entity/metadata/DB.java deleted file mode 100644 index 3e468f214796f2d655a81320d40c215d6f0a8e6d..0000000000000000000000000000000000000000 --- a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/entity/metadata/DB.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.common.entity.metadata; - -import com.webank.wedatasphere.linkis.cs.common.entity.enumeration.DBType; - -/** - * Created by patinousward on 2020/2/11. - */ -public interface DB { - - String getName(); - - void setName(String name); - - DBType getDbType(); - - void setDbType(DBType dbType); - - String getOwners(); - - void setOwners(String owners); - - String getComment(); - - void setComment(String comment); - - String[] getLables(); - - void setLables(String[] lables); - -} diff --git a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/entity/metadata/Partition.java b/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/entity/metadata/Partition.java deleted file mode 100644 index 4790595f300d5bd3107be5c42bf1c694430a26d7..0000000000000000000000000000000000000000 --- a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/entity/metadata/Partition.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.common.entity.metadata; - -/** - * Created by patinousward on 2020/2/11. - */ -public interface Partition { - - Integer getLength(); - - void setLength(Integer length); - - String getName(); - - void setName(String name); - - String getAlias(); - - void setAlias(String alias); - - String getType(); - - void setType(String type); - - String getComment(); - - void setComment(String comment); - - String getExpress(); - - void setExpress(String express); - - String getRule(); - - void setRule(String rule); - - Boolean getPrimary(); - - void setPrimary(Boolean primary); -} diff --git a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/entity/resource/Resource.java b/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/entity/resource/Resource.java deleted file mode 100644 index 4259d623a2b46e723804b866a75ae73bc7229b60..0000000000000000000000000000000000000000 --- a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/entity/resource/Resource.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.common.entity.resource; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ValueBean; - -/** - * Created by patinousward on 2020/2/11. - */ -public interface Resource { -} diff --git a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/exception/ErrorCode.java b/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/exception/ErrorCode.java deleted file mode 100644 index 3bd8513be13627c7e029fe7eb58368823af5cdca..0000000000000000000000000000000000000000 --- a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/exception/ErrorCode.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.common.exception; - -/** - * @Author alexyang - * @Date 2020/2/21 - */ -public class ErrorCode { - - public static final int INVALID_NULL_OBJECT = 70100; - - public static final int SERIALIZER_TO_JSON_ERROR = 70101; - - public static final int INVALID_NULL_STRING = 70102; - - public static final int INVALID_DESERIALIZE_STRING = 70103; - - public static final int INVALID_DESERIALIZE_OBJECT = 70104; - - public static final int DESERIALIZER_FROM_JSON_ERROR = 70105; - - public static final int METHOD_NOT_OVERRIDE = 70106; - - public static final int INVALID_HAID_ENCODE_PARAMS = 70107; - - public static final int INVALID_HAID_STRING = 70108; - - public static final int INVALID_CONTEXT_TYPE = 70109; - - public static final int GET_CONTEXT_VALUE_ERROR = 70110; - - public static final int SEARCH_CONTEXT_VALUE_ERROR = 70111; - - public static final int INVALID_CONTEXT_VALUE_TYPE = 70109; - - - public static final int DESERIALIZE_ERROR = 70112; - -} diff --git a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/listener/ContextIDListener.java b/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/listener/ContextIDListener.java deleted file mode 100644 index 045ee1892055c987b1327229e6334a5157488f75..0000000000000000000000000000000000000000 --- a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/listener/ContextIDListener.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.common.listener; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; - -/** - * Created by patinousward on 2020/2/11. - */ -public interface ContextIDListener { - - void onRemoved(ContextID contextID); - - void onReset(ContextID contextID); - - void onUPdated(ContextID contextID); -} diff --git a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/listener/ContextKeyListener.java b/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/listener/ContextKeyListener.java deleted file mode 100644 index c553cc84228191b1550038e0d4b309517fb7d5a7..0000000000000000000000000000000000000000 --- a/contextservice/cs-common/src/main/java/com/webank/wedatasphere/linkis/cs/common/listener/ContextKeyListener.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.common.listener; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKeyValue; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; - -/** - * Created by patinousward on 2020/2/11. - */ -public interface ContextKeyListener { - - void onUpdated(ContextID contextID, ContextKeyValue contextKeyValue); - - void onRemoved(ContextID contextID, ContextKeyValue contextKeyValue); - - void onReset(ContextID contextID, ContextKeyValue contextKeyValue); - -} diff --git a/contextservice/cs-highavailable/pom.xml b/contextservice/cs-highavailable/pom.xml deleted file mode 100644 index e87c8a42585b996a80f7bfd36720ddfc80552786..0000000000000000000000000000000000000000 --- a/contextservice/cs-highavailable/pom.xml +++ /dev/null @@ -1,74 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-cs-highavailable - - - - com.webank.wedatasphere.linkis - linkis-cs-common - - - - com.webank.wedatasphere.linkis - linkis-cloudRPC - provided - - - com.webank.wedatasphere.linkis - linkis-cs-persistence - provided - - - org.apache.commons - commons-math3 - 3.1.1 - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - \ No newline at end of file diff --git a/contextservice/cs-highavailable/src/main/java/com/webank/wedatasphere/linkis/cs/highavailable/exception/ErrorCode.java b/contextservice/cs-highavailable/src/main/java/com/webank/wedatasphere/linkis/cs/highavailable/exception/ErrorCode.java deleted file mode 100644 index 53d43bf3bb34cb9fa7352860c27d34993c774193..0000000000000000000000000000000000000000 --- a/contextservice/cs-highavailable/src/main/java/com/webank/wedatasphere/linkis/cs/highavailable/exception/ErrorCode.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.highavailable.exception; - -/** - * @Author alexyang - * @Date 2020/2/18 - */ -public class ErrorCode { - - public static int INVALID_INSTANCE_ALIAS = 70010; - - public static int INVALID_HAID = 70011; - - public static int GENERATE_HAID_ERROR = 70012; - - public static int INVALID_CONTEXTID = 70013; - - public static int GENERATE_BACKUP_INSTANCE_ERROR = 70014; - - public static int INVALID_INSTANCE = 70015; - - public static int INVAID_HA_CONTEXTID = 70016; -} diff --git a/contextservice/cs-highavailable/src/test/resources/application.yml b/contextservice/cs-highavailable/src/test/resources/application.yml deleted file mode 100644 index d6b3e8fb9d7c75bc3d89cf2bcf5faef9e9aa26b9..0000000000000000000000000000000000000000 --- a/contextservice/cs-highavailable/src/test/resources/application.yml +++ /dev/null @@ -1,36 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -server: - port: 9010 -spring: - application: - name: CLOUD-CONTEXTSERVICE - -eureka: - client: - serviceUrl: - defaultZone: http://127.0.0.1:20303/eureka/ - registry-fetch-interval-seconds: 5 - instance: - metadata-map: - test: wedatasphere - -management: - endpoints: - web: - exposure: - include: refresh,info diff --git a/contextservice/cs-highavailable/src/test/resources/log4j.properties b/contextservice/cs-highavailable/src/test/resources/log4j.properties deleted file mode 100644 index a7e6854c4d16a1ac0c69c8ba0152eac1f452a908..0000000000000000000000000000000000000000 --- a/contextservice/cs-highavailable/src/test/resources/log4j.properties +++ /dev/null @@ -1,36 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -### set log levels ### - -log4j.rootCategory=INFO,console - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.Threshold=INFO -log4j.appender.console.layout=org.apache.log4j.PatternLayout -#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n -log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n - - -log4j.appender.com.webank.bdp.ide.core=org.apache.log4j.DailyRollingFileAppender -log4j.appender.com.webank.bdp.ide.core.Threshold=INFO -log4j.additivity.com.webank.bdp.ide.core=false -log4j.appender.com.webank.bdp.ide.core.layout=org.apache.log4j.PatternLayout -log4j.appender.com.webank.bdp.ide.core.Append=true -log4j.appender.com.webank.bdp.ide.core.File=logs/linkis.log -log4j.appender.com.webank.bdp.ide.core.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n - -log4j.logger.org.springframework=INFO diff --git a/contextservice/cs-listener/pom.xml b/contextservice/cs-listener/pom.xml deleted file mode 100644 index e1cb123e246d3aebfef70eba2a140a4edeb8c3bc..0000000000000000000000000000000000000000 --- a/contextservice/cs-listener/pom.xml +++ /dev/null @@ -1,69 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-cs-listener - - - - com.webank.wedatasphere.linkis - linkis-common - provided - - - com.webank.wedatasphere.linkis - linkis-cs-common - - - junit - junit - 4.12 - test - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - \ No newline at end of file diff --git a/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestContextID.java b/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestContextID.java deleted file mode 100644 index 3d81b457f320e1abaccf0f1da6a2938427741472..0000000000000000000000000000000000000000 --- a/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestContextID.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.listener.test; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; - -/** - * @author peacewong - * @date 2020/2/13 20:41 - */ -public class TestContextID implements ContextID { - - String contextID; - - @Override - public String getContextId() { - return contextID; - } - - @Override - public void setContextId(String contextId) { - this.contextID = contextId; - } -} diff --git a/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestContextKey.java b/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestContextKey.java deleted file mode 100644 index 6bd442ee6a197a7495364425bd5758d3743f3db7..0000000000000000000000000000000000000000 --- a/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestContextKey.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.listener.test; - -import com.webank.wedatasphere.linkis.cs.common.entity.enumeration.ContextScope; -import com.webank.wedatasphere.linkis.cs.common.entity.enumeration.ContextType; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; - -/** - * @Author: chaogefeng - * @Date: 2020/2/22 - */ -public class TestContextKey implements ContextKey { - private String key; - private ContextType contextType; - @Override - public String getKey() { - return this.key; - } - - @Override - public void setKey(String key) { - this.key=key; - } - - @Override - public ContextType getContextType() { - return this.contextType; - } - - @Override - public void setContextType(ContextType contextType) { - this.contextType=contextType; - } - - @Override - public ContextScope getContextScope() { - return null; - } - - @Override - public void setContextScope(ContextScope contextScope) { - - } - - @Override - public String getKeywords() { - return null; - } - - @Override - public void setKeywords(String keywords) { - - } - - @Override - public int getType() { - return 0; - } - - @Override - public void setType(int type) { - - } -} diff --git a/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestContextKeyValue.java b/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestContextKeyValue.java deleted file mode 100644 index df44a38e35f57309e52b025223d7a493f8e60a89..0000000000000000000000000000000000000000 --- a/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestContextKeyValue.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.listener.test; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKeyValue; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextValue; - -/** - * @author chaogefeng - * @date 2020/2/22 16:46 - */ -public class TestContextKeyValue implements ContextKeyValue { - - private ContextKey contextKey; - - private ContextValue contextValue; - - @Override - public ContextKey getContextKey() { - return this.contextKey; - } - - @Override - public void setContextKey(ContextKey contextKey) { - this.contextKey = contextKey; - } - - @Override - public ContextValue getContextValue() { - return this.contextValue; - } - - @Override - public void setContextValue(ContextValue contextValue) { - this.contextValue = contextValue; - } -} diff --git a/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestContextValue.java b/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestContextValue.java deleted file mode 100644 index db01d5009fbb2cdd635ddd971989988f15436350..0000000000000000000000000000000000000000 --- a/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestContextValue.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.listener.test; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextValue; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ValueBean; - -/** - * @Author: chaogefeng - * @Date: 2020/2/22 - */ -public class TestContextValue implements ContextValue { - private Object value; - - private String keywords; - - - @Override - public String getKeywords() { - return null; - } - - @Override - public void setKeywords(String keywords) { - - } - - @Override - public Object getValue() { - return this.value; - } - - @Override - public void setValue(Object value) { - this.value=value; - } - - -} diff --git a/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestListenerManager.java b/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestListenerManager.java deleted file mode 100644 index 37c174064dce78a3c579592130017a1e5d153a09..0000000000000000000000000000000000000000 --- a/contextservice/cs-listener/src/test/java/com/webank/wedatasphere/linkis/cs/listener/test/TestListenerManager.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.listener.test; - -import com.webank.wedatasphere.linkis.common.listener.Event; -import com.webank.wedatasphere.linkis.cs.common.entity.listener.ListenerDomain; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKeyValue; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextValue; -import com.webank.wedatasphere.linkis.cs.listener.ListenerBus.ContextAsyncListenerBus; -import com.webank.wedatasphere.linkis.cs.listener.callback.imp.ContextKeyValueBean; -import com.webank.wedatasphere.linkis.cs.listener.callback.imp.DefaultContextIDCallbackEngine; -import com.webank.wedatasphere.linkis.cs.listener.callback.imp.DefaultContextKeyCallbackEngine; -import com.webank.wedatasphere.linkis.cs.listener.event.enumeration.OperateType; -import com.webank.wedatasphere.linkis.cs.listener.event.impl.DefaultContextIDEvent; -import com.webank.wedatasphere.linkis.cs.listener.event.impl.DefaultContextKeyEvent; -import com.webank.wedatasphere.linkis.cs.listener.manager.ListenerManager; -import com.webank.wedatasphere.linkis.cs.listener.manager.imp.DefaultContextListenerManager; -import org.junit.Test; - -import java.util.ArrayList; -import java.util.List; - -/** - * @Author: chaogefeng - * @Date: 2020/2/22 - */ -public class TestListenerManager { - @Test - public void testGetContextAsyncListenerBus() { - DefaultContextListenerManager defaultContextListenerManager = DefaultContextListenerManager.getInstance(); - - ContextAsyncListenerBus contextAsyncListenerBus = defaultContextListenerManager.getContextAsyncListenerBus(); - - DefaultContextIDCallbackEngine contextIDCallbackEngine = defaultContextListenerManager.getContextIDCallbackEngine(); - - DefaultContextKeyCallbackEngine contextKeyCallbackEngine = defaultContextListenerManager.getContextKeyCallbackEngine(); - //client1的contextID - TestContextID testContextID1 = new TestContextID(); - testContextID1.setContextId("18392881376"); - - //client2的contextID - TestContextID testContextID2 = new TestContextID(); - testContextID2.setContextId("13431335441"); - - List csKeys1 = new ArrayList<>(); - TestContextKey testContextKey1 = new TestContextKey(); - testContextKey1.setKey("key1"); - TestContextKey testContextKey2 = new TestContextKey(); - testContextKey2.setKey("key2"); - csKeys1.add(testContextKey1); - csKeys1.add(testContextKey2); - - List csKeys2 = new ArrayList<>(); - TestContextKey testContextKey3 = new TestContextKey(); - testContextKey3.setKey("key3"); - TestContextKey testContextKey4 = new TestContextKey(); - testContextKey4.setKey("key4"); - csKeys2.add(testContextKey3); - csKeys2.add(testContextKey4); - - - ListenerDomain ListenerDomain1; - - ListenerDomain ListenerDomain2; - - ListenerDomain ListenerDomain3; - - - - - DefaultContextKeyEvent defaultContextKeyEvent = new DefaultContextKeyEvent(); - defaultContextKeyEvent.setContextID(testContextID1); - defaultContextKeyEvent.setOperateType(OperateType.UPDATE); - TestContextKeyValue testContextKeyValue = new TestContextKeyValue(); - testContextKeyValue.setContextKey(testContextKey1); - TestContextValue testContextValue = new TestContextValue(); - testContextValue.setValue("chaogefeng"); - testContextKeyValue.setContextValue(testContextValue); - defaultContextKeyEvent.setContextKeyValue(testContextKeyValue); - contextAsyncListenerBus.doPostEvent(contextKeyCallbackEngine, defaultContextKeyEvent); - ArrayList clientSource2ListenerCallback = contextKeyCallbackEngine.getListenerCallback("127.0.0.1:8888"); - System.out.println("----------------------------------------------------------------------"); - for (ContextKeyValueBean contextKeyValueBean : clientSource2ListenerCallback) { - System.out.println("返回的bean里面对应的contexID: " + contextKeyValueBean.getCsID().getContextId()); - System.out.println("返回的bean里面对应的cskeys: " + contextKeyValueBean.getCsKey().getKey()); - if (contextKeyValueBean.getCsValue() != null) { - System.out.println("返回的bean里面对应的value: " + contextKeyValueBean.getCsValue().getValue()); - } - } - } - -} diff --git a/contextservice/cs-persistence/pom.xml b/contextservice/cs-persistence/pom.xml deleted file mode 100644 index 1141ee2dc4ac92618bd7a34ca62af6cac19af7c9..0000000000000000000000000000000000000000 --- a/contextservice/cs-persistence/pom.xml +++ /dev/null @@ -1,85 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-cs-persistence - - - - com.webank.wedatasphere.linkis - linkis-cs-common - - - - com.webank.wedatasphere.linkis - linkis-mybatis - - - - com.webank.wedatasphere.linkis - linkis-module - provided - - - - org.apache.commons - commons-math3 - 3.1.1 - - - - junit - junit - 4.12 - test - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - src/main/java - - **/*.xml - - - - ${project.artifactId}-${project.version} - - \ No newline at end of file diff --git a/contextservice/cs-persistence/src/main/java/com/webank/wedatasphere/linkis/cs/persistence/dao/ContextMapMapper.java b/contextservice/cs-persistence/src/main/java/com/webank/wedatasphere/linkis/cs/persistence/dao/ContextMapMapper.java deleted file mode 100644 index 422b2ce4455a19f2341dd8e695c829bdd4d6c1f3..0000000000000000000000000000000000000000 --- a/contextservice/cs-persistence/src/main/java/com/webank/wedatasphere/linkis/cs/persistence/dao/ContextMapMapper.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.persistence.dao; - -import com.webank.wedatasphere.linkis.cs.common.entity.enumeration.ContextScope; -import com.webank.wedatasphere.linkis.cs.common.entity.enumeration.ContextType; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; -import com.webank.wedatasphere.linkis.cs.persistence.entity.PersistenceContextKeyValue; -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -/** - * Created by patinousward on 2020/2/13. - */ -public interface ContextMapMapper { - void createMap(PersistenceContextKeyValue pKV); - - void updateMap(PersistenceContextKeyValue pKV); - - PersistenceContextKeyValue getContextMap(@Param("contextID") ContextID contextID, @Param("contextKey") ContextKey contextKey); - - List getAllContextMapByKey(@Param("contextID") ContextID contextID, @Param("key") String key); - - List getAllContextMapByContextID(@Param("contextID") ContextID contextID); - - List getAllContextMapByScope(@Param("contextID") ContextID contextID, @Param("contextScope") ContextScope contextScope); - - List getAllContextMapByType(@Param("contextID") ContextID contextID, @Param("contextType") ContextType contextType); - - void removeContextMap(@Param("contextID") ContextID contextID, @Param("contextKey") ContextKey contextKey); - - void removeAllContextMapByContextID(@Param("contextID") ContextID contextID); - - void removeAllContextMapByType(@Param("contextID") ContextID contextID, @Param("contextType") ContextType contextType); - - void removeAllContextMapByScope(@Param("contextID") ContextID contextID, @Param("contextScope") ContextScope contextScope); - - void removeByKeyPrefixAndContextType(@Param("contextID") ContextID contextID, @Param("contextType") ContextType contextType, @Param("keyPrefix") String keyPrefix); - - void removeByKeyPrefix(@Param("contextID") ContextID contextID, @Param("keyPrefix") String keyPrefix); -} diff --git a/contextservice/cs-persistence/src/main/java/com/webank/wedatasphere/linkis/cs/persistence/persistence/TransactionManager.java b/contextservice/cs-persistence/src/main/java/com/webank/wedatasphere/linkis/cs/persistence/persistence/TransactionManager.java deleted file mode 100644 index 19fcafd9067d5550deb35d8ea98cae19edb11448..0000000000000000000000000000000000000000 --- a/contextservice/cs-persistence/src/main/java/com/webank/wedatasphere/linkis/cs/persistence/persistence/TransactionManager.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.persistence.persistence; - -/** - * Created by patinousward on 2020/2/11. - */ -public interface TransactionManager { - - Object begin(); - - void rollback(Object object); - - void commit(Object object); - - void onTransaction(); - -} diff --git a/contextservice/cs-persistence/src/main/java/com/webank/wedatasphere/linkis/cs/persistence/util/PersistenceUtils.java b/contextservice/cs-persistence/src/main/java/com/webank/wedatasphere/linkis/cs/persistence/util/PersistenceUtils.java deleted file mode 100644 index 8e29dc21ecd959e61c3b16f9ef052b4671cc16a9..0000000000000000000000000000000000000000 --- a/contextservice/cs-persistence/src/main/java/com/webank/wedatasphere/linkis/cs/persistence/util/PersistenceUtils.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.persistence.util; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.webank.wedatasphere.linkis.cs.common.exception.CSErrorException; -import com.webank.wedatasphere.linkis.cs.common.exception.CSWarnException; -import com.webank.wedatasphere.linkis.cs.common.serialize.helper.ContextSerializationHelper; -import com.webank.wedatasphere.linkis.cs.common.serialize.helper.SerializationHelper; -import com.webank.wedatasphere.linkis.cs.persistence.annotation.Ignore; -import com.webank.wedatasphere.linkis.cs.persistence.entity.ExtraFieldClass; -import com.webank.wedatasphere.linkis.cs.persistence.exception.ThrowingFunction; -import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper; -import org.apache.commons.math3.util.Pair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.BeanUtils; - -import java.lang.reflect.Field; -import java.lang.reflect.Method; -import java.util.Arrays; -import java.util.List; -import java.util.function.Function; -import java.util.stream.Collectors; - -/** - * Created by patinousward on 2020/2/13. - */ -public class PersistenceUtils { - - private static ObjectMapper json = BDPJettyServerHelper.jacksonJson(); - - private static final Logger logger = LoggerFactory.getLogger(PersistenceUtils.class); - - private static String generateGetMethod(Field field) { - String fieldName = field.getName(); - return String.format("get%s%s", fieldName.substring(0, 1).toUpperCase(), fieldName.substring(1)); - } - - public static String generateSetMethod(String fieldName) { - return String.format("set%s%s", fieldName.substring(0, 1).toUpperCase(), fieldName.substring(1)); - } - - private static boolean canIgnore(Field field) { - return field.getAnnotation(Ignore.class) != null; - } - - private static List getIgnoreFieldName(Class clazz) { - if (clazz.getAnnotation(Ignore.class) != null) { - return Arrays.stream(clazz.getDeclaredFields()).map(Field::getName).collect(Collectors.toList()); - } else { - return Arrays.stream(clazz.getDeclaredFields()) - .filter(PersistenceUtils::canIgnore).map(Field::getName).collect(Collectors.toList()); - } - } - - public static Pair transfer(T t, Class sClass) throws CSErrorException { - try { - ExtraFieldClass extraFieldClass = new ExtraFieldClass(); - S s = sClass.newInstance(); - BeanUtils.copyProperties(t, s); - Class tClass = t.getClass(); - extraFieldClass.setClassName(tClass.getName()); - List canIgnore = getIgnoreFieldName(sClass); - for (Field field : tClass.getDeclaredFields()) { - if (!canIgnore.contains(field.getName())) { - Method method = tClass.getMethod(generateGetMethod(field)); - if (null != method.invoke(t)) { - //field.getType().getName() 无法拿到子类的类型 - Object invoke = method.invoke(t); - extraFieldClass.addFieldName(field.getName()); - if (invoke == null) { - extraFieldClass.addFieldType(field.getType().getName()); - } else { - extraFieldClass.addFieldType(invoke.getClass().getName()); - } - extraFieldClass.addFieldValue(invoke); - } - } - } - return new Pair<>(s, extraFieldClass); - } catch (Exception e) { - throw new CSErrorException(97000, "transfer bean failed:", e); - } - } - - public static T transfer(ExtraFieldClass extraFieldClass, S s) throws CSErrorException { - if (s == null) return null; - try { - Class tClass = Class.forName(extraFieldClass.getClassName()); - T t = (T) tClass.newInstance(); - BeanUtils.copyProperties(s, t); - for (int i = 0; i < extraFieldClass.getFieldNames().size(); i++) { - Field field = tClass.getDeclaredField(extraFieldClass.getOneFieldName(i)); - field.setAccessible(true); - if (LONG_TYP.equals(extraFieldClass.getOneFieldType(i))) { - Long value = new Long(extraFieldClass.getOneFieldValue(i).toString()); - field.set(t, value); - } else if (Enum.class.isAssignableFrom(Class.forName(extraFieldClass.getOneFieldType(i)))) { - //反序列化支持枚举类 - Class enumClass = Class.forName(extraFieldClass.getOneFieldType(i)); - Method valueOf = enumClass.getMethod("valueOf", String.class); - Object invoke = valueOf.invoke(null, extraFieldClass.getOneFieldValue(i)); - field.set(t, invoke); - } else if (!BeanUtils.isSimpleProperty(Class.forName(extraFieldClass.getOneFieldType(i)))) { - //非基本类型的话,使用jackson进行反序列化 // TODO: 2020/3/5 这里属性的序列化and反序列化最好修改为utils的序列化器 - Object o = json.convertValue(extraFieldClass.getOneFieldValue(i), Class.forName(extraFieldClass.getOneFieldType(i))); - field.set(t, o); - } else { - field.set(t, extraFieldClass.getOneFieldValue(i)); - } - } - return t; - } catch (Exception e) { - throw new CSErrorException(97000, "transfer bean failed:", e); - } - } - - public static Function map( - ThrowingFunction throwingFunction) { - return i -> { - try { - return throwingFunction.accept(i); - } catch (Exception e) { - throw new CSWarnException(97000, "execute failed,reason:", e); - } - }; - } - - private static final String LONG_TYP = "java.lang.Long"; - - // TODO: 2020/5/15 去掉重复的 - public static final SerializationHelper SERIALIZE = ContextSerializationHelper.getInstance(); - - public static String serialize(Object o) throws CSErrorException { - if (o instanceof String) { - return (String) o; - } - return SERIALIZE.serialize(o); - } - - public static T deserialize(String Str) throws CSErrorException { - return (T) SERIALIZE.deserialize(Str); - } - -} diff --git a/contextservice/cs-search/pom.xml b/contextservice/cs-search/pom.xml deleted file mode 100644 index 7b3c4a8489aebc7dce8e449ca1e0c31c3edb8a49..0000000000000000000000000000000000000000 --- a/contextservice/cs-search/pom.xml +++ /dev/null @@ -1,81 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-cs-search - - - - com.webank.wedatasphere.linkis - linkis-cs-common - - - com.webank.wedatasphere.linkis - linkis-cs-cache - - - - com.webank.wedatasphere.linkis - linkis-cloudRPC - provided - - - - junit - junit - RELEASE - test - - - org.mockito - mockito-all - 2.0.2-beta - test - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - \ No newline at end of file diff --git a/contextservice/cs-search/src/main/java/com/webank/wedatasphere/linkis/cs/execution/matcher/NearestLogicContextSearchMatcher.java b/contextservice/cs-search/src/main/java/com/webank/wedatasphere/linkis/cs/execution/matcher/NearestLogicContextSearchMatcher.java deleted file mode 100644 index 095ed1242d4cf0b7d2dfba85d1c4ada34597814f..0000000000000000000000000000000000000000 --- a/contextservice/cs-search/src/main/java/com/webank/wedatasphere/linkis/cs/execution/matcher/NearestLogicContextSearchMatcher.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.execution.matcher; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKeyValue; -import com.webank.wedatasphere.linkis.cs.condition.impl.NearestCondition; -import com.webank.wedatasphere.linkis.cs.condition.impl.NotCondition; - -public class NearestLogicContextSearchMatcher extends UnaryLogicContextSearchMatcher{ - - public NearestLogicContextSearchMatcher(NearestCondition condition) { - super(condition); - } - - @Override - public Boolean match(ContextKeyValue contextKeyValue) { - return originalMatcher.match(contextKeyValue); - } -} diff --git a/contextservice/cs-search/src/main/java/com/webank/wedatasphere/linkis/cs/optimize/dfs/Node.java b/contextservice/cs-search/src/main/java/com/webank/wedatasphere/linkis/cs/optimize/dfs/Node.java deleted file mode 100644 index 85fb8ffffb81fe1ee422fe8b54bcda419573dca7..0000000000000000000000000000000000000000 --- a/contextservice/cs-search/src/main/java/com/webank/wedatasphere/linkis/cs/optimize/dfs/Node.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.optimize.dfs; - -public interface Node { - Double getCost(); - Double getPriority(); - Node getLeft(); - Node getRight(); - void shift(); - boolean visited(); - void visit(); -} diff --git a/contextservice/cs-search/src/test/java/com/webank/wedatasphere/linkis/cs/csid/TestContextID.java b/contextservice/cs-search/src/test/java/com/webank/wedatasphere/linkis/cs/csid/TestContextID.java deleted file mode 100644 index 0fbdb6b11f37566303886690aaacd87cf27b0867..0000000000000000000000000000000000000000 --- a/contextservice/cs-search/src/test/java/com/webank/wedatasphere/linkis/cs/csid/TestContextID.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.csid; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; - -/** - * @author peacewong - * @date 2020/2/13 20:41 - */ -public class TestContextID implements ContextID { - - String contextID; - - @Override - public String getContextId() { - return contextID; - } - - @Override - public void setContextId(String contextId) { - this.contextID = contextId; - } -} diff --git a/contextservice/cs-search/src/test/java/com/webank/wedatasphere/linkis/cs/keyword/TestContextKey.java b/contextservice/cs-search/src/test/java/com/webank/wedatasphere/linkis/cs/keyword/TestContextKey.java deleted file mode 100644 index 74bd9c6150e65697aa2b70ccaba54ceddfc74ae5..0000000000000000000000000000000000000000 --- a/contextservice/cs-search/src/test/java/com/webank/wedatasphere/linkis/cs/keyword/TestContextKey.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.keyword; - -import com.webank.wedatasphere.linkis.cs.common.annotation.KeywordMethod; -import com.webank.wedatasphere.linkis.cs.common.entity.enumeration.ContextScope; -import com.webank.wedatasphere.linkis.cs.common.entity.enumeration.ContextType; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; - -/** - * @author peacewong - * @date 2020/2/13 16:32 - */ -public class TestContextKey implements ContextKey { - - private String key; - - private String keywords; - - private ContextScope contextScope; - - private ContextType contextType; - - @KeywordMethod - @Override - public String getKey() { - return this.key; - } - - @Override - public void setKey(String key) { - this.key = key; - } - - @Override - public ContextType getContextType() { - return contextType; - } - - @Override - public void setContextType(ContextType contextType) { - this.contextType = contextType; - } - - @Override - public ContextScope getContextScope() { - return contextScope; - } - - @Override - public void setContextScope(ContextScope contextScope) { - this.contextScope = contextScope; - } - - @KeywordMethod(splitter = ",") - @Override - public String getKeywords() { - return this.keywords; - } - - @Override - public void setKeywords(String keywords) { - this.keywords = keywords; - } - - - @Override - public int getType() { - return 0; - } - - @Override - public void setType(int type) { - - } -} diff --git a/contextservice/cs-search/src/test/java/com/webank/wedatasphere/linkis/cs/keyword/TestContextKeyValue.java b/contextservice/cs-search/src/test/java/com/webank/wedatasphere/linkis/cs/keyword/TestContextKeyValue.java deleted file mode 100644 index 79d8a67091ecd881a9805dd2598e3bde9215b59d..0000000000000000000000000000000000000000 --- a/contextservice/cs-search/src/test/java/com/webank/wedatasphere/linkis/cs/keyword/TestContextKeyValue.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.keyword; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKeyValue; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextValue; - -/** - * @author peacewong - * @date 2020/2/13 16:46 - */ -public class TestContextKeyValue implements ContextKeyValue { - - private ContextKey contextKey; - - private ContextValue contextValue; - - @Override - public ContextKey getContextKey() { - return this.contextKey; - } - - @Override - public void setContextKey(ContextKey contextKey) { - this.contextKey = contextKey; - } - - @Override - public ContextValue getContextValue() { - return this.contextValue; - } - - @Override - public void setContextValue(ContextValue contextValue) { - this.contextValue = contextValue; - } -} diff --git a/contextservice/cs-search/src/test/java/com/webank/wedatasphere/linkis/cs/keyword/TestContextValue.java b/contextservice/cs-search/src/test/java/com/webank/wedatasphere/linkis/cs/keyword/TestContextValue.java deleted file mode 100644 index 0a34ed4277b1456c2a12b61e15def37a961d4ee9..0000000000000000000000000000000000000000 --- a/contextservice/cs-search/src/test/java/com/webank/wedatasphere/linkis/cs/keyword/TestContextValue.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.webank.wedatasphere.linkis.cs.keyword; - -import com.webank.wedatasphere.linkis.cs.common.annotation.KeywordMethod; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextValue; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ValueBean; - -/** - * @author peacewong - * @date 2020/2/13 16:44 - */ -public class TestContextValue implements ContextValue { - - private Object value; - - private String keywords; - - @KeywordMethod(splitter = "-") - @Override - public String getKeywords() { - return this.keywords; - } - - @Override - public void setKeywords(String keywords) { - this.keywords = keywords; - } - - @KeywordMethod(regex = "hello") - @Override - public Object getValue() { - return this.value; - } - - @Override - public void setValue(Object value) { - this.value = value; - } -} diff --git a/contextservice/cs-server/Dockerfile b/contextservice/cs-server/Dockerfile deleted file mode 100644 index 9c136f496f1d68ef3279f5aa6abd65399907824d..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM wedatasphere/linkis:emr-base-spark2.4.4 - -MAINTAINER wedatasphere@webank.com - -RUN yum install -y unzip -WORKDIR /opt/linkis - -COPY target/linkis-cs-server.zip /opt/linkis -RUN unzip linkis-cs-server.zip - -WORKDIR /opt/linkis/linkis-cs-server/bin -ENTRYPOINT ["/opt/linkis/linkis-cs-server/bin/startup.sh"] diff --git a/contextservice/cs-server/bin/start-cs-server.sh b/contextservice/cs-server/bin/start-cs-server.sh deleted file mode 100755 index 6278ff3a6f68c9ce4345bdf0230f229b61f0d201..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/bin/start-cs-server.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/bash -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -cd `dirname $0` -cd .. -HOME=`pwd` - -export SERVER_PID=$HOME/bin/linkis.pid -export SERVER_LOG_PATH=$HOME/logs -export SERVER_CLASS=com.webank.wedatasphere.linkis.DataWorkCloudApplication - -if test -z "$SERVER_HEAP_SIZE" -then - export SERVER_HEAP_SIZE="512M" -fi - -if test -z "$SERVER_JAVA_OPTS" -then - export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$HOME/logs/linkis-gc.log" -fi - -if [[ -f "${SERVER_PID}" ]]; then - pid=$(cat ${SERVER_PID}) - if kill -0 ${pid} >/dev/null 2>&1; then - echo "Server is already running." - exit 1 - fi -fi - -nohup java $SERVER_JAVA_OPTS -cp ../module/lib/*:$HOME/conf:$HOME/lib/* $SERVER_CLASS 2>&1 > $SERVER_LOG_PATH/linkis.out & -pid=$! -if [[ -z "${pid}" ]]; then - echo "server $SERVER_NAME start failed!" - exit 1 -else - echo "server $SERVER_NAME start succeeded!" - echo $pid > $SERVER_PID - sleep 1 -fi \ No newline at end of file diff --git a/contextservice/cs-server/bin/startup.sh b/contextservice/cs-server/bin/startup.sh deleted file mode 100755 index 7ed6a42bde58e3fd829946193e3a6ad5b5ac097e..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/bin/startup.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash -cd `dirname $0` -cd .. -HOME=`pwd` - -export SERVER_LOG_PATH=$HOME/logs -export SERVER_CLASS=com.webank.wedatasphere.linkis.DataWorkCloudApplication - - -if test -z "$SERVER_HEAP_SIZE" -then - export SERVER_HEAP_SIZE="512M" -fi - -if test -z "$SERVER_JAVA_OPTS" -then - export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$HOME/logs/linkis-cs-server-gc.log" -fi - -if test -z "$START_PORT" -then - export START_PORT=22008 -fi - -export SERVER_PID=$HOME/bin/linkis.pid - -if [[ -f "${SERVER_PID}" ]]; then - pid=$(cat ${SERVER_PID}) - if kill -0 ${pid} >/dev/null 2>&1; then - echo "Server is already running." - exit 1 - fi -fi - -cp -f /opt/linkis/conf/linkis.properties /opt/linkis/linkis-cs-server/conf - -nohup java $SERVER_JAVA_OPTS -Deurekaurl=$EUREKA_URL -Duser.timezone=Asia/Shanghai -cp $HOME/conf:$HOME/lib/* $SERVER_CLASS --server.port=$START_PORT 2>&1 > $SERVER_LOG_PATH/linkis-cs-server.log & - -pid=$! -if [[ -z "${pid}" ]]; then - echo "server $SERVER_NAME start failed!" - exit 1 -else - echo "server $SERVER_NAME start succeeded!" - echo $pid > $SERVER_PID - sleep 1 -fi - -tail -f /dev/null diff --git a/contextservice/cs-server/bin/stop-cs-server.sh b/contextservice/cs-server/bin/stop-cs-server.sh deleted file mode 100755 index 9e84be4f124db579b1262d8b746e979c06a1a827..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/bin/stop-cs-server.sh +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/bash -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -cd `dirname $0` -cd .. -HOME=`pwd` - -export SERVER_PID=$HOME/bin/linkis.pid - -function wait_for_server_to_die() { - local pid - local count - pid=$1 - timeout=$2 - count=0 - timeoutTime=$(date "+%s") - let "timeoutTime+=$timeout" - currentTime=$(date "+%s") - forceKill=1 - - while [[ $currentTime -lt $timeoutTime ]]; do - $(kill ${pid} > /dev/null 2> /dev/null) - if kill -0 ${pid} > /dev/null 2>&1; then - sleep 3 - else - forceKill=0 - break - fi - currentTime=$(date "+%s") - done - - if [[ forceKill -ne 0 ]]; then - $(kill -9 ${pid} > /dev/null 2> /dev/null) - fi -} - -if [[ ! -f "${SERVER_PID}" ]]; then - echo "server $SERVER_NAME is not running" -else - pid=$(cat ${SERVER_PID}) - if [[ -z "${pid}" ]]; then - echo "server $SERVER_NAME is not running" - else - wait_for_server_to_die $pid 40 - $(rm -f ${SERVER_PID}) - echo "server $SERVER_NAME is stopped." - fi -fi \ No newline at end of file diff --git a/contextservice/cs-server/bin/stop.sh b/contextservice/cs-server/bin/stop.sh deleted file mode 100755 index 9e84be4f124db579b1262d8b746e979c06a1a827..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/bin/stop.sh +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/bash -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -cd `dirname $0` -cd .. -HOME=`pwd` - -export SERVER_PID=$HOME/bin/linkis.pid - -function wait_for_server_to_die() { - local pid - local count - pid=$1 - timeout=$2 - count=0 - timeoutTime=$(date "+%s") - let "timeoutTime+=$timeout" - currentTime=$(date "+%s") - forceKill=1 - - while [[ $currentTime -lt $timeoutTime ]]; do - $(kill ${pid} > /dev/null 2> /dev/null) - if kill -0 ${pid} > /dev/null 2>&1; then - sleep 3 - else - forceKill=0 - break - fi - currentTime=$(date "+%s") - done - - if [[ forceKill -ne 0 ]]; then - $(kill -9 ${pid} > /dev/null 2> /dev/null) - fi -} - -if [[ ! -f "${SERVER_PID}" ]]; then - echo "server $SERVER_NAME is not running" -else - pid=$(cat ${SERVER_PID}) - if [[ -z "${pid}" ]]; then - echo "server $SERVER_NAME is not running" - else - wait_for_server_to_die $pid 40 - $(rm -f ${SERVER_PID}) - echo "server $SERVER_NAME is stopped." - fi -fi \ No newline at end of file diff --git a/contextservice/cs-server/conf/application.yml b/contextservice/cs-server/conf/application.yml deleted file mode 100644 index 3cfdeaeb62799876a2f8fba79a2ef6d9c8327deb..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/conf/application.yml +++ /dev/null @@ -1,48 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -server: - port: 9042 -spring: - application: - name: cloud-contextservice - - -eureka: - client: - serviceUrl: - defaultZone: ${eurekaurl} - instance: - lease-renewal-interval-in-second: 5 - lease-expiration-duration-in-second: 10 - prefer-ip-address: true - instance-id: ${spring.cloud.client.ip-address}:${server.port} - metadata-map: - test: test-user - -management: - endpoints: - web: - exposure: - include: refresh,info -logging: - config: classpath:log4j2.xml - -pagehelper: - helper-dialect: mysql - reasonable: true - support-methods-arguments: true - params: countSql \ No newline at end of file diff --git a/contextservice/cs-server/conf/linkis.properties b/contextservice/cs-server/conf/linkis.properties deleted file mode 100644 index 78e88064ab24c89d8b5c3d4ae37259f842af3cf9..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/conf/linkis.properties +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright 2019 WeBank -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -wds.linkis.server.mybatis.datasource.url=jdbc:mysql://127.0.0.1:3306/dss_dev_center?characterEncoding=UTF-8 -wds.linkis.server.mybatis.datasource.username= -wds.linkis.server.mybatis.datasource.password= -wds.linkis.server.version=v1 -##restful -wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.cs.server.restful -##mybatis -wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/linkis/cs/persistence/dao/impl/*.xml -wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.linkis.cs.persistence.entity -wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.linkis.cs.persistence.dao diff --git a/contextservice/cs-server/conf/log4j.properties b/contextservice/cs-server/conf/log4j.properties deleted file mode 100644 index de6691ad21d293f6505888c5f48c4a47b62da225..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/conf/log4j.properties +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright 2019 WeBank -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -log4j.rootCategory=INFO,console -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.Threshold=INFO -log4j.appender.console.layout=org.apache.log4j.PatternLayout -#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p (%t) %p %c{1} - %m%n -log4j.appender.com.webank.bdp.ide.core=org.apache.log4j.DailyRollingFileAppender -log4j.appender.com.webank.bdp.ide.core.Threshold=INFO -log4j.additivity.com.webank.bdp.ide.core=false -log4j.appender.com.webank.bdp.ide.core.layout=org.apache.log4j.PatternLayout -log4j.appender.com.webank.bdp.ide.core.Append=true -log4j.appender.com.webank.bdp.ide.core.File=logs/linkis.log -log4j.appender.com.webank.bdp.ide.core.layout.ConversionPattern=%d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n -log4j.logger.org.springframework=INFO \ No newline at end of file diff --git a/contextservice/cs-server/pom.xml b/contextservice/cs-server/pom.xml deleted file mode 100644 index d2c2b5815f63ff8e9c69b198247750af75ec82bb..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/pom.xml +++ /dev/null @@ -1,183 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-cs-server - - - - com.webank.wedatasphere.linkis - linkis-cs-common - ${project.version} - - - - com.webank.wedatasphere.linkis - linkis-cs-cache - ${project.version} - - - com.webank.wedatasphere.linkis - linkis-module - - - - - - com.webank.wedatasphere.linkis - linkis-cs-listener - ${project.version} - - - - com.webank.wedatasphere.linkis - linkis-cs-persistence - ${project.version} - - - com.webank.wedatasphere.linkis - linkis-module - - - - - - com.webank.wedatasphere.linkis - linkis-cs-highavailable - ${project.version} - - - com.webank.wedatasphere.linkis - linkis-module - - - - - - com.webank.wedatasphere.linkis - linkis-cs-search - ${project.version} - - - com.webank.wedatasphere.linkis - linkis-module - - - - - - com.webank.wedatasphere.linkis - linkis-cloudRPC - ${project.version} - provided - - - - com.webank.wedatasphere.linkis - linkis-scheduler - ${project.version} - - - - junit - junit - 4.12 - test - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - scala-compile-first - process-resources - - add-source - compile - - - - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-cs-server - false - false - - src/main/assembly/distribution.xml - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - - \ No newline at end of file diff --git a/contextservice/cs-server/pom_k8s.xml b/contextservice/cs-server/pom_k8s.xml deleted file mode 100644 index 743fdf6abc25e9e4f46266d8d607ecfad6f059f6..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/pom_k8s.xml +++ /dev/null @@ -1,204 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-cs-server - - - - com.webank.wedatasphere.linkis - linkis-cs-common - - - - com.webank.wedatasphere.linkis - linkis-cs-cache - - - com.webank.wedatasphere.linkis - linkis-module - - - - - - com.webank.wedatasphere.linkis - linkis-cs-listener - - - - com.webank.wedatasphere.linkis - linkis-cs-persistence - - - com.webank.wedatasphere.linkis - linkis-module - - - - - - com.webank.wedatasphere.linkis - linkis-cs-highavailable - - - com.webank.wedatasphere.linkis - linkis-module - - - - - - com.webank.wedatasphere.linkis - linkis-cs-search - - - com.webank.wedatasphere.linkis - linkis-module - - - - - - com.webank.wedatasphere.linkis - linkis-cloudRPC - - - tomcat - jasper-compiler - - - tomcat - jasper-runtime - - - - - - com.webank.wedatasphere.linkis - linkis-scheduler - - - - junit - junit - 4.12 - test - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - scala-compile-first - process-resources - - add-source - compile - - - - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-cs-server - false - false - - src/main/assembly/distribution.xml - - - - - - - - - - - - - - - - - - - - - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - - \ No newline at end of file diff --git a/contextservice/cs-server/src/main/assembly/distribution.xml b/contextservice/cs-server/src/main/assembly/distribution.xml deleted file mode 100644 index ac1d394d8698e1aa151cffaa47a1f30c69472797..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/src/main/assembly/distribution.xml +++ /dev/null @@ -1,71 +0,0 @@ - - - - linkis-cs-server - - zip - - true - linkis-cs-server - - - - - - lib - true - true - false - false - true - - - - - - - ${basedir}/conf - - * - - 0777 - conf - unix - - - ${basedir}/bin - - * - - 0777 - bin - unix - - - . - - */** - - logs - - - - - diff --git a/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/enumeration/ServiceMethod.java b/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/enumeration/ServiceMethod.java deleted file mode 100644 index fbc241a9a760b1e145be22714bdc530032e7c716..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/enumeration/ServiceMethod.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.cs.server.enumeration; - -/** - * Created by patinousward on 2020/2/22. - */ -public enum ServiceMethod { - /** - * - */ - CREATE, GET, SEARCH, REMOVE, REMOVEALL, UPDATE, RESET, SET, BIND, HEARTBEAT -} diff --git a/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/restful/ContextIDRestfulApi.java b/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/restful/ContextIDRestfulApi.java deleted file mode 100644 index 2b33bcd660a9d624a9393f781123c62cb0d72280..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/restful/ContextIDRestfulApi.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.cs.server.restful; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; -import com.webank.wedatasphere.linkis.cs.common.exception.CSErrorException; -import com.webank.wedatasphere.linkis.cs.common.protocol.ContextHTTPConstant; -import com.webank.wedatasphere.linkis.cs.common.utils.CSCommonUtils; -import com.webank.wedatasphere.linkis.cs.server.enumeration.ServiceMethod; -import com.webank.wedatasphere.linkis.cs.server.enumeration.ServiceType; -import com.webank.wedatasphere.linkis.cs.server.scheduler.CsScheduler; -import com.webank.wedatasphere.linkis.cs.server.scheduler.HttpAnswerJob; -import com.webank.wedatasphere.linkis.server.Message; -import org.codehaus.jackson.JsonNode; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; -import org.springframework.util.StringUtils; - -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.*; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import java.io.IOException; - -/** - * Created by patinousward on 2020/2/18. - */ -@Component -@Path("/contextservice") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) -public class ContextIDRestfulApi implements CsRestfulParent { - - @Autowired - private CsScheduler csScheduler; - - @POST - @Path("createContextID") - public Response createContextID(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, ClassNotFoundException, IOException, CSErrorException { - ContextID contextID = getContextIDFromJsonNode(jsonNode); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.CREATE, contextID); - return Message.messageToResponse(generateResponse(answerJob, "contextId")); - } - - @GET - @Path("getContextID") - public Response getContextID(@Context HttpServletRequest req, @QueryParam("contextId") String id) throws InterruptedException, CSErrorException { - if (StringUtils.isEmpty(id)) { - throw new CSErrorException(97000, "contxtId cannot be empty"); - } - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.GET, id); - Message message = generateResponse(answerJob, "contextID"); - return Message.messageToResponse(message); - } - - @POST - @Path("updateContextID") - public Response updateContextID(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException, IOException, ClassNotFoundException { - ContextID contextID = getContextIDFromJsonNode(jsonNode); - if (StringUtils.isEmpty(contextID.getContextId())) { - throw new CSErrorException(97000, "contxtId cannot be empty"); - } - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.UPDATE, contextID); - return Message.messageToResponse(generateResponse(answerJob, "")); - } - - @POST - @Path("resetContextID") - public Response resetContextID(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException { - if (!jsonNode.has(ContextHTTPConstant.CONTEXT_ID_STR)) { - throw new CSErrorException(97000, ContextHTTPConstant.CONTEXT_ID_STR + " cannot be empty"); - } - String id = jsonNode.get(ContextHTTPConstant.CONTEXT_ID_STR).getTextValue(); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.RESET, id); - return Message.messageToResponse(generateResponse(answerJob, "")); - } - - - @POST - @Path("removeContextID") - public Response removeContextID(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException { - String id = jsonNode.get("contextId").getTextValue(); - if (StringUtils.isEmpty(id)) { - throw new CSErrorException(97000, "contxtId cannot be empty"); - } - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.REMOVE, id); - return Message.messageToResponse(generateResponse(answerJob, "")); - } - - @Override - public ServiceType getServiceType() { - return ServiceType.CONTEXT_ID; - } - - @Override - public CsScheduler getScheduler() { - return this.csScheduler; - } - -} diff --git a/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/restful/ContextListenerRestfulApi.java b/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/restful/ContextListenerRestfulApi.java deleted file mode 100644 index 5b9fa70edc145d301a4c2635637687cebc497178..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/restful/ContextListenerRestfulApi.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.cs.server.restful; - -import com.webank.wedatasphere.linkis.cs.common.entity.listener.CommonContextIDListenerDomain; -import com.webank.wedatasphere.linkis.cs.common.entity.listener.CommonContextKeyListenerDomain; -import com.webank.wedatasphere.linkis.cs.common.entity.listener.ContextIDListenerDomain; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; -import com.webank.wedatasphere.linkis.cs.common.exception.CSErrorException; -import com.webank.wedatasphere.linkis.cs.server.enumeration.ServiceMethod; -import com.webank.wedatasphere.linkis.cs.server.enumeration.ServiceType; -import com.webank.wedatasphere.linkis.cs.server.scheduler.CsScheduler; -import com.webank.wedatasphere.linkis.cs.server.scheduler.HttpAnswerJob; -import com.webank.wedatasphere.linkis.server.Message; -import org.codehaus.jackson.JsonNode; -import org.codehaus.jackson.map.ObjectMapper; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.Consumes; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import java.io.IOException; - -/** - * Created by patinousward on 2020/2/18. - */ -@Component -@Path("/contextservice") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) -public class ContextListenerRestfulApi implements CsRestfulParent { - - @Autowired - private CsScheduler csScheduler; - - private ObjectMapper objectMapper = new ObjectMapper(); - - @POST - @Path("onBindIDListener") - public Response onBindIDListener(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException, IOException, ClassNotFoundException { - String source = jsonNode.get("source").getTextValue(); - ContextID contextID = getContextIDFromJsonNode(jsonNode); - ContextIDListenerDomain listener = new CommonContextIDListenerDomain(); - listener.setSource(source); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.BIND, contextID, listener); - return Message.messageToResponse(generateResponse(answerJob, "")); - } - - @POST - @Path("onBindKeyListener") - public Response onBindKeyListener(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException, IOException, ClassNotFoundException { - String source = jsonNode.get("source").getTextValue(); - ContextID contextID = getContextIDFromJsonNode(jsonNode); - ContextKey contextKey = getContextKeyFromJsonNode(jsonNode); - CommonContextKeyListenerDomain listener = new CommonContextKeyListenerDomain(); - listener.setSource(source); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.BIND, contextID, contextKey, listener); - return Message.messageToResponse(generateResponse(answerJob, "")); - } - - @POST - @Path("heartbeat") - public Response heartbeat(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, IOException, CSErrorException { - String source = jsonNode.get("source").getTextValue(); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.HEARTBEAT, source); - return Message.messageToResponse(generateResponse(answerJob, "ContextKeyValueBean")); - } - - @Override - public ServiceType getServiceType() { - return ServiceType.CONTEXT_LISTENER; - } - - @Override - public CsScheduler getScheduler() { - return this.csScheduler; - } -} diff --git a/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/restful/ContextRestfulApi.java b/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/restful/ContextRestfulApi.java deleted file mode 100644 index ea2e1cf8f657c891d58e7d8c7ba4ba9af757f636..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/restful/ContextRestfulApi.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.cs.server.restful; - -import com.webank.wedatasphere.linkis.cs.common.entity.enumeration.ContextType; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKeyValue; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextValue; -import com.webank.wedatasphere.linkis.cs.common.exception.CSErrorException; -import com.webank.wedatasphere.linkis.cs.common.protocol.ContextHTTPConstant; -import com.webank.wedatasphere.linkis.cs.server.enumeration.ServiceMethod; -import com.webank.wedatasphere.linkis.cs.server.enumeration.ServiceType; -import com.webank.wedatasphere.linkis.cs.server.scheduler.CsScheduler; -import com.webank.wedatasphere.linkis.cs.server.scheduler.HttpAnswerJob; -import com.webank.wedatasphere.linkis.server.Message; -import org.codehaus.jackson.JsonNode; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.type.TypeReference; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.Consumes; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import java.io.IOException; -import java.util.Map; - - -/** - * Created by patinousward on 2020/2/18. - */ -@Component -@Path("contextservice") -@Produces(MediaType.APPLICATION_JSON) -@Consumes(MediaType.APPLICATION_JSON) -public class ContextRestfulApi implements CsRestfulParent { - - private static final Logger LOGGER = LoggerFactory.getLogger(ContextRestfulApi.class); - - @Autowired - private CsScheduler csScheduler; - - private ObjectMapper objectMapper = new ObjectMapper(); - - @POST - @Path("getContextValue") - public Response getContextValue(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException, IOException, ClassNotFoundException { - ContextID contextID = getContextIDFromJsonNode(jsonNode); - ContextKey contextKey = getContextKeyFromJsonNode(jsonNode); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.GET, contextID, contextKey); - Message message = generateResponse(answerJob, "contextValue"); - return Message.messageToResponse(message); - } - - - @POST - @Path("searchContextValue") - public Response searchContextValue(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException, IOException, ClassNotFoundException { - ContextID contextID = getContextIDFromJsonNode(jsonNode); - JsonNode condition = jsonNode.get("condition"); - Map conditionMap = objectMapper.convertValue(condition, new TypeReference>() { - }); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.SEARCH, contextID, conditionMap); - Message message = generateResponse(answerJob, "contextKeyValue"); - return Message.messageToResponse(message); - } - -/* @GET - @Path("searchContextValueByCondition") - public Response searchContextValueByCondition(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException { - Condition condition = null; - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.SEARCH, condition); - return generateResponse(answerJob,""); - }*/ - - - @POST - @Path("setValueByKey") - public Response setValueByKey(@Context HttpServletRequest req, JsonNode jsonNode) throws CSErrorException, IOException, ClassNotFoundException, InterruptedException { - ContextID contextID = getContextIDFromJsonNode(jsonNode); - ContextKey contextKey = getContextKeyFromJsonNode(jsonNode); - ContextValue contextValue = getContextValueFromJsonNode(jsonNode); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.SET, contextID, contextKey, contextValue); - return Message.messageToResponse(generateResponse(answerJob, "")); - } - - @POST - @Path("setValue") - public Response setValue(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException, IOException, ClassNotFoundException { - ContextID contextID = getContextIDFromJsonNode(jsonNode); - ContextKeyValue contextKeyValue = getContextKeyValueFromJsonNode(jsonNode); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.SET, contextID, contextKeyValue); - return Message.messageToResponse(generateResponse(answerJob, "")); - } - - @POST - @Path("resetValue") - public Response resetValue(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException, IOException, ClassNotFoundException { - ContextID contextID = getContextIDFromJsonNode(jsonNode); - ContextKey contextKey = getContextKeyFromJsonNode(jsonNode); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.RESET, contextID, contextKey); - return Message.messageToResponse(generateResponse(answerJob, "")); - } - - @POST - @Path("removeValue") - public Response removeValue(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException, IOException, ClassNotFoundException { - ContextID contextID = getContextIDFromJsonNode(jsonNode); - ContextKey contextKey = getContextKeyFromJsonNode(jsonNode); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.REMOVE, contextID, contextKey); - return Message.messageToResponse(generateResponse(answerJob, "")); - } - - @POST - @Path("removeAllValue") - public Response removeAllValue(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException, IOException, ClassNotFoundException { - ContextID contextID = getContextIDFromJsonNode(jsonNode); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.REMOVEALL, contextID); - return Message.messageToResponse(generateResponse(answerJob, "")); - } - - @POST - @Path("removeAllValueByKeyPrefixAndContextType") - public Response removeAllValueByKeyPrefixAndContextType(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException, IOException, ClassNotFoundException { - ContextID contextID = getContextIDFromJsonNode(jsonNode); - String contextType = jsonNode.get(ContextHTTPConstant.CONTEXT_KEY_TYPE_STR).getTextValue(); - String keyPrefix = jsonNode.get(ContextHTTPConstant.CONTEXT_KEY_PREFIX_STR).getTextValue(); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.REMOVEALL, contextID, ContextType.valueOf(contextType),keyPrefix); - return Message.messageToResponse(generateResponse(answerJob, "")); - } - - @POST - @Path("removeAllValueByKeyPrefix") - public Response removeAllValueByKeyPrefix(@Context HttpServletRequest req, JsonNode jsonNode) throws InterruptedException, CSErrorException, IOException, ClassNotFoundException { - ContextID contextID = getContextIDFromJsonNode(jsonNode); - String keyPrefix = jsonNode.get(ContextHTTPConstant.CONTEXT_KEY_PREFIX_STR).getTextValue(); - HttpAnswerJob answerJob = submitRestJob(req, ServiceMethod.REMOVEALL, contextID,keyPrefix); - return Message.messageToResponse(generateResponse(answerJob, "")); - } - - @Override - public ServiceType getServiceType() { - return ServiceType.CONTEXT; - } - - @Override - public CsScheduler getScheduler() { - return this.csScheduler; - } -} diff --git a/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/restful/CsRestfulParent.java b/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/restful/CsRestfulParent.java deleted file mode 100644 index f34c4825846005feac2ff9a49e1b48ce9dcce964..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/restful/CsRestfulParent.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.cs.server.restful; - -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextID; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKey; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextKeyValue; -import com.webank.wedatasphere.linkis.cs.common.entity.source.ContextValue; -import com.webank.wedatasphere.linkis.cs.common.exception.CSErrorException; -import com.webank.wedatasphere.linkis.cs.server.enumeration.ServiceMethod; -import com.webank.wedatasphere.linkis.cs.server.enumeration.ServiceType; -import com.webank.wedatasphere.linkis.cs.server.protocol.HttpRequestProtocol; -import com.webank.wedatasphere.linkis.cs.server.protocol.HttpResponseProtocol; -import com.webank.wedatasphere.linkis.cs.server.protocol.RestResponseProtocol; -import com.webank.wedatasphere.linkis.cs.server.scheduler.CsScheduler; -import com.webank.wedatasphere.linkis.cs.server.scheduler.HttpAnswerJob; -import com.webank.wedatasphere.linkis.cs.server.scheduler.RestJobBuilder; -import com.webank.wedatasphere.linkis.cs.server.util.CsUtils; -import com.webank.wedatasphere.linkis.server.Message; -import com.webank.wedatasphere.linkis.server.security.SecurityFilter; -import org.codehaus.jackson.JsonNode; - -import javax.servlet.http.HttpServletRequest; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -/** - * Created by patinousward on 2020/2/22. - */ -public interface CsRestfulParent { - - default HttpAnswerJob submitRestJob(HttpServletRequest req, - ServiceMethod method, - Object... objects) throws InterruptedException { - // TODO: 2020/3/3 单例 - HttpAnswerJob job = (HttpAnswerJob) new RestJobBuilder().build(getServiceType()); - HttpRequestProtocol protocol = job.getRequestProtocol(); - protocol.setUsername(SecurityFilter.getLoginUsername(req)); - protocol.setServiceMethod(method); - protocol.setRequestObjects(objects); - getScheduler().sumbit(job); - return job; - } - - default Message generateResponse(HttpAnswerJob job, String responseKey) throws CSErrorException { - HttpResponseProtocol responseProtocol = job.getResponseProtocol(); - if (responseProtocol instanceof RestResponseProtocol) { - Message message = ((RestResponseProtocol) responseProtocol).get(); - if (message == null) { - return Message.error("job execute timeout"); - } - int status = ((RestResponseProtocol) responseProtocol).get().getStatus(); - if (status == 1) { - //failed - return ((RestResponseProtocol) responseProtocol).get(); - } else if (status == 0) { - Object data = job.getResponseProtocol().getResponseData(); - if (data == null) { - return Message.ok().data(responseKey, null); - } else if (data instanceof List && ((List) data).isEmpty()) { - return Message.ok().data(responseKey, new String[]{}); - } else if (data instanceof List) { - ArrayList strings = new ArrayList<>(); - for (Object d : (List) data) { - strings.add(CsUtils.serialize(d)); - } - return Message.ok().data(responseKey, strings); - } else { - String dataStr = CsUtils.serialize(data); - return Message.ok().data(responseKey, dataStr); - } - } else { - - } - } - return Message.ok(); - } - - ServiceType getServiceType(); - - CsScheduler getScheduler(); - - default ContextID getContextIDFromJsonNode(JsonNode jsonNode) throws CSErrorException, IOException, ClassNotFoundException { - return deserialize(jsonNode, "contextID"); - } - - default T deserialize(JsonNode jsonNode, String key) throws CSErrorException { - String str = jsonNode.get(key).getTextValue(); - return (T) CsUtils.SERIALIZE.deserialize(str); - } - - default ContextKey getContextKeyFromJsonNode(JsonNode jsonNode) throws CSErrorException, IOException, ClassNotFoundException { - return deserialize(jsonNode, "contextKey"); - } - - default ContextValue getContextValueFromJsonNode(JsonNode jsonNode) throws CSErrorException, IOException, ClassNotFoundException { - return deserialize(jsonNode, "contextValue"); - } - - default ContextKeyValue getContextKeyValueFromJsonNode(JsonNode jsonNode) throws CSErrorException, IOException, ClassNotFoundException { - return deserialize(jsonNode, "contextKeyValue"); - } - - - -} diff --git a/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/scheduler/linkisImpl/CsExecutorManager.java b/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/scheduler/linkisImpl/CsExecutorManager.java deleted file mode 100644 index 08e55c9c5c4e092b91cceecf43c8a86984c3c0cf..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/src/main/java/com/webank/wedatasphere/linkis/cs/server/scheduler/linkisImpl/CsExecutorManager.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.cs.server.scheduler.linkisImpl; - -import com.webank.wedatasphere.linkis.scheduler.executer.Executor; -import com.webank.wedatasphere.linkis.scheduler.executer.ExecutorManager; -import com.webank.wedatasphere.linkis.scheduler.listener.ExecutorListener; -import com.webank.wedatasphere.linkis.scheduler.queue.SchedulerEvent; -import scala.Option; -import scala.Some; -import scala.concurrent.duration.Duration; - -/** - * Created by patinousward on 2020/2/18. - */ -public class CsExecutorManager extends ExecutorManager { - - @Override - public void setExecutorListener(ExecutorListener executorListener) { - - } - - @Override - public Executor createExecutor(SchedulerEvent event) { - return new CsExecutor(); - } - - @Override - public Option askExecutor(SchedulerEvent event) { - return new Some<>(createExecutor(event)); - } - - @Override - public Option askExecutor(SchedulerEvent event, Duration wait) { - return askExecutor(event); - } - - @Override - public Option getById(long id) { - return new Some<>(null); - } - - @Override - public Executor[] getByGroup(String groupName) { - return new Executor[0]; - } - - @Override - public void delete(Executor executor) { - - } - - @Override - public void shutdown() { - - } -} diff --git a/contextservice/cs-server/src/main/resources/application.yml b/contextservice/cs-server/src/main/resources/application.yml deleted file mode 100644 index 246846d777658ce4734177403c7b40e398b6cc96..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/src/main/resources/application.yml +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -server: - port: 9004 -spring: - application: - name: cloud-contextservice - - -eureka: - client: - serviceUrl: - defaultZone: http://127.0.0.1:20303/eureka/ - instance: - metadata-map: - test: wedatasphere - -management: - endpoints: - web: - exposure: - include: refresh,info -logging: - config: classpath:log4j2.xml diff --git a/contextservice/cs-server/src/main/resources/linkis.properties b/contextservice/cs-server/src/main/resources/linkis.properties deleted file mode 100644 index 9abef042eb98a343ae127f7d9cbbe77852c408a9..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/src/main/resources/linkis.properties +++ /dev/null @@ -1,32 +0,0 @@ -# -# Copyright 2019 WeBank -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -wds.linkis.test.mode=true - -wds.linkis.server.mybatis.datasource.url=jdbc:mysql://127.0.0.1:3306/ide_gz_bdap_sit_01?characterEncoding=UTF-8 -wds.linkis.server.mybatis.datasource.username= -wds.linkis.server.mybatis.datasource.password= - - -wds.linkis.log.clear=true -wds.linkis.server.version=v1 - -##restful -wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.cs.server.restful - -##mybatis -wds.linkis.server.mybatis.mapperLocations=classpath*:com\\webank\\wedatasphere\\linkis\\cs\\persistence\\dao\\impl\\*.xml - -wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.linkis.cs.persistence.entity - -wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.linkis.cs.persistence.dao diff --git a/contextservice/cs-server/src/main/resources/log4j.properties b/contextservice/cs-server/src/main/resources/log4j.properties deleted file mode 100644 index 0807e6087704a1a31f2c6d41042fec441d301a85..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/src/main/resources/log4j.properties +++ /dev/null @@ -1,37 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# - -### set log levels ### - -log4j.rootCategory=INFO,console - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.Threshold=INFO -log4j.appender.console.layout=org.apache.log4j.PatternLayout -#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n -log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n - - -log4j.appender.com.webank.bdp.ide.core=org.apache.log4j.DailyRollingFileAppender -log4j.appender.com.webank.bdp.ide.core.Threshold=INFO -log4j.additivity.com.webank.bdp.ide.core=false -log4j.appender.com.webank.bdp.ide.core.layout=org.apache.log4j.PatternLayout -log4j.appender.com.webank.bdp.ide.core.Append=true -log4j.appender.com.webank.bdp.ide.core.File=logs/linkis.log -log4j.appender.com.webank.bdp.ide.core.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n - -log4j.logger.org.springframework=INFO diff --git a/contextservice/cs-server/src/main/resources/log4j2.xml b/contextservice/cs-server/src/main/resources/log4j2.xml deleted file mode 100644 index 3923cd9f39ff28b9b7c08f01e783fb271d36ee8f..0000000000000000000000000000000000000000 --- a/contextservice/cs-server/src/main/resources/log4j2.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/contextservice/cs-ujes-client/pom.xml b/contextservice/cs-ujes-client/pom.xml deleted file mode 100644 index d422492cae50ecb3d048f6f39c777dec259382a9..0000000000000000000000000000000000000000 --- a/contextservice/cs-ujes-client/pom.xml +++ /dev/null @@ -1,62 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-cs-ujes-client - - - - com.webank.wedatasphere.linkis - linkis-cs-client - - - com.webank.wedatasphere.linkis - linkis-storage - provided - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - - - \ No newline at end of file diff --git a/core/cloudModule/pom.xml b/core/cloudModule/pom.xml deleted file mode 100644 index 698908fb0ea8f7add54906565f65dded4cb1251a..0000000000000000000000000000000000000000 --- a/core/cloudModule/pom.xml +++ /dev/null @@ -1,549 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-module - - - - com.webank.wedatasphere.linkis - linkis-common - - - json4s-jackson_2.11 - org.json4s - - - jackson-core - com.fasterxml.jackson.core - - - - - - org.springframework.boot - spring-boot - ${spring.boot.version} - - - org.apache.logging.log4j - log4j-api - - - org.apache.logging.log4j - log4j-core - - - - - org.springframework.cloud - spring-cloud-starter-eureka - ${spring.eureka.version} - - - jsr311-api - javax.ws.rs - - - jackson-annotations - com.fasterxml.jackson.core - - - jackson-core - com.fasterxml.jackson.core - - - jackson-databind - com.fasterxml.jackson.core - - - httpclient - org.apache.httpcomponents - - - org.springframework.cloud - spring-cloud-commons - - - org.springframework.cloud - spring-cloud-starter - - - org.springframework.cloud - spring-cloud-starter-openfeign - - - org.springframework.cloud - spring-cloud-starter-netflix-archaius - - - org.springframework.cloud - spring-cloud-starter-netflix-ribbon - - - guava - com.google.guava - - - org.springframework.boot - spring-boot-autoconfigure - - - org.springframework.boot - spring-boot - - - org.springframework.boot - spring-boot-starter-web - - - org.springframework.cloud - spring-cloud-starter-netflix-eureka-client - - - - - - org.springframework.cloud - spring-cloud-starter-netflix-eureka-client - 2.0.0.RELEASE - - - jackson-databind - com.fasterxml.jackson.core - - - jackson-annotations - com.fasterxml.jackson.core - - - jackson-core - com.fasterxml.jackson.core - - - - - - org.springframework.boot - spring-boot-starter-jetty - ${spring.boot.version} - - - asm - org.ow2.asm - - - javax.annotation-api - javax.annotation - - - - - org.springframework.boot - spring-boot-starter-web - - - org.springframework.boot - spring-boot-starter-tomcat - - - hibernate-validator - org.hibernate.validator - - - jackson-databind - com.fasterxml.jackson.core - - - jackson-datatype-jdk8 - com.fasterxml.jackson.datatype - - - jackson-datatype-jsr310 - com.fasterxml.jackson.datatype - - - jackson-module-parameter-names - com.fasterxml.jackson.module - - - ${spring.boot.version} - - - - org.springframework.boot - spring-boot-starter - ${spring.boot.version} - - - org.springframework.boot - spring-boot-starter-logging - - - - - org.springframework.boot - spring-boot-starter-log4j2 - ${spring.boot.version} - - - org.springframework.boot - spring-boot-starter-actuator - ${spring.boot.version} - - - org.springframework.boot - spring-boot-starter-logging - - - jackson-databind - com.fasterxml.jackson.core - - - jackson-datatype-jsr310 - com.fasterxml.jackson.datatype - - - - - - org.springframework.cloud - spring-cloud-starter-config - 2.0.0.RELEASE - - - org.springframework.boot - spring-boot-starter-logging - - - jackson-databind - com.fasterxml.jackson.core - - - jackson-annotations - com.fasterxml.jackson.core - - - spring-cloud-commons - org.springframework.cloud - - - org.springframework.cloud - spring-cloud-context - - - - - - org.springframework.boot - spring-boot-starter-aop - - - org.springframework.boot - spring-boot-starter-logging - - - ${spring.boot.version} - - - - mysql - mysql-connector-java - 5.1.49 - - - - org.glassfish.jersey.bundles - jaxrs-ri - 2.21 - - - jersey-container-servlet-core - org.glassfish.jersey.containers - - - jersey-container-servlet - org.glassfish.jersey.containers - - - javax.annotation-api - javax.annotation - - - - - cglib - cglib - 2.2.2 - - - commons-dbcp - commons-dbcp - 1.4 - - - org.eclipse.jetty - jetty-server - ${jetty.version} - - - org.eclipse.jetty - jetty-webapp - ${jetty.version} - - - org.eclipse.jetty.websocket - websocket-server - ${jetty.version} - - - - org.glassfish.jersey.ext - jersey-spring3 - ${jersey.servlet.version} - - - org.springframework - spring - - - org.springframework - spring-core - - - org.springframework - spring-web - - - org.springframework - spring-beans - - - org.springframework - spring-context - - - jersey-server - org.glassfish.jersey.core - - - - - - - com.sun.jersey - jersey-server - 1.19.1 - - - jsr311-api - javax.ws.rs - - - - - com.sun.jersey - jersey-servlet - 1.19.1 - - - org.glassfish.jersey.containers - jersey-container-servlet - ${jersey.servlet.version} - - - jersey-common - org.glassfish.jersey.core - - - jersey-server - org.glassfish.jersey.core - - - - - org.glassfish.jersey.containers - jersey-container-servlet-core - ${jersey.servlet.version} - - - javax.ws.rs-api - javax.ws.rs - - - jersey-common - org.glassfish.jersey.core - - - jersey-server - org.glassfish.jersey.core - - - - - - org.glassfish.jersey.media - jersey-media-json-jackson - ${jersey.version} - - - jackson-annotations - com.fasterxml.jackson.core - - - jackson-core - com.fasterxml.jackson.core - - - jackson-databind - com.fasterxml.jackson.core - - - jersey-common - org.glassfish.jersey.core - - - - - org.glassfish.jersey.media - jersey-media-multipart - ${jersey.version} - - - jersey-common - org.glassfish.jersey.core - - - - - org.glassfish.jersey.ext - jersey-entity-filtering - ${jersey.version} - - - com.sun.jersey - jersey-json - - - jsr311-api - javax.ws.rs - - - 1.19 - - - - com.fasterxml.jackson.core - jackson-databind - ${fasterxml.jackson.version} - - - com.fasterxml.jackson.core - jackson-annotations - ${fasterxml.jackson.version} - - - - org.reflections - reflections - 0.9.10 - - - - com.google.code.gson - gson - - - io.netty - netty-all - ${netty.version} - - - io.netty - netty-transport-native-epoll - ${netty.version} - - - org.json4s - json4s-jackson_${scala.binary.version} - ${json4s.version} - - - org.scala-lang - scala-library - - - jackson-databind - com.fasterxml.jackson.core - - - json4s-core_2.11 - org.json4s - - - - - org.springframework.cloud - spring-cloud-starter-openfeign - 2.0.0.RELEASE - - - guava - com.google.guava - - - - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - - diff --git a/core/cloudModule/src/main/assembly/distribution.xml b/core/cloudModule/src/main/assembly/distribution.xml deleted file mode 100644 index 865b4a9a349bdb3fe078d3b3253d09787c2374c1..0000000000000000000000000000000000000000 --- a/core/cloudModule/src/main/assembly/distribution.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - module - - zip - - true - module - - - - - - lib - true - true - false - true - true - - io.netty:netty-buffer* - io.netty:netty-codec* - io.netty:netty-common* - io.netty:netty-handler* - io.netty:netty-transport* - - - - - - diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/package.scala b/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/package.scala deleted file mode 100644 index 5f00528cd565f03adf267d802f64de0a4450eee9..0000000000000000000000000000000000000000 --- a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/package.scala +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis - -import java.util - -import javax.servlet.http.HttpServletRequest -import com.webank.wedatasphere.linkis.common.exception.{ErrorException, ExceptionManager, FatalException, WarnException} -import com.webank.wedatasphere.linkis.common.utils.Utils -import com.webank.wedatasphere.linkis.server.exception.{BDPServerErrorException, NonLoginException} -import com.webank.wedatasphere.linkis.server.security.SecurityFilter -import org.apache.commons.lang.StringUtils -import org.apache.commons.lang.exception.ExceptionUtils -import org.slf4j.Logger - -import scala.collection.{JavaConversions, mutable} - -/** - * Created by enjoyyin on 2018/5/2. - */ -package object server { - - val EXCEPTION_MSG = "errorMsg" - type JMap[K, V] = java.util.HashMap[K, V] - - implicit def getUser(req: HttpServletRequest): String = SecurityFilter.getLoginUsername(req) - - def validateFailed(message: String): Message = Message(status = 2).setMessage(message) - def validate[T](json: util.Map[String, T], keys: String*): Unit = { - keys.foreach(k => if(!json.contains(k) || json.get(k) == null || StringUtils.isEmpty(json.get(k).toString)) - throw new BDPServerErrorException(11001, s"Verification failed, $k cannot be empty!(验证失败,$k 不能为空!)")) - } - def error(message: String): Message = Message.error(message) - implicit def ok(msg: String): Message = Message.ok(msg) - implicit def error(t: Throwable): Message = Message.error(t) - implicit def error(e: (String, Throwable)): Message = Message.error(e) - implicit def error(msg: String, t: Throwable): Message = Message.error(msg -> t) - // def tryCatch[T](tryOp: => T)(catchOp: Throwable => T): T = Utils.tryCatch(tryOp)(catchOp) -// def tryCatch(tryOp: => Message)(catchOp: Throwable => Message): Message = Utils.tryCatch(tryOp){ -// case nonLogin: NonLoginException => Message.noLogin(msg = nonLogin.getMessage) -// case t => catchOp(t) -// } - def catchMsg(tryOp: => Message)(msg: String)(implicit log: Logger): Message = Utils.tryCatch(tryOp){ - case fatal: FatalException => - log.error("Fatal Error, system exit...", fatal) - System.exit(fatal.getErrCode) - Message.error("Fatal Error, system exit...") - case nonLogin: NonLoginException => - val message = Message.noLogin(nonLogin.getMessage) - message.data(EXCEPTION_MSG, nonLogin.toMap) - message - case error: ErrorException => - val cause = error.getCause - val errorMsg = cause match { - case t: ErrorException => s"error code(错误码): ${t.getErrCode}, error message(错误信息): ${t.getDesc}." - case _ => s"error code(错误码): ${error.getErrCode}, error message(错误信息): ${error.getDesc}." - } - log.error(errorMsg, error) - val message = Message.error(errorMsg) - message.data(EXCEPTION_MSG, error.toMap) - message - case warn: WarnException => - val warnMsg = s"Warning code(警告码): ${warn.getErrCode}, Warning message(警告信息): ${warn.getDesc}." - log.warn(warnMsg, warn) - val message = Message.warn(warnMsg) - message.data(EXCEPTION_MSG, warn.toMap) - message - case t => - log.error(msg, t) - val errorMsg = ExceptionUtils.getRootCauseMessage(t) - val message = if(StringUtils.isNotEmpty(errorMsg) && "operation failed(操作失败)" != msg) error(msg + "!the reason(原因):" + errorMsg) - else if(StringUtils.isNotEmpty(errorMsg)) error(errorMsg) else error(msg) - message.data(EXCEPTION_MSG, ExceptionManager.unknownException(message.getMessage)) - } - def catchIt(tryOp: => Message)(implicit log: Logger): Message = catchMsg(tryOp)("operation failed(操作失败)s") - implicit def toScalaBuffer[T](list: util.List[T]): mutable.Buffer[T] = JavaConversions.asScalaBuffer(list) - implicit def toScalaMap[K, V](map: util.Map[K, V]): mutable.Map[K, V] = JavaConversions.mapAsScalaMap(map) - implicit def toJavaList[T](list: mutable.Buffer[T]): util.List[T] = { - val arrayList = new util.ArrayList[T] - list.foreach(arrayList.add) - arrayList - } - implicit def toJavaMap[K, V](map: mutable.Map[K, V]): JMap[K, V] = { - val hashMap = new util.HashMap[K, V]() - map.foreach(m => hashMap.put(m._1, m._2)) - hashMap - } - implicit def toJavaMap[K, V](map: Map[K, V]): JMap[K, V] = { - val hashMap = new util.HashMap[K, V]() - map.foreach(m => hashMap.put(m._1, m._2)) - hashMap - } - implicit def asString(mapWithKey: (util.Map[String, Object], String)): String = mapWithKey._1.get(mapWithKey._2).asInstanceOf[String] - implicit def getString(mapWithKey: (util.Map[String, String], String)): String = mapWithKey._1.get(mapWithKey._2) - implicit def asInt(map: util.Map[String, Object], key: String): Int = map.get(key).asInstanceOf[Int] - implicit def asBoolean(mapWithKey: (util.Map[String, Object], String)): Boolean = mapWithKey._1.get(mapWithKey._2).asInstanceOf[Boolean] - -} diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/security/SecurityFilter.scala b/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/security/SecurityFilter.scala deleted file mode 100644 index 1d01cd60d9dcdb777565f24dba918bb329ff2186..0000000000000000000000000000000000000000 --- a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/security/SecurityFilter.scala +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.server.security - -import java.text.DateFormat -import java.util.{Date, Locale} - -import com.webank.wedatasphere.linkis.common.conf.{CommonVars, Configuration} -import com.webank.wedatasphere.linkis.common.utils.{Logging, RSAUtils, Utils} -import com.webank.wedatasphere.linkis.server.conf.ServerConfiguration -import com.webank.wedatasphere.linkis.server.exception.{IllegalUserTicketException, LoginExpireException, NonLoginException} -import com.webank.wedatasphere.linkis.server.security.SSOUtils.sslEnable -import com.webank.wedatasphere.linkis.server.{Message, _} -import javax.servlet._ -import javax.servlet.http.{Cookie, HttpServletRequest, HttpServletResponse} -import org.apache.commons.lang.StringUtils - -/** - * Created by enjoyyin on 2018/1/9. - */ -class SecurityFilter extends Filter with Logging { - private val refererValidate = ServerConfiguration.BDP_SERVER_SECURITY_REFERER_VALIDATE.getValue - private val localAddress = ServerConfiguration.BDP_SERVER_ADDRESS.getValue - protected val testUser = ServerConfiguration.BDP_TEST_USER.getValue - - override def init(filterConfig: FilterConfig): Unit = {} - - private def filterResponse(message: Message)(implicit response: HttpServletResponse): Unit = { - response.setHeader("Content-Type", "application/json;charset=UTF-8") - response.setStatus(Message.messageToHttpStatus(message)) - response.getOutputStream.print(message) - response.getOutputStream.flush() - } - - def doFilter(request: HttpServletRequest)(implicit response: HttpServletResponse): Boolean = { - addAccessHeaders(response) - if(refererValidate) { - //Security certification support, referer limited(安全认证支持,referer限定) - val referer = request.getHeader("Referer") - if(StringUtils.isNotEmpty(referer) && !referer.trim.contains(localAddress)) { - filterResponse(validateFailed("不允许的跨站请求!")) - return false - } - //Security certification support, solving verb tampering(安全认证支持,解决动词篡改) - request.getMethod.toUpperCase match { - case "GET" | "POST" | "PUT" | "DELETE" | "HEAD" | "TRACE" | "CONNECT" | "OPTIONS" => - case _ => - filterResponse(validateFailed("Do not use HTTP verbs to tamper with!(不可使用HTTP动词篡改!)")) - return false - } - } - - if(request.getRequestURI == ServerConfiguration.BDP_SERVER_SECURITY_SSL_URI.getValue) { - val message = Message.ok("Get success!(获取成功!)").data("enable", SSOUtils.sslEnable) - if(SSOUtils.sslEnable) message.data("publicKey", RSAUtils.getDefaultPublicKey()) - filterResponse(message) - false - } else if(request.getRequestURI == ServerConfiguration.BDP_SERVER_RESTFUL_LOGIN_URI.getValue) { - true - } else { - val userName = Utils.tryCatch(SecurityFilter.getLoginUser(request)){ - case n: NonLoginException => - if(Configuration.IS_TEST_MODE.getValue) None else { - filterResponse(Message.noLogin(n.getMessage) << request.getRequestURI) - return false - } - case t: Throwable => - SecurityFilter.warn("", t) - throw t - } - if(userName.isDefined) { - true - } else if(Configuration.IS_TEST_MODE.getValue) { - SecurityFilter.info("test mode! login for uri: " + request.getRequestURI) - SecurityFilter.setLoginUser(response, testUser) - true - } else { - filterResponse(Message.noLogin("You are not logged in, please login first!(您尚未登录,请先登录!)") << request.getRequestURI) - false - } - } - } - - override def doFilter(servletRequest: ServletRequest, servletResponse: ServletResponse, filterChain: FilterChain): Unit = { - val request = servletRequest.asInstanceOf[HttpServletRequest] - implicit val response = servletResponse.asInstanceOf[HttpServletResponse] - if(doFilter(request)) filterChain.doFilter(servletRequest, servletResponse) - if(SecurityFilter.isRequestIgnoreTimeout(request)) SecurityFilter.removeIgnoreTimeoutSignal(response) - } - - protected def addAccessHeaders(response: HttpServletResponse) { - response.setHeader("Access-Control-Allow-Origin", "*") - response.setHeader("Access-Control-Allow-Credentials", "true") - response.setHeader("Access-Control-Allow-Headers", "authorization,Content-Type") - response.setHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, HEAD, DELETE") - val fullDateFormatEN = DateFormat.getDateTimeInstance(DateFormat.FULL, DateFormat.FULL, new Locale("EN", "en")) - response.setHeader("Date", fullDateFormatEN.format(new Date)) - } - - override def destroy(): Unit = {} -} - -object SecurityFilter extends Logging { - private[linkis] val OTHER_SYSTEM_IGNORE_UM_USER = "dataworkcloud_rpc_user" - private[linkis] val ALLOW_ACCESS_WITHOUT_TIMEOUT = "dataworkcloud_inner_request" - def getLoginUserThrowsExceptionWhenTimeout(req: HttpServletRequest): Option[String] = Option(req.getCookies).flatMap(cs => SSOUtils.getLoginUser(cs)) - .orElse(SSOUtils.getLoginUserIgnoreTimeout(key => Option(req.getHeader(key))).filter(_ == OTHER_SYSTEM_IGNORE_UM_USER)) - def getLoginUser(req: HttpServletRequest): Option[String] = Utils.tryCatch(getLoginUserThrowsExceptionWhenTimeout(req)) { - case _: LoginExpireException => - SSOUtils.getLoginUserIgnoreTimeout(key => Option(req.getCookies).flatMap(_.find(_.getName == key).map(_.getValue))).filter(user => user != OTHER_SYSTEM_IGNORE_UM_USER && - isRequestIgnoreTimeout(req)) - case t => throw t - } - def isRequestIgnoreTimeout(req: HttpServletRequest): Boolean = Option(req.getCookies).exists(_.exists(c => c.getName == ALLOW_ACCESS_WITHOUT_TIMEOUT && c.getValue == "true")) - def addIgnoreTimeoutSignal(response: HttpServletResponse): Unit = response.addCookie(ignoreTimeoutSignal()) - def ignoreTimeoutSignal(): Cookie = { - val cookie = new Cookie(ALLOW_ACCESS_WITHOUT_TIMEOUT, "true") - cookie.setMaxAge(-1) - cookie.setPath("/") - if(sslEnable) cookie.setSecure(true) - cookie - } - def removeIgnoreTimeoutSignal(response: HttpServletResponse): Unit = { - val cookie = new Cookie(ALLOW_ACCESS_WITHOUT_TIMEOUT, "false") - cookie.setMaxAge(0) - cookie.setPath("/") - if(sslEnable) cookie.setSecure(true) - response.addCookie(cookie) - } - def getLoginUsername(req: HttpServletRequest): String = getLoginUser(req).getOrElse(throw new IllegalUserTicketException( s"Illegal user token information(非法的用户token信息).")) - def setLoginUser(resp: HttpServletResponse, username: String): Unit = SSOUtils.setLoginUser(c => resp.addCookie(c), username) - def removeLoginUser(req: HttpServletRequest, resp: HttpServletResponse): Unit = { - SSOUtils.removeLoginUser(req.getCookies) - SSOUtils.removeLoginUserByAddCookie(s => resp.addCookie(s)) - } -} \ No newline at end of file diff --git a/core/cloudMybatis/pom.xml b/core/cloudMybatis/pom.xml deleted file mode 100644 index b4eb65b27cd3c532e1019f9314981419bc40bb71..0000000000000000000000000000000000000000 --- a/core/cloudMybatis/pom.xml +++ /dev/null @@ -1,86 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - jar - - linkis-mybatis - - - - com.webank.wedatasphere.linkis - linkis-module - provided - - - org.mybatis.spring.boot - mybatis-spring-boot-starter - 1.3.2 - - - spring-boot-starter - org.springframework.boot - - - spring-boot-autoconfigure - org.springframework.boot - - - spring-beans - org.springframework - - - - - com.github.pagehelper - pagehelper - 5.1.4 - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - src/main/resources - - - ${project.artifactId}-${project.version} - - \ No newline at end of file diff --git a/core/cloudProtocol/pom.xml b/core/cloudProtocol/pom.xml deleted file mode 100644 index aa3f83a60d9990212f9c5b5f8ba8ee39f9028d35..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/pom.xml +++ /dev/null @@ -1,63 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-protocol - - - - com.webank.wedatasphere.linkis - linkis-common - provided - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - - \ No newline at end of file diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/CacheNotFound.java b/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/CacheNotFound.java deleted file mode 100644 index f0c719e689cb5a6f3c39b5722dfe9737568cc56b..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/CacheNotFound.java +++ /dev/null @@ -1,4 +0,0 @@ -package com.webank.wedatasphere.linkis.protocol.query.cache; - -public class CacheNotFound implements ResponseReadCache { -} diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/CacheTaskResult.java b/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/CacheTaskResult.java deleted file mode 100644 index 7f36f7d3b548c1f911f608b89c0d3cd11cfe6a57..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/CacheTaskResult.java +++ /dev/null @@ -1,14 +0,0 @@ -package com.webank.wedatasphere.linkis.protocol.query.cache; - -public class CacheTaskResult implements ResponseReadCache { - - private String resultLocation; - - public CacheTaskResult(String resultLocation) { - this.resultLocation = resultLocation; - } - - public String getResultLocation() { - return resultLocation; - } -} diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/FailedToDeleteCache.java b/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/FailedToDeleteCache.java deleted file mode 100644 index 41cdca6ec20fd5d25467ed81904853c1ddee5fc7..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/FailedToDeleteCache.java +++ /dev/null @@ -1,13 +0,0 @@ -package com.webank.wedatasphere.linkis.protocol.query.cache; - -public class FailedToDeleteCache { - private String errorMessage; - - public FailedToDeleteCache(String errorMessage) { - this.errorMessage = errorMessage; - } - - public String getErrorMessage() { - return errorMessage; - } -} diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/RequestDeleteCache.java b/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/RequestDeleteCache.java deleted file mode 100644 index 317eed73ceabdaeaf713f575b9d1bb65eff8e4b7..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/RequestDeleteCache.java +++ /dev/null @@ -1,28 +0,0 @@ -package com.webank.wedatasphere.linkis.protocol.query.cache; - -import com.webank.wedatasphere.linkis.protocol.query.QueryProtocol; - -public class RequestDeleteCache implements QueryProtocol { - - private String executionCode; - private String engineType; - private String user; - - public RequestDeleteCache(String executionCode, String engineType, String user) { - this.executionCode = executionCode; - this.engineType = engineType; - this.user = user; - } - - public String getExecutionCode() { - return executionCode; - } - - public String getEngineType() { - return engineType; - } - - public String getUser() { - return user; - } -} diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/RequestReadCache.java b/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/RequestReadCache.java deleted file mode 100644 index 49ed99e2e22d95f111c17c585dcebf3db1a750dd..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/RequestReadCache.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.webank.wedatasphere.linkis.protocol.query.cache; - -import com.webank.wedatasphere.linkis.protocol.query.QueryProtocol; - -public class RequestReadCache implements QueryProtocol { - private String executionCode; - private String engineType; - private String user; - private Long readCacheBefore; - - public RequestReadCache(String executionCode, String engineType, String user, Long readCacheBefore) { - this.executionCode = executionCode; - this.engineType = engineType; - this.user = user; - this.readCacheBefore = readCacheBefore; - } - - public String getExecutionCode() { - return executionCode; - } - - public String getEngineType() { - return engineType; - } - - public String getUser() { - return user; - } - - public Long getReadCacheBefore() { - return readCacheBefore; - } -} diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/ResponseDeleteCache.java b/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/ResponseDeleteCache.java deleted file mode 100644 index e9c0e3a9e34151a39d56a143b87312251d77459c..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/ResponseDeleteCache.java +++ /dev/null @@ -1,6 +0,0 @@ -package com.webank.wedatasphere.linkis.protocol.query.cache; - -import com.webank.wedatasphere.linkis.protocol.query.QueryProtocol; - -public interface ResponseDeleteCache extends QueryProtocol { -} diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/ResponseReadCache.java b/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/ResponseReadCache.java deleted file mode 100644 index dbcc84a50efcb2f0cdb3759b4f9ebc47097b7667..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/ResponseReadCache.java +++ /dev/null @@ -1,6 +0,0 @@ -package com.webank.wedatasphere.linkis.protocol.query.cache; - -import com.webank.wedatasphere.linkis.protocol.query.QueryProtocol; - -public interface ResponseReadCache extends QueryProtocol { -} diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/SuccessDeletedCache.java b/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/SuccessDeletedCache.java deleted file mode 100644 index b16098faadc3aabfff9cd02a59ba8d8415698c2e..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/SuccessDeletedCache.java +++ /dev/null @@ -1,4 +0,0 @@ -package com.webank.wedatasphere.linkis.protocol.query.cache; - -public class SuccessDeletedCache { -} diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/config/RequestQueryGlobalConfig.scala b/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/config/RequestQueryGlobalConfig.scala deleted file mode 100644 index 18592fdfdf840329ee22d3c9a761ce8644af8b6c..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/config/RequestQueryGlobalConfig.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.protocol.config - -import com.webank.wedatasphere.linkis.protocol.{CacheableProtocol, RetryableProtocol} - -/** - * Created by enjoyyin on 2018/10/16. - */ -trait ConfigProtocol - -case class RequestQueryGlobalConfig(userName:String) extends CacheableProtocol with RetryableProtocol with ConfigProtocol - -case class RequestQueryAppConfig(userName:String,creator:String,appName:String) extends CacheableProtocol with RetryableProtocol with ConfigProtocol - -case class RequestQueryAppConfigWithGlobal(userName:String,creator:String,appName:String,isMerge:Boolean) extends CacheableProtocol with RetryableProtocol with ConfigProtocol diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestEngineStatus.scala b/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestEngineStatus.scala deleted file mode 100644 index e3aeb3e3539f3accef0df8699814d6d08b8e3ef1..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestEngineStatus.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.protocol.engine - -import com.webank.wedatasphere.linkis.protocol.RetryableProtocol - -/** - * - * Request status information from an engine(向某一个engine请求状态信息) - * Created by enjoyyin on 2018/9/27. - */ -case class RequestEngineStatus(messageType: Int) extends RetryableProtocol -object RequestEngineStatus { - val Status_Only = 1 - val Status_Overload = 2 - val Status_Concurrent = 3 - val Status_Overload_Concurrent = 4 - val Status_BasicInfo = 5 - val ALL = 6 -} \ No newline at end of file diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestNewEngine.scala b/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestNewEngine.scala deleted file mode 100644 index d8f354c2ed1fb567179533950e117f86675735bc..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestNewEngine.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.protocol.engine - -import java.util - -import com.webank.wedatasphere.linkis.protocol.RetryableProtocol - -/** - * Created by enjoyyin on 2018/9/18. - */ -trait RequestEngine extends RetryableProtocol { - val creator: String - val user: String - val properties: util.Map[String, String] //Other parameter information(其他参数信息) -} -object RequestEngine { - private val header = "_req_" - val REQUEST_ENTRANCE_INSTANCE = header + "entrance_instance" -// val ENGINE_MAX_FREE_TIME = header + "engine_max_free_time" -// val ENGINE_MAX_EXECUTE_NUM = header + "engine_max_execute_num" - // val ENGINE_USER = header + "engine_user" - val ENGINE_INIT_SPECIAL_CODE = header + "engine_init_code" - def isRequestEngineProperties(key: String) = key.startsWith(header) -} -trait TimeoutRequestEngine { - val timeout: Long -} -case class RequestNewEngine(creator: String, user: String, properties: util.Map[String, String]) extends RequestEngine -case class TimeoutRequestNewEngine(timeout: Long, user: String, creator: String, properties: util.Map[String, String]) extends RequestEngine with TimeoutRequestEngine -case class RequestKillEngine(instance: String, killApplicationName: String, killInstance: String) \ No newline at end of file diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/ResponseNewEngine.scala b/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/ResponseNewEngine.scala deleted file mode 100644 index 6e72475babacdf6b15ffc54575fed9b97e528600..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/ResponseNewEngine.scala +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.protocol.engine - -import com.webank.wedatasphere.linkis.protocol.{BroadcastProtocol, RetryableProtocol} - -/** - * Created by enjoyyin on 2018/9/18. - */ -trait ResponseEngine extends RetryableProtocol { - val port: Int - val status: Int - val initErrorMsg: String -} - -/** - * engine send to engineManager - * @param pid - */ -case class ResponseEnginePid(port: Int, pid: String) extends RetryableProtocol -/** - * engine send to engineManager - * @param port - * @param status - * @param initErrorMsg - */ -case class ResponseEngineStatusCallback(override val port: Int, - override val status: Int, - override val initErrorMsg: String) extends ResponseEngine - -/** - * engineManager send to entrance - * @param instance - * @param responseEngineStatus - */ - case class ResponseNewEngineStatus(instance: String, responseEngineStatus: ResponseEngineStatusCallback) extends RetryableProtocol - -/** - * engineManager send to entrance - * @param applicationName - * @param instance - */ -case class ResponseNewEngine(applicationName: String, instance: String) extends RetryableProtocol - -case class BroadcastNewEngine(responseNewEngine: ResponseNewEngine, responseEngineStatus: ResponseEngineStatus) - extends BroadcastProtocol \ No newline at end of file diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/ResponseTaskExecute.scala b/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/ResponseTaskExecute.scala deleted file mode 100644 index 66d7d97b0727d4b2a1a0f2f506f711d6e6436c23..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/ResponseTaskExecute.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.protocol.engine - -import java.util - -import com.webank.wedatasphere.linkis.protocol.{BroadcastProtocol, IRServiceGroupProtocol, RetryableProtocol, UserWithCreator} - -/** - * Created by enjoyyin on 2018/9/14. - */ -case class ResponseTaskExecute(execId: String) -case class JobProgressInfo(id: String, totalTasks: Int, runningTasks: Int, failedTasks: Int, succeedTasks: Int) -case class ResponseTaskProgress(execId: String, progress: Float, progressInfo: Array[JobProgressInfo])( - implicit override val userWithCreator: UserWithCreator) extends RetryableProtocol with IRServiceGroupProtocol -case class ResponseEngineLock(lock: String) -case class EngineConcurrentInfo(runningTasks: Int, pendingTasks: Int, succeedTasks: Int, failedTasks: Int) -case class EngineOverloadInfo(maxMemory: Long, usedMemory: Long, systemCPUUsed: Float) -case class ResponseEngineStatusChanged(instance: String, fromState: Int, toState: Int, - overload: EngineOverloadInfo, concurrent: EngineConcurrentInfo) - extends BroadcastProtocol -case class ResponseEngineInfo(createEntranceInstance: String, creator: String, user: String, properties: util.Map[String, String]) -case class ResponseEngineStatus(instance: String, state: Int, overload: EngineOverloadInfo, concurrent: EngineConcurrentInfo, - engineInfo: ResponseEngineInfo) -case class ResponseTaskLog(execId: String, log: String)( - implicit override val userWithCreator: UserWithCreator) extends RetryableProtocol with IRServiceGroupProtocol - -case class ResponseTaskError(execId: String, errorMsg: String)( - implicit override val userWithCreator: UserWithCreator) extends RetryableProtocol with IRServiceGroupProtocol - -case class ResponseTaskStatus(execId: String, state: Int)( - implicit override val userWithCreator: UserWithCreator) extends RetryableProtocol with IRServiceGroupProtocol - -case class ResponseTaskResultSet(execId: String, output: String, alias: String)( - implicit override val userWithCreator: UserWithCreator) extends RetryableProtocol with IRServiceGroupProtocol - -case class ResponseTaskResultSize(execId: String, resultSize: Int)(implicit override val userWithCreator: UserWithCreator) extends RetryableProtocol - with IRServiceGroupProtocol \ No newline at end of file diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/ZuulEntranceUtils.scala b/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/ZuulEntranceUtils.scala deleted file mode 100644 index 7e333c1c63d6ddb8f8f249d48048169fdc18ef11..0000000000000000000000000000000000000000 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/ZuulEntranceUtils.scala +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.protocol.utils - -import javafx.beans.binding.LongExpression - -/** - * created by enjoyyin on 2018/11/8 - * Description: - */ -object ZuulEntranceUtils { - - - - def parseExecID(longExecID:String):Array[String] = { - //Add creator to execID while old code is compatible(添加creator到execID,同时老代码兼容) - if(isNumberic(longExecID.substring(0, 6))){ - val creatorLength = Integer.parseInt(longExecID.substring(0,2)) - val executeLength = Integer.parseInt(longExecID.substring(2,4)) - val instanceLength = Integer.parseInt(longExecID.substring(4,6)) - val creator = longExecID.substring(6, 6 + creatorLength) - val executeApplicationName = longExecID.substring(6 + creatorLength, 6 + creatorLength + executeLength) - val instance = longExecID.substring(6 + creatorLength + executeLength, 6 + creatorLength + executeLength + instanceLength) - val shortExecID = longExecID.substring(6 + creatorLength + executeLength + instanceLength, longExecID.length) - return Array(creator, executeApplicationName, instance, shortExecID) - } - val executeLength = Integer.parseInt(longExecID.substring(0,2)) - val instanceLength = Integer.parseInt(longExecID.substring(2,4)) - val executeApplicationName:String = longExecID.substring(4, 4 + executeLength) - val instance:String = longExecID.substring(4 + executeLength, 4 + executeLength + instanceLength) - val shortExecID:String = longExecID.substring(4 + executeLength + instanceLength, longExecID.length) - Array[String](executeApplicationName, instance, shortExecID) - } - - - private def isNumberic(s:String):Boolean = { - s.toCharArray foreach { - c => if (c < 48 || c >57) return false - } - true - } - - /** - * - * @param shortExecID ExecID generated by the scheduler, such as IDE_neiljianliu_0(scheduler生成的ExecID, 如 IDE_neiljianliu_0) - * @param executeApplicationName {dd}{dd}${executeApplicationName}${instance}${shortExecID} - * @return - */ - @Deprecated - def generateExecID(shortExecID:String, executeApplicationName:String, instance:String):String = { - val executeLength = getLengthStr(executeApplicationName) - val instanceLength = getLengthStr(instance) - if (shortExecID.split("_").length == 3){ - //Backward compatible(向下兼容) - val creator = shortExecID.split("_")(0) - val creatorLength = getLengthStr(creator) - return creatorLength + executeLength + instanceLength + creator + executeApplicationName + instance + shortExecID - } - executeLength + instanceLength + executeApplicationName + instance + shortExecID - } - - - def generateExecID(shortExecID:String, executeApplicationName:String, instance:String, creator:String):String = { - val creatorLength = getLengthStr(creator) - val executeLength = getLengthStr(executeApplicationName) - val instanceLength = getLengthStr(instance) - creatorLength + executeLength + instanceLength + creator + executeApplicationName + instance + shortExecID - } - - - - - private def getLengthStr(string:String):String = { - val length = string.length - if (length >= 10) String.valueOf(length) else "0" + String.valueOf(length) - } - - def main(args: Array[String]): Unit = { - - } - -} diff --git a/core/cloudRPC/pom.xml b/core/cloudRPC/pom.xml deleted file mode 100644 index 61bc5e2ce0182499058a9d8c9ae25965a748cd6a..0000000000000000000000000000000000000000 --- a/core/cloudRPC/pom.xml +++ /dev/null @@ -1,156 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-cloudRPC - - - - com.webank.wedatasphere.linkis - linkis-protocol - - - com.webank.wedatasphere.linkis - linkis-module - - - org.springframework.cloud - spring-cloud-starter-feign - ${spring.eureka.version} - - - netty-codec - io.netty - - - netty-transport - io.netty - - - jersey-client - com.sun.jersey - - - jersey-apache-client4 - com.sun.jersey.contribs - - - jackson-databind - com.fasterxml.jackson.core - - - jackson-core - com.fasterxml.jackson.core - - - jackson-annotations - com.fasterxml.jackson.core - - - httpclient - org.apache.httpcomponents - - - spring-cloud-commons - org.springframework.cloud - - - HdrHistogram - org.hdrhistogram - - - jsr305 - com.google.code.findbugs - - - org.springframework.cloud - spring-cloud-starter - - - org.springframework.cloud - spring-cloud-starter-openfeign - - - - - org.json4s - json4s-jackson_${scala.binary.version} - 3.5.3 - - - jackson-databind - com.fasterxml.jackson.core - - - - - org.springframework.cloud - spring-cloud-starter-openfeign - 2.0.0.RELEASE - - - jackson-databind - com.fasterxml.jackson.core - - - jackson-core - com.fasterxml.jackson.core - - - jackson-annotations - com.fasterxml.jackson.core - - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - - \ No newline at end of file diff --git a/core/common/pom.xml b/core/common/pom.xml deleted file mode 100644 index 57a1c38a802fef1fe21a3476cb2da34754486b3f..0000000000000000000000000000000000000000 --- a/core/common/pom.xml +++ /dev/null @@ -1,204 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-common - jar - - - - - - org.scala-lang - scala-library - - - org.scala-lang - scala-compiler - - - org.scala-lang - scala-reflect - - - org.scala-lang - scalap - - - commons-lang - commons-lang - - - commons-io - commons-io - 2.4 - - - commons-collections - commons-collections - - - com.google.guava - guava - - - - com.fasterxml.jackson.core - jackson-databind - ${fasterxml.jackson.version} - - - com.fasterxml.jackson.core - jackson-annotations - ${fasterxml.jackson.version} - - - com.fasterxml.jackson.module - jackson-module-scala_${scala.binary.version} - ${fasterxml.jackson.version} - - - com.google.guava - guava - - - org.scala-lang - scala-library - - - org.scala-lang - scala-reflect - - - paranamer - com.thoughtworks.paranamer - - - - - - com.fasterxml.jackson.module - jackson-module-parameter-names - ${fasterxml.jackson.version} - - - - com.fasterxml.jackson.datatype - jackson-datatype-jsr310 - ${fasterxml.jackson.version} - - - - com.fasterxml.jackson.datatype - jackson-datatype-jdk8 - ${fasterxml.jackson.version} - - - - - org.slf4j - slf4j-api - ${slf4j.version} - - - - commons-net - commons-net - 3.1 - - - - commons-codec - commons-codec - 1.10 - - - com.google.code.gson - gson - - - org.json4s - json4s-jackson_2.11 - - - - - org.json4s - json4s-core_${scala.binary.version} - 3.2.11 - - - org.json4s - json4s-jackson_${scala.binary.version} - 3.2.11 - - - jackson-databind - com.fasterxml.jackson.core - - - - - org.json4s - json4s-ast_2.11 - 3.2.11 - - - org.apache.httpcomponents - httpclient - 4.5.4 - - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - src/main/resources - - - ${project.artifactId}-${project.version} - - - - \ No newline at end of file diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/io/TestMain.java b/core/common/src/main/java/com/webank/wedatasphere/linkis/common/io/TestMain.java deleted file mode 100644 index abd1f6501732352cb354e17eb4bc2a05d6b054cd..0000000000000000000000000000000000000000 --- a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/io/TestMain.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.common.io; - -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.net.*; - -/** - * Created by enjoyyin on 2018/10/24. - */ -public class TestMain { - public static void main(String[] args) throws FileNotFoundException, URISyntaxException, MalformedURLException, UnknownHostException { - FsPath fsPath = new FsPath("hdfs:///test"); - System.out.print(fsPath.getPath()); - } -} diff --git a/core/hadoop-common/pom.xml b/core/hadoop-common/pom.xml deleted file mode 100644 index ff289768929a27072829a1d1d4b9fff8465bf4b8..0000000000000000000000000000000000000000 --- a/core/hadoop-common/pom.xml +++ /dev/null @@ -1,243 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-hadoop-common - jar - - - - - com.webank.wedatasphere.linkis - linkis-common - provided - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - - - log4j - log4j - - - org.mortbay.jetty - jetty - - - org.mortbay.jetty - jetty-util - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-server - - - com.sun.jersey - jersey-json - - - jsr311-api - javax.ws.rs - - - net.java.dev.jets3t - jets3t - - - com.jcraft - jsch - - - com.google.code.findbugs - jsr305 - - - xmlenc - xmlenc - - - net.java.dev.jets3t - jets3t - - - org.apache.avro - avro - - - org.apache.hadoop - hadoop-auth - - - com.jcraft - jsch - - - com.google.code.findbugs - jsr305 - - - servlet-api - javax.servlet - - - org.slf4j - slf4j-log4j12 - - - com.sun.jersey - * - - - org.codehaus.jackson - * - - - commons-codec - commons-codec - - - commons-logging - commons-logging - - - netty - io.netty - - - curator-client - org.apache.curator - - - - - org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - - - io.netty - netty - - - servlet-api - javax.servlet - - - com.google.guava - guava - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-server - - - org.slf4j - slf4j-log4j12 - - - com.sun.jersey - * - - - org.codehaus.jackson - * - - - commons-logging - commons-logging - - - netty - io.netty - - - commons-codec - commons-codec - - - - - org.apache.hadoop - hadoop-auth - ${hadoop.version} - - - org.slf4j - slf4j-log4j12 - - - org.apache.httpcomponents - httpclient - - - org.apache.httpcomponents - * - - - commons-codec - commons-codec - - - zookeeper - org.apache.zookeeper - - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - src/main/resources - - - ${project.artifactId}-${project.version} - - \ No newline at end of file diff --git a/core/httpclient/pom.xml b/core/httpclient/pom.xml deleted file mode 100644 index eea76d854325f006e74ae7d16352ffbe0bd72361..0000000000000000000000000000000000000000 --- a/core/httpclient/pom.xml +++ /dev/null @@ -1,108 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - linkis-httpclient - - - - com.webank.wedatasphere.linkis - linkis-common - - - org.apache.httpcomponents - httpclient - ${httpclient.version} - - - org.apache.httpcomponents - httpmime - ${httpmime.version} - - - - org.json4s - json4s-jackson_${scala.binary.version} - ${json4s.version} - - - org.scala-lang - scala-library - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-core - - - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - org.apache.maven.plugins - maven-source-plugin - - - attach-sources - - jar - - - - - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - - \ No newline at end of file diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/AbstractHttpClient.scala b/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/AbstractHttpClient.scala deleted file mode 100644 index e610b6379f1b15d5da5ff730fd09f4d2883e1749..0000000000000000000000000000000000000000 --- a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/AbstractHttpClient.scala +++ /dev/null @@ -1,287 +0,0 @@ -/* - * Copyright 2019 WeBank - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.httpclient - -import java.util -import java.util.concurrent.TimeUnit - -import com.webank.wedatasphere.linkis.common.conf.Configuration -import com.webank.wedatasphere.linkis.common.io.{Fs, FsPath} -import com.webank.wedatasphere.linkis.common.utils.Utils -import com.webank.wedatasphere.linkis.httpclient.authentication.{AbstractAuthenticationStrategy, AuthenticationAction, HttpAuthentication} -import com.webank.wedatasphere.linkis.httpclient.config.ClientConfig -import com.webank.wedatasphere.linkis.httpclient.discovery.{AbstractDiscovery, Discovery, HeartbeatAction} -import com.webank.wedatasphere.linkis.httpclient.exception.{HttpClientResultException, HttpMessageParseException} -import com.webank.wedatasphere.linkis.httpclient.loadbalancer.{AbstractLoadBalancer, DefaultLoadbalancerStrategy, LoadBalancer} -import com.webank.wedatasphere.linkis.httpclient.request._ -import com.webank.wedatasphere.linkis.httpclient.response._ -import org.apache.commons.io.IOUtils -import org.apache.commons.lang.StringUtils -import org.apache.http.client.entity.UrlEncodedFormEntity -import org.apache.http.client.methods.{CloseableHttpResponse, HttpGet, HttpPost} -import org.apache.http.client.utils.URIBuilder -import org.apache.http.entity.mime.MultipartEntityBuilder -import org.apache.http.entity.{ContentType, StringEntity} -import org.apache.http.impl.client.HttpClients -import org.apache.http.message.BasicNameValuePair -import org.apache.http.util.EntityUtils -import org.apache.http.{HttpException, HttpResponse, _} -import org.json4s.jackson.Serialization.read -import org.json4s.{DefaultFormats, Formats} - -import scala.collection.Iterable -import scala.collection.JavaConversions._ -import scala.concurrent.duration.Duration -import scala.concurrent.{ExecutionContext, ExecutionContextExecutorService} - - -/** - * Created by enjoyyin on 2019/5/20. - */ -abstract class AbstractHttpClient(clientConfig: ClientConfig, clientName: String) extends Client { - - protected implicit val formats: Formats = DefaultFormats - protected implicit val executors: ExecutionContext = Utils.newCachedExecutionContext(clientConfig.getMaxConnection, clientName, false) - protected val httpTimeout: Duration = if (clientConfig.getReadTimeout > 0) Duration(clientConfig.getReadTimeout, TimeUnit.MILLISECONDS) - else Duration.Inf - - protected val httpClient = HttpClients.createDefault() - - if (clientConfig.getAuthenticationStrategy != null) clientConfig.getAuthenticationStrategy match { - case auth: AbstractAuthenticationStrategy => auth.setClient(this) - case _ => - } - protected val (discovery, loadBalancer): (Option[Discovery], Option[LoadBalancer]) = - if (this.clientConfig.isDiscoveryEnabled) { - val discovery = Some(createDiscovery()) - discovery.foreach { - case d: AbstractDiscovery => - d.setServerUrl(clientConfig.getServerUrl) - d.setClient(this) - d.setSchedule(clientConfig.getDiscoveryPeriod, clientConfig.getDiscoveryTimeUnit) - case d => d.setServerUrl(clientConfig.getServerUrl) - } - //如果discovery没有启用,那么启用loadBalancer是没有意义的 - val loadBalancer = if (clientConfig.isLoadbalancerEnabled && this.clientConfig.getLoadbalancerStrategy != null) - Some(this.clientConfig.getLoadbalancerStrategy.createLoadBalancer()) - else if (clientConfig.isLoadbalancerEnabled) Some(DefaultLoadbalancerStrategy.createLoadBalancer()) - else None - loadBalancer match { - case Some(lb: AbstractLoadBalancer) => - discovery.foreach(_.addDiscoveryListener(lb)) - case _ => - } - (discovery, loadBalancer) - } else (None, None) - - discovery.foreach(_.start()) - - protected def createDiscovery(): Discovery - - override def execute(requestAction: Action): Result = execute(requestAction, -1) - - override def execute(requestAction: Action, waitTime: Long): Result = { - if(!requestAction.isInstanceOf[HttpAction]) - throw new UnsupportedOperationException("only HttpAction supported, but the fact is " + requestAction.getClass) - val action = prepareAction(requestAction.asInstanceOf[HttpAction]) - val response: CloseableHttpResponse = executeHttpAction(action) - responseToResult(response, action) - } - - override def execute(requestAction: Action, resultListener: ResultListener): Unit = { - if (!requestAction.isInstanceOf[HttpAction]) { - throw new UnsupportedOperationException("only HttpAction supported, but the fact is " + requestAction.getClass) - } - val action = prepareAction(requestAction.asInstanceOf[HttpAction]) - val response: CloseableHttpResponse = executeHttpAction(action) - //response.onSuccess{case r => resultListener.onSuccess(responseToResult(r, action))} - //response.onFailure{case t => resultListener.onFailure(t)} - } - - protected def getRequestUrl(suffixUrl: String, requestBody: String): String = { - val urlPrefix = loadBalancer.map(_.chooseServerUrl(requestBody)).getOrElse(clientConfig.getServerUrl) - connectUrl(urlPrefix, suffixUrl) - } - - protected def connectUrl(prefix: String, suffix: String): String = { - val prefixEnd = prefix.endsWith("/") - val suffixStart = suffix.startsWith("/") - if(prefixEnd && suffixStart) prefix.substring(0, prefix.length - 1) + suffix - else if(!prefixEnd && !suffixStart) prefix + "/" + suffix - else prefix + suffix - } - - protected def prepareAction(requestAction: HttpAction): HttpAction = requestAction - - protected def executeHttpAction(requestAction: HttpAction): CloseableHttpResponse = { - var realURL = "" - requestAction match { - case serverUrlAction: ServerUrlAction => - realURL = connectUrl(serverUrlAction.serverUrl, requestAction.getURL) - case _ => - realURL = getRequestUrl(requestAction.getURL, requestAction.getRequestBody) - } - - if (clientConfig.getAuthenticationStrategy != null) clientConfig.getAuthenticationStrategy.login(requestAction, realURL.replaceAll(requestAction.getURL, "")) match { - case authAction: HttpAuthentication => - val cookies = authAction.authToCookies - if (cookies != null && cookies.nonEmpty) cookies.foreach(requestAction.addCookie) - val headers = authAction.authToHeaders - if (headers != null && !headers.isEmpty()) { - headers.foreach { case (k, v) => requestAction.addHeader(k.toString(), v.toString()) } - } - case _ => - } - - var response: CloseableHttpResponse = null - requestAction match { - case upload: UploadAction => - val httpPost = new HttpPost(realURL) - val builder = MultipartEntityBuilder.create() - if(upload.inputStreams != null) - upload.inputStreams.foreach { case (k, v) => - builder.addBinaryBody(k, v, ContentType.create("multipart/form-data"), k) - } - upload match { - case get: GetAction => get.getParameters. - retain((k, v) => v != null && k != null). - foreach { case (k, v) => builder.addTextBody(k.toString, v.toString) } - case _ => - } - upload match { - case get: GetAction => get.getHeaders. - retain((k, v) => v != null && k != null). - foreach { case (k, v) => httpPost.addHeader(k.toString, v.toString) } - case _ => - } - val httpEntity = builder.build() - httpPost.setEntity(httpEntity) - response = httpClient.execute(httpPost) - case post: POSTAction => - val httpPost = new HttpPost(realURL) - if (post.getParameters.nonEmpty || post.getFormParams.nonEmpty) { - val nvps = new util.ArrayList[NameValuePair] - if (post.getParameters.nonEmpty) { - post.getParameters.foreach { case (k, v) => nvps.add(new BasicNameValuePair(k, v.toString())) } - } - if (post.getFormParams.nonEmpty) { - post.getFormParams.foreach { case (k, v) => nvps.add(new BasicNameValuePair(k, v.toString())) } - } - httpPost.setEntity(new UrlEncodedFormEntity(nvps)) - } - - if (StringUtils.isNotBlank(post.getRequestPayload)) { - val stringEntity = new StringEntity(post.getRequestPayload, "UTF-8") - stringEntity.setContentEncoding(Configuration.BDP_ENCODING.getValue) - stringEntity.setContentType("application/json") - httpPost.setEntity(stringEntity) - } - - if (requestAction.getHeaders.nonEmpty) { - requestAction.getHeaders.foreach { case (k, v) => httpPost.addHeader(k.toString(), v.toString()) } - } - response = httpClient.execute(httpPost) - case get: GetAction => - val builder = new URIBuilder(realURL) - if (!get.getParameters.isEmpty) { - get.getParameters.foreach { case (k, v) => builder.addParameter(k.toString(), v.toString()) } - } - val httpGet = new HttpGet(builder.build()) - if (requestAction.getHeaders.nonEmpty) { - requestAction.getHeaders.foreach { case (k, v) => httpGet.addHeader(k.toString(), v.toString()) } - } - response = httpClient.execute(httpGet); - case _ => - val httpost = new HttpPost(realURL) - val stringEntity = new StringEntity(requestAction.getRequestBody, "UTF-8") - stringEntity.setContentEncoding(Configuration.BDP_ENCODING.getValue) - stringEntity.setContentType("application/json") - httpost.setEntity(stringEntity) - if (requestAction.getHeaders.nonEmpty) { - requestAction.getHeaders.foreach { case (k, v) => httpost.addHeader(k.toString(), v.toString()) } - } - response = httpClient.execute(httpost) - } - response - } - - protected def getFsByUser(user: String, path: FsPath): Fs - - protected def responseToResult(response: HttpResponse, requestAction: Action): Result = { - var entity = response.getEntity - val result = requestAction match { - case download: DownloadAction => - val statusCode = response.getStatusLine.getStatusCode - if (statusCode != 200) { - var responseBody: String = null - if (entity != null) { - responseBody = EntityUtils.toString(entity, "UTF-8") - } - throw new HttpClientResultException(s"request failed! ResponseBody is $responseBody.") - } - download.write(entity.getContent) - Result() - case heartbeat: HeartbeatAction => - discovery.map { - case d: AbstractDiscovery => d.getHeartbeatResult(response, heartbeat) - }.getOrElse(throw new HttpMessageParseException("Discovery is not enable, HeartbeatAction is not needed!")) - case auth: AuthenticationAction => - clientConfig.getAuthenticationStrategy match { - case a: AbstractAuthenticationStrategy => a.getAuthenticationResult(response, auth) - case _ => throw new HttpMessageParseException("AuthenticationStrategy is not enable, login is not needed!") - } - case httpAction: HttpAction => - var responseBody: String = null - if (entity != null) { - responseBody = EntityUtils.toString(entity, "UTF-8") - } - httpResponseToResult(response, httpAction, responseBody) - .getOrElse(throw new HttpMessageParseException("cannot parse message: " + responseBody)) - } - result match { - case userAction: UserAction => requestAction match { - case _userAction: UserAction => userAction.setUser(_userAction.getUser) - case _ => - } - case _ => - } - result - } - - protected def httpResponseToResult(response: HttpResponse, requestAction: HttpAction, responseBody: String): Option[Result] - - protected def deserializeResponseBody(response: HttpResponse): Iterable[_] = { - var entity = response.getEntity - var responseBody: String = null - if (entity != null) { - responseBody = EntityUtils.toString(entity, "UTF-8") - } - if (responseBody.startsWith("{") && responseBody.endsWith("}")) - read[Map[String, Object]](responseBody) - else if (responseBody.startsWith("[") && responseBody.endsWith("}")) - read[List[Map[String, Object]]](responseBody) - else if (StringUtils.isEmpty(responseBody)) Map.empty[String, Object] - else if (responseBody.length > 200) throw new HttpException(responseBody.substring(0, 200)) - else throw new HttpException(responseBody) - } - - override def close(): Unit = { - discovery.foreach { - case d: AbstractDiscovery => IOUtils.closeQuietly(d) - case _ => - } - httpClient.close() - executors.asInstanceOf[ExecutionContextExecutorService].shutdown() - } -} \ No newline at end of file diff --git a/core/scheduler/pom.xml b/core/scheduler/pom.xml deleted file mode 100644 index fe3b7eaf04c129d678cd7e856350615a86d3d1bc..0000000000000000000000000000000000000000 --- a/core/scheduler/pom.xml +++ /dev/null @@ -1,74 +0,0 @@ - - - - - 4.0.0 - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - linkis-scheduler - - - - com.webank.wedatasphere.linkis - linkis-common - provided - - - com.webank.wedatasphere.linkis - linkis-protocol - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - - - - product - - 0.0.5 - - - - \ No newline at end of file diff --git a/core/scheduler/src/main/scala/com/webank/wedatasphere/linkis/scheduler/executer/ExecuteResponse.scala b/core/scheduler/src/main/scala/com/webank/wedatasphere/linkis/scheduler/executer/ExecuteResponse.scala deleted file mode 100644 index 34c7c71a248c52c8f84973bc16f8fd80add8def1..0000000000000000000000000000000000000000 --- a/core/scheduler/src/main/scala/com/webank/wedatasphere/linkis/scheduler/executer/ExecuteResponse.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.scheduler.executer - -/** - * Created by enjoyyin on 2018/8/31. - */ -trait ExecuteResponse -trait CompletedExecuteResponse extends ExecuteResponse -case class SuccessExecuteResponse() extends CompletedExecuteResponse -trait OutputExecuteResponse extends ExecuteResponse { - def getOutput: String -} -case class AliasOutputExecuteResponse(alias: String, output: String) extends OutputExecuteResponse { - override def getOutput: String = output -} -case class ErrorExecuteResponse(message: String, t: Throwable) extends CompletedExecuteResponse -case class IncompleteExecuteResponse(message: String) extends ExecuteResponse -trait AsynReturnExecuteResponse extends ExecuteResponse { - def notify(rs: ExecuteResponse => Unit): Unit -} \ No newline at end of file diff --git a/core/scheduler/src/main/scala/com/webank/wedatasphere/linkis/scheduler/executer/Executor.scala b/core/scheduler/src/main/scala/com/webank/wedatasphere/linkis/scheduler/executer/Executor.scala deleted file mode 100644 index e33f45b4de3422078a50d75a5902c5c5368a9328..0000000000000000000000000000000000000000 --- a/core/scheduler/src/main/scala/com/webank/wedatasphere/linkis/scheduler/executer/Executor.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.scheduler.executer - -import java.io.Closeable - -import com.webank.wedatasphere.linkis.protocol.engine.EngineState -import com.webank.wedatasphere.linkis.protocol.engine.EngineState.EngineState - -/** - * Created by enjoyyin on 2018/8/31. - */ -trait Executor extends Closeable { - def getId: Long - def execute(executeRequest: ExecuteRequest): ExecuteResponse - def state: ExecutorState.ExecutorState - - def getExecutorInfo: ExecutorInfo -} -object ExecutorState { - type ExecutorState = EngineState - val Starting = EngineState.Starting - val Idle = EngineState.Idle - val Busy = EngineState.Busy - val ShuttingDown = EngineState.ShuttingDown - val Error = EngineState.Error - val Dead = EngineState.Dead - val Success = EngineState.Success - - def apply(x: Int): ExecutorState = EngineState(x) - def isCompleted(state: ExecutorState) = EngineState.isCompleted(state.asInstanceOf[EngineState]) - def isAvailable(state: ExecutorState) = EngineState.isAvailable(state.asInstanceOf[EngineState]) -} \ No newline at end of file diff --git a/core/scheduler/src/main/scala/com/webank/wedatasphere/linkis/scheduler/executer/ExecutorManager.scala b/core/scheduler/src/main/scala/com/webank/wedatasphere/linkis/scheduler/executer/ExecutorManager.scala deleted file mode 100644 index 3c4722431c94ed425c18a0fb901ca605c3577df6..0000000000000000000000000000000000000000 --- a/core/scheduler/src/main/scala/com/webank/wedatasphere/linkis/scheduler/executer/ExecutorManager.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.scheduler.executer - -import com.webank.wedatasphere.linkis.scheduler.listener.ExecutorListener -import com.webank.wedatasphere.linkis.scheduler.queue.{Job, SchedulerEvent} - -import scala.concurrent.duration.Duration - -/** - * Created by enjoyyin on 2018/9/1. - */ -abstract class ExecutorManager { - - def setExecutorListener(executorListener: ExecutorListener): Unit - - protected def createExecutor(event: SchedulerEvent): Executor - - def askExecutor(event: SchedulerEvent): Option[Executor] - - def askExecutor(event: SchedulerEvent, wait: Duration): Option[Executor] - - def getById(id: Long): Option[Executor] - - def getByGroup(groupName: String): Array[Executor] - - protected def delete(executor: Executor): Unit - - def shutdown(): Unit - -} \ No newline at end of file diff --git a/core/scheduler/src/main/scala/com/webank/wedatasphere/linkis/scheduler/queue/parallelqueue/ParallelConsumerManager.scala b/core/scheduler/src/main/scala/com/webank/wedatasphere/linkis/scheduler/queue/parallelqueue/ParallelConsumerManager.scala deleted file mode 100644 index 2c61f6e9e4dffa915cc9f692e8aedf4f5f04e656..0000000000000000000000000000000000000000 --- a/core/scheduler/src/main/scala/com/webank/wedatasphere/linkis/scheduler/queue/parallelqueue/ParallelConsumerManager.scala +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.scheduler.queue.parallelqueue - -import java.util.concurrent.ExecutorService - -import com.webank.wedatasphere.linkis.common.utils.Utils -import com.webank.wedatasphere.linkis.scheduler.listener.ConsumerListener -import com.webank.wedatasphere.linkis.scheduler.queue._ -import com.webank.wedatasphere.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer - -import scala.collection.mutable - -/** - * Created by enjoyyin on 2018/9/11. - */ -class ParallelConsumerManager(maxParallelismUsers: Int)extends ConsumerManager{ - - private val UJES_CONTEXT_CONSTRUCTOR_LOCK = new Object() - private var consumerListener: Option[ConsumerListener] = None - - private var executorService: ExecutorService = _ - - private val consumerGroupMap = new mutable.HashMap[String, FIFOUserConsumer]() - - override def setConsumerListener(consumerListener: ConsumerListener) = { - this.consumerListener = Some(consumerListener) - } - - override def getOrCreateExecutorService = if(executorService != null) executorService - else UJES_CONTEXT_CONSTRUCTOR_LOCK.synchronized { - if (executorService == null) { - executorService = Utils.newCachedThreadPool(5 * maxParallelismUsers + 1, "Engine-Scheduler-ThreadPool-", true) - } - executorService - } - - override def getOrCreateConsumer(groupName: String) = if(consumerGroupMap.contains(groupName)) consumerGroupMap(groupName) - else UJES_CONTEXT_CONSTRUCTOR_LOCK.synchronized { - consumerGroupMap.getOrElse(groupName, { - val newConsumer = createConsumer(groupName) - val group = getSchedulerContext.getOrCreateGroupFactory.getOrCreateGroup(groupName) - newConsumer.setGroup(group) - newConsumer.setConsumeQueue(new LoopArrayQueue(group)) - consumerGroupMap.put(groupName, newConsumer) - consumerListener.foreach(_.onConsumerCreated(newConsumer)) - newConsumer.start() - newConsumer - }) - } - - override protected def createConsumer(groupName: String) = { - val group = getSchedulerContext.getOrCreateGroupFactory.getOrCreateGroup(groupName) - new FIFOUserConsumer(getSchedulerContext, getOrCreateExecutorService, group) - } - - override def destroyConsumer(groupName: String) = - consumerGroupMap.get(groupName).foreach { tmpConsumer => - tmpConsumer.shutdown() - consumerGroupMap.remove(groupName) - consumerListener.foreach(_.onConsumerDestroyed(tmpConsumer)) - } - - override def shutdown() = { - consumerGroupMap.iterator.foreach(x => x._2.shutdown()) - } - - override def listConsumers() = consumerGroupMap.values.toArray -} diff --git a/datasource/datasourcemanager/common/pom.xml b/datasource/datasourcemanager/common/pom.xml deleted file mode 100644 index 8334f5e77883b7daad170e37cf71178d6e1f7770..0000000000000000000000000000000000000000 --- a/datasource/datasourcemanager/common/pom.xml +++ /dev/null @@ -1,70 +0,0 @@ - - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../../pom.xml - - 4.0.0 - - linkis-datasourcemanager-common - - - - UTF-8 - 1.0.12 - - - - - com.webank.wedatasphere.linkis - linkis-module - - - asm - org.ow2.asm - - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - src/main/resources - - - ${project.artifactId}-${project.version} - - diff --git a/datasource/datasourcemanager/common/src/main/java/com/webank/wedatasphere/linkis/datasourcemanager/common/util/json/Json.java b/datasource/datasourcemanager/common/src/main/java/com/webank/wedatasphere/linkis/datasourcemanager/common/util/json/Json.java deleted file mode 100644 index 9becb83223710a076c9da7d36dcc91cb3e78464c..0000000000000000000000000000000000000000 --- a/datasource/datasourcemanager/common/src/main/java/com/webank/wedatasphere/linkis/datasourcemanager/common/util/json/Json.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright 2019 WeBank - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.datasourcemanager.common.util.json; - -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.*; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.ArrayList; - -/** - * @author kirkzhou - * Json utils - * 2018/9/3 - */ -public class Json { - private static final String PREFIX = "["; - private static final String SUFFIX = "]"; - private static final Logger logger = LoggerFactory.getLogger(Json.class); - - private static ObjectMapper mapper; - - static{ - mapper = new ObjectMapper(); - mapper.configure(JsonParser.Feature.ALLOW_COMMENTS, true); - mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true); - mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true); - mapper.configure(DeserializationFeature.READ_ENUMS_USING_TO_STRING, true); - mapper.configure(SerializationFeature.WRITE_ENUMS_USING_TO_STRING, true); - mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_CONTROL_CHARS, true); - //empty beans allowed - mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); - //ignore unknown properties - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - //cancel to scape non ascii - mapper.configure(JsonGenerator.Feature.ESCAPE_NON_ASCII, false); - } - private Json(){} - - @SuppressWarnings("unchecked") - public static T fromJson(String json, Class clazz, Class... parameters){ - if(StringUtils.isNotBlank(json)){ - try{ - if(parameters.length > 0){ - return (T)mapper.readValue(json, mapper.getTypeFactory().constructParametricType(clazz, parameters)); - } - if(json.startsWith(PREFIX) - && json.endsWith(SUFFIX)){ - JavaType javaType = mapper.getTypeFactory() - .constructParametricType(ArrayList.class, clazz); - return mapper.readValue(json, javaType); - } - return (T)mapper.readValue(json, clazz); - } catch (Exception e) { - logger.info(e.getLocalizedMessage()); - throw new RuntimeException(e); - } - } - return null; - } - - public static T fromJson(InputStream stream, Class clazz, Class... parameters){ - StringBuilder builder = new StringBuilder(); - String jsonStr = null; - try{ - BufferedReader reader = new BufferedReader(new InputStreamReader(stream, "UTF-8")); - while((jsonStr = reader.readLine()) != null){ - builder.append(jsonStr); - } - reader.close(); - }catch(Exception e){ - logger.info(e.getLocalizedMessage()); - throw new RuntimeException(e); - } - return fromJson(builder.toString(), clazz, parameters); - } - - public static String toJson(Object obj, Class model){ - ObjectWriter writer = mapper.writer(); - if(null != obj){ - try{ - if(null != model){ - writer = writer.withView(model); - } - return writer.writeValueAsString(obj); - } catch (JsonProcessingException e) { - logger.info(e.getLocalizedMessage()); - throw new RuntimeException(e); - } - } - return null; - } - -} diff --git a/datasource/datasourcemanager/server/Dockerfile b/datasource/datasourcemanager/server/Dockerfile deleted file mode 100644 index edda49ab940c6366837f34b358728d5450b47232..0000000000000000000000000000000000000000 --- a/datasource/datasourcemanager/server/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM wedatasphere/linkis:emr-base-spark2.4.4 - -MAINTAINER wedatasphere@webank.com - -RUN yum install -y unzip -WORKDIR /opt/linkis - -COPY target/linkis-dsm-server.zip /opt/linkis -RUN unzip linkis-dsm-server.zip - -WORKDIR /opt/linkis/linkis-dsm-server/bin -ENTRYPOINT ["/opt/linkis/linkis-dsm-server/bin/startup.sh"] diff --git a/datasource/datasourcemanager/server/conf/application.yml b/datasource/datasourcemanager/server/conf/application.yml deleted file mode 100644 index 8e9cb3ae6f42f3210b700a3ff18e2a2cecd695c6..0000000000000000000000000000000000000000 --- a/datasource/datasourcemanager/server/conf/application.yml +++ /dev/null @@ -1,30 +0,0 @@ -server: - port: 8196 -spring: - application: - name: dsm-server - -eureka: - client: - serviceUrl: - defaultZone: ${eurekaurl} - registry-fetch-interval-seconds: 5 - instance: - lease-renewal-interval-in-second: 5 - lease-expiration-duration-in-second: 10 - prefer-ip-address: true - instance-id: ${spring.cloud.client.ip-address}:${server.port} - metadata-map: - test: wedatasphere - - -management: - endpoints: - web: - exposure: - include: refresh,info -logging: - config: classpath:log4j2.xml - - - diff --git a/datasource/datasourcemanager/server/pom.xml b/datasource/datasourcemanager/server/pom.xml deleted file mode 100644 index b7d8da87b0d0e9f46b0b97aa7b61c688d201b70d..0000000000000000000000000000000000000000 --- a/datasource/datasourcemanager/server/pom.xml +++ /dev/null @@ -1,191 +0,0 @@ - - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../../pom.xml - - 4.0.0 - - linkis-datasourcemanager-server - - - - UTF-8 - - - - - com.webank.wedatasphere.linkis - linkis-module - - - asm - org.ow2.asm - - - hk2-api - org.glassfish.hk2 - - - jersey-common - org.glassfish.jersey.core - - - provided - - - - com.webank.wedatasphere.linkis - linkis-datasourcemanager-common - - - asm - org.ow2.asm - - - - - - org.glassfish.jersey.ext - jersey-bean-validation - ${jersey.version} - - - javax.ws.rs-api - javax.ws.rs - - - hk2-locator - org.glassfish.hk2 - - - hk2-api - org.glassfish.hk2 - - - - - - com.webank.wedatasphere.linkis - linkis-bmlclient - - - - com.webank.wedatasphere.linkis - linkis-mybatis - - - - com.webank.wedatasphere.linkis - linkis-cloudRPC - provided - - - - com.webank.wedatasphere.linkis - linkis-metadatamanager-common - - - hk2-api - org.glassfish.hk2 - 2.4.0-b34 - - - org.json4s - json4s-jackson_2.11 - ${json4s.version} - - - org.json4s - json4s-core_2.11 - ${json4s.version} - - - org.json4s - json4s-ast_2.11 - ${json4s.version} - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-dsm-server - false - false - - src/main/assembly/distribution.xml - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - ${project.artifactId}-${project.version} - - diff --git a/datasource/datasourcemanager/server/pom_k8s.xml b/datasource/datasourcemanager/server/pom_k8s.xml deleted file mode 100644 index 9970a795db4b7d5c581fc58cbccd038db213e45a..0000000000000000000000000000000000000000 --- a/datasource/datasourcemanager/server/pom_k8s.xml +++ /dev/null @@ -1,209 +0,0 @@ - - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../../pom.xml - - 4.0.0 - - linkis-datasourcemanager-server - - - - UTF-8 - - - - - com.webank.wedatasphere.linkis - linkis-module - - - asm - org.ow2.asm - - - hk2-api - org.glassfish.hk2 - - - jersey-common - org.glassfish.jersey.core - - - - - - com.webank.wedatasphere.linkis - linkis-datasourcemanager-common - - - asm - org.ow2.asm - - - - - - org.glassfish.jersey.ext - jersey-bean-validation - ${jersey.version} - - - javax.ws.rs-api - javax.ws.rs - - - hk2-locator - org.glassfish.hk2 - - - hk2-api - org.glassfish.hk2 - - - - - - com.webank.wedatasphere.linkis - linkis-bmlclient - - - - com.webank.wedatasphere.linkis - linkis-mybatis - - - - com.webank.wedatasphere.linkis - linkis-cloudRPC - - - - com.webank.wedatasphere.linkis - linkis-metadatamanager-common - - - hk2-api - org.glassfish.hk2 - 2.4.0-b34 - - - org.json4s - json4s-jackson_2.11 - ${json4s.version} - - - org.json4s - json4s-core_2.11 - ${json4s.version} - - - org.json4s - json4s-ast_2.11 - ${json4s.version} - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-dsm-server - false - false - - src/main/assembly/distribution.xml - - - - - - - - - - - - - - - - - - - - - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - - - - - - - - ${project.artifactId}-${project.version} - - diff --git a/datasource/metadatamanager/common/pom.xml b/datasource/metadatamanager/common/pom.xml deleted file mode 100644 index 25a3d701f5af9430f59a540e144a72f295cbfd3b..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/common/pom.xml +++ /dev/null @@ -1,73 +0,0 @@ - - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../../pom.xml - - 4.0.0 - - linkis-metadatamanager-common - - - UTF-8 - - - - - com.webank.wedatasphere.linkis - linkis-common - - - asm - org.ow2.asm - - - - - - com.webank.wedatasphere.linkis - linkis-cloudRPC - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - src/main/resources - - - ${project.artifactId}-${project.version} - - diff --git a/datasource/metadatamanager/common/src/main/java/com/webank/wedatasphere/linkis/metadatamanager/common/Json.java b/datasource/metadatamanager/common/src/main/java/com/webank/wedatasphere/linkis/metadatamanager/common/Json.java deleted file mode 100644 index 203c4a14099482ae481f9535d35d5ea82d78926a..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/common/src/main/java/com/webank/wedatasphere/linkis/metadatamanager/common/Json.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2019 WeBank - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.metadatamanager.common; - -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.*; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.ArrayList; - -/** - * Json utils - * Created by jackyxxie on 2018/9/3. - */ -public class Json { - private static final String PREFIX = "["; - private static final String SUFFIX = "]"; - private static final Logger logger = LoggerFactory.getLogger(Json.class); - - private static ObjectMapper mapper; - - static{ - mapper = new ObjectMapper(); - mapper.configure(JsonParser.Feature.ALLOW_COMMENTS, true); - mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true); - mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true); - mapper.configure(DeserializationFeature.READ_ENUMS_USING_TO_STRING, true); - mapper.configure(SerializationFeature.WRITE_ENUMS_USING_TO_STRING, true); - mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_CONTROL_CHARS, true); - //empty beans allowed - mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); - //ignore unknown properties - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - //cancel to scape non ascii - mapper.configure(JsonGenerator.Feature.ESCAPE_NON_ASCII, false); - } - private Json(){} - - @SuppressWarnings("unchecked") - public static T fromJson(String json, Class clazz, Class... parameters){ - if(StringUtils.isNotBlank(json)){ - try{ - if(parameters.length > 0){ - return (T)mapper.readValue(json, mapper.getTypeFactory().constructParametricType(clazz, parameters)); - } - if(json.startsWith(PREFIX) - && json.endsWith(SUFFIX)){ - JavaType javaType = mapper.getTypeFactory() - .constructParametricType(ArrayList.class, clazz); - return mapper.readValue(json, javaType); - } - return (T)mapper.readValue(json, clazz); - } catch (Exception e) { - logger.info(e.getLocalizedMessage()); - throw new RuntimeException(e); - } - } - return null; - } - - public static T fromJson(InputStream stream, Class clazz, Class... parameters){ - StringBuilder builder = new StringBuilder(); - String jsonStr = null; - try{ - BufferedReader reader = new BufferedReader(new InputStreamReader(stream, "UTF-8")); - while((jsonStr = reader.readLine()) != null){ - builder.append(jsonStr); - } - reader.close(); - }catch(Exception e){ - logger.info(e.getLocalizedMessage()); - throw new RuntimeException(e); - } - return fromJson(builder.toString(), clazz, parameters); - } - - public static String toJson(Object obj, Class model){ - ObjectWriter writer = mapper.writer(); - if(null != obj){ - try{ - if(null != model){ - writer = writer.withView(model); - } - return writer.writeValueAsString(obj); - } catch (JsonProcessingException e) { - logger.info(e.getLocalizedMessage()); - throw new RuntimeException(e); - } - } - return null; - } - -} diff --git a/datasource/metadatamanager/server/Dockerfile b/datasource/metadatamanager/server/Dockerfile deleted file mode 100644 index 6e7ebcbb456bc7f2a9c826680a55b628add5216d..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/server/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM wedatasphere/linkis:emr-base-spark2.4.4 - -MAINTAINER wedatasphere@webank.com - -RUN yum install -y unzip -WORKDIR /opt/linkis - -COPY target/linkis-mdm-server.zip /opt/linkis -RUN unzip linkis-mdm-server.zip - -WORKDIR /opt/linkis/linkis-mdm-server/bin -ENTRYPOINT ["/opt/linkis/linkis-mdm-server/bin/startup.sh"] diff --git a/datasource/metadatamanager/server/conf/application.yml b/datasource/metadatamanager/server/conf/application.yml deleted file mode 100644 index c74a09c008f59eccc9dab9cf4b7065c13ed3aba2..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/server/conf/application.yml +++ /dev/null @@ -1,35 +0,0 @@ -server: - port: 8296 -spring: - application: - name: mdm-server - - -eureka: - client: - serviceUrl: - defaultZone: ${eurekaurl} - registry-fetch-interval-seconds: 5 - instance: - lease-renewal-interval-in-second: 5 - lease-expiration-duration-in-second: 10 - prefer-ip-address: true - instance-id: ${spring.cloud.client.ip-address}:${server.port} - metadata-map: - test: wedatasphere - -management: - endpoints: - web: - exposure: - include: refresh,info -logging: - config: classpath:log4j2.xml - - -pagehelper: - helper-dialect: mysql - reasonable: true - support-methods-arguments: true - params: countSql - diff --git a/datasource/metadatamanager/server/pom.xml b/datasource/metadatamanager/server/pom.xml deleted file mode 100644 index 1c35e9691f64c3304e78b39bf6b666b24f0f4abc..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/server/pom.xml +++ /dev/null @@ -1,131 +0,0 @@ - - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../../pom.xml - - 4.0.0 - linkis-metadatamanager-server - - - UTF-8 - - - - - com.webank.wedatasphere.linkis - linkis-module - - - asm - org.ow2.asm - - - provided - - - - com.webank.wedatasphere.linkis - linkis-metadatamanager-common - - - - com.webank.wedatasphere.linkis - linkis-datasourcemanager-common - - - asm - org.ow2.asm - - - - - - com.webank.wedatasphere.linkis - linkis-cloudRPC - provided - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-mdm-server - false - false - - src/main/assembly/distribution.xml - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - ${project.artifactId}-${project.version} - - diff --git a/datasource/metadatamanager/server/pom_k8s.xml b/datasource/metadatamanager/server/pom_k8s.xml deleted file mode 100644 index d22cb129f3a5f36a8d501fc6684ccb03d898da85..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/server/pom_k8s.xml +++ /dev/null @@ -1,149 +0,0 @@ - - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../../pom.xml - - 4.0.0 - linkis-metadatamanager-server - - - UTF-8 - - - - - com.webank.wedatasphere.linkis - linkis-module - - - asm - org.ow2.asm - - - - - - com.webank.wedatasphere.linkis - linkis-metadatamanager-common - - - - com.webank.wedatasphere.linkis - linkis-datasourcemanager-common - - - asm - org.ow2.asm - - - - - - com.webank.wedatasphere.linkis - linkis-cloudRPC - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-mdm-server - false - false - - src/main/assembly/distribution.xml - - - - - - - - - - - - - - - - - - - - - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - ${project.artifactId}-${project.version} - - diff --git a/datasource/metadatamanager/service/elasticsearch/Dockerfile b/datasource/metadatamanager/service/elasticsearch/Dockerfile deleted file mode 100644 index 4736fd79a5fa6d67609e8c695980d1b42ba1862c..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/elasticsearch/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM wedatasphere/linkis:emr-base-spark2.4.4 - -MAINTAINER wedatasphere@webank.com - -RUN yum install -y unzip -WORKDIR /opt/linkis - -COPY target/linkis-mdm-service-es.zip /opt/linkis -RUN unzip linkis-mdm-service-es.zip - -WORKDIR /opt/linkis/linkis-mdm-service-es/bin -ENTRYPOINT ["/opt/linkis/linkis-mdm-service-es/bin/startup.sh"] diff --git a/datasource/metadatamanager/service/elasticsearch/conf/application.yml b/datasource/metadatamanager/service/elasticsearch/conf/application.yml deleted file mode 100644 index 474819da678b9ab5a056e9dadf6eff2847342988..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/elasticsearch/conf/application.yml +++ /dev/null @@ -1,28 +0,0 @@ -server: - port: 8295 -spring: - application: - name: mdm-service-elasticsearch - -eureka: - client: - serviceUrl: - defaultZone: ${eurekaurl} - registry-fetch-interval-seconds: 5 - instance: - lease-renewal-interval-in-second: 5 - lease-expiration-duration-in-second: 10 - prefer-ip-address: true - instance-id: ${spring.cloud.client.ip-address}:${server.port} - metadata-map: - test: wedatasphere - -management: - endpoints: - web: - exposure: - include: refresh,info -logging: - config: classpath:log4j2.xml - - diff --git a/datasource/metadatamanager/service/elasticsearch/pom.xml b/datasource/metadatamanager/service/elasticsearch/pom.xml deleted file mode 100644 index 7c7609aca917bdb2831032455935434a1649ff20..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/elasticsearch/pom.xml +++ /dev/null @@ -1,118 +0,0 @@ - - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../../../pom.xml - - 4.0.0 - - linkis-metadatamanager-service-es - - UTF-8 - 6.7.1 - - - - - com.webank.wedatasphere.linkis - linkis-metadatamanager-common - - - com.webank.wedatasphere.linkis - linkis-module - - - asm - org.ow2.asm - - - - - org.elasticsearch.client - elasticsearch-rest-client - ${es.version} - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-mdm-service-es - false - false - - src/main/assembly/distribution.xml - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - ${project.artifactId}-${project.version} - - diff --git a/datasource/metadatamanager/service/elasticsearch/pom_k8s.xml b/datasource/metadatamanager/service/elasticsearch/pom_k8s.xml deleted file mode 100644 index 322d668334ae74539252350212467e4f3a21700d..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/elasticsearch/pom_k8s.xml +++ /dev/null @@ -1,138 +0,0 @@ - - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../../../pom.xml - - 4.0.0 - - linkis-metadatamanager-service-es - - UTF-8 - 6.7.1 - - - - - com.webank.wedatasphere.linkis - linkis-metadatamanager-common - - - com.webank.wedatasphere.linkis - linkis-module - - - asm - org.ow2.asm - - - - - org.elasticsearch.client - elasticsearch-rest-client - ${es.version} - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-mdm-service-es - false - false - - src/main/assembly/distribution.xml - - - - - - - - - - - - - - - - - - - - - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - ${project.artifactId}-${project.version} - - diff --git a/datasource/metadatamanager/service/hive/Dockerfile b/datasource/metadatamanager/service/hive/Dockerfile deleted file mode 100644 index 1f6244a87ea9e5419e2865ecef7d887005715390..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/hive/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM wedatasphere/linkis:emr-base-spark2.4.4 - -MAINTAINER wedatasphere@webank.com - -RUN yum install -y unzip -WORKDIR /opt/linkis - -COPY target/linkis-mdm-service-hive.zip /opt/linkis -RUN unzip linkis-mdm-service-hive.zip - -WORKDIR /opt/linkis/linkis-mdm-service-hive/bin -ENTRYPOINT ["/opt/linkis/linkis-mdm-service-hive/bin/startup.sh"] diff --git a/datasource/metadatamanager/service/hive/conf/application.yml b/datasource/metadatamanager/service/hive/conf/application.yml deleted file mode 100644 index 08f79c0e02ce7010cf3cfff8da2bf0b0008f62e1..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/hive/conf/application.yml +++ /dev/null @@ -1,28 +0,0 @@ -server: - port: 8293 -spring: - application: - name: mdm-service-hive - -eureka: - client: - serviceUrl: - defaultZone: ${eurekaurl} - registry-fetch-interval-seconds: 5 - instance: - lease-renewal-interval-in-second: 5 - lease-expiration-duration-in-second: 10 - prefer-ip-address: true - instance-id: ${spring.cloud.client.ip-address}:${server.port} - metadata-map: - test: wedatasphere - -management: - endpoints: - web: - exposure: - include: refresh,info -logging: - config: classpath:log4j2.xml - - diff --git a/datasource/metadatamanager/service/hive/pom.xml b/datasource/metadatamanager/service/hive/pom.xml deleted file mode 100644 index 9fd641bc8b4db4a5c5cc576d813ebf1e1f5e09c5..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/hive/pom.xml +++ /dev/null @@ -1,154 +0,0 @@ - - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../../../pom.xml - - 4.0.0 - linkis-metadatamanager-service-hive - - - UTF-8 - - - - - com.webank.wedatasphere.linkis - linkis-metadatamanager-common - - - com.webank.wedatasphere.linkis - linkis-module - - - - org.apache.hadoop - hadoop-client - ${hadoop.version} - - - org.slf4j - slf4j-log4j12 - - - servlet-api - javax.servlet - - - guava - com.google.guava - - - - - org.apache.hive - hive-exec - ${hive.version} - - - slf4j-log4j12 - org.slf4j - - - org.apache.ivy - ivy - - - guava - com.google.guava - - - commons-lang3 - org.apache.commons - - - - - - com.webank.wedatasphere.linkis - linkis-bmlclient - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-mdm-service-hive - false - false - - src/main/assembly/distribution.xml - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - ${project.artifactId}-${project.version} - - diff --git a/datasource/metadatamanager/service/hive/pom_k8s.xml b/datasource/metadatamanager/service/hive/pom_k8s.xml deleted file mode 100644 index 90a094b54d46f41e493467b76e81a77ccae1ee68..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/hive/pom_k8s.xml +++ /dev/null @@ -1,174 +0,0 @@ - - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../../../pom.xml - - 4.0.0 - linkis-metadatamanager-service-hive - - - UTF-8 - - - - - com.webank.wedatasphere.linkis - linkis-metadatamanager-common - - - com.webank.wedatasphere.linkis - linkis-module - - - - org.apache.hadoop - hadoop-client - ${hadoop.version} - - - org.slf4j - slf4j-log4j12 - - - servlet-api - javax.servlet - - - guava - com.google.guava - - - - - org.apache.hive - hive-exec - ${hive.version} - - - slf4j-log4j12 - org.slf4j - - - org.apache.ivy - ivy - - - guava - com.google.guava - - - commons-lang3 - org.apache.commons - - - - - - com.webank.wedatasphere.linkis - linkis-bmlclient - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-mdm-service-hive - false - false - - src/main/assembly/distribution.xml - - - - - - - - - - - - - - - - - - - - - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - ${project.artifactId}-${project.version} - - diff --git a/datasource/metadatamanager/service/mysql/Dockerfile b/datasource/metadatamanager/service/mysql/Dockerfile deleted file mode 100644 index 269399c5bcebfbb05f1d1c3329a655ce65aa2ab9..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/mysql/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM wedatasphere/linkis:emr-base-spark2.4.4 - -MAINTAINER wedatasphere@webank.com - -RUN yum install -y unzip -WORKDIR /opt/linkis - -COPY target/linkis-mdm-service-mysql.zip /opt/linkis -RUN unzip linkis-mdm-service-mysql.zip - -WORKDIR /opt/linkis/linkis-mdm-service-mysql/bin -ENTRYPOINT ["/opt/linkis/linkis-mdm-service-mysql/bin/startup.sh"] diff --git a/datasource/metadatamanager/service/mysql/conf/application.yml b/datasource/metadatamanager/service/mysql/conf/application.yml deleted file mode 100644 index 072b1b5af143355c6a3a8e5446313e471937f284..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/mysql/conf/application.yml +++ /dev/null @@ -1,28 +0,0 @@ -server: - port: 8294 -spring: - application: - name: mdm-service-mysql - -eureka: - client: - serviceUrl: - defaultZone: ${eurekaurl} - registry-fetch-interval-seconds: 5 - instance: - lease-renewal-interval-in-second: 5 - lease-expiration-duration-in-second: 10 - prefer-ip-address: true - instance-id: ${spring.cloud.client.ip-address}:${server.port} - metadata-map: - test: wedatasphere - -management: - endpoints: - web: - exposure: - include: refresh,info -logging: - config: classpath:log4j2.xml - - diff --git a/datasource/metadatamanager/service/mysql/conf/log4j2.xml b/datasource/metadatamanager/service/mysql/conf/log4j2.xml deleted file mode 100644 index 1c68190669e6a47fdcf68fbf03bd6092b9271d4c..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/mysql/conf/log4j2.xml +++ /dev/null @@ -1,35 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/datasource/metadatamanager/service/mysql/pom.xml b/datasource/metadatamanager/service/mysql/pom.xml deleted file mode 100644 index 86007121d867aa884306645c06cda72a5ebca6eb..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/mysql/pom.xml +++ /dev/null @@ -1,123 +0,0 @@ - - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../../../pom.xml - - 4.0.0 - - linkis-metadatamanager-service-mysql - - - UTF-8 - 5.1.34 - - - - - com.webank.wedatasphere.linkis - linkis-metadatamanager-common - - - com.webank.wedatasphere.linkis - linkis-module - - - asm - org.ow2.asm - - - mysql - mysql-connector-java - - - - - mysql - mysql-connector-java - ${mysql.version} - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-mdm-service-mysql - false - false - - src/main/assembly/distribution.xml - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - ${project.artifactId}-${project.version} - - diff --git a/datasource/metadatamanager/service/mysql/pom_k8s.xml b/datasource/metadatamanager/service/mysql/pom_k8s.xml deleted file mode 100644 index d5839e311040378e570c616f469fa3d63d49f600..0000000000000000000000000000000000000000 --- a/datasource/metadatamanager/service/mysql/pom_k8s.xml +++ /dev/null @@ -1,143 +0,0 @@ - - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../../../pom.xml - - 4.0.0 - - linkis-metadatamanager-service-mysql - - - UTF-8 - 5.1.34 - - - - - com.webank.wedatasphere.linkis - linkis-metadatamanager-common - - - com.webank.wedatasphere.linkis - linkis-module - - - asm - org.ow2.asm - - - mysql - mysql-connector-java - - - - - mysql - mysql-connector-java - ${mysql.version} - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - linkis-mdm-service-mysql - false - false - - src/main/assembly/distribution.xml - - - - - - - - - - - - - - - - - - - - - - - - - - - src/main/java - - **/*.xml - - - - src/main/resources - - **/*.properties - **/application.yml - **/bootstrap.yml - **/log4j2.xml - - - - ${project.artifactId}-${project.version} - - diff --git a/db/linkis_ddl.sql b/db/linkis_ddl.sql index cf588c942416981ae3c072d9d1d58fa3fb667a74..c271459973cda51c4ef856142630ecbafd0ab8a9 100644 --- a/db/linkis_ddl.sql +++ b/db/linkis_ddl.sql @@ -1,171 +1,45 @@ SET FOREIGN_KEY_CHECKS=0; --- ---------------------------- --- Table structure for linkis_develop_application --- ---------------------------- -DROP TABLE IF EXISTS `linkis_develop_application`; -CREATE TABLE `linkis_develop_application` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `name` varchar(200) DEFAULT NULL, - `source` varchar(50) DEFAULT NULL COMMENT 'Source of the development application', - `version` varchar(50) DEFAULT NULL, - `description` text, - `user_id` bigint(20) DEFAULT NULL, - `is_published` bit(1) DEFAULT NULL, - `create_time` datetime DEFAULT NULL, - `update_time` datetime DEFAULT NULL, - `org_id` bigint(20) DEFAULT NULL COMMENT 'Organization ID', - `visibility` bit(1) DEFAULT NULL, - `is_transfer` bit(1) DEFAULT NULL COMMENT 'Reserved word', - `initial_org_id` bigint(20) DEFAULT NULL, - `json_path` varchar(255) DEFAULT NULL COMMENT 'Path of the jason file which is used for data development in the front-end. ', - `isAsh` bit(1) DEFAULT NULL COMMENT 'If it is active', - `pic` varchar(255) DEFAULT NULL, - `star_num` int(11) DEFAULT '0', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - --- ---------------------------- --- Table structure for linkis_project_list --- ---------------------------- -DROP TABLE IF EXISTS `linkis_project_list`; -CREATE TABLE `linkis_project_list` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `name` varchar(200) DEFAULT NULL COMMENT 'Project service name which needs to be initialized', - `is_project_need_init` bit(1) DEFAULT NULL, - `url` varchar(255) DEFAULT NULL COMMENT 'URL used to initialize a project', - `is_user_need_init` bit(1) DEFAULT NULL, - `is_project_inited` bit(1) DEFAULT NULL, - `json` text COMMENT 'Data provided by project to the front-end would be jsonized after initialization.', - `level` tinyint(255) DEFAULT NULL COMMENT 'Marks the importance of the project. When encounter initialization failure, if a user tried to log in, the project would report an error if its level is greater than 4, otherwise, grey the corresponding function button', - `user_init_url` varchar(255) DEFAULT NULL COMMENT 'URL used to initialize a user', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - --- ---------------------------- --- Table structure for linkis_project_user --- ---------------------------- -DROP TABLE IF EXISTS `linkis_project_user`; -CREATE TABLE `linkis_project_user` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `project_id` bigint(20) DEFAULT NULL, - `json` varchar(255) DEFAULT NULL COMMENT 'Data returned by initializing a user would be jsonized', - `user_id` bigint(20) DEFAULT NULL, - `is_init_success` bit(1) DEFAULT NULL, - `is_new_feature` bit(1) DEFAULT NULL COMMENT 'If this project is a new function to the user', - PRIMARY KEY (`id`), - UNIQUE KEY `project_id` (`project_id`,`user_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - --- ---------------------------- --- Table structure for linkis_user --- ---------------------------- -DROP TABLE IF EXISTS `linkis_user`; -CREATE TABLE `linkis_user` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `email` varchar(255) DEFAULT NULL, - `username` varchar(255) NOT NULL, - `password` varchar(255) DEFAULT NULL, - `admin` tinyint(1) DEFAULT NULL COMMENT 'If it is an administrator', - `active` tinyint(1) DEFAULT NULL COMMENT 'If it is active', - `name` varchar(255) DEFAULT NULL COMMENT 'User name', - `description` varchar(255) DEFAULT NULL, - `department` varchar(255) DEFAULT NULL, - `avatar` varchar(255) DEFAULT NULL COMMENT 'Path of the avator', - `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `create_by` bigint(20) DEFAULT '0', - `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, - `update_by` bigint(20) DEFAULT '0', - `is_first_login` bit(1) DEFAULT NULL COMMENT 'If it is the first time to log in', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -SET FOREIGN_KEY_CHECKS=0; - - --- ---------------------------- --- Table structure for linkis_application --- ---------------------------- -DROP TABLE IF EXISTS `linkis_application`; -CREATE TABLE `linkis_application` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `name` varchar(50) DEFAULT NULL COMMENT 'Can be one of the following: execute_application_name(in table linkis_task), request_application_name(i.e. creator), general configuration', - `chinese_name` varchar(50) DEFAULT NULL, - `description` varchar(200) DEFAULT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - - --- ---------------------------- --- Table structure for linkis_config_key_tree --- ---------------------------- -DROP TABLE IF EXISTS `linkis_config_key_tree`; -CREATE TABLE `linkis_config_key_tree` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `key_id` bigint(20) DEFAULT NULL, - `tree_id` bigint(20) DEFAULT NULL, - PRIMARY KEY (`id`), - KEY `key_id` (`key_id`), - KEY `tree_id` (`tree_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - - --- ---------------------------- --- Table structure for linkis_config_key_user --- ---------------------------- -DROP TABLE IF EXISTS `linkis_config_key_user`; -CREATE TABLE `linkis_config_key_user` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `application_id` bigint(20) DEFAULT NULL COMMENT 'Same as id in tale linkis_application, except that it cannot be the id of creator', - `key_id` bigint(20) DEFAULT NULL, - `user_name` varchar(50) DEFAULT NULL, - `value` varchar(200) DEFAULT NULL COMMENT 'Value of the key', - PRIMARY KEY (`id`), - UNIQUE KEY `application_id_2` (`application_id`,`key_id`,`user_name`), - KEY `key_id` (`key_id`), - KEY `application_id` (`application_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - - --- ---------------------------- --- Table structure for linkis_config_key --- ---------------------------- -DROP TABLE IF EXISTS `linkis_config_key`; -CREATE TABLE `linkis_config_key` ( +DROP TABLE IF EXISTS `linkis_configuration_config_key`; +CREATE TABLE `linkis_configuration_config_key`( `id` bigint(20) NOT NULL AUTO_INCREMENT, `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', `description` varchar(200) DEFAULT NULL, `name` varchar(50) DEFAULT NULL, - `application_id` bigint(20) DEFAULT NULL COMMENT 'Correlate with id in table linkis_application', + `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', - `validate_range` varchar(100) DEFAULT NULL COMMENT 'Validate range', + `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', - `unit` varchar(64) DEFAULT NULL, - PRIMARY KEY (`id`), - KEY `application_id` (`application_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; --- ---------------------------- --- Table structure for linkis_config_tree --- ---------------------------- -DROP TABLE IF EXISTS `linkis_config_tree`; -CREATE TABLE `linkis_config_tree` ( +DROP TABLE IF EXISTS `linkis_configuration_config_value`; +CREATE TABLE linkis_configuration_config_value( `id` bigint(20) NOT NULL AUTO_INCREMENT, - `parent_id` bigint(20) DEFAULT NULL COMMENT 'Parent ID', - `name` varchar(50) DEFAULT NULL COMMENT 'Application name or category name under general configuration', - `description` varchar(200) DEFAULT NULL, - `application_id` bigint(20) DEFAULT NULL COMMENT 'Same as id(in table linkis_application), except that it cannot be the id of creator', + `configkey_id` bigint(20), + `config_value` varchar(50), + `config_label_id`int(20), PRIMARY KEY (`id`), - KEY `application_id` (`application_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; -SET FOREIGN_KEY_CHECKS=0; + UNIQUE INDEX(`configkey_id`, `config_label_id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_configuration_category`; +CREATE TABLE `linkis_configuration_category` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_id` int(20) NOT NULL, + `level` int(20) NOT NULL, + `description` varchar(200), + `tag` varchar(200), + `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE INDEX(`label_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_task @@ -176,6 +50,7 @@ CREATE TABLE `linkis_task` ( `instance` varchar(50) DEFAULT NULL COMMENT 'An instance of Entrance, consists of IP address of the entrance server and port', `exec_id` varchar(50) DEFAULT NULL COMMENT 'execution ID, consists of jobID(generated by scheduler), executeApplicationName , creator and instance', `um_user` varchar(50) DEFAULT NULL COMMENT 'User name', + `submit_user` varchar(50) DEFAULT NULL COMMENT 'submitUser name', `execution_code` text COMMENT 'Run script. When exceeding 6000 lines, script would be stored in HDFS and its file path would be stored in database', `progress` float DEFAULT NULL COMMENT 'Script execution progress, between zero and one', `log_path` varchar(200) DEFAULT NULL COMMENT 'File path of the log files', @@ -191,63 +66,15 @@ CREATE TABLE `linkis_task` ( `script_path` varchar(200) DEFAULT NULL COMMENT 'Path of the script in workspace', `params` text COMMENT 'Configuration item of the parameters', `engine_instance` varchar(50) DEFAULT NULL COMMENT 'An instance of engine, consists of IP address of the engine server and port', + `task_resource` varchar(1024) DEFAULT NULL, `engine_start_time` time DEFAULT NULL, + `label_json` varchar(200) DEFAULT NULL COMMENT 'label json', PRIMARY KEY (`id`), KEY `created_time` (`created_time`), KEY `um_user` (`um_user`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; -DROP TABLE IF EXISTS `linkis_em_resource_meta_data`; -CREATE TABLE `linkis_em_resource_meta_data` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `em_application_name` varchar(500) COLLATE utf8_bin DEFAULT NULL, - `em_instance` varchar(500) COLLATE utf8_bin DEFAULT NULL, - `total_resource` varchar(2000) COLLATE utf8_bin DEFAULT NULL, - `protected_resource` varchar(2000) COLLATE utf8_bin DEFAULT NULL, - `resource_policy` varchar(500) COLLATE utf8_bin DEFAULT NULL, - `used_resource` varchar(2000) COLLATE utf8_bin DEFAULT NULL, - `left_resource` varchar(2000) COLLATE utf8_bin DEFAULT NULL, - `locked_resource` varchar(2000) COLLATE utf8_bin DEFAULT NULL, - `register_time` bigint(20) DEFAULT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; - -DROP TABLE IF EXISTS `linkis_resource_lock`; -CREATE TABLE `linkis_resource_lock` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `user` varchar(50) COLLATE utf8_bin DEFAULT NULL, - `em_application_name` varchar(50) COLLATE utf8_bin DEFAULT NULL, - `em_instance` varchar(50) COLLATE utf8_bin DEFAULT NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `lock_unique` (`user`,`em_application_name`,`em_instance`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; - -DROP TABLE IF EXISTS `linkis_user_resource_meta_data`; -CREATE TABLE `linkis_user_resource_meta_data` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `user` varchar(500) COLLATE utf8_bin DEFAULT NULL, - `ticket_id` varchar(500) COLLATE utf8_bin DEFAULT NULL, - `creator` varchar(500) COLLATE utf8_bin DEFAULT NULL, - `em_application_name` varchar(500) COLLATE utf8_bin DEFAULT NULL, - `em_instance` varchar(500) COLLATE utf8_bin DEFAULT NULL, - `engine_application_name` varchar(500) COLLATE utf8_bin DEFAULT NULL, - `engine_instance` varchar(500) COLLATE utf8_bin DEFAULT NULL, - `user_locked_resource` varchar(5000) COLLATE utf8_bin DEFAULT NULL, - `user_used_resource` varchar(5000) COLLATE utf8_bin DEFAULT NULL, - `resource_type` varchar(500) COLLATE utf8_bin DEFAULT NULL, - `locked_time` bigint(20) DEFAULT NULL, - `used_time` bigint(20) DEFAULT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; - -DROP TABLE IF EXISTS `linkis_em_meta_data`; -CREATE TABLE `linkis_em_meta_data` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `em_name` varchar(500) COLLATE utf8_bin DEFAULT NULL, - `resource_request_policy` varchar(500) COLLATE utf8_bin DEFAULT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; SET FOREIGN_KEY_CHECKS=0; @@ -473,6 +300,102 @@ CREATE TABLE `linkis_mdq_table_info` ( PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +-- ---------------------------- +-- Table structure for linkis_role +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_role`; +CREATE TABLE `linkis_role` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `name` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `chinese_name` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `description` varchar(255) COLLATE utf8_bin DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +-- ---------------------------- +-- Table structure for linkis_user_role +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_user_role`; + CREATE TABLE `linkis_user_role` ( + `user_id` bigint(20) NOT NULL, + `role_id` bigint(20) NOT NULL +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +-- ---------------------------- +-- Table structure for linkis_cs_context_map +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_cs_context_map`; +CREATE TABLE `linkis_cs_context_map` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `key` varchar(128) DEFAULT NULL, + `context_scope` varchar(32) DEFAULT NULL, + `context_type` varchar(32) DEFAULT NULL, + `props` text, + `value` text, + `context_id` int(11) DEFAULT NULL, + `keywords` varchar(255) DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `key` (`key`,`context_id`,`context_type`), + KEY `keywords` (`keywords`(191)) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for linkis_cs_context_map_listener +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_cs_context_map_listener`; +CREATE TABLE `linkis_cs_context_map_listener` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `listener_source` varchar(255) DEFAULT NULL, + `key_id` int(11) DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for linkis_cs_context_history +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_cs_context_history`; +CREATE TABLE `linkis_cs_context_history` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `context_id` int(11) DEFAULT NULL, + `source` text, + `context_type` varchar(32) DEFAULT NULL, + `history_json` text, + `keyword` varchar(255) DEFAULT NULL, + PRIMARY KEY (`id`), + KEY `keyword` (`keyword`(191)) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for linkis_cs_context_id +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_cs_context_id`; +CREATE TABLE `linkis_cs_context_id` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `user` varchar(32) DEFAULT NULL, + `application` varchar(32) DEFAULT NULL, + `source` varchar(255) DEFAULT NULL, + `expire_type` varchar(32) DEFAULT NULL, + `expire_time` datetime DEFAULT NULL, + `instance` varchar(32) DEFAULT NULL, + `backup_instance` varchar(255) DEFAULT NULL, + PRIMARY KEY (`id`), + KEY `instance` (`instance`), + KEY `backup_instance` (`backup_instance`(191)), + KEY `instance_2` (`instance`,`backup_instance`(191)) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + +-- ---------------------------- +-- Table structure for linkis_cs_context_listener +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_cs_context_listener`; +CREATE TABLE `linkis_cs_context_listener` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `listener_source` varchar(255) DEFAULT NULL, + `context_id` int(11) DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + + drop table if exists `linkis_resources`; CREATE TABLE if not exists `linkis_resources` ( `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键', @@ -566,153 +489,256 @@ CREATE TABLE if not exists `linkis_resources_task` ( PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; --- ---------------------------- --- Table structure for linkis_cs_context_map --- ---------------------------- -DROP TABLE IF EXISTS `linkis_cs_context_map`; -CREATE TABLE `linkis_cs_context_map` ( + + +drop table if exists linkis_bml_project; +create table if not exists linkis_bml_project( + `id` int(10) NOT NULL AUTO_INCREMENT, + `name` varchar(128) DEFAULT NULL, + `system` varchar(64) not null default 'dss', + `source` varchar(1024) default null, + `description` varchar(1024) default null, + `creator` varchar(128) not null, + `enabled` tinyint default 1, + `create_time` datetime DEFAULT now(), + unique key(`name`), +PRIMARY KEY (`id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; + + + +drop table if exists linkis_bml_project_user; +create table if not exists linkis_bml_project_user( + `id` int(10) NOT NULL AUTO_INCREMENT, + `project_id` int(10) NOT NULL, + `username` varchar(64) DEFAULT NULL, + `priv` int(10) not null default 7, -- rwx 421 相加, 8是管理员,可以为其他用户授权 + `creator` varchar(128) not null, + `create_time` datetime DEFAULT now(), + `expire_time` datetime default null, + unique key user_project(`username`, `project_id`), +PRIMARY KEY (`id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; + + +drop table if exists linkis_bml_project_resource; +create table if not exists linkis_bml_project_resource( + `id` int(10) NOT NULL AUTO_INCREMENT, + `project_id` int(10) NOT NULL, + `resource_id` varchar(128) DEFAULT NULL, +PRIMARY KEY (`id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; + +DROP TABLE IF EXISTS `linkis_manager_service_instance`; + +CREATE TABLE `linkis_manager_service_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT, - `key` varchar(128) DEFAULT NULL, - `context_scope` varchar(32) DEFAULT NULL, - `context_type` varchar(32) DEFAULT NULL, - `props` text, - `value` text, - `context_id` int(11) DEFAULT NULL, - `keywords` varchar(255) DEFAULT NULL, + `instance` varchar(64) COLLATE utf8_bin DEFAULT NULL, + `name` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `owner` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `mark` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `updator` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `creator` varchar(32) COLLATE utf8_bin DEFAULT NULL, PRIMARY KEY (`id`), - UNIQUE KEY `key` (`key`,`context_id`,`context_type`), - KEY `keywords` (`keywords`(191)) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + UNIQUE KEY `instance` (`instance`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; --- ---------------------------- --- Table structure for linkis_cs_context_map_listener --- ---------------------------- -DROP TABLE IF EXISTS `linkis_cs_context_map_listener`; -CREATE TABLE `linkis_cs_context_map_listener` ( +DROP TABLE IF EXISTS `linkis_manager_linkis_resources`; + +CREATE TABLE `linkis_manager_linkis_resources` ( `id` int(11) NOT NULL AUTO_INCREMENT, - `listener_source` varchar(255) DEFAULT NULL, - `key_id` int(11) DEFAULT NULL, + `max_resource` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `min_resource` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `used_resource` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `left_resource` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `expected_resource` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `locked_resource` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `resourceType` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `ticketId` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `updator` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `creator` varchar(255) COLLATE utf8_bin DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; --- ---------------------------- --- Table structure for linkis_cs_context_history --- ---------------------------- -DROP TABLE IF EXISTS `linkis_cs_context_history`; -CREATE TABLE `linkis_cs_context_history` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `context_id` int(11) DEFAULT NULL, - `source` text, - `context_type` varchar(32) DEFAULT NULL, - `history_json` text, - `keyword` varchar(255) DEFAULT NULL, - PRIMARY KEY (`id`), - KEY `keyword` (`keyword`(191)) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +DROP TABLE IF EXISTS `linkis_manager_lock`; --- ---------------------------- --- Table structure for linkis_cs_context_id --- ---------------------------- -DROP TABLE IF EXISTS `linkis_cs_context_id`; -CREATE TABLE `linkis_cs_context_id` ( +CREATE TABLE `linkis_manager_lock` ( `id` int(11) NOT NULL AUTO_INCREMENT, - `user` varchar(32) DEFAULT NULL, - `application` varchar(32) DEFAULT NULL, - `source` varchar(255) DEFAULT NULL, - `expire_type` varchar(32) DEFAULT NULL, - `expire_time` datetime DEFAULT NULL, - `instance` varchar(32) DEFAULT NULL, - `backup_instance` varchar(255) DEFAULT NULL, + `lock_object` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `time_out` longtext COLLATE utf8_bin, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_external_resource_provider`; +CREATE TABLE `linkis_external_resource_provider` ( + `id` int(10) NOT NULL AUTO_INCREMENT, + `resource_type` varchar(32) NOT NULL, + `name` varchar(32) NOT NULL, + `labels` varchar(32) DEFAULT NULL, + `config` text NOT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS `linkis_manager_engine_em`; +CREATE TABLE `linkis_manager_engine_em` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `engine_instance` varchar(64) COLLATE utf8_bin DEFAULT NULL, + `em_instance` varchar(64) COLLATE utf8_bin DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_manager_label`; + +CREATE TABLE `linkis_manager_label` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_key` varchar(32) COLLATE utf8_bin NOT NULL, + `label_value` varchar(255) COLLATE utf8_bin NOT NULL, + `label_feature` varchar(16) COLLATE utf8_bin NOT NULL, + `label_value_size` int(20) NOT NULL, + `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), - KEY `instance` (`instance`), - KEY `backup_instance` (`backup_instance`(191)), - KEY `instance_2` (`instance`,`backup_instance`(191)) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + UNIQUE KEY `label_key_value` (`label_key`,`label_value`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; --- ---------------------------- --- Table structure for linkis_cs_context_listener --- ---------------------------- -DROP TABLE IF EXISTS `linkis_cs_context_listener`; -CREATE TABLE `linkis_cs_context_listener` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `listener_source` varchar(255) DEFAULT NULL, - `context_id` int(11) DEFAULT NULL, +DROP TABLE IF EXISTS `linkis_manager_label_value_relation`; + +CREATE TABLE `linkis_manager_label_value_relation` ( + `label_value_key` varchar(255) COLLATE utf8_bin NOT NULL, + `label_value_content` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `label_id` int(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + UNIQUE KEY `label_value_key_label_id` (`label_value_key`,`label_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_manager_label_resource`; +CREATE TABLE `linkis_manager_label_resource` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_id` int(20) DEFAULT NULL, + `resource_id` int(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +DROP TABLE IF EXISTS `linkis_manager_label_service_instance`; +CREATE TABLE `linkis_manager_label_service_instance` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_id` int(20) DEFAULT NULL, + `service_instance` varchar(64) COLLATE utf8_bin DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; --- ---------------------------- --- Table structure for linkis_datasource --- ---------------------------- -CREATE TABLE IF NOT EXISTS `linkis_datasource` ( - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `datasource_name` VARCHAR(100) NOT NULL COMMENT 'Data source name', - `datasource_type_id` BIGINT(20) DEFAULT NULL COMMENT 'Data source type id', - `datasource_desc` VARCHAR(200) DEFAULT NULL COMMENT 'Data source description', - `create_identify` VARCHAR(20) DEFAULT 'BDP' COMMENT 'Example: project name', - `create_system` VARCHAR(20) DEFAULT 'BDP' COMMENT 'Create system', - `create_user` VARCHAR(50) DEFAULT NULL COMMENT 'Creator', - `parameter` TEXT COMMENT 'Connect parameters', - `create_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `modify_user` VARCHAR(50) DEFAULT NULL COMMENT 'Modify user', - `modify_time` DATETIME DEFAULT NULL COMMENT 'Modify time', - `datasource_env_id` BIGINT(20) DEFAULT NULL, - PRIMARY KEY (`id`) - ) ENGINE=InnoDB AUTO_INCREMENT=140 DEFAULT CHARSET=utf8; - --- ---------------------------- --- Table structure for linkis_datasource_env --- ---------------------------- -CREATE TABLE IF NOT EXISTS `linkis_datasource_env` ( - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `env_name` VARCHAR(100) NOT NULL COMMENT 'Environment name', - `env_desc` VARCHAR(200) DEFAULT NULL COMMENT 'Description', - `create_user` VARCHAR(50) DEFAULT NULL COMMENT 'Creator', - `parameter` TEXT NOT NULL COMMENT 'Connect parameters', - `create_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `modify_user` VARCHAR(50) DEFAULT NULL COMMENT 'Modify user', - `modify_time` DATETIME DEFAULT NULL COMMENT 'Modify time', - PRIMARY KEY (`id`) - ) ENGINE=InnoDB AUTO_INCREMENT=108 DEFAULT CHARSET=utf8; - --- ---------------------------- --- Table structure for linkis_datasource_type_key --- ---------------------------- -CREATE TABLE IF NOT EXISTS `linkis_datasource_type_key` ( - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `key` VARCHAR(50) DEFAULT NULL COMMENT 'Key of variable', - `description` VARCHAR(200) DEFAULT NULL COMMENT 'Description', - `name` VARCHAR(50) DEFAULT NULL COMMENT 'Option name of column in page', - `data_source_type_id` BIGINT(20) DEFAULT NULL COMMENT 'Type id', - `require` TINYINT(1) DEFAULT '0', - `scope` VARCHAR(50) DEFAULT NULL COMMENT 'Scope', - `default_value` VARCHAR(200) DEFAULT NULL COMMENT 'Default value', - `value_type` VARCHAR(50) DEFAULT NULL COMMENT 'Value type', - `value_regex` VARCHAR(100) DEFAULT NULL COMMENT 'Value regex', - `ref_id` BIGINT(20) DEFAULT NULL COMMENT 'Related id', - `ref_value` VARCHAR(100) DEFAULT NULL COMMENT 'Related value', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +DROP TABLE IF EXISTS `linkis_manager_label_user`; +CREATE TABLE `linkis_manager_label_user` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `username` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `label_id` int(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + + +DROP TABLE IF EXISTS `linkis_manager_metrics_history`; + +CREATE TABLE `linkis_manager_metrics_history` ( + `instance_status` int(20) DEFAULT NULL, + `overload` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `heartbeat_msg` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `healthy_status` int(20) DEFAULT NULL, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `creator` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `ticketID` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `serviceName` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `instance` varchar(255) COLLATE utf8_bin DEFAULT NULL +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_manager_service_instance_metrics`; + +CREATE TABLE `linkis_manager_service_instance_metrics` ( + `instance` varchar(32) COLLATE utf8_bin NOT NULL, + `instance_status` int(11) DEFAULT NULL, + `overload` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `heartbeat_msg` text COLLATE utf8_bin DEFAULT NULL, + `healthy_status` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`instance`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_engine_conn_plugin_bml_resources`; +CREATE TABLE `linkis_engine_conn_plugin_bml_resources` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键', + `engine_conn_type` varchar(100) NOT NULL COMMENT '引擎类型', + `version` varchar(100) COMMENT '版本', + `file_name` varchar(255) COMMENT '文件名', + `file_size` bigint(20) DEFAULT 0 NOT NULL COMMENT '文件大小', + `last_modified` bigint(20) COMMENT '文件更新时间', + `bml_resource_id` varchar(100) NOT NULL COMMENT '所属系统', + `bml_resource_version` varchar(200) NOT NULL COMMENT '资源所属者', + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `last_update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间', + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4; + +DROP TABLE IF EXISTS `linkis_instance_label`; +CREATE TABLE `linkis_instance_label` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_key` varchar(32) COLLATE utf8_bin NOT NULL COMMENT 'string key', + `label_value` varchar(255) COLLATE utf8_bin NOT NULL COMMENT 'string value', + `label_feature` varchar(16) COLLATE utf8_bin NOT NULL COMMENT 'store the feature of label, but it may be redundant', + `label_value_size` int(20) NOT NULL COMMENT 'size of key -> value map', + `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', + PRIMARY KEY (`id`), + UNIQUE KEY `label_key_value` (`label_key`,`label_value`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + + +DROP TABLE IF EXISTS `linkis_instance_label_value_relation`; +CREATE TABLE `linkis_instance_label_value_relation` ( + `label_value_key` varchar(255) COLLATE utf8_bin NOT NULL COMMENT 'value key', + `label_value_content` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT 'value content', + `label_id` int(20) DEFAULT NULL COMMENT 'id reference linkis_instance_label -> id', + `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp', + UNIQUE KEY `label_value_key_label_id` (`label_value_key`,`label_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_instance_label_relation`; +CREATE TABLE `linkis_instance_label_relation` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_id` int(20) DEFAULT NULL COMMENT 'id reference linkis_instance_label -> id', + `service_instance` varchar(64) NOT NULL COLLATE utf8_bin COMMENT 'structure like ${host|machine}:${port}', + `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp', + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + + +DROP TABLE IF EXISTS `linkis_instance_info`; +CREATE TABLE `linkis_instance_info` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `instance` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'structure like ${host|machine}:${port}', + `name` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'equal application name in registry', + `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp', + PRIMARY KEY (`id`), + UNIQUE KEY `instance` (`instance`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; --- ---------------------------- --- Table structure for linkis_datasource_type --- ---------------------------- -CREATE TABLE IF NOT EXISTS `linkis_datasource_type` ( - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `icon` VARCHAR(50) DEFAULT NULL COMMENT 'Icon', - `description` VARCHAR(200) DEFAULT NULL COMMENT 'Description', - `name` VARCHAR(50) DEFAULT NULL COMMENT 'Name', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; --- ---------------------------- --- Table structure for linkis_datasource_type_env --- ---------------------------- -CREATE TABLE IF NOT EXISTS `linkis_datasource_type_env` ( - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `data_source_type_id` BIGINT(20) DEFAULT NULL COMMENT 'Type id', - `env_id` BIGINT(20) DEFAULT NULL COMMENT 'Environment id', - PRIMARY KEY (`id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; \ No newline at end of file diff --git a/db/linkis_dml.sql b/db/linkis_dml.sql index cec6d5495a07d59892e73df984a1f045a527afd9..cccd2d8f461aa19d09b7de6b7f5cd7d288f2555c 100644 --- a/db/linkis_dml.sql +++ b/db/linkis_dml.sql @@ -1,240 +1,318 @@ -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, '通用设置', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'spark', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'IDE', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'hive', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'storage', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'python', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'tidb', NULL, NULL); - -SELECT @application_id := id from linkis_application where name = '通用设置'; -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', '队列资源', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', '预热机制', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', '清理机制', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', '引擎设置', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', '驱动器资源', NULL, @application_id); - -SELECT @application_id := id from linkis_application where name = 'spark'; -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'spark资源设置', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'spark引擎设置', NULL, @application_id); - -SELECT @application_id := id from linkis_application where name = 'hive'; -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'hive引擎设置', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'hive资源设置', NULL, @application_id); - -SELECT @application_id := id from linkis_application where name = 'python'; -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'python引擎设置', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'python资源设置', NULL, @application_id); - -SELECT @application_id := id from linkis_application where name = 'tidb'; -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'tidb设置', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'spark引擎设置', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'spark资源设置', NULL, @application_id); - - - -SELECT @application_id := id from linkis_application where name = '通用设置'; -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.yarnqueue', 'yarn队列名', 'yarn队列名', @application_id, 'default', 'None', NULL, '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.preheating.time', '预热时间', '预热时间', @application_id, '9:00', 'None', NULL, '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.tmpfile.clean.time', 'tmp文件清理时间', 'tmp文件清理时间', @application_id, '10:00', 'None', NULL, '0', '0', '1'); - -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', @application_id, '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `unit`) VALUES (0, 'wds.linkis.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', @application_id, '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', 'G'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `unit`) VALUES (0, 'wds.linkis.client.memory.max', '取值范围:1-1000,单位:G', '驱动器内存使用上限', @application_id, '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', 'G'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.instance', '范围:1-20,单位:个', '引擎最大并发数', @application_id, '10', 'NumInterval', '[1,20]', '0', '0', '1'); - - -SELECT @application_id := id from linkis_application where name = 'IDE'; -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.executor.instances', '取值范围:1-40,单位:个', '执行器实例最大并发数', @application_id, '2', 'NumInterval', '[1,40]', '0', '0', '2'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.executor.cores', '取值范围:1-8,单位:个', '执行器核心个数', @application_id, '2', 'NumInterval', '[1,2]', '1', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.executor.memory', '取值范围:3-15,单位:G', '执行器内存大小', @application_id, '3', 'NumInterval', '[3,15]', '0', '0', '3'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.driver.cores', '取值范围:只能取1,单位:个', '驱动器核心个数', @application_id, '1', 'NumInterval', '[1,1]', '1', '1', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.driver.memory', '取值范围:1-15,单位:G', '驱动器内存大小', @application_id, '2', 'NumInterval', '[1,15]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'hive.client.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小', @application_id, '2', 'NumInterval', '[1,10]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数', @application_id, '', 'None', NULL, '1', '1', '1'); - -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.instance', '范围:1-3,单位:个', 'hive引擎最大并发数', @application_id, '3', 'NumInterval', '[1,3]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.instance', '范围:1-3,单位:个', 'spark引擎最大并发数', @application_id, '3', 'NumInterval', '[1,3]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.instance', '范围:1-3,单位:个', 'python引擎最大并发数', @application_id, '3', 'NumInterval', '[1,3]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'mapred.reduce.tasks', '范围:1-20,单位:个', 'reduce数', @application_id, '10', 'NumInterval', '[1,20]', '0', '1', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'dfs.block.size', '取值范围:2-10,单位:G', 'map数据块大小', @application_id, '10', 'NumInterval', '[2,10]', '0', '1', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'hive.exec.reduce.bytes.per.reducer', '取值范围:2-10,单位:G', 'reduce处理的数据量', @application_id, '10', 'NumInterval', '[2,10]', '0', '1', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'python.java.client.memory', '取值范围:1-2,单位:G', 'python引擎初始化内存大小', @application_id, '1', 'NumInterval', '[1,2]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.instance', '范围:1-3,单位:个', 'spark引擎最大并发数', @application_id, '1', 'NumInterval', '[1,3]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.executor.instances', '取值范围:1-40,单位:个', '执行器实例最大并发数', @application_id, '2', 'NumInterval', '[1,40]', '0', '0', '2'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.executor.cores', '取值范围:1-8,单位:个', '执行器核心个数', @application_id, '2', 'NumInterval', '[1,2]', '1', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.executor.memory', '取值范围:3-15,单位:G', '执行器内存大小', @application_id, '3', 'NumInterval', '[3,15]', '0', '0', '3'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.driver.cores', '取值范围:只能取1,单位:个', '驱动器核心个数 ', @application_id, '1', 'NumInterval', '[1,1]', '1', '1', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.driver.memory', '取值范围:1-15,单位:G', '驱动器内存大小', @application_id, '2', 'NumInterval', '[1,15]', '0', '0', '1'); - -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.tispark.pd.addresses', NULL, NULL, @application_id, 'pd0:2379', 'None', NULL, '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.tispark.tidb.addr', NULL, NULL, @application_id, 'tidb', 'None', NULL, '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.tispark.tidb.password', NULL, NULL, @application_id, NULL, 'None', NULL, '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.tispark.tidb.port', NULL, NULL, @application_id, '4000', 'None', NULL, '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.tispark.tidb.user', NULL, NULL, @application_id, 'root', 'None', NULL, '0', '0', '1'); - - -#---------------------------------------全局设置------------------ +-- 变量: +SET @SPARK_LABEL="spark-2.4.3"; +SET @HIVE_LABEL="hive-1.2.1"; +SET @PYTHON_LABEL="python-python2"; +SET @PIPELINE_LABEL="pipeline-*"; +SET @JDBC_LABEL="jdbc-4"; + +-- 衍生变量: +SET @SPARK_ALL=CONCAT('*-*,',@SPARK_LABEL); +SET @SPARK_IDE=CONCAT('*-IDE,',@SPARK_LABEL); +SET @SPARK_NODE=CONCAT('*-nodeexecution,',@SPARK_LABEL); +SET @SPARK_VISUALIS=CONCAT('*-Visualis,',@SPARK_LABEL); + +SET @HIVE_ALL=CONCAT('*-*,',@HIVE_LABEL); +SET @HIVE_IDE=CONCAT('*-IDE,',@HIVE_LABEL); +SET @HIVE_NODE=CONCAT('*-nodeexecution,',@HIVE_LABEL); + +SET @PYTHON_ALL=CONCAT('*-*,',@PYTHON_LABEL); +SET @PYTHON_IDE=CONCAT('*-IDE,',@PYTHON_LABEL); +SET @PYTHON_NODE=CONCAT('*-nodeexecution,',@PYTHON_LABEL); + +SET @PIPELINE_ALL=CONCAT('*-*,',@PIPELINE_LABEL); +SET @PIPELINE_IDE=CONCAT('*-IDE,',@PIPELINE_LABEL); + +SET @JDBC_ALL=CONCAT('*-*,',@JDBC_LABEL); +SET @JDBC_IDE=CONCAT('*-IDE,',@JDBC_LABEL); + + + +-- Configuration的默认Key +-- 全局设置 +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'default', 'None', NULL, '0', '0', '1', '队列资源'); +-- spark +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', 'yarn队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', '执行器实例最大并发数', '2', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', '执行器核心个数', '2', 'NumInterval', '[1,2]', '1', '0', '1','spark资源设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:3-15,单位:G', '执行器内存大小', '3', 'NumInterval', '[3,15]', '0', '0', '3', 'spark资源设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', '驱动器核心个数', '1', 'NumInterval', '[1,1]', '1', '1', '1', 'spark资源设置','spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', '驱动器内存大小','2', 'NumInterval', '[1,15]', '0', '0', '1', 'spark资源设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark'); +-- hive +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','2', 'NumInterval', '[1,10]', '0', '0', '1', 'hive引擎设置', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数','', 'None', NULL, '1', '1', '1', 'hive引擎设置', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('mapred.reduce.tasks', '范围:1-20,单位:个', 'reduce数', '10', 'NumInterval', '[1,20]', '0', '1', '1', 'hive资源设置', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('dfs.block.size', '取值范围:2-10,单位:G', 'map数据块大小', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.exec.reduce.bytes.per.reducer', '取值范围:2-10,单位:G', 'reduce处理的数据量', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive'); +-- python +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'python'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'python'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'python'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('python.java.client.memory', '取值范围:1-2,单位:G', 'python引擎初始化内存大小', '1', 'NumInterval', '[1,2]', '0', '0', '1', 'python引擎设置', 'python'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'python引擎设置', 'python'); +-- pipeline +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.mold', '取值范围:csv或excel', '结果集导出类型','csv', 'OFT', '[\"csv\",\"excel\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.field.split', '取值范围:,或\\t', 'csv分隔符',',', 'OFT', '[\",\",\"\\\\t\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.charset', '取值范围:utf-8或gbk', '结果集导出字符集','gbk', 'OFT', '[\"utf-8\",\"gbk\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.isoverwtite', '取值范围:true或false', '是否覆写','true', 'OFT', '[\"true\",\"false\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.instance', '范围:1-3,单位:个', 'pipeline引擎最大并发数','3', 'NumInterval', '[1,3]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.engine.memory', '取值范围:1-10,单位:G', 'pipeline引擎初始化内存大小','2', 'NumInterval', '[1,10]', '0', '0', '1', 'pipeline资源设置', 'pipeline'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.shuffle.null.type', '取值范围:NULL或者BLANK', '空值替换','NULL', 'OFT', '[\"NULL\",\"BLANK\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +-- jdbc +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.url', '例如:jdbc:hive2://127.0.0.1:10000', 'jdbc连接地址', 'jdbc:hive2://127.0.0.1:10000', 'Regex', '^\s*jdbc:\w+://([^:]+)(:\d+)(/[^\?]+)?(\?\S*)?$', '0', '0', '1', '数据源配置', 'jdbc'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.version', '取值范围:jdbc3,jdbc4', 'jdbc版本','jdbc4', 'OFT', '[\"jdbc3\",\"jdbc4\"]', '0', '0', '1', '数据源配置', 'jdbc'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.username', 'username', '数据库连接用户名', '', '', '', '0', '0', '1', '用户配置', 'jdbc'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.password', 'password', '数据库连接密码', '', '', '', '0', '0', '1', '用户配置', 'jdbc'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.max', '范围:1-20,单位:个', 'jdbc引擎最大连接数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '数据源配置', 'jdbc'); +-- Configuration一级目录 +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator','*-全局设置,*-*', 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator','*-IDE,*-*', 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator','*-Visualis,*-*', 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator','*-nodeexecution,*-*', 'OPTIONAL', 2, now(), now()); + + +-- 引擎级别默认配置 +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@SPARK_ALL, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@HIVE_ALL, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@PYTHON_ALL, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@PIPELINE_ALL, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@JDBC_ALL, 'OPTIONAL', 2, now(), now()); + + +-- Configuration二级目录(creator 级别的默认配置) +-- IDE +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@SPARK_IDE, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@HIVE_IDE, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@PYTHON_IDE, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@PIPELINE_IDE, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@JDBC_IDE, 'OPTIONAL', 2, now(), now()); + +-- Visualis +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@SPARK_VISUALIS, 'OPTIONAL', 2, now(), now()); +-- nodeexecution +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@SPARK_NODE, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@HIVE_NODE, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@PYTHON_NODE, 'OPTIONAL', 2, now(), now()); + + +-- 关联一级二级目录 +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = '*-全局设置,*-*'; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 1); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = '*-IDE,*-*'; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 1); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = '*-Visualis,*-*'; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 1); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = '*-nodeexecution,*-*'; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 1); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_IDE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_IDE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_IDE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_IDE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @JDBC_IDE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_VISUALIS; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_NODE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_NODE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_NODE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + + +---- 关联label和默认配置 +-- 全局默认配置(此处的'*-*,*-*'与一级目录'*-全局设置,*-*'相同,真正查询全局设置的label时应当查询*-*,*-*,而不是*-全局设置,*-*) +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator','*-*,*-*', 'OPTIONAL', 2, now(), now()); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.yarnqueue' AND `engine_conn_type` IS NULL; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = '*-*,*-*'; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +-- spark默认配置 +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.yarnqueue.instance.max' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.yarnqueue.cores.max' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.yarnqueue.memory.max' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := id from linkis_config_key WHERE `key` = 'wds.linkis.yarnqueue'; -SELECT @tree_id := id from linkis_config_tree WHERE `name` = '队列资源'; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := id from linkis_config_key WHERE `key` = 'wds.linkis.yarnqueue.cores.max'; -SELECT @tree_id := id from linkis_config_tree WHERE `name` = '队列资源'; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := id from linkis_config_key WHERE `key` = 'wds.linkis.yarnqueue.memory.max'; -SELECT @tree_id := id from linkis_config_tree WHERE `name` = '队列资源'; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := id from linkis_config_key WHERE `key` = 'wds.linkis.preheating.time'; -SELECT @tree_id := id from linkis_config_tree WHERE `name` = '预热机制'; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := id from linkis_config_key WHERE `key` = 'wds.linkis.tmpfile.clean.time'; -SELECT @tree_id := id from linkis_config_tree WHERE `name` = '清理机制'; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := id from linkis_config_key WHERE `name` = '引擎最大并发数'; -SELECT @tree_id := id from linkis_config_tree WHERE `name` = '引擎设置'; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := id from linkis_config_key WHERE `key` = 'wds.linkis.client.memory.max'; -SELECT @tree_id := id from linkis_config_tree WHERE `name` = '驱动器资源'; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.client.memory.max' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -#---------------------------------------spark--------------- +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.client.core.max' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.executor.instances' and a.name = 'IDE' ORDER BY k.id limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'spark' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.instance' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`name` = 'spark引擎最大并发数' and a.name = 'IDE' ORDER BY k.id limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark引擎设置' and a.name = 'spark' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'spark.executor.instances' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.executor.cores' and a.name = 'IDE' ORDER BY k.id limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'spark' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'spark.executor.cores' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.executor.memory' and a.name = 'IDE' ORDER BY k.id limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'spark' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'spark.executor.memory' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.driver.cores' and a.name = 'IDE' ORDER BY k.id limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'spark' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'spark.driver.cores' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.driver.memory' and a.name = 'IDE' ORDER BY k.id limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'spark' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'spark.driver.memory' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -#---------------------------------------tidb------------------ +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'spark.python.version' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.executor.instances' and a.name = 'IDE' ORDER BY k.id desc limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +-- hive默认配置 +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.client.memory.max' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`name` = 'spark引擎最大并发数' and a.name = 'IDE' ORDER BY k.id desc limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark引擎设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.client.core.max' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.executor.cores' and a.name = 'IDE' ORDER BY k.id desc limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.instance' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.executor.memory' and a.name = 'IDE' ORDER BY k.id desc limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'hive.client.memory' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.driver.cores' and a.name = 'IDE' ORDER BY k.id desc limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'hive.client.java.opts' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.driver.memory' and a.name = 'IDE' ORDER BY k.id desc limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'mapred.reduce.tasks' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.tispark.pd.addresses' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'tidb设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'dfs.block.size' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.tispark.tidb.addr' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'tidb设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'hive.exec.reduce.bytes.per.reducer' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.tispark.tidb.password' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'tidb设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +-- python默认配置 +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.client.memory.max' AND `engine_conn_type` = 'python'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.tispark.tidb.port' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'tidb设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.client.core.max' AND `engine_conn_type` = 'python'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.tispark.tidb.user' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'tidb设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.instance' AND `engine_conn_type` = 'python'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -#---------------------------hive----------------- -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`name` = 'hive引擎最大并发数' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'hive引擎设置' and a.name = 'hive' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'python.java.client.memory' AND `engine_conn_type` = 'python'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'hive.client.memory' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'hive资源设置' and a.name = 'hive' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'python.version' AND `engine_conn_type` = 'python'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'mapred.reduce.tasks' and a.name = 'IDE'; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'hive资源设置' and a.name = 'hive' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +-- pipeline默认配置 +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'pipeline.output.mold' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'dfs.block.size' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'hive资源设置' and a.name = 'hive' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'pipeline.field.split' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'hive.exec.reduce.bytes.per.reducer' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'hive资源设置' and a.name = 'hive' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'pipeline.output.charset' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'hive.client.java.opts' and a.name = 'IDE'; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'hive资源设置' and a.name = 'hive' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'pipeline.output.isoverwtite' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -#------------------------python--------------------- +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'pipeline.engine.memory' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`name` = 'python引擎最大并发数' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'python引擎设置' and a.name = 'python' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'pipeline.output.shuffle.null.type' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'python.java.client.memory' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'python资源设置' and a.name = 'python' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.instance' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); +-- jdbc默认配置 +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.jdbc.connect.url' AND `engine_conn_type` = 'jdbc'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @JDBC_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -#------------控制台-jdbc--------- -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (NULL, 'jdbc', NULL, NULL); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.jdbc.version' AND `engine_conn_type` = 'jdbc'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @JDBC_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -select @application_id:=id from `linkis_application` where `name` = 'IDE'; -select @jdbc_id:=id from `linkis_application` where `name` = 'jdbc'; +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.jdbc.username' AND `engine_conn_type` = 'jdbc'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @JDBC_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (NULL, 'jdbc.url', '格式:', 'jdbc连接地址', @application_id, NULL , 'None', NULL , '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (NULL, 'jdbc.username', NULL , 'jdbc连接用户名', @application_id, NULL, 'None', NULL , '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (NULL, 'jdbc.password', NULL , 'jdbc连接密码', @application_id, NULL , 'None', NULL , '0', '0', '1'); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.jdbc.password' AND `engine_conn_type` = 'jdbc'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @JDBC_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (NULL, '0', 'jdbc连接设置', NULL, @jdbc_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.jdbc.connect.max' AND `engine_conn_type` = 'jdbc'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @JDBC_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -select @key_id1:=id from `linkis_config_key` where `application_id` = @application_id and `key` = 'jdbc.url'; -select @key_id2:=id from `linkis_config_key` where `application_id` = @application_id and `key` = 'jdbc.username'; -select @key_id3:=id from `linkis_config_key` where `application_id` = @application_id and `key` = 'jdbc.password'; -SELECT @tree_id1:=t.id from linkis_config_tree t LEFT JOIN linkis_application a on t.application_id = a.id WHERE t.`name` = 'jdbc连接设置' and a.`name` = 'jdbc'; -insert into `linkis_config_key_tree` VALUES(NULL,@key_id1,@tree_id1); -insert into `linkis_config_key_tree` VALUES(NULL,@key_id2,@tree_id1); -insert into `linkis_config_key_tree` VALUES(NULL,@key_id3,@tree_id1); - -INSERT INTO `linkis_datasource_type`(`icon`, `name`) VALUES('0x001', 'ElasticSearch'); -INSERT INTO `linkis_datasource_type`(`icon`, `name`) VALUES('0x001', 'Hive'); -INSERT INTO `linkis_datasource_type`(`icon`, `name`) VALUES('0x001', 'MySql'); \ No newline at end of file +insert into `linkis_external_resource_provider`(`id`,`resource_type`,`name`,`labels`,`config`) values +(1,'Yarn','sit',NULL,'{\r\n\"rmWebAddress\": \"@YARN_RESTFUL_URL\",\r\n\"hadoopVersion\": \"2.7.2\",\r\n\"authorEnable\":true,\r\n\"user\":\"hadoop\",\r\n\"pwd\":\"897ede66a860\"\r\n}'); diff --git a/db/module/linkis-bml.sql b/db/module/linkis-bml.sql index b2355bfb0241b3fa23ea6b371c1539dd6e0b1be1..d5f4b3dfab2c28cc4865dfacd61a24dfd8df10b5 100644 --- a/db/module/linkis-bml.sql +++ b/db/module/linkis-bml.sql @@ -17,10 +17,10 @@ CREATE TABLE `linkis_resources` ( PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4; --- 修改expire_type的默认值为NULL +--修改expire_type的默认值为NULL alter table linkis_resources alter column expire_type set default null; --- 修改expire_time的默认值为NULL +--修改expire_time的默认值为NULL alter table linkis_resources alter column expire_time set default null; @@ -40,15 +40,15 @@ CREATE TABLE `linkis_resources_version` ( PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; --- 添加start_byte 和 end_byte 字段 +--添加start_byte 和 end_byte 字段 ALTER TABLE `linkis_resources_version` ADD COLUMN `start_byte` BIGINT(20) UNSIGNED NOT NULL DEFAULT 0 AFTER `size`; ALTER TABLE `linkis_resources_version` ADD COLUMN `end_byte` BIGINT(20) UNSIGNED NOT NULL DEFAULT 0 AFTER `start_byte`; --- version字段修改 +--version字段修改 alter table `linkis_resources_version` modify column `version` varchar(20) not null; --- 给resource_id 和 version 加上联合唯一约束 +--给resource_id 和 version 加上联合唯一约束 alter table `linkis_resources_version` add unique key `resource_id_version`(`resource_id`, `version`); @@ -75,13 +75,13 @@ CREATE TABLE `linkis_resources_download_history` ( ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; --- 删除resources_version_id 字段 +--删除resources_version_id 字段 alter table `linkis_resources_download_history` drop column `resources_version_id`; --- 添加resource_id 字段 +--添加resource_id 字段 alter table `linkis_resources_download_history` add column `resource_id` varchar(50) not null after `state`; --- 添加version字段 +--添加version字段 alter table `linkis_resources_download_history` add column `version` varchar(20) not null after `resource_id`; create table dws_bml_resources_contentType ( @@ -93,7 +93,7 @@ create table dws_bml_resources_contentType ( UNIQUE KEY `whitelist_contentType_uindex` (`content_type`) USING BTREE ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; --- 创建资源任务表,包括上传,更新,下载 +--创建资源任务表,包括上传,更新,下载 CREATE TABLE `linkis_resources_task` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `resource_id` varchar(50) DEFAULT NULL COMMENT '资源id,资源的uuid', diff --git a/db/module/linkis-datasource.sql b/db/module/linkis-datasource.sql deleted file mode 100644 index 40a7de947c7ef50620632388a97d7b6376754738..0000000000000000000000000000000000000000 --- a/db/module/linkis-datasource.sql +++ /dev/null @@ -1,67 +0,0 @@ -CREATE TABLE IF NOT EXISTS `linkis_datasource` ( - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `datasource_name` VARCHAR(100) NOT NULL COMMENT 'Data source name', - `datasource_type_id` BIGINT(20) DEFAULT NULL COMMENT 'Data source type id', - `datasource_desc` VARCHAR(200) DEFAULT NULL COMMENT 'Data source description', - `create_identify` VARCHAR(20) DEFAULT 'BDP' COMMENT 'Example: project name', - `create_system` VARCHAR(20) DEFAULT 'BDP' COMMENT 'Create system', - `create_user` VARCHAR(50) DEFAULT NULL COMMENT 'Creator', - `parameter` TEXT COMMENT 'Connect parameters', - `create_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `modify_user` VARCHAR(50) DEFAULT NULL COMMENT 'Modify user', - `modify_time` DATETIME DEFAULT NULL COMMENT 'Modify time', - `datasource_env_id` BIGINT(20) DEFAULT NULL, - PRIMARY KEY (`id`) - ) ENGINE=InnoDB AUTO_INCREMENT=140 DEFAULT CHARSET=utf8; - - -CREATE TABLE IF NOT EXISTS `linkis_datasource_env` ( - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `env_name` VARCHAR(100) NOT NULL COMMENT 'Environment name', - `env_desc` VARCHAR(200) DEFAULT NULL COMMENT 'Description', - `create_user` VARCHAR(50) DEFAULT NULL COMMENT 'Creator', - `parameter` TEXT NOT NULL COMMENT 'Connect parameters', - `create_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `modify_user` VARCHAR(50) DEFAULT NULL COMMENT 'Modify user', - `modify_time` DATETIME DEFAULT NULL COMMENT 'Modify time', - PRIMARY KEY (`id`) - ) ENGINE=InnoDB AUTO_INCREMENT=108 DEFAULT CHARSET=utf8; - - -CREATE TABLE IF NOT EXISTS `linkis_datasource_type_key` ( - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `key` VARCHAR(50) DEFAULT NULL COMMENT 'Key of variable', - `description` VARCHAR(200) DEFAULT NULL COMMENT 'Description', - `name` VARCHAR(50) DEFAULT NULL COMMENT 'Option name of column in page', - `data_source_type_id` BIGINT(20) DEFAULT NULL COMMENT 'Type id', - `require` TINYINT(1) DEFAULT '0', - `scope` VARCHAR(50) DEFAULT NULL COMMENT 'Scope', - `default_value` VARCHAR(200) DEFAULT NULL COMMENT 'Default value', - `value_type` VARCHAR(50) DEFAULT NULL COMMENT 'Value type', - `value_regex` VARCHAR(100) DEFAULT NULL COMMENT 'Value regex', - `ref_id` BIGINT(20) DEFAULT NULL COMMENT 'Related id', - `ref_value` VARCHAR(100) DEFAULT NULL COMMENT 'Related value', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - - -CREATE TABLE IF NOT EXISTS `linkis_datasource_type` ( - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `icon` VARCHAR(50) DEFAULT NULL COMMENT 'Icon', - `description` VARCHAR(200) DEFAULT NULL COMMENT 'Description', - `name` VARCHAR(50) DEFAULT NULL COMMENT 'Name', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - - -CREATE TABLE IF NOT EXISTS `linkis_datasource_type_env` ( - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `data_source_type_id` BIGINT(20) DEFAULT NULL COMMENT 'Type id', - `env_id` BIGINT(20) DEFAULT NULL COMMENT 'Environment id', - PRIMARY KEY (`id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - - -INSERT INTO `linkis_datasource_type`(`icon`, `name`) VALUES('0x001', 'ElasticSearch'); -INSERT INTO `linkis_datasource_type`(`icon`, `name`) VALUES('0x001', 'Hive'); -INSERT INTO `linkis_datasource_type`(`icon`, `name`) VALUES('0x001', 'MySql'); \ No newline at end of file diff --git a/db/module/linkis_application.sql b/db/module/linkis_application.sql deleted file mode 100644 index 2f739ccaa65854d0d1b13c891b3475840849a89b..0000000000000000000000000000000000000000 --- a/db/module/linkis_application.sql +++ /dev/null @@ -1,85 +0,0 @@ -SET FOREIGN_KEY_CHECKS=0; - --- ---------------------------- --- Table structure for linkis_develop_application --- ---------------------------- -DROP TABLE IF EXISTS `linkis_develop_application`; -CREATE TABLE `linkis_develop_application` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `name` varchar(200) DEFAULT NULL, - `source` varchar(50) DEFAULT NULL COMMENT 'Source of the development application', - `version` varchar(50) DEFAULT NULL, - `description` text, - `user_id` bigint(20) DEFAULT NULL, - `is_published` bit(1) DEFAULT NULL, - `create_time` datetime DEFAULT NULL, - `update_time` datetime DEFAULT NULL, - `org_id` bigint(20) DEFAULT NULL COMMENT 'Organization ID', - `visibility` bit(1) DEFAULT NULL, - `is_transfer` bit(1) DEFAULT NULL COMMENT 'Reserved word', - `initial_org_id` bigint(20) DEFAULT NULL, - `json_path` varchar(255) DEFAULT NULL COMMENT 'Path of the jason file which is used for data development in the front-end.', - `isAsh` bit(1) DEFAULT NULL COMMENT 'If it is active', - `pic` varchar(255) DEFAULT NULL, - `star_num` int(11) DEFAULT '0', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - --- ---------------------------- --- Table structure for linkis_project_list --- ---------------------------- -DROP TABLE IF EXISTS `linkis_project_list`; -CREATE TABLE `linkis_project_list` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `name` varchar(200) DEFAULT NULL COMMENT 'Project service name which needs to be initialized', - `is_project_need_init` bit(1) DEFAULT NULL, - `url` varchar(255) DEFAULT NULL COMMENT 'URL used to initialize a project', - `is_user_need_init` bit(1) DEFAULT NULL, - `is_project_inited` bit(1) DEFAULT NULL, - `json` text COMMENT 'Data provided by project to the front-end would be jsonized after initialization.', - `level` tinyint(255) DEFAULT NULL COMMENT 'Marks the importance of the project. When encounter initialization failure, if a user tried to log in, the project would report an error if its level is greater than 4, otherwise, grey the corresponding function button', - `user_init_url` varchar(255) DEFAULT NULL COMMENT 'URL used to initialize a user', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - --- ---------------------------- --- Table structure for linkis_project_user --- ---------------------------- -DROP TABLE IF EXISTS `linkis_project_user`; -CREATE TABLE `linkis_project_user` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `project_id` bigint(20) DEFAULT NULL, - `json` varchar(255) DEFAULT NULL COMMENT 'Data returned by initializing a user would be jsonized', - `user_id` bigint(20) DEFAULT NULL, - `is_init_success` bit(1) DEFAULT NULL, - `is_new_feature` bit(1) DEFAULT NULL COMMENT 'If this project is a new function to the user', - PRIMARY KEY (`id`), - UNIQUE KEY `project_id` (`project_id`,`user_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - --- ---------------------------- --- Table structure for linkis_user --- ---------------------------- -DROP TABLE IF EXISTS `linkis_user`; -CREATE TABLE `linkis_user` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `email` varchar(255) DEFAULT NULL, - `username` varchar(255) NOT NULL, - `password` varchar(255) DEFAULT NULL, - `admin` tinyint(1) DEFAULT NULL COMMENT 'If it is an administrator', - `active` tinyint(1) DEFAULT NULL COMMENT 'If it is active', - `name` varchar(255) DEFAULT NULL COMMENT 'User name', - `description` varchar(255) DEFAULT NULL, - `department` varchar(255) DEFAULT NULL, - `avatar` varchar(255) DEFAULT NULL COMMENT 'Path of the avator', - `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `create_by` bigint(20) DEFAULT '0', - `update_time` timestamp NOT NULL DEFAULT '1970-01-01 08:00:01', - `update_by` bigint(20) DEFAULT '0', - `is_first_login` bit(1) DEFAULT NULL COMMENT 'If it is the first time to log in', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - diff --git a/db/module/linkis_configuration.sql b/db/module/linkis_configuration.sql index 1f433220fc8c0b4a6133f3c127930b1b27727f80..021a39cb483b49b0e2e69ac6c87b9fdbfb54a378 100644 --- a/db/module/linkis_configuration.sql +++ b/db/module/linkis_configuration.sql @@ -1,82 +1,42 @@ -SET FOREIGN_KEY_CHECKS=0; - - --- ---------------------------- --- Table structure for linkis_application --- ---------------------------- -DROP TABLE IF EXISTS `linkis_application`; -CREATE TABLE `linkis_application` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `name` varchar(50) DEFAULT NULL COMMENT 'Can be one of the following: execute_application_name(in table linkis_task), request_application_name(i.e. creator), general configuration', - `chinese_name` varchar(50) DEFAULT NULL, - `description` varchar(200) DEFAULT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - - --- ---------------------------- --- Table structure for linkis_config_key_tree --- ---------------------------- -DROP TABLE IF EXISTS `linkis_config_key_tree`; -CREATE TABLE `linkis_config_key_tree` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `key_id` bigint(20) DEFAULT NULL, - `tree_id` bigint(20) DEFAULT NULL, - PRIMARY KEY (`id`), - KEY `key_id` (`key_id`), - KEY `tree_id` (`tree_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - - --- ---------------------------- --- Table structure for linkis_config_key_user --- ---------------------------- -DROP TABLE IF EXISTS `linkis_config_key_user`; -CREATE TABLE `linkis_config_key_user` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `application_id` bigint(20) DEFAULT NULL COMMENT 'Same as id in tale linkis_application, except that it cannot be the id of creator', - `key_id` bigint(20) DEFAULT NULL, - `user_name` varchar(50) DEFAULT NULL, - `value` varchar(200) DEFAULT NULL COMMENT 'Value of the key', - PRIMARY KEY (`id`), - UNIQUE KEY `application_id_2` (`application_id`,`key_id`,`user_name`), - KEY `key_id` (`key_id`), - KEY `application_id` (`application_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - - --- ---------------------------- --- Table structure for linkis_config_key --- ---------------------------- -DROP TABLE IF EXISTS `linkis_config_key`; -CREATE TABLE `linkis_config_key` ( +DROP TABLE IF EXISTS `linkis_configuration_config_key`; +CREATE TABLE `linkis_configuration_config_key`( `id` bigint(20) NOT NULL AUTO_INCREMENT, `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', `description` varchar(200) DEFAULT NULL, `name` varchar(50) DEFAULT NULL, - `application_id` bigint(20) DEFAULT NULL COMMENT 'Correlate with id in table linkis_application', + `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', - `validate_range` varchar(100) DEFAULT NULL COMMENT 'Validate range', + `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', - `unit` varchar(64) DEFAULT NULL, - PRIMARY KEY (`id`), - KEY `application_id` (`application_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; --- ---------------------------- --- Table structure for linkis_config_tree --- ---------------------------- -DROP TABLE IF EXISTS `linkis_config_tree`; -CREATE TABLE `linkis_config_tree` ( +DROP TABLE IF EXISTS `linkis_configuration_config_value`; +CREATE TABLE linkis_configuration_config_value( `id` bigint(20) NOT NULL AUTO_INCREMENT, - `parent_id` bigint(20) DEFAULT NULL COMMENT 'Parent ID', - `name` varchar(50) DEFAULT NULL COMMENT 'Application name or category name under general configuration', - `description` varchar(200) DEFAULT NULL, - `application_id` bigint(20) DEFAULT NULL COMMENT 'Same as id(in table linkis_application), except that it cannot be the id of creator', + `configkey_id` bigint(20), + `config_value` varchar(50), + `config_label_id`int(20), + PRIMARY KEY (`id`), + UNIQUE INDEX(`configkey_id`, `config_label_id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_configuration_category`; +CREATE TABLE `linkis_configuration_category` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_id` int(20) NOT NULL, + `level` int(20) NOT NULL, + `description` varchar(200), + `tag` varchar(200), + `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), - KEY `application_id` (`application_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + UNIQUE INDEX(`label_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + + diff --git a/db/module/linkis_configuration_dml.sql b/db/module/linkis_configuration_dml.sql index 4e845aca3d64ae6e5a87ddeb335806e7edb3dc19..ca31ec74caa2eddabc1a16992d4633a7ce302ca8 100644 --- a/db/module/linkis_configuration_dml.sql +++ b/db/module/linkis_configuration_dml.sql @@ -1,246 +1,313 @@ -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, '通用设置', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'spark', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'IDE', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'hive', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'storage', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'python', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'tidb', NULL, NULL); -INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) VALUES (0, 'pipeline', NULL, NULL); - -SELECT @application_id := id from linkis_application where name = '通用设置'; -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', '队列资源', NULL, @application_id); -#INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', '预热机制', NULL, @application_id); -#INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', '清理机制', NULL, @application_id); -#INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', '引擎设置', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', '驱动器资源', NULL, @application_id); - -SELECT @application_id := id from linkis_application where name = 'spark'; -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'spark资源设置', NULL, @application_id); -#INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'spark引擎设置', NULL, @application_id); - -SELECT @application_id := id from linkis_application where name = 'hive'; -#INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'hive引擎设置', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'hive资源设置', NULL, @application_id); - -SELECT @application_id := id from linkis_application where name = 'python'; -#INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'python引擎设置', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'python资源设置', NULL, @application_id); - -SELECT @application_id := id from linkis_application where name = 'tidb'; -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'tidb设置', NULL, @application_id); -#INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'spark引擎设置', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', 'spark资源设置', NULL, @application_id); - -SELECT @application_id := id from linkis_application where name = 'pipeline'; -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', '导入导出设置', NULL, @application_id); -INSERT INTO `linkis_config_tree` (`id`, `parent_id`, `name`, `description`, `application_id`) VALUES (0, '0', '导入导出资源设置', NULL, @application_id); - - - -SELECT @application_id := id from linkis_application where name = '通用设置'; -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.yarnqueue', 'yarn队列名', 'yarn队列名', @application_id, 'ide', 'None', NULL, '0', '0', '1'); -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.preheating.time', '预热时间', '预热时间', @application_id, '9:00', 'None', NULL, '0', '0', '1'); -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.tmpfile.clean.time', 'tmp文件清理时间', 'tmp文件清理时间', @application_id, '10:00', 'None', NULL, '0', '0', '1'); - -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', @application_id, '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1'); -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `unit`) VALUES (0, 'wds.linkis.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', @application_id, '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1','G'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `unit`) VALUES (0, 'wds.linkis.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', @application_id, '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', 'G'); -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.instance', '范围:1-20,单位:个', '引擎最大并发数', @application_id, '10', 'NumInterval', '[1,20]', '0', '0', '1'); - - -SELECT @application_id := id from linkis_application where name = 'IDE'; -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.executor.instances', '取值范围:1-40,单位:个', '执行器实例最大并发数', @application_id, '2', 'NumInterval', '[1,40]', '0', '0', '2'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.executor.cores', '取值范围:1-8,单位:个', '执行器核心个数', @application_id, '2', 'NumInterval', '[1,2]', '1', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.executor.memory', '取值范围:3-15,单位:G', '执行器内存大小', @application_id, '3', 'NumInterval', '[3,15]', '0', '0', '3'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.driver.cores', '取值范围:只能取1,单位:个', '驱动器核心个数', @application_id, '1', 'NumInterval', '[1,1]', '1', '1', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.driver.memory', '取值范围:1-15,单位:G', '驱动器内存大小', @application_id, '2', 'NumInterval', '[1,15]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'hive.client.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小', @application_id, '2', 'NumInterval', '[1,10]', '0', '0', '1'); -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数', @application_id, '', 'None', NULL, '1', '1', '1'); -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.instance', '范围:1-3,单位:个', 'hive引擎最大并发数', @application_id, '3', 'NumInterval', '[1,3]', '0', '0', '1'); -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.instance', '范围:1-3,单位:个', 'spark引擎最大并发数', @application_id, '3', 'NumInterval', '[1,3]', '0', '0', '1'); -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.instance', '范围:1-3,单位:个', 'python引擎最大并发数', @application_id, '3', 'NumInterval', '[1,3]', '0', '0', '1'); -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'mapred.reduce.tasks', '范围:1-20,单位:个', 'reduce数', @application_id, '10', 'NumInterval', '[1,20]', '0', '1', '1'); -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'dfs.block.size', '取值范围:2-10,单位:G', 'map数据块大小', @application_id, '10', 'NumInterval', '[2,10]', '0', '1', '1'); -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'hive.exec.reduce.bytes.per.reducer', '取值范围:2-10,单位:G', 'reduce处理的数据量', @application_id, '10', 'NumInterval', '[2,10]', '0', '1', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'python.java.client.memory', '取值范围:1-2,单位:G', 'python引擎初始化内存大小', @application_id, '1', 'NumInterval', '[1,2]', '0', '0', '1'); +--变量: +SET @SPARK_LABEL="spark-2.4.3"; +SET @HIVE_LABEL="hive-1.2.1"; +SET @PYTHON_LABEL="python-2"; +SET @PIPELINE_LABEL="pipeline-*"; +SET @JDBC_LABEL="jdbc-4"; + +--衍生变量: +SET @SPARK_ALL=CONCAT('*-*,',@SPARK_LABEL); +SET @SPARK_IDE=CONCAT('*-IDE,',@SPARK_LABEL); +SET @SPARK_NODE=CONCAT('*-nodeexecution,',@SPARK_LABEL); +SET @SPARK_VISUALIS=CONCAT('*-Visualis,',@SPARK_LABEL); + +SET @HIVE_ALL=CONCAT('*-*,',@HIVE_LABEL); +SET @HIVE_IDE=CONCAT('*-IDE,',@HIVE_LABEL); +SET @HIVE_NODE=CONCAT('*-nodeexecution,',@HIVE_LABEL); + +SET @PYTHON_ALL=CONCAT('*-*,',@PYTHON_LABEL); +SET @PYTHON_IDE=CONCAT('*-IDE,',@PYTHON_LABEL); +SET @PYTHON_NODE=CONCAT('*-nodeexecution,',@PYTHON_LABEL); + +SET @PIPELINE_ALL=CONCAT('*-*,',@PIPELINE_LABEL); +SET @PIPELINE_IDE=CONCAT('*-IDE,',@PIPELINE_LABEL); + +SET @JDBC_ALL=CONCAT('*-*,',@JDBC_LABEL); +SET @JDBC_IDE=CONCAT('*-IDE,',@JDBC_LABEL); + + + +---- Configuration的默认Key +-- 全局设置 +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'default', 'None', NULL, '0', '0', '1', '队列资源'); +-- spark +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', 'yarn队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', '执行器实例最大并发数', '2', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', '执行器核心个数', '2', 'NumInterval', '[1,2]', '1', '0', '1','spark资源设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:3-15,单位:G', '执行器内存大小', '3', 'NumInterval', '[3,15]', '0', '0', '3', 'spark资源设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', '驱动器核心个数', '1', 'NumInterval', '[1,1]', '1', '1', '1', 'spark资源设置','spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', '驱动器内存大小','2', 'NumInterval', '[1,15]', '0', '0', '1', 'spark资源设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark'); +-- hive +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','2', 'NumInterval', '[1,10]', '0', '0', '1', 'hive引擎设置', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数','', 'None', NULL, '1', '1', '1', 'hive引擎设置', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('mapred.reduce.tasks', '范围:1-20,单位:个', 'reduce数', '10', 'NumInterval', '[1,20]', '0', '1', '1', 'hive资源设置', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('dfs.block.size', '取值范围:2-10,单位:G', 'map数据块大小', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.exec.reduce.bytes.per.reducer', '取值范围:2-10,单位:G', 'reduce处理的数据量', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive'); +--python +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'python'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'python'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'python'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('python.java.client.memory', '取值范围:1-2,单位:G', 'python引擎初始化内存大小', '1', 'NumInterval', '[1,2]', '0', '0', '1', 'python引擎设置', 'python'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'python引擎设置', 'python'); +--pipeline +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.mold', '取值范围:csv或excel', '结果集导出类型','csv', 'OFT', '[\"csv\",\"excel\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.field.split', '取值范围:,或\\t', 'csv分隔符',',', 'OFT', '[\",\",\"\\\\t\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.charset', '取值范围:utf-8或gbk', '结果集导出字符集','gbk', 'OFT', '[\"utf-8\",\"gbk\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.isoverwtite', '取值范围:true或false', '是否覆写','true', 'OFT', '[\"true\",\"false\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.instance', '范围:1-3,单位:个', 'pipeline引擎最大并发数','3', 'NumInterval', '[1,3]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.engine.memory', '取值范围:1-10,单位:G', 'pipeline引擎初始化内存大小','2', 'NumInterval', '[1,10]', '0', '0', '1', 'pipeline资源设置', 'pipeline'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.shuffle.null.type', '取值范围:NULL或者BLANK', '空值替换','NULL', 'OFT', '[\"NULL\",\"BLANK\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +--jdbc +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.url', '例如:jdbc:hive2://127.0.0.1:10000', 'jdbc连接地址', 'jdbc:hive2://127.0.0.1:10000', 'Regex', '^\s*jdbc:\w+://([^:]+)(:\d+)(/[^\?]+)?(\?\S*)?$', '0', '0', '1', '数据源配置', 'jdbc'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.version', '取值范围:jdbc3,jdbc4', 'jdbc版本','jdbc4', 'OFT', '[\"jdbc3\",\"jdbc4\"]', '0', '0', '1', '数据源配置', 'jdbc'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.username', 'username', '数据库连接用户名', '', '', '', '0', '0', '1', '用户配置', 'jdbc'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.password', 'password', '数据库连接密码', '', '', '', '0', '0', '1', '用户配置', 'jdbc'); +INSERT INTO `linkis_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.max', '范围:1-20,单位:个', 'jdbc引擎最大连接数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '数据源配置', 'jdbc'); +---- Configuration一级目录 +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator','*-全局设置,*-*', 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator','*-IDE,*-*', 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator','*-Visualis,*-*', 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator','*-nodeexecution,*-*', 'OPTIONAL', 2, now(), now()); + + +---- 引擎级别默认配置 +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@SPARK_ALL, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@HIVE_ALL, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@PYTHON_ALL, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@PIPELINE_ALL, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@JDBC_ALL, 'OPTIONAL', 2, now(), now()); + + +---- Configuration二级目录(creator 级别的默认配置) +-- IDE +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@SPARK_IDE, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@HIVE_IDE, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@PYTHON_IDE, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@PIPELINE_IDE, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@JDBC_IDE, 'OPTIONAL', 2, now(), now()); + +-- Visualis +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@SPARK_VISUALIS, 'OPTIONAL', 2, now(), now()); +-- nodeexecution +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@SPARK_NODE, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@HIVE_NODE, 'OPTIONAL', 2, now(), now()); +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator',@PYTHON_NODE, 'OPTIONAL', 2, now(), now()); + + +-- 关联一级二级目录 +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = '*-全局设置,*-*'; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 1); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = '*-IDE,*-*'; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 1); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = '*-Visualis,*-*'; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 1); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = '*-nodeexecution,*-*'; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 1); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_IDE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_IDE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_IDE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_IDE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @JDBC_IDE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_VISUALIS; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_NODE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_NODE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + +SELECT @label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_NODE; +INSERT INTO linkis_configuration_category (`label_id`, `level`) VALUES (@label_id, 2); + + +---- 关联label和默认配置 +-- 全局默认配置(此处的'*-*,*-*'与一级目录'*-全局设置,*-*'相同,真正查询全局设置的label时应当查询*-*,*-*,而不是*-全局设置,*-*) +INSERT INTO `linkis_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_engineType_userCreator','*-*,*-*', 'OPTIONAL', 2, now(), now()); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.yarnqueue' AND `engine_conn_type` IS NULL; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = '*-*,*-*'; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +-- spark默认配置 +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.yarnqueue.instance.max' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.yarnqueue.cores.max' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.yarnqueue.memory.max' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -#INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'wds.linkis.instance', '范围:1-3,单位:个', 'spark引擎最大并发数', @application_id, '1', 'NumInterval', '[1,3]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.executor.instances', '取值范围:1-40,单位:个', '执行器实例最大并发数', @application_id, '2', 'NumInterval', '[1,40]', '0', '0', '2'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.executor.cores', '取值范围:1-8,单位:个', '执行器核心个数', @application_id, '2', 'NumInterval', '[1,2]', '1', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.executor.memory', '取值范围:3-15,单位:G', '执行器内存大小', @application_id, '3', 'NumInterval', '[3,15]', '0', '0', '3'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.driver.cores', '取值范围:只能取1,单位:个', '驱动器核心个数 ', @application_id, '1', 'NumInterval', '[1,1]', '1', '1', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.driver.memory', '取值范围:1-15,单位:G', '驱动器内存大小', @application_id, '2', 'NumInterval', '[1,15]', '0', '0', '1'); - -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'pipeline.output.mold', '取值范围:csv或excel', '结果集导出类型', @application_id, 'csv', 'OFT', '[csv,excel]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'pipeline.field.split', '取值范围:,或\\t', 'csv分隔符', @application_id, ',', 'OFT', '[\",\",\"\\\\t\"]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'pipeline.output.charset', '取值范围:utf-8或gbk', '结果集导出字符集', @application_id, 'gbk', 'OFT', '[utf-8,gbk]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'pipeline.output.isoverwtite', '取值范围:true或false', '是否覆写', @application_id, 'true', 'OFT', '[true,false]', '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'pipeline.engine.memory', '取值范围:1-10,单位:G', 'pipeline引擎初始化内存大小', @application_id, '2', 'NumInterval', '[1,10]', '0', '0', '1'); - - -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.tispark.pd.addresses', NULL, NULL, @application_id, 'pd0:2379', 'None', NULL, '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.tispark.tidb.addr', NULL, NULL, @application_id, 'tidb', 'None', NULL, '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.tispark.tidb.password', NULL, NULL, @application_id, NULL, 'None', NULL, '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.tispark.tidb.port', NULL, NULL, @application_id, '4000', 'None', NULL, '0', '0', '1'); -INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (0, 'spark.tispark.tidb.user', NULL, NULL, @application_id, 'root', 'None', NULL, '0', '0', '1'); - - -#---------------------------------------全局设置------------------ +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.client.memory.max' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := id from linkis_config_key WHERE `key` = 'wds.linkis.yarnqueue'; -SELECT @tree_id := id from linkis_config_tree WHERE `name` = '队列资源'; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -#SELECT @key_id := id from linkis_config_key WHERE `key` = 'wds.linkis.yarnqueue.cores.max'; -#SELECT @tree_id := id from linkis_config_tree WHERE `name` = '队列资源'; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -#SELECT @key_id := id from linkis_config_key WHERE `key` = 'wds.linkis.yarnqueue.memory.max'; -#SELECT @tree_id := id from linkis_config_tree WHERE `name` = '队列资源'; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -#SELECT @key_id := id from linkis_config_key WHERE `key` = 'wds.linkis.preheating.time'; -#SELECT @tree_id := id from linkis_config_tree WHERE `name` = '预热机制'; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.client.core.max' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -#SELECT @key_id := id from linkis_config_key WHERE `key` = 'wds.linkis.tmpfile.clean.time'; -#SELECT @tree_id := id from linkis_config_tree WHERE `name` = '清理机制'; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.instance' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -#SELECT @key_id := id from linkis_config_key WHERE `name` = '引擎最大并发数'; -#SELECT @tree_id := id from linkis_config_tree WHERE `name` = '引擎设置'; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'spark.executor.instances' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := id from linkis_config_key WHERE `key` = 'wds.linkis.client.memory.max'; -SELECT @tree_id := id from linkis_config_tree WHERE `name` = '驱动器资源'; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'spark.executor.cores' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -#---------------------------------------spark--------------- +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'spark.executor.memory' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.executor.instances' and a.name = 'IDE' ORDER BY k.id limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'spark' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'spark.driver.cores' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -#SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`name` = 'spark引擎最大并发数' and a.name = 'IDE' ORDER BY k.id limit 1) as tmp; -#SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark引擎设置' and a.name = 'spark' ; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'spark.driver.memory' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.executor.cores' and a.name = 'IDE' ORDER BY k.id limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'spark' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'spark.python.version' AND `engine_conn_type` = 'spark'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @SPARK_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.executor.memory' and a.name = 'IDE' ORDER BY k.id limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'spark' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +-- hive默认配置 +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.client.memory.max' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.driver.cores' and a.name = 'IDE' ORDER BY k.id limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'spark' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.client.core.max' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.driver.memory' and a.name = 'IDE' ORDER BY k.id limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'spark' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.instance' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -#---------------------------------------tidb------------------ +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'hive.client.memory' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.executor.instances' and a.name = 'IDE' ORDER BY k.id desc limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -#SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`name` = 'spark引擎最大并发数' and a.name = 'IDE' ORDER BY k.id desc limit 1) as tmp; -#SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark引擎设置' and a.name = 'tidb' ; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'hive.client.java.opts' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.executor.cores' and a.name = 'IDE' ORDER BY k.id desc limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'mapred.reduce.tasks' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.executor.memory' and a.name = 'IDE' ORDER BY k.id desc limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'dfs.block.size' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.driver.cores' and a.name = 'IDE' ORDER BY k.id desc limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'hive.exec.reduce.bytes.per.reducer' AND `engine_conn_type` = 'hive'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @HIVE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id:=id from(SELECT k.id as 'id'from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.driver.memory' and a.name = 'IDE' ORDER BY k.id desc limit 1) as tmp; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'spark资源设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +-- python默认配置 +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.client.memory.max' AND `engine_conn_type` = 'python'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.tispark.pd.addresses' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'tidb设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.client.core.max' AND `engine_conn_type` = 'python'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.tispark.tidb.addr' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'tidb设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.tispark.tidb.password' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'tidb设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.tispark.tidb.port' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'tidb设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'spark.tispark.tidb.user' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'tidb设置' and a.name = 'tidb' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -#---------------------------hive----------------- -#SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`name` = 'hive引擎最大并发数' and a.name = 'IDE' ; -#SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'hive引擎设置' and a.name = 'hive' ; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'hive.client.memory' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'hive资源设置' and a.name = 'hive' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -#SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'mapred.reduce.tasks' and a.name = 'IDE'; -#SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'hive资源设置' and a.name = 'hive' ; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -#SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'dfs.block.size' and a.name = 'IDE' ; -#SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'hive资源设置' and a.name = 'hive' ; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -#SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'hive.exec.reduce.bytes.per.reducer' and a.name = 'IDE' ; -#SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'hive资源设置' and a.name = 'hive' ; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -#SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'hive.client.java.opts' and a.name = 'IDE'; -#SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'hive资源设置' and a.name = 'hive' ; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -#------------------------python--------------------- - -#SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`name` = 'python引擎最大并发数' and a.name = 'IDE' ; -#SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'python引擎设置' and a.name = 'python' ; -#INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'python.java.client.memory' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = 'python资源设置' and a.name = 'python' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -#---------------------------pipeline----------------- -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'pipeline.output.mold' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = '导入导出设置' and a.name = 'pipeline' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'pipeline.field.split' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = '导入导出设置' and a.name = 'pipeline' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'pipeline.output.charset' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = '导入导出设置' and a.name = 'pipeline' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'pipeline.output.isoverwtite' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = '导入导出设置' and a.name = 'pipeline' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); - -SELECT @key_id := k.id from linkis_config_key k left join linkis_application a on k.application_id = a.id WHERE k.`key` = 'pipeline.engine.memory' and a.name = 'IDE' ; -SELECT @tree_id := t.id from linkis_config_tree t left join linkis_application a on t.application_id = a.id WHERE t.`name` = '导入导出资源设置' and a.name = 'pipeline' ; -INSERT INTO `linkis_config_key_tree` (`id`, `key_id`, `tree_id`) VALUES (0, @key_id, @tree_id); \ No newline at end of file +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.rm.instance' AND `engine_conn_type` = 'python'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'python.java.client.memory' AND `engine_conn_type` = 'python'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'python.version' AND `engine_conn_type` = 'python'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PYTHON_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +-- pipeline默认配置 +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'pipeline.output.mold' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'pipeline.field.split' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'pipeline.output.charset' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'pipeline.output.isoverwtite' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'pipeline.engine.memory' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'pipeline.output.shuffle.null.type' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.instance' AND `engine_conn_type` = 'pipeline'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @PIPELINE_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +-- jdbc默认配置 +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.jdbc.connect.url' AND `engine_conn_type` = 'jdbc'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @JDBC_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.jdbc.version' AND `engine_conn_type` = 'jdbc'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @JDBC_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.jdbc.username' AND `engine_conn_type` = 'jdbc'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @JDBC_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.jdbc.password' AND `engine_conn_type` = 'jdbc'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @JDBC_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); + +SELECT @configkey_id := id from linkis_configuration_config_key WHERE `key` = 'wds.linkis.jdbc.connect.max' AND `engine_conn_type` = 'jdbc'; +SELECT @config_label_id := id from linkis_manager_label WHERE `label_value` = @JDBC_ALL; +INSERT INTO `linkis_configuration_config_value` (`configkey_id`, `config_value`, `config_label_id`) VALUES (@configkey_id, '', @config_label_id); diff --git a/db/module/linkis_ecp.sql b/db/module/linkis_ecp.sql new file mode 100644 index 0000000000000000000000000000000000000000..c310c37a3b3c76c8713562a0de865a249f2f7b5a --- /dev/null +++ b/db/module/linkis_ecp.sql @@ -0,0 +1,14 @@ +DROP TABLE IF EXISTS `linkis_engine_conn_plugin_bml_resources`; +CREATE TABLE `linkis_engine_conn_plugin_bml_resources` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键', + `engine_conn_type` varchar(100) NOT NULL COMMENT '引擎类型', + `version` varchar(100) COMMENT '版本', + `file_name` varchar(255) COMMENT '文件名', + `file_size` bigint(20) DEFAULT 0 NOT NULL COMMENT '文件大小', + `last_modified` bigint(20) COMMENT '文件更新时间', + `bml_resource_id` varchar(100) NOT NULL COMMENT '所属系统', + `bml_resource_version` varchar(200) NOT NULL COMMENT '资源所属者', + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `last_update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间', + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4; \ No newline at end of file diff --git a/db/module/linkis_instance_label.sql b/db/module/linkis_instance_label.sql new file mode 100644 index 0000000000000000000000000000000000000000..cdf8f8a1d2b1724133061c449f9898e99a41fc42 --- /dev/null +++ b/db/module/linkis_instance_label.sql @@ -0,0 +1,42 @@ +CREATE TABLE `linkis_instance_label` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_key` varchar(32) COLLATE utf8_bin NOT NULL COMMENT 'string key', + `label_value` varchar(255) COLLATE utf8_bin NOT NULL COMMENT 'string value', + `label_feature` varchar(16) COLLATE utf8_bin NOT NULL COMMENT 'store the feature of label, but it may be redundant', + `label_value_size` int(20) NOT NULL COMMENT 'size of key -> value map', + `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', + PRIMARY KEY (`id`), + UNIQUE KEY `label_key_value` (`label_key`,`label_value`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +CREATE TABLE `linkis_instance_label_value_relation` ( + `label_value_key` varchar(255) COLLATE utf8_bin NOT NULL COMMENT 'value key', + `label_value_content` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT 'value content', + `label_id` int(20) DEFAULT NULL COMMENT 'id reference linkis_instance_label -> id', + `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp', + UNIQUE KEY `label_value_key_label_id` (`label_value_key`,`label_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +CREATE TABLE `linkis_instance_label_relation` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_id` int(20) DEFAULT NULL COMMENT 'id reference linkis_instance_label -> id', + `service_instance` varchar(64) NOT NULL COLLATE utf8_bin COMMENT 'structure like ${host|machine}:${port}', + `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp', + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +CREATE TABLE `linkis_instance_info` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `instance` varchar(64) COLLATE utf8_bin DEFAULT NULL COMMENT 'structure like ${host|machine}:${port}', + `name` varchar(32) COLLATE utf8_bin DEFAULT NULL COMMENT 'equal application name in registry', + `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp', + PRIMARY KEY (`id`), + UNIQUE KEY `instance` (`instance`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + + + diff --git a/db/module/linkis_manager.sql b/db/module/linkis_manager.sql new file mode 100644 index 0000000000000000000000000000000000000000..255a04d1add1de831ff5b2386632421b204f7f44 --- /dev/null +++ b/db/module/linkis_manager.sql @@ -0,0 +1,151 @@ +DROP TABLE IF EXISTS `linkis_manager_service_instance`; + +CREATE TABLE `linkis_manager_service_instance` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `instance` varchar(64) COLLATE utf8_bin DEFAULT NULL, + `name` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `owner` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `mark` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `updator` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `creator` varchar(32) COLLATE utf8_bin DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `instance` (`instance`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_manager_linkis_resources`; + +CREATE TABLE `linkis_manager_linkis_resources` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `max_resource` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `min_resource` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `used_resource` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `left_resource` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `expected_resource` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `locked_resource` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `resourceType` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `ticketId` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `updator` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `creator` varchar(255) COLLATE utf8_bin DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_manager_lock`; + +CREATE TABLE `linkis_manager_lock` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `lock_object` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `time_out` longtext COLLATE utf8_bin, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_external_resource_provider`; +CREATE TABLE `linkis_external_resource_provider` ( + `id` int(10) NOT NULL AUTO_INCREMENT, + `resource_type` varchar(32) NOT NULL, + `name` varchar(32) NOT NULL, + `labels` varchar(32) DEFAULT NULL, + `config` text NOT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + + +DROP TABLE IF EXISTS `linkis_manager_engine_em`; +CREATE TABLE `linkis_manager_engine_em` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `engine_instance` varchar(64) COLLATE utf8_bin DEFAULT NULL, + `em_instance` varchar(64) COLLATE utf8_bin DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_manager_label`; + +CREATE TABLE `linkis_manager_label` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_key` varchar(32) COLLATE utf8_bin NOT NULL, + `label_value` varchar(255) COLLATE utf8_bin NOT NULL, + `label_feature` varchar(16) COLLATE utf8_bin NOT NULL, + `label_value_size` int(20) NOT NULL, + `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `label_key_value` (`label_key`,`label_value`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_manager_label_value_relation`; + +CREATE TABLE `linkis_manager_label_value_relation` ( + `label_value_key` varchar(255) COLLATE utf8_bin NOT NULL, + `label_value_content` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `label_id` int(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + UNIQUE KEY `label_value_key_label_id` (`label_value_key`,`label_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_manager_label_resource`; +CREATE TABLE `linkis_manager_label_resource` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_id` int(20) DEFAULT NULL, + `resource_id` int(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + + +DROP TABLE IF EXISTS `linkis_manager_label_service_instance`; +CREATE TABLE `linkis_manager_label_service_instance` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `label_id` int(20) DEFAULT NULL, + `service_instance` varchar(64) COLLATE utf8_bin DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + + +DROP TABLE IF EXISTS `linkis_manager_label_user`; +CREATE TABLE `linkis_manager_label_user` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `username` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `label_id` int(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + + +DROP TABLE IF EXISTS `linkis_manager_metrics_history`; + +CREATE TABLE `linkis_manager_metrics_history` ( + `instance_status` int(20) DEFAULT NULL, + `overload` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `heartbeat_msg` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `healthy_status` int(20) DEFAULT NULL, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `creator` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `ticketID` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `serviceName` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `instance` varchar(255) COLLATE utf8_bin DEFAULT NULL +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +DROP TABLE IF EXISTS `linkis_manager_service_instance_metrics`; + +CREATE TABLE `linkis_manager_service_instance_metrics` ( + `instance` varchar(32) COLLATE utf8_bin NOT NULL, + `instance_status` int(11) DEFAULT NULL, + `overload` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `heartbeat_msg` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `healthy_status` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`instance`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; diff --git a/db/module/linkis_query.sql b/db/module/linkis_query.sql index 1262fba7b83c977b22f7d6ccc697ca9b24de17ac..af76c6bc799308e6d00221f1f8633faab08885a6 100644 --- a/db/module/linkis_query.sql +++ b/db/module/linkis_query.sql @@ -9,22 +9,25 @@ CREATE TABLE `linkis_task` ( `instance` varchar(50) DEFAULT NULL COMMENT 'An instance of Entrance, consists of IP address of the entrance server and port', `exec_id` varchar(50) DEFAULT NULL COMMENT 'execution ID, consists of jobID(generated by scheduler), executeApplicationName , creator and instance', `um_user` varchar(50) DEFAULT NULL COMMENT 'User name', + `submit_user` varchar(50) DEFAULT NULL COMMENT 'submitUser name', `execution_code` text COMMENT 'Run script. When exceeding 6000 lines, script would be stored in HDFS and its file path would be stored in database', `progress` float DEFAULT NULL COMMENT 'Script execution progress, between zero and one', `log_path` varchar(200) DEFAULT NULL COMMENT 'File path of the log files', `result_location` varchar(200) DEFAULT NULL COMMENT 'File path of the result', - `status` varchar(50) DEFAULT NULL COMMENT 'Script execution status, must be one of the following: Inited, WaitForRetry, Scheduled, Running, Succeed, Failed, Cancelled, Timeout', + `status` varchar(50) DEFAULT NULL COMMENT 'Script execution status, must be one of the following: Inited, WaitForRetry, Scheduled, Running, Succeed, Failed, Cancelled, Timeout', `created_time` datetime DEFAULT NULL COMMENT 'Creation time', `updated_time` datetime DEFAULT NULL COMMENT 'Update time', `run_type` varchar(50) DEFAULT NULL COMMENT 'Further refinement of execution_application_time, e.g, specifying whether to run pySpark or SparkR', `err_code` int(11) DEFAULT NULL COMMENT 'Error code. Generated when the execution of the script fails', - `err_desc` text COMMENT 'Execution description. Generated when the execution of script fails', + `err_desc` text COMMENT 'Execution description. Generated when the execution of script fails', `execute_application_name` varchar(200) DEFAULT NULL COMMENT 'The service a user selects, e.g, Spark, Python, R, etc', `request_application_name` varchar(200) DEFAULT NULL COMMENT 'Parameter name for creator', `script_path` varchar(200) DEFAULT NULL COMMENT 'Path of the script in workspace', - `params` text COMMENT 'Configuration item of the parameters', + `params` text COMMENT 'Configuration item of the parameters', `engine_instance` varchar(50) DEFAULT NULL COMMENT 'An instance of engine, consists of IP address of the engine server and port', + `task_resource` varchar(1024) DEFAULT NULL, `engine_start_time` time DEFAULT NULL, + `label_json` varchar(200) DEFAULT NULL COMMENT 'label json', PRIMARY KEY (`id`), KEY `created_time` (`created_time`), KEY `um_user` (`um_user`) diff --git a/docs/zh_CN/README.md b/docs/zh_CN/README.md deleted file mode 100644 index e4f330a13c88de3d2277cdfb1a74a58bafd2bf45..0000000000000000000000000000000000000000 --- a/docs/zh_CN/README.md +++ /dev/null @@ -1,141 +0,0 @@ -Linkis -============ - -[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html) - -[English](../../README.md) | 中文 - -# 引言: - -Linkis是一个打通了多个计算存储引擎如:Spark、TiSpark、Hive、Python和HBase等,对外提供统一REST/WebSocket/JDBC接口,提交执行SQL、Pyspark、HiveQL、Scala等脚本的计算中间件。 - -Linkis基于微服务架构,提供了金融级多租户隔离、资源管控、权限隔离等企业级特性,支持统一变量、UDF、函数、用户资源文件管理,具备高并发、高性能、高可用的大数据作业/请求全生命周期管理能力。 - - -![Linkis效果图](images/ch3/解决方案.jpg) -
-
- -基于Linkis计算中间件的架构设计理念,我们在上层构建了很多的应用系统。目前已开源的有: -- [**DataSphere Studio - 数据应用开发管理门户**](https://github.com/WeBankFinTech/DataSphereStudio) -- [**Qualitis - 数据质量工具**](https://github.com/WeBankFinTech/Qualitis) -- [**Scriptis - 数据研发IDE工具**](https://github.com/WeBankFinTech/Scriptis) -- [**Visualis - 数据可视化工具**](https://github.com/WeBankFinTech/Visualis) - -更多工具准备开源中,敬请期待! - ----- - -# 核心特点: - -- **统一作业执行服务**:一个分布式的REST/WebSocket服务,用于接收用户提交的各种脚本请求。 - - **目前支持的计算引擎有**:Spark、Python、TiSpark、Hive和Shell等。 - - **支持的脚本语言有**:SparkSQL、Spark Scala、Pyspark、R、Python、HQL和Shell等; - - - -- **资源管理服务**: 支持实时管控每个系统和用户的资源使用情况,限制系统和用户的资源使用量和并发数,并提供实时的资源动态图表,方便查看和管理系统和用户的资源; - - **目前已支持的资源类型**:Yarn队列资源、服务器(CPU和内存)、用户并发个数等。 - - - -- **应用管理服务**:管理所有系统的所有用户应用,包括离线批量应用、交互式查询应用和实时流式应用,为离线和交互式应用提供强大的复用能力,并提供应用全生命周期管理,自动释放用户多余的空闲应用; - - - -- **统一存储服务**:通用的IO架构,能快速对接各种存储系统,提供统一调用入口,支持所有常用格式数据,集成度高,简单易用; - - - -- **统一上下文服务**:统一用户和系统资源文件(JAR、ZIP、Properties等),用户、系统、计算引擎的参数和变量统一管理,一处设置,处处自动引用; - - - -- **物料库**:系统和用户级物料管理,可分享和流转,支持全生命周期自动管理; - - - -- **元数据服务**:实时的库表结构和分区情况展示。 - -## 与类似系统的对比: - -Linkis跟Apache Livy和Apache Zeppelin Interpreter的定位不一样,但是由于开源社区目前尚没有同类竞品,所以这里做一个简单对比供大家参考。 - -![comparison_table](images/introduction/comparison_table.png) - - - ----- - -# 文档列表 - -[Linkis快速安装使用文档](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis) - -[上层前端应用的HTTP接入文档](https://github.com/WeBankFinTech/Linkis/wiki/%E4%B8%8A%E5%B1%82%E5%89%8D%E7%AB%AF%E5%BA%94%E7%94%A8HTTP%E6%8E%A5%E5%85%A5%E6%96%87%E6%A1%A3) - -[上层前端应用的WebSocket接入文档](https://github.com/WeBankFinTech/Linkis/wiki/%E4%B8%8A%E5%B1%82%E5%89%8D%E7%AB%AF%E5%BA%94%E7%94%A8WebSocket%E6%8E%A5%E5%85%A5%E6%96%87%E6%A1%A3) - -[Linkis架构文档](https://github.com/WeBankFinTech/Linkis/wiki/%E6%80%BB%E4%BD%93%E6%9E%B6%E6%9E%84%E8%AE%BE%E8%AE%A1%E4%BB%8B%E7%BB%8D) - -**更多文档,请[点我访问](https://github.com/WeBankFinTech/Linkis/wiki)** - - ----- - -# Architecture: - -![introduction_new](images/introduction/introduction_new.png) - ----- - -# RoadMap - -### 1. Linkis管理台 - -- 用户可以查看和管理自己的Job -- 用户可以查看和管理自己的引擎 -- 用户可以查看Yarn的资源使用情况和用户资源使用排名 -- 用户可以配置引擎的启动参数 -- 用户可以设置变量 - -### 2. 支持标签功能 - -- 支持给EngineManager打标签 -- 启动引擎时,支持为引擎打标签 -- 支持通过标签访问目标引擎 - -### 3. AppGovernance应用治理实现 - -- 系统级、用户级应用全生命周期管理,包括离线批量、交互式查询和实时流式应用 -- 为离线和交互式应用提供复用能力和智能切换能力 -- 智能限流、应用智能保护 - -### 4. 支持Flink和Spark Structured Streaming - - -
- -**如果您有任何需求,欢迎给我们提issue,我们将会及时给您回复。** - ----- - -# Contributing - -非常欢迎广大的社区伙伴给我们贡献新引擎和代码! - ----- - -# Communication - -如果您想得到最快的响应,请给我们提issue,或者您也可以扫码进群: - -![introduction05](images/introduction/introduction05.png) - ----- - -# License - -**Linkis is under the Apache 2.0 license. See the [LICENSE](/LICENSE) file for details.** diff --git a/docs/zh_CN/SUMMARY.md b/docs/zh_CN/SUMMARY.md index 391a4e1e366c2b1a10ca344c9bf8ac3e570c8dae..dca2f1fdc128220fa39dc24ead5dfff79b92d3f6 100644 --- a/docs/zh_CN/SUMMARY.md +++ b/docs/zh_CN/SUMMARY.md @@ -1,16 +1,8 @@ # Summary -* [Introduction](README.md) * [系统部署文档](ch1/deploy.md) * [快速使用文档](ch2/linkis快速使用文档.md) * [系统使用手册](ch3/linkis使用文档.md) -* [架构设计]() +* 架构设计 * [UJES架构设计](ch4/Linkis-UJES设计文档.md) - * [RM架构设计](ch4/Linkis-RM设计文档.md) - * [AM架构设计]() -* [开发文档]() -* [FAQ]() -* [版本升级文档]() -* [历次版本发布内容]() - - + * [RM架构设计](ch4/Linkis-RM设计文档.md) \ No newline at end of file diff --git a/emr-base/Dockerfile b/emr-base/Dockerfile deleted file mode 100644 index c42b7315b3e22c5a05b0f42e7d04a83597b48c43..0000000000000000000000000000000000000000 --- a/emr-base/Dockerfile +++ /dev/null @@ -1,73 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM centos:latest -MAINTAINER wedatasphere@webank.com - -RUN yum -y install wget tar && \ - mkdir /opt/{flink,hadoop,spark,hive} - -WORKDIR /tmp - -RUN wget -O jdk-8u221-linux-x64.tar.gz \ - -c --content-disposition \ - "https://javadl.oracle.com/webapps/download/AutoDL?BundleId=239835_230deb18db3e4014bb8e3e8324f81b43" && \ - wget "https://archive.apache.org/dist/flink/flink-1.7.2/flink-1.7.2-bin-hadoop27-scala_2.11.tgz" && \ - wget "https://downloads.lightbend.com/scala/2.11.8/scala-2.11.8.tgz" && \ - wget "http://archive.apache.org/dist/spark/spark-2.4.4/spark-2.4.4-bin-hadoop2.7.tgz" && \ - wget "http://archive.apache.org/dist/hive/hive-2.3.6/apache-hive-2.3.6-bin.tar.gz" && \ - wget "http://archive.apache.org/dist/hadoop/common/hadoop-2.7.7/hadoop-2.7.7.tar.gz" - - -#COPY flink-1.7.2-bin-hadoop27-scala_2.11.tgz . -#COPY scala-2.11.8.tgz . -#COPY jdk-8u221-linux-x64.tar.gz . -#COPY hadoop-2.7.7.tar.gz . -#COPY spark-2.4.4-bin-hadoop2.7.tgz . -#COPY apache-hive-2.3.6-bin.tar.gz . - -RUN tar xf flink-1.7.2-bin-hadoop27-scala_2.11.tgz -C /opt/flink/ && \ - tar xf scala-2.11.8.tgz -C /usr/local/ && \ - tar xf jdk-8u221-linux-x64.tar.gz -C /usr/local/ && \ - tar xf hadoop-2.7.7.tar.gz -C /opt/hadoop/ && \ - tar xf spark-2.4.4-bin-hadoop2.7.tgz -C /opt/spark/ && \ - tar xf apache-hive-2.3.6-bin.tar.gz -C /opt/hive/ && \ - rm -f *.tgz *.tar.gz - -#COPY hive/conf /opt/hive/apache-hive-2.3.6-bin/conf -#COPY hadoop/conf /opt/hadoop/hadoop-2.7.7/etc/hadoop -#COPY jdk-8u221-linux-x64.tar.gz /tmp -#COPY scala-2.11.8.tgz /tmp -#COPY spark-2.4.4-bin-hadoop2.7.tgz /tmp -# -#RUN tar xf jdk-8u221-linux-x64.tar.gz -C /usr/local/ && \ -# tar xf scala-2.11.8.tgz -C /usr/local/ && \ -# tar xf spark-2.4.4-bin-hadoop2.7.tgz -C /opt/spark/ && \ -# rm -f *.tgz *.tar.gz - -ENV TIME_ZONE Asia/Shanghai -ENV SPARK_HOME=/opt/spark/spark-2.4.4-bin-hadoop2.7 -ENV SPARK_CONF_DIR=$SPARK_HOME/conf -ENV PYSPARK_ALLOW_INSECURE_GATEWAY=1 -ENV HIVE_HOME=/opt/hive/apache-hive-2.3.6-bin -ENV FLINK_HOME=/opt/flink/flink-1.7.2 -ENV HIVE_CONF_DIR=$HIVE_HOME/conf -ENV SCALA_HOME=/usr/local/scala-2.11.8 -ENV HADOOP_HOME=/opt/hadoop/hadoop-2.7.7 -ENV HADOOP_CONF_PATH=$HADOOP_HOME/etc/hadoop -ENV HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop -ENV JAVA_HOME /usr/local/jdk1.8.0_221 -ENV CLASSPATH $JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar -ENV PATH ${JAVA_HOME}/bin/:${SPARK_HOME}/bin:${HIVE_HOME}/bin:${SCALA_HOME}/bin:${FLINK_HOME}/bin:${HADOOP_HOME}/sbin:${HADOOP_HOME}/bin:$PATH \ No newline at end of file diff --git a/emr-base/README.md b/emr-base/README.md deleted file mode 100644 index ff758fdb6e9d417a1a74005cf6fcdbf4839286ae..0000000000000000000000000000000000000000 --- a/emr-base/README.md +++ /dev/null @@ -1,4 +0,0 @@ -该目录下放入jdk,scala,spark二进制压缩包编译基础镜像使用,其他linkis module都是基于基础镜像做扩展 -spark需要二进制包,因为spark-engine调用了spark-submit脚本 - -hadoop,hive 配置文件core-site.xml,hdfs-sit.xml,hive-site.xml等也放在同一级目录hive/conf,hadoop/conf,按Dockerfile的指定层级 \ No newline at end of file diff --git a/eurekaServer/bin/start-eureka.sh b/eurekaServer/bin/start-eureka.sh deleted file mode 100755 index fd70203b43aee69960b340f7d769395ba8631945..0000000000000000000000000000000000000000 --- a/eurekaServer/bin/start-eureka.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash - -cd `dirname $0` -cd .. -HOME=`pwd` - -export EUREKA_SERVER_PID=$HOME/bin/linkis.pid - -if [[ -f "${EUREKA_SERVER_PID}" ]]; then - pid=$(cat ${EUREKA_SERVER_PID}) - if kill -0 ${pid} >/dev/null 2>&1; then - echo "EUREKA_SERVER Remote Server is already running." - exit 1 - fi -fi - -export EUREKA_SERVER_LOG_PATH=$HOME/logs -export EUREKA_SERVER_HEAP_SIZE="512M" -export EUREKA_SERVER_CLASS=${EUREKA_SERVER_CLASS:-com.webank.wedatasphere.linkis.eureka.SpringCloudEurekaApplication} - -profiles='eureka' -if [ $1 ];then - type=$1 -fi - -export EUREKA_SERVER_JAVA_OPTS=" -Xmx$EUREKA_SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$HOME/logs/linkis-gc.log" - -java $EUREKA_SERVER_JAVA_OPTS -cp $HOME/conf:$HOME/lib/* $EUREKA_SERVER_CLASS --spring.profiles.active=$profiles 2>&1 > $EUREKA_SERVER_LOG_PATH/linkis.out & -pid=$! -sleep 2 -if [[ -z "${pid}" ]]; then - echo "EUREKA SERVER start failed!" - exit 1 -else - echo "EUREKA SERVER start succeed!" - echo $pid > $EUREKA_SERVER_PID -fi diff --git a/eurekaServer/bin/stop-eureka.sh b/eurekaServer/bin/stop-eureka.sh deleted file mode 100755 index f032887111aae2a915bd8302af30617599a506e2..0000000000000000000000000000000000000000 --- a/eurekaServer/bin/stop-eureka.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -cd `dirname $0` -cd .. -HOME=`pwd` - -export SERVER_PID=$HOME/bin/linkis.pid - -function wait_for_server_to_die() { - local pid - local count - pid=$1 - timeout=$2 - count=0 - timeoutTime=$(date "+%s") - let "timeoutTime+=$timeout" - currentTime=$(date "+%s") - forceKill=1 - - while [[ $currentTime -lt $timeoutTime ]]; do - $(kill ${pid} > /dev/null 2> /dev/null) - if kill -0 ${pid} > /dev/null 2>&1; then - sleep 3 - else - forceKill=0 - break - fi - currentTime=$(date "+%s") - done - - if [[ forceKill -ne 0 ]]; then - $(kill -9 ${pid} > /dev/null 2> /dev/null) - fi -} - -if [[ ! -f "${SERVER_PID}" ]]; then - echo "server $SERVER_NAME is not running" -else - pid=$(cat ${SERVER_PID}) - if [[ -z "${pid}" ]]; then - echo "server $SERVER_NAME is not running" - else - wait_for_server_to_die $pid 40 - $(rm -f ${SERVER_PID}) - echo "server $SERVER_NAME is stopped." - fi -fi \ No newline at end of file diff --git a/eurekaServer/pom.xml b/eurekaServer/pom.xml deleted file mode 100644 index 2c1c1a01b689375710f2c8e3e2b1a2cb245aa02f..0000000000000000000000000000000000000000 --- a/eurekaServer/pom.xml +++ /dev/null @@ -1,107 +0,0 @@ - - - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../pom.xml - - - linkis-eureka-server - 4.0.0 - jar - - - - org.springframework.cloud - spring-cloud-starter - - - org.springframework.boot - spring-boot-starter - - - - - org.springframework.cloud - spring-cloud-starter-eureka-server - ${spring.eureka.version} - - - hibernate-validator - org.hibernate.validator - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - src/main/assembly/distribution.xml - - - - - - false - eureka - false - false - - src/main/assembly/distribution.xml - - - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - \ No newline at end of file diff --git a/eurekaServer/src/main/assembly/distribution.xml b/eurekaServer/src/main/assembly/distribution.xml deleted file mode 100644 index 6e9cb5c1f5b6dd97d5e90c5db21dcf85dc2e6cdc..0000000000000000000000000000000000000000 --- a/eurekaServer/src/main/assembly/distribution.xml +++ /dev/null @@ -1,70 +0,0 @@ - - - - eurekaServer - - zip - - true - eureka - - - - - - lib - true - true - false - true - true - - - - - - ${basedir}/src/main/resources - - * - - 0777 - conf - unix - - - ${basedir}/bin - - * - - 0777 - bin - unix - - - . - - */** - - logs - - - - - diff --git a/extensions/spark-excel/pom.xml b/extensions/spark-excel/pom.xml deleted file mode 100644 index dbde02f216203136b03b7291f72753990b2a4c5b..0000000000000000000000000000000000000000 --- a/extensions/spark-excel/pom.xml +++ /dev/null @@ -1,190 +0,0 @@ - - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - 4.0.0 - - com.webank.weDataSphere - spark-excel_2.11 - - - UTF-8 - UTF-8 - 1.7 - 3.3.3 - 2.11.7 - 2.11 - 2.1.0 - 2.7.2 - 0.8.2.0 - 1.7.12 - 1.2.2 - 2.7.0 - 1.1.2 - 1.2.17 - 3.2.11 - 12.0.1 - provided - provided - - - - - com.webank.wedatasphere.linkis - linkis-common - provided - - - org.slf4j - slf4j-log4j12 - ${slf4j.version} - - - org.apache.poi - poi-ooxml - 3.17 - - - com.norbitltd - spoiwo_${scala.compat.version} - 1.2.0 - - - com.monitorjbl - xlsx-streamer - 1.2.1 - - - org.apache.poi - ooxml-schemas - - - - - com.fasterxml.jackson.module - jackson-module-scala_${scala.compat.version} - 2.8.8 - ${spark-scope} - - - org.scalatest - scalatest_${scala.compat.version} - 3.0.1 - ${spark-scope} - - - org.scalacheck - scalacheck_${scala.compat.version} - 1.13.4 - ${spark-scope} - - - com.github.alexarchambault - scalacheck-shapeless_1.13_${scala.compat.version} - 1.1.6 - ${spark-scope} - - - com.holdenkarau - spark-testing-base_${scala.compat.version} - 2.1.0_0.9.0 - ${spark-scope} - - - org.scalamock - scalamock-scalatest-support_${scala.compat.version} - 3.5.0 - ${spark-scope} - - - - org.apache.spark - spark-core_${scala.compat.version} - ${spark.version} - ${spark-scope} - - - - org.apache.spark - spark-sql_${scala.compat.version} - ${spark.version} - ${spark-scope} - - - - org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - ${spark-scope} - - - servlet-api - javax.servlet - - - com.google.guava - guava - - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - 2.8.2 - - - org.apache.maven.plugins - maven-compiler-plugin - 3.2 - - ${java.version} - ${java.version} - UTF-8 - - - - net.alchim31.maven - scala-maven-plugin - 3.2.2 - - incremental - - -target:jvm-${java.version} - - - -source - ${java.version} - -target - ${java.version} - - - - - process-resources - - compile - - - - scala-test-compile - process-test-resources - - testCompile - - - - - - - - \ No newline at end of file diff --git a/extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/DefaultSource15.scala b/extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/DefaultSource15.scala deleted file mode 100644 index 27d04ef709516a4cb760e8c2322d2a0b3b7cbbc7..0000000000000000000000000000000000000000 --- a/extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/DefaultSource15.scala +++ /dev/null @@ -1,7 +0,0 @@ -package com.webank.wedatasphere.spark.excel - -import org.apache.spark.sql.sources.DataSourceRegister - -class DefaultSource15 extends DefaultSource with DataSourceRegister { - override def shortName(): String = "excel" -} diff --git a/extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/package.scala b/extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/package.scala deleted file mode 100644 index d9e3733af6072656cd173d51df09087f3eeb3db8..0000000000000000000000000000000000000000 --- a/extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/package.scala +++ /dev/null @@ -1,30 +0,0 @@ -package com.webank.wedatasphere.spark - -import org.apache.poi.ss.usermodel.Row.MissingCellPolicy -import org.apache.poi.ss.usermodel.{Cell, Row} - -package object excel { - - implicit class RichRow(val row: Row) extends AnyVal { - - def eachCellIterator(startColumn: Int, endColumn: Int): Iterator[Option[Cell]] = new Iterator[Option[Cell]] { - private val lastCellInclusive = row.getLastCellNum - 1 - private val endCol = Math.min(endColumn, Math.max(startColumn, lastCellInclusive)) - require(startColumn >= 0 && startColumn <= endCol) - - private var nextCol = startColumn - - override def hasNext: Boolean = nextCol <= endCol && nextCol <= lastCellInclusive - - override def next(): Option[Cell] = { - val next = - if (nextCol > endCol) throw new NoSuchElementException(s"column index = $nextCol") - else Option(row.getCell(nextCol, MissingCellPolicy.RETURN_NULL_AND_BLANK)) - nextCol += 1 - next - } - } - - } - -} diff --git a/gateway/core/pom.xml b/gateway/core/pom.xml deleted file mode 100644 index c424ba590714d921e6e2ad9bb52f778766d92369..0000000000000000000000000000000000000000 --- a/gateway/core/pom.xml +++ /dev/null @@ -1,109 +0,0 @@ - - - - 4.0.0 - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - linkis-gateway-core - - - - com.webank.wedatasphere.linkis - linkis-cloudRPC - - - org.springframework.boot - spring-boot-starter-web - - - org.springframework.boot - spring-boot-starter-jetty - - - org.eclipse.jetty - jetty-server - - - org.eclipse.jetty - jetty-webapp - - - websocket-server - org.eclipse.jetty.websocket - - - io.netty - netty-all - - - json4s-jackson_2.11 - org.json4s - - - - - org.eclipse.jetty.websocket - websocket-client - ${jetty.version} - - - org.eclipse.jetty.websocket - websocket-servlet - ${jetty.version} - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - **/*.yml - **/*.properties - **/*.sh - - true - - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - - diff --git a/gateway/core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/ProxyUserUtils.scala b/gateway/core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/ProxyUserUtils.scala deleted file mode 100644 index c223ebe793eaa555bf3fc71d68a12e7e351b3883..0000000000000000000000000000000000000000 --- a/gateway/core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/ProxyUserUtils.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.gateway.security - -import java.util.Properties -import java.util.concurrent.TimeUnit - -import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils} -import com.webank.wedatasphere.linkis.gateway.config.GatewayConfiguration._ -import org.apache.commons.lang.StringUtils - -object ProxyUserUtils extends Logging { - - private val props = new Properties - if(ENABLE_PROXY_USER.getValue){ - Utils.defaultScheduler.scheduleAtFixedRate(new Runnable { - override def run(): Unit = { - info("loading proxy users.") - val newProps = new Properties - newProps.load(this.getClass.getResourceAsStream(PROXY_USER_CONFIG.getValue)) - props.clear() - props.putAll(newProps) - } - }, 0, PROXY_USER_SCAN_INTERVAL.getValue, TimeUnit.MILLISECONDS) - } - - def getProxyUser(umUser: String): String = if(ENABLE_PROXY_USER.getValue) { - val proxyUser = props.getProperty(umUser) - if(StringUtils.isBlank(proxyUser)) umUser else { - info(s"switched to proxy user $proxyUser for umUser $umUser.") - proxyUser - } - } else umUser - -} diff --git a/gateway/core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/SecurityFilter.scala b/gateway/core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/SecurityFilter.scala deleted file mode 100644 index 04209acbe362a81ee37e4225411a698fe977b3c1..0000000000000000000000000000000000000000 --- a/gateway/core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/SecurityFilter.scala +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.gateway.security - -import java.text.DateFormat -import java.util.{Date, Locale} - -import com.webank.wedatasphere.linkis.common.conf.Configuration -import com.webank.wedatasphere.linkis.common.exception.DWCException -import com.webank.wedatasphere.linkis.common.utils.Utils -import com.webank.wedatasphere.linkis.gateway.config.GatewayConfiguration -import com.webank.wedatasphere.linkis.gateway.http.GatewayContext -import com.webank.wedatasphere.linkis.gateway.security.sso.SSOInterceptor -import com.webank.wedatasphere.linkis.gateway.security.token.TokenAuthentication -import com.webank.wedatasphere.linkis.server.conf.ServerConfiguration -import com.webank.wedatasphere.linkis.server.exception.{LoginExpireException, NonLoginException} -import com.webank.wedatasphere.linkis.server.{Message, validateFailed} -import org.apache.commons.lang.StringUtils -import org.apache.commons.lang.exception.ExceptionUtils - -/** - * created by cooperyang on 2019/1/9. - */ -object SecurityFilter { - - private val refererValidate = ServerConfiguration.BDP_SERVER_SECURITY_REFERER_VALIDATE.getValue - private val localAddress = ServerConfiguration.BDP_SERVER_ADDRESS.getValue - protected val testUser: String = ServerConfiguration.BDP_TEST_USER.getValue - - private var userRestful: UserRestful = _ - def setUserRestful(userRestful: UserRestful): Unit = this.userRestful = userRestful - - def filterResponse(gatewayContext: GatewayContext, message: Message): Unit = { - gatewayContext.getResponse.setStatus(Message.messageToHttpStatus(message)) - gatewayContext.getResponse.write(message) - gatewayContext.getResponse.sendResponse() - } - - def doFilter(gatewayContext: GatewayContext): Boolean = { - addAccessHeaders(gatewayContext) - if(refererValidate) { - //Security certification support, referer limited(安全认证支持,referer限定) - val referer = gatewayContext.getRequest.getHeaders.get("Referer") - if(referer != null && referer.nonEmpty && StringUtils.isNotEmpty(referer.head) && !referer.head.trim.contains(localAddress)) { - filterResponse(gatewayContext, validateFailed("Unallowed cross-site request(不允许的跨站请求)!")) - return false - } - //Security certification support, solving verb tampering(安全认证支持,解决动词篡改) - gatewayContext.getRequest.getMethod.toUpperCase match { - case "GET" | "POST" | "PUT" | "DELETE" | "HEAD" | "TRACE" | "CONNECT" | "OPTIONS" => - case _ => - filterResponse(gatewayContext, validateFailed("Do not use HTTP verbs to tamper with(不可使用HTTP动词篡改)!")) - return false - } - } - val isPassAuthRequest = GatewayConfiguration.PASS_AUTH_REQUEST_URI.exists(gatewayContext.getRequest.getRequestURI.startsWith) - if(gatewayContext.getRequest.getRequestURI.startsWith(ServerConfiguration.BDP_SERVER_USER_URI.getValue)) { - Utils.tryCatch(userRestful.doUserRequest(gatewayContext)){ t => - val message = t match { - case dwc: DWCException => dwc.getMessage - case _ => "login failed! reason: " + ExceptionUtils.getRootCauseMessage(t) - } - GatewaySSOUtils.error("login failed!", t) - filterResponse(gatewayContext, Message.error(message).<<(gatewayContext.getRequest.getRequestURI)) - } - false - } else if(isPassAuthRequest && !GatewayConfiguration.ENABLE_SSO_LOGIN.getValue) { - GatewaySSOUtils.info("No login needed for proxy uri: " + gatewayContext.getRequest.getRequestURI) - true - } else if(TokenAuthentication.isTokenRequest(gatewayContext)) { - TokenAuthentication.tokenAuth(gatewayContext) - } else { - val userName = Utils.tryCatch(GatewaySSOUtils.getLoginUser(gatewayContext)){ - case n @ (_: NonLoginException | _: LoginExpireException )=> - if(Configuration.IS_TEST_MODE.getValue) None else { - filterResponse(gatewayContext, Message.noLogin(n.getMessage) << gatewayContext.getRequest.getRequestURI) - return false - } - case t: Throwable => - GatewaySSOUtils.warn("", t) - throw t - } - if(userName.isDefined) { - true - } else if(Configuration.IS_TEST_MODE.getValue) { - GatewaySSOUtils.info("test mode! login for uri: " + gatewayContext.getRequest.getRequestURI) - GatewaySSOUtils.setLoginUser(gatewayContext, testUser) - true - } else if(GatewayConfiguration.ENABLE_SSO_LOGIN.getValue) { - val user = SSOInterceptor.getSSOInterceptor.getUser(gatewayContext) - if(StringUtils.isNotBlank(user)) { - GatewaySSOUtils.setLoginUser(gatewayContext.getRequest, user) - true - } else if(isPassAuthRequest) { - gatewayContext.getResponse.redirectTo(SSOInterceptor.getSSOInterceptor.redirectTo(gatewayContext.getRequest.getURI)) - gatewayContext.getResponse.sendResponse() - false - } else { - filterResponse(gatewayContext, Message.noLogin("You are not logged in, please login first(您尚未登录,请先登录)!") - .data("enableSSO", true).data("SSOURL", SSOInterceptor.getSSOInterceptor.redirectTo(gatewayContext.getRequest.getURI)) << gatewayContext.getRequest.getRequestURI) - false - } - } else { - filterResponse(gatewayContext, Message.noLogin("You are not logged in, please login first(您尚未登录,请先登录)!") << gatewayContext.getRequest.getRequestURI) - false - } - } - } - - protected def addAccessHeaders(gatewayContext: GatewayContext) { - val response = gatewayContext.getResponse - response.setHeader("Access-Control-Allow-Origin", "*") - response.setHeader("Access-Control-Allow-Credentials", "true") - response.setHeader("Access-Control-Allow-Headers", "authorization,Content-Type") - response.setHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, HEAD, DELETE") - val fullDateFormatEN = DateFormat.getDateTimeInstance(DateFormat.FULL, DateFormat.FULL, new Locale("EN", "en")) - response.setHeader("Date", fullDateFormatEN.format(new Date)) - } - -} diff --git a/gateway/core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/UserRestful.scala b/gateway/core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/UserRestful.scala deleted file mode 100644 index d01b9dbf0785ee3de313e5babfdc5cde23248e0b..0000000000000000000000000000000000000000 --- a/gateway/core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/UserRestful.scala +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.gateway.security - -import com.google.gson.Gson -import com.webank.wedatasphere.linkis.common.utils.{Logging, RSAUtils, Utils} -import com.webank.wedatasphere.linkis.gateway.config.GatewayConfiguration -import com.webank.wedatasphere.linkis.gateway.http.GatewayContext -import com.webank.wedatasphere.linkis.gateway.security.sso.SSOInterceptor -import com.webank.wedatasphere.linkis.protocol.usercontrol.{RequestLogin, RequestRegister, ResponseLogin, ResponseRegister} -import com.webank.wedatasphere.linkis.rpc.Sender -import com.webank.wedatasphere.linkis.server.conf.ServerConfiguration -import com.webank.wedatasphere.linkis.server.security.SSOUtils -import com.webank.wedatasphere.linkis.server.{Message, _} -import org.apache.commons.lang.StringUtils - -/** - * created by cooperyang on 2019/1/9. - */ -trait UserRestful { - - def doUserRequest(gatewayContext: GatewayContext): Unit - -} -abstract class AbstractUserRestful extends UserRestful with Logging { - - private var securityHooks: Array[SecurityHook] = Array.empty - - def setSecurityHooks(securityHooks: Array[SecurityHook]): Unit = this.securityHooks = securityHooks - - private val userRegex = { - var userURI = ServerConfiguration.BDP_SERVER_USER_URI.getValue - if(!userURI.endsWith("/")) userURI += "/" - userURI - } - - override def doUserRequest(gatewayContext: GatewayContext): Unit = { - val path = gatewayContext.getRequest.getRequestURI.replace(userRegex, "") - val message = path match { - case "register" => register(gatewayContext) - case "login" => - Utils.tryCatch { - val loginUser = GatewaySSOUtils.getLoginUsername(gatewayContext) - Message.error(loginUser + "Already logged in, please log out before signing in(已经登录,请先退出再进行登录)!").data("redirectToIndex", true) - }(_ => login(gatewayContext)) - case "logout" => logout(gatewayContext) - case "userInfo" => userInfo(gatewayContext) - case "publicKey" => publicKey(gatewayContext) - case "heartbeat" => heartbeat(gatewayContext) - case _ => - warn(s"Unknown request URI" + path) - Message.error("unknown request URI " + path) - } - gatewayContext.getResponse.write(message) - gatewayContext.getResponse.setStatus(Message.messageToHttpStatus(message)) - gatewayContext.getResponse.sendResponse() - } - - def login(gatewayContext: GatewayContext): Message = { - val message = tryLogin(gatewayContext) - if(securityHooks != null) securityHooks.foreach(_.postLogin(gatewayContext)) - message - } - - def register(gatewayContext: GatewayContext): Message = { - val message = tryRegister(gatewayContext) - message - } - - protected def tryLogin(context: GatewayContext): Message - - def logout(gatewayContext: GatewayContext): Message = { - GatewaySSOUtils.removeLoginUser(gatewayContext) - if(GatewayConfiguration.ENABLE_SSO_LOGIN.getValue) SSOInterceptor.getSSOInterceptor.logout(gatewayContext) - if(securityHooks != null) securityHooks.foreach(_.preLogout(gatewayContext)) - "Logout successful(退出登录成功)!" - } - - def userInfo(gatewayContext: GatewayContext): Message = { - "get user information succeed!".data("userName", GatewaySSOUtils.getLoginUsername(gatewayContext)) - } - - def publicKey(gatewayContext: GatewayContext): Message = { - val message = Message.ok("Gain success(获取成功)!").data("enable", SSOUtils.sslEnable) - if(SSOUtils.sslEnable) message.data("publicKey", RSAUtils.getDefaultPublicKey()) - message - } - - def heartbeat(gatewayContext: GatewayContext): Message = Utils.tryCatch { - GatewaySSOUtils.getLoginUsername(gatewayContext) - "Maintain heartbeat success(维系心跳成功)!" - }(t => Message.noLogin(t.getMessage)) - - protected def tryRegister(context: GatewayContext): Message -} -abstract class UserPwdAbstractUserRestful extends AbstractUserRestful with Logging{ - - private val sender: Sender = Sender.getSender(GatewayConfiguration.USERCONTROL_SPRING_APPLICATION_NAME.getValue) - private val LINE_DELIMITER = "
" - - override protected def tryLogin(gatewayContext: GatewayContext): Message = { - val userNameArray = gatewayContext.getRequest.getQueryParams.get("userName") - val passwordArray = gatewayContext.getRequest.getQueryParams.get("password") - val (userName, password) = if(userNameArray != null && userNameArray.nonEmpty && - passwordArray != null && passwordArray.nonEmpty) - (userNameArray.head, passwordArray.head) - else if(StringUtils.isNotBlank(gatewayContext.getRequest.getRequestBody)){ - val json = BDPJettyServerHelper.gson.fromJson(gatewayContext.getRequest.getRequestBody, classOf[java.util.Map[String, Object]]) - (json.get("userName"), json.get("password")) - } else (null, null) - if(userName == null || StringUtils.isBlank(userName.toString)) { - Message.error("Username can not be empty(用户名不能为空)!") - } else if(password == null || StringUtils.isBlank(password.toString)) { - Message.error("Password can not be blank(密码不能为空)!") - } else { - //warn: For easy to useing linkis,Admin skip login - if(GatewayConfiguration.ADMIN_USER.getValue.equals(userName.toString) && userName.toString.equals(password.toString)){ - GatewaySSOUtils.setLoginUser(gatewayContext, userName.toString) - "login successful(登录成功)!".data("userName", userName) - .data("isAdmin", true) - } else { - // firstly for test user - var message = Message.ok() - if (GatewayConfiguration.USERCONTROL_SWITCH_ON.getValue) { - message = userControlLogin(userName.toString, password.toString, gatewayContext) - } else { - // standard login - val lowerCaseUserName = userName.toString.toLowerCase - message = login(lowerCaseUserName, password.toString) - if(message.getStatus == 0) GatewaySSOUtils.setLoginUser(gatewayContext, lowerCaseUserName) - } - if (message.getData.containsKey("errmsg")) { - message.setMessage(message.getMessage + LINE_DELIMITER + message.getData.get("errmsg").toString) - } - message - } - } - } - - protected def login(userName: String, password: String): Message - - protected def register(gatewayContext: GatewayContext) : Message - - def userControlLogin(userName: String, password: String, gatewayContext: GatewayContext): Message = { - var message = Message.ok() - // usercontrol switch on(开启了用户控制开关) - val requestLogin = new RequestLogin - requestLogin.setUserName(userName.toString).setPassword(password.toString) - Utils.tryCatch(sender.ask(requestLogin) match { - case r: ResponseLogin => - message.setStatus(r.getStatus) - if (StringUtils.isNotBlank(r.getErrMsg)) { - message.data("errmsg", r.getErrMsg) - } - if (0 == r.getStatus) { - GatewaySSOUtils.setLoginUser(gatewayContext, userName.toString) - message.setStatus(0) - message.setMessage("Login successful(登录成功)") - } else { - message = Message.error("Invalid username or password, please check and try again later(用户名或密码无效,请稍后再试)") - } - }) { - t => { - warn(s"Login rpc request error, err message ", t) - message.setStatus(1) - message.setMessage("System error, please try again later(系统异常,请稍后再试)") - message.data("errmsg", t.getMessage) - } - } - message - } - - override def tryRegister(gatewayContext: GatewayContext): Message = { - var message = Message.ok() - if (GatewayConfiguration.USERCONTROL_SWITCH_ON.getValue) { - message = userControlRegister(gatewayContext) - } else { - // TODO use normal register only when it's implemented(仅当实现了通用注册,才可以调注册接口) - message = register(gatewayContext) - } - message - } - - /** - * userControl register(用户控制模块登录) - * @param gatewayContext - * @return - */ - private def userControlRegister(gatewayContext: GatewayContext): Message = { - val message = Message.ok() - val gson = new Gson - val requestRegister = new RequestRegister - val requestBody: String = gatewayContext.getRequest.getRequestBody - Utils.tryCatch({ - requestRegister.setParams(requestBody) - sender.ask(requestRegister) match { - case r: ResponseRegister => - message.setStatus(r.getStatus) - message.setMessage(r.getMessage) - var map = r.getData - message.setData(map) - message.setMethod(r.getMethod) - info(s"Register rpc success. requestRegister=" + gson.toJson(requestRegister) + ", response=" + gson.toJson(r)) - } - }) { - e => - warn(s"Register rpc request error. err message ", e) - message.setStatus(1) - message.setMessage("System, please try again later(系统异常,请稍后再试)") - } - if (message.getData.containsKey("errmsg")) { - // for frontend display - message.setMessage(message.getMessage + LINE_DELIMITER + message.getData.get("errmsg").toString) - } - message - } -} \ No newline at end of file diff --git a/gateway/gateway-httpclient-support/pom.xml b/gateway/gateway-httpclient-support/pom.xml deleted file mode 100644 index e6afd8c6f78c2f8a0b9e402e9974e2d4eb2a7512..0000000000000000000000000000000000000000 --- a/gateway/gateway-httpclient-support/pom.xml +++ /dev/null @@ -1,89 +0,0 @@ - - - - - 4.0.0 - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - linkis-gateway-httpclient-support - - - - com.webank.wedatasphere.linkis - linkis-httpclient - - - com.webank.wedatasphere.linkis - linkis-storage - provided - - - commons-beanutils - commons-beanutils - 1.9.4 - - - org.reflections - reflections - 0.9.10 - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-source-plugin - - - attach-sources - - jar - - - - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - - \ No newline at end of file diff --git a/gateway/gateway-ujes-support/Dockerfile b/gateway/gateway-ujes-support/Dockerfile deleted file mode 100644 index e7016a123624b9a31bf3bf0229b22b7e557f1156..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM wedatasphere/linkis:emr-base-spark2.4.4 - -MAINTAINER wedatasphere@webank.com - -RUN yum install -y unzip -WORKDIR /opt/linkis - -COPY target/linkis-gateway.zip /opt/linkis -RUN unzip linkis-gateway.zip - -WORKDIR /opt/linkis/linkis-gateway/bin -ENTRYPOINT ["/opt/linkis/linkis-gateway/bin/startup.sh"] diff --git a/gateway/gateway-ujes-support/bin/start-gateway.sh b/gateway/gateway-ujes-support/bin/start-gateway.sh deleted file mode 100755 index 56cab6df00b2460cb6816d660ee4c9e9e98df205..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/bin/start-gateway.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash - -cd `dirname $0` -cd .. -HOME=`pwd` - -export SERVER_PID=$HOME/bin/linkis.pid -export SERVER_LOG_PATH=$HOME/logs -export SERVER_CLASS=com.webank.wedatasphere.linkis.DataWorkCloudApplication - -if test -z "$SERVER_HEAP_SIZE" -then - export SERVER_HEAP_SIZE="512M" -fi - -if test -z "$SERVER_JAVA_OPTS" -then - export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$HOME/logs/linkis-gc.log" -fi - -if [[ -f "${SERVER_PID}" ]]; then - pid=$(cat ${SERVER_PID}) - if kill -0 ${pid} >/dev/null 2>&1; then - echo "Server is already running." - exit 1 - fi -fi - -nohup java $SERVER_JAVA_OPTS -cp $HOME/conf:$HOME/lib/* $SERVER_CLASS 2>&1 > $SERVER_LOG_PATH/linkis.out & -pid=$! -if [[ -z "${pid}" ]]; then - echo "server $SERVER_NAME start failed!" - exit 1 -else - echo "server $SERVER_NAME start succeeded!" - echo $pid > $SERVER_PID - sleep 1 -fi \ No newline at end of file diff --git a/gateway/gateway-ujes-support/bin/startup.sh b/gateway/gateway-ujes-support/bin/startup.sh deleted file mode 100755 index 2b531e25e9ec9a9022803800e8c1be8151fa0fce..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/bin/startup.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash -cd `dirname $0` -cd .. -HOME=`pwd` - -export SERVER_LOG_PATH=$HOME/logs -export SERVER_CLASS=com.webank.wedatasphere.linkis.DataWorkCloudApplication - - -if test -z "$SERVER_HEAP_SIZE" -then - export SERVER_HEAP_SIZE="512M" -fi - -if test -z "$SERVER_JAVA_OPTS" -then - export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$HOME/logs/linkis-gateway-gc.log" -fi - -if test -z "$START_PORT" -then - export START_PORT=14001 -fi - -export SERVER_PID=$HOME/bin/linkis.pid - -if [[ -f "${SERVER_PID}" ]]; then - pid=$(cat ${SERVER_PID}) - if kill -0 ${pid} >/dev/null 2>&1; then - echo "Server is already running." - exit 1 - fi -fi - -cp -f /opt/linkis/conf/linkis.properties /opt/linkis/linkis-gateway/conf - -nohup java $SERVER_JAVA_OPTS -Deurekaurl=$EUREKA_URL -Duser.timezone=Asia/Shanghai -cp $HOME/conf:$HOME/lib/* $SERVER_CLASS --server.port=$START_PORT 2>&1 > $SERVER_LOG_PATH/linkis-gateway.log & - -pid=$! -if [[ -z "${pid}" ]]; then - echo "server $SERVER_NAME start failed!" - exit 1 -else - echo "server $SERVER_NAME start succeeded!" - echo $pid > $SERVER_PID - sleep 1 -fi - -tail -f /dev/null diff --git a/gateway/gateway-ujes-support/bin/stop-gateway.sh b/gateway/gateway-ujes-support/bin/stop-gateway.sh deleted file mode 100755 index f032887111aae2a915bd8302af30617599a506e2..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/bin/stop-gateway.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -cd `dirname $0` -cd .. -HOME=`pwd` - -export SERVER_PID=$HOME/bin/linkis.pid - -function wait_for_server_to_die() { - local pid - local count - pid=$1 - timeout=$2 - count=0 - timeoutTime=$(date "+%s") - let "timeoutTime+=$timeout" - currentTime=$(date "+%s") - forceKill=1 - - while [[ $currentTime -lt $timeoutTime ]]; do - $(kill ${pid} > /dev/null 2> /dev/null) - if kill -0 ${pid} > /dev/null 2>&1; then - sleep 3 - else - forceKill=0 - break - fi - currentTime=$(date "+%s") - done - - if [[ forceKill -ne 0 ]]; then - $(kill -9 ${pid} > /dev/null 2> /dev/null) - fi -} - -if [[ ! -f "${SERVER_PID}" ]]; then - echo "server $SERVER_NAME is not running" -else - pid=$(cat ${SERVER_PID}) - if [[ -z "${pid}" ]]; then - echo "server $SERVER_NAME is not running" - else - wait_for_server_to_die $pid 40 - $(rm -f ${SERVER_PID}) - echo "server $SERVER_NAME is stopped." - fi -fi \ No newline at end of file diff --git a/gateway/gateway-ujes-support/bin/stop.sh b/gateway/gateway-ujes-support/bin/stop.sh deleted file mode 100755 index f032887111aae2a915bd8302af30617599a506e2..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/bin/stop.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -cd `dirname $0` -cd .. -HOME=`pwd` - -export SERVER_PID=$HOME/bin/linkis.pid - -function wait_for_server_to_die() { - local pid - local count - pid=$1 - timeout=$2 - count=0 - timeoutTime=$(date "+%s") - let "timeoutTime+=$timeout" - currentTime=$(date "+%s") - forceKill=1 - - while [[ $currentTime -lt $timeoutTime ]]; do - $(kill ${pid} > /dev/null 2> /dev/null) - if kill -0 ${pid} > /dev/null 2>&1; then - sleep 3 - else - forceKill=0 - break - fi - currentTime=$(date "+%s") - done - - if [[ forceKill -ne 0 ]]; then - $(kill -9 ${pid} > /dev/null 2> /dev/null) - fi -} - -if [[ ! -f "${SERVER_PID}" ]]; then - echo "server $SERVER_NAME is not running" -else - pid=$(cat ${SERVER_PID}) - if [[ -z "${pid}" ]]; then - echo "server $SERVER_NAME is not running" - else - wait_for_server_to_die $pid 40 - $(rm -f ${SERVER_PID}) - echo "server $SERVER_NAME is stopped." - fi -fi \ No newline at end of file diff --git a/gateway/gateway-ujes-support/dockerinfo/image-id b/gateway/gateway-ujes-support/dockerinfo/image-id deleted file mode 100644 index 4cbd2fafaff58659a75a9bc47668303b571f3e6f..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/dockerinfo/image-id +++ /dev/null @@ -1 +0,0 @@ -4a6d1f2107f5 diff --git a/gateway/gateway-ujes-support/dockerinfo/image-name b/gateway/gateway-ujes-support/dockerinfo/image-name deleted file mode 100644 index c17be8314f4ae4cc9453abda27ffb74e9f573c98..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/dockerinfo/image-name +++ /dev/null @@ -1 +0,0 @@ -wedatasphere/linkis:linkis-gateway diff --git a/gateway/gateway-ujes-support/dockerinfo/repository b/gateway/gateway-ujes-support/dockerinfo/repository deleted file mode 100644 index d2b33f60b5f96d71637a4790e8c094b4692212b7..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/dockerinfo/repository +++ /dev/null @@ -1 +0,0 @@ -wedatasphere/linkis diff --git a/gateway/gateway-ujes-support/dockerinfo/tag b/gateway/gateway-ujes-support/dockerinfo/tag deleted file mode 100644 index 865786b3c5516b4d77a2d1f6d7319cd9cf58ef7f..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/dockerinfo/tag +++ /dev/null @@ -1 +0,0 @@ -linkis-gateway diff --git a/gateway/gateway-ujes-support/pom.xml b/gateway/gateway-ujes-support/pom.xml deleted file mode 100644 index a20e20b89dabe12aa264dc42bb2d7390f262ca64..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/pom.xml +++ /dev/null @@ -1,112 +0,0 @@ - - - - - 4.0.0 - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - - linkis-gateway-ujes-support - - - - com.webank.wedatasphere.linkis - linkis-gateway-springcloudgateway - - - org.eclipse.jetty - jetty-webapp - - - org.eclipse.jetty - jetty-server - - - - - - javax.servlet.jsp - jsp-api - 2.1 - - - com.webank.wedatasphere.linkis - linkis-cs-common - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - ../gateway-ujes-support/src/main/assembly/distribution.xml - - - - - - false - linkis-gateway - false - false - - ../gateway-ujes-support/src/main/assembly/distribution.xml - - - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - - \ No newline at end of file diff --git a/gateway/gateway-ujes-support/pom_k8s.xml b/gateway/gateway-ujes-support/pom_k8s.xml deleted file mode 100644 index 6e9aefa5fefcfcd704b87114658342136c1df294..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/pom_k8s.xml +++ /dev/null @@ -1,152 +0,0 @@ - - - - - 4.0.0 - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - - linkis-gateway-ujes-support - - - - com.webank.wedatasphere.linkis - linkis-gateway-springcloudgateway - - - org.eclipse.jetty - jetty-webapp - - - org.eclipse.jetty - jetty-server - - - - - - javax.servlet.jsp - jsp-api - 2.1 - - - org.json4s - json4s-jackson_2.11 - 3.2.11 - - - com.fasterxml.jackson.core - jackson-databind - 2.9.6 - - - io.netty - netty-all - 4.1.25.Final - - - io.netty - netty-transport-native-epoll - 4.1.25.Final - - - com.webank.wedatasphere.linkis - linkis-cs-common - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - org.apache.maven.plugins - maven-assembly-plugin - 2.3 - false - - - make-assembly - package - - single - - - - ../gateway-ujes-support/src/main/assembly/distribution.xml - - - - - - false - linkis-gateway - false - false - - ../gateway-ujes-support/src/main/assembly/distribution.xml - - - - - - - - - - - - - - - - - - - - - - - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - - \ No newline at end of file diff --git a/gateway/gateway-ujes-support/src/main/assembly/distribution.xml b/gateway/gateway-ujes-support/src/main/assembly/distribution.xml deleted file mode 100644 index bd07b723c6e7d371b73a5619dcfdc42acc8ddf27..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/src/main/assembly/distribution.xml +++ /dev/null @@ -1,70 +0,0 @@ - - - - linkis-gateway - - zip - - true - linkis-gateway - - - - - - lib - true - true - false - true - true - - - - - - ${basedir}/src/main/resources - - * - - 0777 - conf - unix - - - ${basedir}/bin - - * - - 0777 - bin - unix - - - . - - */** - - logs - - - - - diff --git a/gateway/gateway-ujes-support/src/main/resources/application.yml b/gateway/gateway-ujes-support/src/main/resources/application.yml deleted file mode 100644 index 60b63ee6ca228c162106b6815ed37940424870b0..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/src/main/resources/application.yml +++ /dev/null @@ -1,39 +0,0 @@ -server: - port: 9001 -spring: - application: - name: DataWorkCloud-Gateway - cloud: - gateway: - routes: - - id: dws - uri: http://localhost:port/ - predicates: - - Path=/dws/ - - id: dws/vg - uri: http://localhost:port/ - predicates: - - Path=/dws/vg/ - - id: dws/easyide - uri: http://localhost:port/ - predicates: - - Path=/dws/easyide/ - -eureka: - client: - serviceUrl: - defaultZone: ${eurekaurl} - registry-fetch-interval-seconds: 5 - instance: - lease-renewal-interval-in-second: 5 - lease-expiration-duration-in-second: 10 - prefer-ip-address: true - instance-id: ${spring.cloud.client.ip-address}:${server.port} - metadata-map: - test: wedatasphere - -management: - endpoints: - web: - exposure: - include: refresh,info diff --git a/gateway/gateway-ujes-support/src/main/resources/linkis.properties b/gateway/gateway-ujes-support/src/main/resources/linkis.properties deleted file mode 100644 index 786777975e1066d78ad482f4c6f458b824b5a77a..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/src/main/resources/linkis.properties +++ /dev/null @@ -1,34 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -#wds.linkis.test.mode=true - -wds.linkis.server.version=v1 - -wds.linkis.ldap.proxy.url= -wds.linkis.ldap.proxy.baseDN= - -wds.linkis.server.restful.uri=/ - -wds.linkis.server.web.session.timeout=1h - -wds.linkis.gateway.conf.enable.proxy.user=false - -wds.linkis.gateway.conf.url.pass.auth=/dss/ - -wds.linkis.gateway.admin.user=hadoop - -wds.linkis.gateway.conf.enable.token.auth=true \ No newline at end of file diff --git a/gateway/gateway-ujes-support/src/main/resources/log4j.properties b/gateway/gateway-ujes-support/src/main/resources/log4j.properties deleted file mode 100644 index 178f8dfa2605eeaab7020c5b4792f3fa6984ae6f..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/src/main/resources/log4j.properties +++ /dev/null @@ -1,36 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -### set log levels ### - -log4j.rootCategory=INFO,console - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.Threshold=INFO -log4j.appender.console.layout=org.apache.log4j.PatternLayout -#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n -log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n - - -log4j.appender.com.webank.bdp.ide.core=org.apache.log4j.DailyRollingFileAppender -log4j.appender.com.webank.bdp.ide.core.Threshold=INFO -log4j.additivity.com.webank.bdp.ide.core=false -log4j.appender.com.webank.bdp.ide.core.layout=org.apache.log4j.PatternLayout -log4j.appender.com.webank.bdp.ide.core.Append=true -log4j.appender.com.webank.bdp.ide.core.File=logs/linkis.log -log4j.appender.com.webank.bdp.ide.core.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n - -log4j.logger.org.springframework=INFO \ No newline at end of file diff --git a/gateway/gateway-ujes-support/src/main/resources/token.properties b/gateway/gateway-ujes-support/src/main/resources/token.properties deleted file mode 100644 index 136141cebfaf6e048fceba893d5244abc6f5610f..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/src/main/resources/token.properties +++ /dev/null @@ -1,5 +0,0 @@ -QML-AUTH=* -BML-AUTH=* -WS-AUTH=* -dss-AUTH=* -QUALITIS-AUTH=* \ No newline at end of file diff --git a/gateway/gateway-ujes-support/src/main/scala/com/webank/wedatasphere/linkis/gateway/ujes/parser/EntranceExecutionGatewayParser.scala b/gateway/gateway-ujes-support/src/main/scala/com/webank/wedatasphere/linkis/gateway/ujes/parser/EntranceExecutionGatewayParser.scala deleted file mode 100644 index 939c89ac73b68693c0b222be417050e5f0e88261..0000000000000000000000000000000000000000 --- a/gateway/gateway-ujes-support/src/main/scala/com/webank/wedatasphere/linkis/gateway/ujes/parser/EntranceExecutionGatewayParser.scala +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2019 WeBank - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.webank.wedatasphere.linkis.gateway.ujes.parser - -import com.webank.wedatasphere.linkis.common.ServiceInstance -import com.webank.wedatasphere.linkis.gateway.http.GatewayContext -import com.webank.wedatasphere.linkis.gateway.parser.AbstractGatewayParser -import com.webank.wedatasphere.linkis.gateway.springcloud.SpringCloudGatewayConfiguration._ -import com.webank.wedatasphere.linkis.protocol.constants.TaskConstant -import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper -import org.apache.commons.lang.StringUtils -import org.springframework.stereotype.Component - -/** - * created by cooperyang on 2019/5/15. - */ -@Component -class EntranceExecutionGatewayParser extends AbstractGatewayParser { - - override def shouldContainRequestBody(gatewayContext: GatewayContext): Boolean = gatewayContext.getRequest.getRequestURI match { - case EntranceExecutionGatewayParser.ENTRANCE_EXECUTION_REGEX(_, _) => true - case _ => false - } - - override def parse(gatewayContext: GatewayContext): Unit = gatewayContext.getRequest.getRequestURI match { - case EntranceExecutionGatewayParser.ENTRANCE_EXECUTION_REGEX(version, _) => - if(sendResponseWhenNotMatchVersion(gatewayContext, version)) return - //var (creator, executeApplicationName): (String, String) = null - var creator:String = null - var executeApplicationName:String = null - if(StringUtils.isNotBlank(gatewayContext.getRequest.getRequestBody)) { - val json = BDPJettyServerHelper.gson.fromJson(gatewayContext.getRequest.getRequestBody, classOf[java.util.Map[String, Object]]) - json.get(TaskConstant.EXECUTEAPPLICATIONNAME) match { - case s: String => executeApplicationName = s - case _ => - } - json.get(TaskConstant.REQUESTAPPLICATIONNAME) match { - case s: String => creator = s - case _ => - } - } - val path = gatewayContext.getRequest.getRequestURI - if(StringUtils.isBlank(executeApplicationName)) { - sendErrorResponse(s"requestUri $path need request parameter " + TaskConstant.EXECUTEAPPLICATIONNAME, gatewayContext) - } else { - info(s"GatewayParser parse requestUri $path to service $creator or $executeApplicationName.") - if(StringUtils.isNotBlank(creator)) gatewayContext.getGatewayRoute.getParams.put(TaskConstant.REQUESTAPPLICATIONNAME, creator) - gatewayContext.getGatewayRoute.setServiceInstance(ServiceInstance(executeApplicationName, null)) - } - case _ => - } -} - -object EntranceExecutionGatewayParser { - val ENTRANCE_HEADER = normalPath(API_URL_PREFIX) + "rest_[a-zA-Z][a-zA-Z_0-9]*/(v\\d+)/entrance/" - val ENTRANCE_EXECUTION_REGEX = (ENTRANCE_HEADER + "(execute|backgroundservice)").r -} \ No newline at end of file diff --git a/gateway/springcloudgateway/pom.xml b/gateway/springcloudgateway/pom.xml deleted file mode 100644 index cf574e69b0670657ac0fbb1032db4b302928d3e4..0000000000000000000000000000000000000000 --- a/gateway/springcloudgateway/pom.xml +++ /dev/null @@ -1,97 +0,0 @@ - - - - - 4.0.0 - - - linkis - com.webank.wedatasphere.linkis - 0.10.0 - ../../pom.xml - - linkis-gateway-springcloudgateway - - - - com.webank.wedatasphere.linkis - linkis-gateway-core - - - json4s-jackson_2.11 - org.json4s - - - - - org.springframework.cloud - spring-cloud-starter-gateway - 2.0.1.RELEASE - - - hibernate-validator - org.hibernate.validator - - - spring-boot-starter-logging - org.springframework.boot - - - jackson-databind - com.fasterxml.jackson.core - - - - - org.springframework.boot - spring-boot-starter-reactor-netty - 2.0.3.RELEASE - - - io.projectreactor - reactor-core - 3.2.2.RELEASE - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - - ${basedir}/src/main/resources - - - ${project.artifactId}-${project.version} - - - \ No newline at end of file diff --git a/gateway/springcloudgateway/src/main/resources/application.yml b/gateway/springcloudgateway/src/main/resources/application.yml deleted file mode 100644 index 3af5bcba993039bb5620f308fa4a3da903de4fc9..0000000000000000000000000000000000000000 --- a/gateway/springcloudgateway/src/main/resources/application.yml +++ /dev/null @@ -1,35 +0,0 @@ -server: - port: 9001 #该模块提供服务的端口(必须) -spring: - application: - name: DataWorkCloud-Gateway #模块名,用于做高可用(必须) - cloud: - gateway: - routes: - - id: dws - uri: locahost - predicates: - - Path=/dws/ - - id: dws/vg - uri: locahost - predicates: - - Path=/dws/vg/ - - id: dws/easyide - uri: locahost - predicates: - - Path=/dws/easyide/ -#指定eureka Server的地址,用于注册(必须) -eureka: - client: - serviceUrl: - defaultZone: ${eurekaurl} - registry-fetch-interval-seconds: 5 - instance: - metadata-map: - test: wedatasphere -#(必须) -management: - endpoints: - web: - exposure: - include: refresh,info diff --git a/gateway/springcloudgateway/src/main/resources/linkis.properties b/gateway/springcloudgateway/src/main/resources/linkis.properties deleted file mode 100644 index e47babfa634aa71fc1225fce9708a05cd0c32d4e..0000000000000000000000000000000000000000 --- a/gateway/springcloudgateway/src/main/resources/linkis.properties +++ /dev/null @@ -1,31 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -#wds.linkis.test.mode=true - - -wds.linkis.ldap.proxy.url= -wds.linkis.ldap.proxy.baseDN= - -wds.linkis.server.restful.uri=/ - -wds.linkis.server.web.session.timeout=1h - -wds.linkis.gateway.conf.enable.proxy.user=false - -wds.linkis.gateway.conf.url.pass.auth=/dws/ - -wds.linkis.gateway.admin.user=hadoop \ No newline at end of file diff --git a/gateway/springcloudgateway/src/main/resources/log4j2.xml b/gateway/springcloudgateway/src/main/resources/log4j2.xml deleted file mode 100644 index ad88ea570e7faf06d2ccd711b52f22b8487540a6..0000000000000000000000000000000000000000 --- a/gateway/springcloudgateway/src/main/resources/log4j2.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/gateway/springcloudgateway/src/main/resources/token.properties b/gateway/springcloudgateway/src/main/resources/token.properties deleted file mode 100644 index 136141cebfaf6e048fceba893d5244abc6f5610f..0000000000000000000000000000000000000000 --- a/gateway/springcloudgateway/src/main/resources/token.properties +++ /dev/null @@ -1,5 +0,0 @@ -QML-AUTH=* -BML-AUTH=* -WS-AUTH=* -dss-AUTH=* -QUALITIS-AUTH=* \ No newline at end of file diff --git a/images/Linkis_1.0_architecture.png b/images/Linkis_1.0_architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..9b6cc90e54341f01dcbf119c346c0f6cfbd2c752 Binary files /dev/null and b/images/Linkis_1.0_architecture.png differ diff --git a/images/after_linkis_cn.png b/images/after_linkis_cn.png new file mode 100644 index 0000000000000000000000000000000000000000..b94beab5b00bc2aafb5dc268d1294957ee58ca17 Binary files /dev/null and b/images/after_linkis_cn.png differ diff --git a/images/before_linkis_cn.png b/images/before_linkis_cn.png new file mode 100644 index 0000000000000000000000000000000000000000..914d38b08592db4e9904e3429f66479ecf138ae5 Binary files /dev/null and b/images/before_linkis_cn.png differ diff --git a/images/en_US/Linkis1.0/architecture/Linkis1.0-architecture.png b/images/en_US/Linkis1.0/architecture/Linkis1.0-architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..825672bc2332c83f13a85a2f219dfc69c997f93d Binary files /dev/null and b/images/en_US/Linkis1.0/architecture/Linkis1.0-architecture.png differ diff --git a/images/wedatasphere_contact_01.png b/images/wedatasphere_contact_01.png new file mode 100644 index 0000000000000000000000000000000000000000..5a3d80eba9062e834a758f6219fa5921054822ed Binary files /dev/null and b/images/wedatasphere_contact_01.png differ diff --git a/images/wedatasphere_stack_Linkis.png b/images/wedatasphere_stack_Linkis.png new file mode 100644 index 0000000000000000000000000000000000000000..36060b9ccd118013f416439a9f02b2af156aa95f Binary files /dev/null and b/images/wedatasphere_stack_Linkis.png differ diff --git a/images/zh_CN/Linkis1.0/architecture/Linkis0.X-NewEngine-architecture.png b/images/zh_CN/Linkis1.0/architecture/Linkis0.X-NewEngine-architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..57c83b3a588890887995e454f8e0398e0627998d Binary files /dev/null and b/images/zh_CN/Linkis1.0/architecture/Linkis0.X-NewEngine-architecture.png differ diff --git a/images/zh_CN/Linkis1.0/architecture/Linkis1.0-EngineConn-architecture.png b/images/zh_CN/Linkis1.0/architecture/Linkis1.0-EngineConn-architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..d95da891277497fc4f7faf505dd56718a0e3da57 Binary files /dev/null and b/images/zh_CN/Linkis1.0/architecture/Linkis1.0-EngineConn-architecture.png differ diff --git a/images/zh_CN/Linkis1.0/architecture/Linkis1.0-NewEngine-architecture.png b/images/zh_CN/Linkis1.0/architecture/Linkis1.0-NewEngine-architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..b1d60bf9efdc636fd924606c81f27e1b6637ce40 Binary files /dev/null and b/images/zh_CN/Linkis1.0/architecture/Linkis1.0-NewEngine-architecture.png differ diff --git a/images/zh_CN/Linkis1.0/architecture/Linkis1.0-architecture.png b/images/zh_CN/Linkis1.0/architecture/Linkis1.0-architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..c3cb24ceccfebba27e3f56682dcf25dcb40fe343 Binary files /dev/null and b/images/zh_CN/Linkis1.0/architecture/Linkis1.0-architecture.png differ diff --git a/images/zh_CN/Linkis1.0/architecture/Linkis1.0-newEngine-initialization.png b/images/zh_CN/Linkis1.0/architecture/Linkis1.0-newEngine-initialization.png new file mode 100644 index 0000000000000000000000000000000000000000..003b38eda5dc2487b31b711618dab7b724c0f15c Binary files /dev/null and b/images/zh_CN/Linkis1.0/architecture/Linkis1.0-newEngine-initialization.png differ diff --git a/images/zh_CN/Linkis1.0/installation/Linkis0.X-services-list.png b/images/zh_CN/Linkis1.0/installation/Linkis0.X-services-list.png new file mode 100644 index 0000000000000000000000000000000000000000..c669abfb685ed462ac512771648a43275cd8df4e Binary files /dev/null and b/images/zh_CN/Linkis1.0/installation/Linkis0.X-services-list.png differ diff --git a/images/zh_CN/Linkis1.0/installation/Linkis1.0-services-list.png b/images/zh_CN/Linkis1.0/installation/Linkis1.0-services-list.png new file mode 100644 index 0000000000000000000000000000000000000000..f7685452fef9c2f8d84314c596dbf5799f4482e6 Binary files /dev/null and b/images/zh_CN/Linkis1.0/installation/Linkis1.0-services-list.png differ diff --git a/k8s/README.MD b/k8s/README.MD deleted file mode 100644 index f21d236297bdb2d81bc5647f5db5d711041cb945..0000000000000000000000000000000000000000 --- a/k8s/README.MD +++ /dev/null @@ -1,104 +0,0 @@ -Linkis on kubernetes - -#重点解决问题 -1.每个人在部署过程中,都会遇到包冲突,操作系统不兼容,openjdk,hotspot jdk兼容问题,docker交付可以从根本上解决这些问题 - -2.docker基于cgroup资源隔离,共享文件系统,namespace 可以轻量限制计算资源,秒级扩缩容 - - 例如:限制某个进程只能使用CPU一个核的20%资源 - cd /sys/fs/cgroup/cpu - touch test - $ echo 10000 > cpu.cfs_quota_us - $ echo 50000 > cpu.cfs_period_us - 启动一个进程,请不要在生产做这样操作!!!! - $ while :; do :; done & - 记录当前进程Id - $ echo pid >> tasks - -#部署文档 - -##基础镜像及各服务镜像编译方法 - - 目前所有微服务镜像都上传到dockerhub,https://hub.docker.com/repository/docker/wedatasphere/linkis - - 出于对虚机部署版本的考虑,新增了pom_k8s.xml,默认打包方式跟之前保持一致,如果想要自己编译镜像,需要引用pom_k8s.xml - - mvn clean package -f gateway/gateway-ujes-support/pom_k8s.xml - - 一.创建kubernetes docker secret - - k8s/registry.yaml 修改对应harbor的账号,密码,镜像仓库地址 - 或者手动创建secret,后续拉取镜像使用,私有docker仓库请修改成私有reposity地址 - $kubectl create secret docker-registry registry-key \ - --docker-server=https://hub.docker.com \ - --docker-username=wedatasphere \ - --docker-password=***** \ - --docker-email=wedatasphere@webank.com - - 二.NFS/ceph 搭建 - - 搭建NFS或者ceph完成后,publicservice等微服务的deployment需要修改nfs的地址 - - 三.基础镜像创建文件(使用官方维护的镜像可以跳过该步骤) - 自己制作基础镜像方法如下: - * 环境变量设置 - - > 设置harbor的地址环境变量 - ``` - export HARBOR_LINKIS= - ``` - - * 基础镜像编译打包 - > 打包 - ``` - cd Linkis/emr-base - docker build -t $HARBOR_LINKIS/linkis:emr-base-spark2.4.4 . - ``` - > 推送 - ``` - docker push $HARBOR_LINKIS/linkis:emr-base-spark2.4.4 - ``` - * 基础依赖包编译 - ``` - mvn clean install -Dmaven.test.skip=true - ``` - - * 替换各个微服务Dockerfile的基础镜像为自己的私服地址 - ``` - find . -name Dockerfile | xargs grep -rl "FROM wedatasphere/linkis" | xargs sed -i "" "s?FROM wedatasphere/linkis?FROM $HARBOR_LINKIS/linkis?" - find . -name "*.yaml" | xargs grep -rl "image: wedatasphere/linkis" | xargs sed -i "" "s?image: wedatasphere/linkis?image: $HARBOR_LINKIS/linkis?" - ``` - - * 编译各微服务镜像 - - > 各个微服务basedir下面有对应 module 的 Dockerfile - 与公司内部的CI/CD集成,可以通过mvn docker插件直接编译docker镜像 - ``` - sh k8s/package.sh - sh k8s/build.sh - sh k8s/push.sh - ``` - - 四、K8S部署 - - 1.大数据环境配置 - > 主要涉及 hadoop,hive,spark环境配置,需要将几个组件的配置在k8s各个节点上 - - | 组件 | 版本 | 节点目录 | - | --- | --- | --- | - | hadoop | 2.7.7 | /opt/hadoop/hadoop-2.7.7 | - | hive | 2.3.6 | /opt/hive/apache-hive-2.3.6-bin | - | spark | 2.4 | /opt/spark/spark-2.4.4-bin-hadoop2.7 | - - 2.启动eureka - eureka部署在k8s的任意一个node节点上就可以 - 启动eureka成功后 - 修改Linkis/k8s/linkis-eureka-configmap.yaml中的eurekaUrl - - 3.修改k8s目录下的linkis-*- configmap.yaml - 特别注意配置文件的正确性,避免后续部署的低级问题出现,如数据库连接不上,hdfs地址不正确等等 - 例如gateway/linkis.properties 复制到k8s/linkis-gateway-configmap.yaml - - 4.运行k8s目录下的 - sh init.sh - 观察eureka上注册的服务,一共22个,都启动成功基本就没有问题 diff --git a/k8s/build.sh b/k8s/build.sh deleted file mode 100644 index abdf1857ce55902ecc78670530e66c334ed408a4..0000000000000000000000000000000000000000 --- a/k8s/build.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash - -# docker build -t $HARBOR_LINKIS/linkis:emr-base-spark2.4.4 -f emr-base/Dockerfile . - -docker build -t $HARBOR_LINKIS/linkis:linkis-bml-0.10.0 -f bml/bmlserver/Dockerfile bml/bmlserver - -docker build -t $HARBOR_LINKIS/linkis:linkis-dsm-server-0.10.0 -f datasource/datasourcemanager/server/Dockerfile datasource/datasourcemanager/server - -docker build -t $HARBOR_LINKIS/linkis:linkis-mdm-server-0.10.0 -f datasource/metadatamanager/server/Dockerfile datasource/metadatamanager/server -docker build -t $HARBOR_LINKIS/linkis:linkis-mdm-service-mysql-0.10.0 -f datasource/metadatamanager/service/mysql/Dockerfile datasource/metadatamanager/service/mysql -docker build -t $HARBOR_LINKIS/linkis:linkis-mdm-service-es-0.10.0 -f datasource/metadatamanager/service/elasticsearch/Dockerfile datasource/metadatamanager/service/elasticsearch -docker build -t $HARBOR_LINKIS/linkis:linkis-mdm-service-hive-0.10.0 -f datasource/metadatamanager/service/hive/Dockerfile datasource/metadatamanager/service/hive - -docker build -t $HARBOR_LINKIS/linkis:linkis-gateway-0.10.0 -f gateway/gateway-ujes-support/Dockerfile gateway/gateway-ujes-support - -docker build -t $HARBOR_LINKIS/linkis:linkis-resourcemanager-0.10.0 -f resourceManager/resourcemanagerserver/Dockerfile resourceManager/resourcemanagerserver - -docker build -t $HARBOR_LINKIS/linkis:linkis-cs-server-0.10.0 -f contextservice/cs-server/Dockerfile contextservice/cs-server - -docker build -t $HARBOR_LINKIS/linkis:linkis-metadata-0.10.0 -f metadata/Dockerfile metadata - -docker build -t $HARBOR_LINKIS/linkis:linkis-publicservice-0.10.0 -f publicService/Dockerfile publicService - -docker build -t $HARBOR_LINKIS/linkis:linkis-ujes-spark-enginemanager-0.10.0 -f ujes/definedEngines/spark/enginemanager/Dockerfile ujes/definedEngines/spark/enginemanager -docker build -t $HARBOR_LINKIS/linkis:linkis-ujes-spark-entrance-0.10.0 -f ujes/definedEngines/spark/entrance/Dockerfile ujes/definedEngines/spark/entrance - -docker build -t $HARBOR_LINKIS/linkis:linkis-ujes-hive-enginemanager-0.10.0 -f ujes/definedEngines/hive/enginemanager/Dockerfile ujes/definedEngines/hive/enginemanager -docker build -t $HARBOR_LINKIS/linkis:linkis-ujes-hive-entrance-0.10.0 -f ujes/definedEngines/hive/entrance/Dockerfile ujes/definedEngines/hive/entrance - -docker build -t $HARBOR_LINKIS/linkis:linkis-ujes-python-enginemanager-0.10.0 -f ujes/definedEngines/python/enginemanager/Dockerfile ujes/definedEngines/python/enginemanager -docker build -t $HARBOR_LINKIS/linkis:linkis-ujes-python-entrance-0.10.0 -f ujes/definedEngines/python/entrance/Dockerfile ujes/definedEngines/python/entrance - -docker build -t $HARBOR_LINKIS/linkis:linkis-ujes-pipeline-enginemanager-0.10.0 -f ujes/definedEngines/pipeline/enginemanager/Dockerfile ujes/definedEngines/pipeline/enginemanager -docker build -t $HARBOR_LINKIS/linkis:linkis-ujes-pipeline-entrance-0.10.0 -f ujes/definedEngines/pipeline/entrance/Dockerfile ujes/definedEngines/pipeline/entrance - -docker build -t $HARBOR_LINKIS/linkis:linkis-ujes-jdbc-enginemanager-0.10.0 -f ujes/definedEngines/jdbc/entrance/Dockerfile ujes/definedEngines/jdbc/entrance - -docker build -t $HARBOR_LINKIS/linkis:linkis-ujes-mlsql-entrance-0.10.0 -f ujes/definedEngines/mlsql/entrance/Dockerfile ujes/definedEngines/mlsql/entrance - -docker build -t $HARBOR_LINKIS/linkis:linkis-ujes-shell-enginemanager-0.10.0 -f ujes/definedEngines/shell/entrance/Dockerfile ujes/definedEngines/shell/entrance -docker build -t $HARBOR_LINKIS/linkis:linkis-ujes-shell-entrance-0.10.0 -f ujes/definedEngines/shell/enginemanager/Dockerfile ujes/definedEngines/shell/enginemanager - - diff --git a/k8s/delete.sh b/k8s/delete.sh deleted file mode 100644 index e7179f87907d6c7dec15d226f454b87a90d2824b..0000000000000000000000000000000000000000 --- a/k8s/delete.sh +++ /dev/null @@ -1,54 +0,0 @@ -kubectl delete -f register.yaml - -kubectl delete -f linkis-gateway-configmap.yaml -kubectl delete -f linkis-metadata-configmap.yaml -kubectl delete -f linkis-publicservice-configmap.yaml -kubectl delete -f linkis-resourcemanager-configmap.yaml -kubectl delete -f linkis-bml-configmap.yaml -kubectl delete -f linkis-ujes-hive-enginemanager-configmap.yaml -kubectl delete -f linkis-ujes-hive-entrance-configmap.yaml -kubectl delete -f linkis-ujes-jdbc-entrance-configmap.yaml -kubectl delete -f linkis-ujes-mlsql-entrance-configmap.yaml -kubectl delete -f linkis-ujes-pipeline-enginemanager-configmap.yaml -kubectl delete -f linkis-ujes-pipeline-entrance-configmap.yaml -kubectl delete -f linkis-ujes-python-enginemanager-configmap.yaml -kubectl delete -f linkis-ujes-python-entrance-configmap.yaml -kubectl delete -f linkis-ujes-shell-enginemanager-configmap.yaml -kubectl delete -f linkis-ujes-shell-entrance-configmap.yaml -kubectl delete -f linkis-ujes-spark-entrance-configmap.yaml -kubectl delete -f linkis-ujes-spark-enginemanager-configmap.yaml - -kubectl delete -f linkis-dsm-server-configmap.yaml -kubectl delete -f linkis-mdm-server-configmap.yaml -kubectl delete -f linkis-mdm-service-es-configmap.yaml -kubectl delete -f linkis-mdm-service-hive-configmap.yaml -kubectl delete -f linkis-mdm-service-mysql-configmap.yaml - -kubectl delete -f linkis-gateway-deployment.yaml -kubectl delete -f linkis-bml-deployment.yaml -kubectl delete -f linkis-metadata-deployment.yaml -kubectl delete -f linkis-publicservice-deployment.yaml -kubectl delete -f linkis-resourcemanager-deployment.yaml -kubectl delete -f linkis-ujes-jdbc-entrance-deployment.yaml -kubectl delete -f linkis-ujes-hive-entrance-deployment.yaml -kubectl delete -f linkis-ujes-hive-enginemanager-deployment.yaml -kubectl delete -f linkis-ujes-mlsql-entrance-deployment.yaml -kubectl delete -f linkis-ujes-pipeline-entrance-deployment.yaml -kubectl delete -f linkis-ujes-pipeline-enginemanager-deployment.yaml -kubectl delete -f linkis-ujes-python-entrance-deployment.yaml -kubectl delete -f linkis-ujes-python-enginemanager-deployment.yaml -kubectl delete -f linkis-ujes-shell-entrance-deployment.yaml -kubectl delete -f linkis-ujes-shell-enginemanager-deployment.yaml -kubectl delete -f linkis-ujes-spark-entrance-deployment.yaml -kubectl delete -f linkis-ujes-spark-enginemanager-deployment.yaml - -kubectl delete -f linkis-gateway-service.yaml - -kubectl delete -f linkis-dsm-server-deployment.yaml -kubectl delete -f linkis-mdm-server-deployment.yaml -kubectl delete -f linkis-mdm-service-es-deployment.yaml -kubectl delete -f linkis-mdm-service-hive-deployment.yaml -kubectl delete -f linkis-mdm-service-mysql-deployment.yaml - - - diff --git a/k8s/ingress-nginx-controller.yaml b/k8s/ingress-nginx-controller.yaml deleted file mode 100644 index 910531082b2c4e8d468882b269d6bc8ee2b592bf..0000000000000000000000000000000000000000 --- a/k8s/ingress-nginx-controller.yaml +++ /dev/null @@ -1,678 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -kind: Namespace -metadata: - name: ingress-nginx - labels: - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - ---- -# Source: ingress-nginx/templates/controller-serviceaccount.yaml -apiVersion: v1 -kind: ServiceAccount -metadata: - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: controller - name: ingress-nginx - namespace: ingress-nginx ---- -# Source: ingress-nginx/templates/controller-configmap.yaml -apiVersion: v1 -kind: ConfigMap -metadata: - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: controller - name: ingress-nginx-controller - namespace: ingress-nginx -data: ---- -# Source: ingress-nginx/templates/clusterrole.yaml -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - name: ingress-nginx - namespace: ingress-nginx -rules: - - apiGroups: - - '' - resources: - - configmaps - - endpoints - - nodes - - pods - - secrets - verbs: - - list - - watch - - apiGroups: - - '' - resources: - - nodes - verbs: - - get - - apiGroups: - - '' - resources: - - services - verbs: - - get - - list - - update - - watch - - apiGroups: - - extensions - - networking.k8s.io # k8s 1.14+ - resources: - - ingresses - verbs: - - get - - list - - watch - - apiGroups: - - '' - resources: - - events - verbs: - - create - - patch - - apiGroups: - - extensions - - networking.k8s.io # k8s 1.14+ - resources: - - ingresses/status - verbs: - - update - - apiGroups: - - networking.k8s.io # k8s 1.14+ - resources: - - ingressclasses - verbs: - - get - - list - - watch ---- -# Source: ingress-nginx/templates/clusterrolebinding.yaml -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - name: ingress-nginx - namespace: ingress-nginx -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: ClusterRole - name: ingress-nginx -subjects: - - kind: ServiceAccount - name: ingress-nginx - namespace: ingress-nginx ---- -# Source: ingress-nginx/templates/controller-role.yaml -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: controller - name: ingress-nginx - namespace: ingress-nginx -rules: - - apiGroups: - - '' - resources: - - namespaces - verbs: - - get - - apiGroups: - - '' - resources: - - configmaps - - pods - - secrets - - endpoints - verbs: - - get - - list - - watch - - apiGroups: - - '' - resources: - - services - verbs: - - get - - list - - update - - watch - - apiGroups: - - extensions - - networking.k8s.io # k8s 1.14+ - resources: - - ingresses - verbs: - - get - - list - - watch - - apiGroups: - - extensions - - networking.k8s.io # k8s 1.14+ - resources: - - ingresses/status - verbs: - - update - - apiGroups: - - networking.k8s.io # k8s 1.14+ - resources: - - ingressclasses - verbs: - - get - - list - - watch - - apiGroups: - - '' - resources: - - configmaps - resourceNames: - - ingress-controller-leader-nginx - verbs: - - get - - update - - apiGroups: - - '' - resources: - - configmaps - verbs: - - create - - apiGroups: - - '' - resources: - - endpoints - verbs: - - create - - get - - update - - apiGroups: - - '' - resources: - - events - verbs: - - create - - patch ---- -# Source: ingress-nginx/templates/controller-rolebinding.yaml -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: controller - name: ingress-nginx - namespace: ingress-nginx -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: ingress-nginx -subjects: - - kind: ServiceAccount - name: ingress-nginx - namespace: ingress-nginx ---- -# Source: ingress-nginx/templates/controller-service-webhook.yaml -apiVersion: v1 -kind: Service -metadata: - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: controller - name: ingress-nginx-controller-admission - namespace: ingress-nginx -spec: - type: ClusterIP - ports: - - name: https-webhook - port: 443 - targetPort: webhook - selector: - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/component: controller ---- -# Source: ingress-nginx/templates/controller-service.yaml -apiVersion: v1 -kind: Service -metadata: - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: controller - name: ingress-nginx-controller - namespace: ingress-nginx -spec: - type: NodePort - ports: - - name: http - port: 80 - protocol: TCP - #targetPort: http - nodePort: 31234 - - name: https - port: 443 - protocol: TCP - #targetPort: https - nodePort: 31235 - selector: - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/component: controller ---- -# Source: ingress-nginx/templates/controller-deployment.yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: controller - nginx.ingress.kubernetes.io/proxy-connect-timeout: "600" - nginx.ingress.kubernetes.io/proxy-read-timeout: "600" - nginx.ingress.kubernetes.io/proxy-send-timeout: "600" - name: ingress-nginx-controller - namespace: ingress-nginx -spec: - selector: - matchLabels: - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/component: controller - revisionHistoryLimit: 10 - strategy: - rollingUpdate: - maxUnavailable: 1 - type: RollingUpdate - minReadySeconds: 0 - template: - metadata: - labels: - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/component: controller - spec: - dnsPolicy: ClusterFirst - containers: - - name: controller - image: quay.io/kubernetes-ingress-controller/nginx-ingress-controller:0.33.0 - imagePullPolicy: IfNotPresent - lifecycle: - preStop: - exec: - command: - - /wait-shutdown - args: - - /nginx-ingress-controller - - --election-id=ingress-controller-leader - - --ingress-class=nginx - - --configmap=ingress-nginx/ingress-nginx-controller - - --validating-webhook=:8443 - - --validating-webhook-certificate=/usr/local/certificates/cert - - --validating-webhook-key=/usr/local/certificates/key - - --publish-status-address=localhost - securityContext: - capabilities: - drop: - - ALL - add: - - NET_BIND_SERVICE - runAsUser: 101 - allowPrivilegeEscalation: true - env: - - name: POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: POD_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - livenessProbe: - httpGet: - path: /healthz - port: 10254 - scheme: HTTP - initialDelaySeconds: 10 - periodSeconds: 10 - timeoutSeconds: 1 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /healthz - port: 10254 - scheme: HTTP - initialDelaySeconds: 10 - periodSeconds: 10 - timeoutSeconds: 1 - successThreshold: 1 - failureThreshold: 3 - ports: - - name: http - containerPort: 80 - protocol: TCP - hostPort: 80 - - name: https - containerPort: 443 - protocol: TCP - hostPort: 443 - - name: webhook - containerPort: 8443 - protocol: TCP - volumeMounts: - - name: webhook-cert - mountPath: /usr/local/certificates/ - readOnly: true - resources: - requests: - cpu: 100m - memory: 90Mi - nodeSelector: - ingress-ready: 'true' - tolerations: - - effect: NoSchedule - key: node-role.kubernetes.io/master - operator: Equal - serviceAccountName: ingress-nginx - terminationGracePeriodSeconds: 0 - volumes: - - name: webhook-cert - secret: - secretName: ingress-nginx-admission ---- -# Source: ingress-nginx/templates/admission-webhooks/validating-webhook.yaml -apiVersion: admissionregistration.k8s.io/v1beta1 -kind: ValidatingWebhookConfiguration -metadata: - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: admission-webhook - name: ingress-nginx-admission - namespace: ingress-nginx -webhooks: - - name: validate.nginx.ingress.kubernetes.io - rules: - - apiGroups: - - extensions - - networking.k8s.io - apiVersions: - - v1beta1 - operations: - - CREATE - - UPDATE - resources: - - ingresses - failurePolicy: Fail - clientConfig: - service: - namespace: ingress-nginx - name: ingress-nginx-controller-admission - path: /extensions/v1beta1/ingresses ---- -# Source: ingress-nginx/templates/admission-webhooks/job-patch/clusterrole.yaml -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - name: ingress-nginx-admission - annotations: - helm.sh/hook: pre-install,pre-upgrade,post-install,post-upgrade - helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: admission-webhook - namespace: ingress-nginx -rules: - - apiGroups: - - admissionregistration.k8s.io - resources: - - validatingwebhookconfigurations - verbs: - - get - - update ---- -# Source: ingress-nginx/templates/admission-webhooks/job-patch/clusterrolebinding.yaml -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: - name: ingress-nginx-admission - annotations: - helm.sh/hook: pre-install,pre-upgrade,post-install,post-upgrade - helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: admission-webhook - namespace: ingress-nginx -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: ClusterRole - name: ingress-nginx-admission -subjects: - - kind: ServiceAccount - name: ingress-nginx-admission - namespace: ingress-nginx ---- -# Source: ingress-nginx/templates/admission-webhooks/job-patch/job-createSecret.yaml -apiVersion: batch/v1 -kind: Job -metadata: - name: ingress-nginx-admission-create - annotations: - helm.sh/hook: pre-install,pre-upgrade - helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: admission-webhook - namespace: ingress-nginx -spec: - template: - metadata: - name: ingress-nginx-admission-create - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: admission-webhook - spec: - containers: - - name: create - image: jettech/kube-webhook-certgen:v1.2.0 - imagePullPolicy: IfNotPresent - args: - - create - - --host=ingress-nginx-controller-admission,ingress-nginx-controller-admission.ingress-nginx.svc - - --namespace=ingress-nginx - - --secret-name=ingress-nginx-admission - restartPolicy: OnFailure - serviceAccountName: ingress-nginx-admission - securityContext: - runAsNonRoot: true - runAsUser: 2000 ---- -# Source: ingress-nginx/templates/admission-webhooks/job-patch/job-patchWebhook.yaml -apiVersion: batch/v1 -kind: Job -metadata: - name: ingress-nginx-admission-patch - annotations: - helm.sh/hook: post-install,post-upgrade - helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: admission-webhook - namespace: ingress-nginx -spec: - template: - metadata: - name: ingress-nginx-admission-patch - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: admission-webhook - spec: - containers: - - name: patch - image: jettech/kube-webhook-certgen:v1.2.0 - imagePullPolicy: IfNotPresent - args: - - patch - - --webhook-name=ingress-nginx-admission - - --namespace=ingress-nginx - - --patch-mutating=false - - --secret-name=ingress-nginx-admission - - --patch-failure-policy=Fail - restartPolicy: OnFailure - serviceAccountName: ingress-nginx-admission - securityContext: - runAsNonRoot: true - runAsUser: 2000 ---- -# Source: ingress-nginx/templates/admission-webhooks/job-patch/role.yaml -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - name: ingress-nginx-admission - annotations: - helm.sh/hook: pre-install,pre-upgrade,post-install,post-upgrade - helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: admission-webhook - namespace: ingress-nginx -rules: - - apiGroups: - - '' - resources: - - secrets - verbs: - - get - - create ---- -# Source: ingress-nginx/templates/admission-webhooks/job-patch/rolebinding.yaml -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: ingress-nginx-admission - annotations: - helm.sh/hook: pre-install,pre-upgrade,post-install,post-upgrade - helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: admission-webhook - namespace: ingress-nginx -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: ingress-nginx-admission -subjects: - - kind: ServiceAccount - name: ingress-nginx-admission - namespace: ingress-nginx ---- -# Source: ingress-nginx/templates/admission-webhooks/job-patch/serviceaccount.yaml -apiVersion: v1 -kind: ServiceAccount -metadata: - name: ingress-nginx-admission - annotations: - helm.sh/hook: pre-install,pre-upgrade,post-install,post-upgrade - helm.sh/hook-delete-policy: before-hook-creation,hook-succeeded - labels: - helm.sh/chart: ingress-nginx-2.4.0 - app.kubernetes.io/name: ingress-nginx - app.kubernetes.io/instance: ingress-nginx - app.kubernetes.io/version: 0.33.0 - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: admission-webhook - namespace: ingress-nginx \ No newline at end of file diff --git a/k8s/ingress.yaml b/k8s/ingress.yaml deleted file mode 100644 index 6811557df7410f00f2b0ce5f261049f6c58f45ec..0000000000000000000000000000000000000000 --- a/k8s/ingress.yaml +++ /dev/null @@ -1,36 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: extensions/v1beta1 -kind: Ingress -metadata: - name: linkis-gateway-ingress -spec: - rules: - - host: - http: - paths: - - path: /api - backend: - serviceName: linkis-gateway-service - servicePort: 14001 - - path: /ws - backend: - serviceName: linkis-gateway-service - servicePort: 14001 - - path: / - backend: - serviceName: scriptis-service - servicePort: 80 \ No newline at end of file diff --git a/k8s/init.sh b/k8s/init.sh deleted file mode 100644 index 61748af2ed8b14c57e352d6b1cbf13845dc64322..0000000000000000000000000000000000000000 --- a/k8s/init.sh +++ /dev/null @@ -1,58 +0,0 @@ -kubectl create -f register.yaml - -kubectl apply -f linkis-eureka-configmap.yaml - -kubectl create -f linkis-gateway-configmap.yaml -kubectl create -f linkis-metadata-configmap.yaml -kubectl create -f linkis-publicservice-configmap.yaml -kubectl create -f linkis-resourcemanager-configmap.yaml -kubectl create -f linkis-cs-server-configmap.yaml -kubectl create -f linkis-bml-configmap.yaml -kubectl create -f linkis-ujes-hive-enginemanager-configmap.yaml -kubectl create -f linkis-ujes-hive-entrance-configmap.yaml -kubectl create -f linkis-ujes-jdbc-entrance-configmap.yaml -kubectl create -f linkis-ujes-mlsql-entrance-configmap.yaml -kubectl create -f linkis-ujes-pipeline-enginemanager-configmap.yaml -kubectl create -f linkis-ujes-pipeline-entrance-configmap.yaml -kubectl create -f linkis-ujes-python-enginemanager-configmap.yaml -kubectl create -f linkis-ujes-python-entrance-configmap.yaml -kubectl create -f linkis-ujes-shell-enginemanager-configmap.yaml -kubectl create -f linkis-ujes-shell-entrance-configmap.yaml -kubectl create -f linkis-ujes-spark-entrance-configmap.yaml -kubectl create -f linkis-ujes-spark-enginemanager-configmap.yaml - -kubectl create -f linkis-dsm-server-configmap.yaml -kubectl create -f linkis-mdm-server-configmap.yaml -kubectl create -f linkis-mdm-service-es-configmap.yaml -kubectl create -f linkis-mdm-service-hive-configmap.yaml -kubectl create -f linkis-mdm-service-mysql-configmap.yaml - - -kubectl create -f linkis-gateway-deployment.yaml -kubectl create -f linkis-bml-deployment.yaml -kubectl create -f linkis-metadata-deployment.yaml -kubectl create -f linkis-publicservice-deployment.yaml -kubectl create -f linkis-resourcemanager-deployment.yaml -kubectl create -f linkis-cs-server-deployment.yaml -kubectl create -f linkis-ujes-jdbc-entrance-deployment.yaml -kubectl create -f linkis-ujes-hive-entrance-deployment.yaml -kubectl create -f linkis-ujes-hive-enginemanager-deployment.yaml -kubectl create -f linkis-ujes-mlsql-entrance-deployment.yaml -kubectl create -f linkis-ujes-pipeline-entrance-deployment.yaml -kubectl create -f linkis-ujes-pipeline-enginemanager-deployment.yaml -kubectl create -f linkis-ujes-python-entrance-deployment.yaml -kubectl create -f linkis-ujes-python-enginemanager-deployment.yaml -kubectl create -f linkis-ujes-shell-entrance-deployment.yaml -kubectl create -f linkis-ujes-shell-enginemanager-deployment.yaml -kubectl create -f linkis-ujes-spark-entrance-deployment.yaml -kubectl create -f linkis-ujes-spark-enginemanager-deployment.yaml - -kubectl create -f linkis-gateway-service.yaml - - -kubectl create -f linkis-dsm-server-deployment.yaml -kubectl create -f linkis-mdm-server-deployment.yaml -kubectl create -f linkis-mdm-service-es-deployment.yaml -kubectl create -f linkis-mdm-service-hive-deployment.yaml -kubectl create -f linkis-mdm-service-mysql-deployment.yaml - diff --git a/k8s/linkis-bml-configmap.yaml b/k8s/linkis-bml-configmap.yaml deleted file mode 100644 index 0ced16555dc2b06e2dd9cfb102e03a2c1ff6ca91..0000000000000000000000000000000000000000 --- a/k8s/linkis-bml-configmap.yaml +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -data: - linkis.properties: |+ - wds.linkis.server.mybatis.mapperLocations=classpath:com/webank/wedatasphere/linkis/bml/dao/impl/*.xml - wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.linkis.bml.dao - wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.linkis.bml.dao - wds.test.mode=true - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.bml.restful - - #sit - wds.linkis.server.mybatis.datasource.url= - wds.linkis.server.mybatis.datasource.username= - wds.linkis.server.mybatis.datasource.password= - wds.linkis.server.version=v1 - - #hadoop.config.dir - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-bml.properties: {} - manager: kubectl - operation: Update - name: linkis-bml-config - namespace: default - diff --git a/k8s/linkis-bml-deployment.yaml b/k8s/linkis-bml-deployment.yaml deleted file mode 100644 index 3fe08553bd7179237575d7d12f8d27fd666fd0ac..0000000000000000000000000000000000000000 --- a/k8s/linkis-bml-deployment.yaml +++ /dev/null @@ -1,111 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-bml-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-bml - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - - template: - metadata: - labels: - app: linkis-bml - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-bml - topologyKey: "kubernetes.io/hostname" - containers: - - name: linkis-bml - image: wedatasphere/linkis:linkis-bml-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-bml/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 14009 - livenessProbe: - tcpSocket: - port: 14009 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "14009" - volumeMounts: - - name: linkis-bml-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-bml/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: eureka-config - configMap: - name: eureka-config - - name: linkis-bml-config - configMap: - name: linkis-bml-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-cs-server-configmap.yaml b/k8s/linkis-cs-server-configmap.yaml deleted file mode 100644 index 4ed38082ef06b818d90a99bc6e69c14feb291a10..0000000000000000000000000000000000000000 --- a/k8s/linkis-cs-server-configmap.yaml +++ /dev/null @@ -1,44 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - #wds.linkis.test.mode=true - wds.linkis.server.mybatis.datasource.url=jdbc:mysql://127.0.0.1:3306/ide_gz_bdap_sit_01?characterEncoding=UTF-8 - wds.linkis.server.mybatis.datasource.username= - wds.linkis.server.mybatis.datasource.password= - wds.linkis.log.clear=true - wds.linkis.server.version=v1 - ##restful - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.cs.server.restful - ##mybatis - wds.linkis.server.mybatis.mapperLocations=classpath*:com\\webank\\wedatasphere\\linkis\\cs\\persistence\\dao\\impl\\*.xml - wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.linkis.cs.persistence.entity - wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.linkis.cs.persistence.dao - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-cs-server.properties: {} - manager: kubectl - operation: Update - name: linkis-cs-server-config - namespace: default - diff --git a/k8s/linkis-cs-server-deployment.yaml b/k8s/linkis-cs-server-deployment.yaml deleted file mode 100644 index 1b1eb80ebfc51c6a9a5a8ba9c065257723c82075..0000000000000000000000000000000000000000 --- a/k8s/linkis-cs-server-deployment.yaml +++ /dev/null @@ -1,115 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-cs-server-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-cs-server - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-cs-server - release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-cs-server - topologyKey: "kubernetes.io/hostname" - containers: - - name: linkis-cs-server - image: wedatasphere/linkis:linkis-cs-server-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-cs-server/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 14004 - livenessProbe: - tcpSocket: - port: 14004 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "14004" - volumeMounts: - - name: linkis-cs-server-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-cs-server/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-cs-server-config - configMap: - name: linkis-cs-server-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-dsm-server-configmap.yaml b/k8s/linkis-dsm-server-configmap.yaml deleted file mode 100644 index 835fdb9cfd0abb80009406b8cb6e799769f4b072..0000000000000000000000000000000000000000 --- a/k8s/linkis-dsm-server-configmap.yaml +++ /dev/null @@ -1,68 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -data: - linkis.properties: |+ - # - # Copyright 2019 WeBank - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # http://www.apache.org/licenses/LICENSE-2.0 - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - # - - wds.linkis.server.mybatis.mapperLocations=classpath:com/webank/wedatasphere/linkis/datasourcemanager/core/dao/mapper/*.xml - wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.linkis.datasourcemanager.common.domain - wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.linkis.datasourcemanager.core.dao - - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.datasourcemanager.core.restful - - #sit - wds.linkis.server.version=v1 - - #test - wds.linkis.test.mode=true - wds.linkis.test.user= - wds.linkis.server.mybatis.datasource.url= - wds.linkis.server.mybatis.datasource.username= - wds.linkis.server.mybatis.datasource.password= - - #dsm - wds.linkis.server.dsm.admin.users= - - #bml - wds.linkis.gateway.ip= - wds.linkis.gateway.port= - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-bml.properties: {} - manager: kubectl - operation: Update - name: linkis-dsm-server-config - namespace: default - diff --git a/k8s/linkis-dsm-server-deployment.yaml b/k8s/linkis-dsm-server-deployment.yaml deleted file mode 100644 index e01377b1fc5f7be0c9a77b4c8b6995a6f02ab2c9..0000000000000000000000000000000000000000 --- a/k8s/linkis-dsm-server-deployment.yaml +++ /dev/null @@ -1,114 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-dsm-server-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-dsm-server - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-dsm-server - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-dsm-server - topologyKey: "kubernetes.io/hostname" - containers: - - name: linkis-dsm-server - image: wedatasphere/linkis:linkis-dsm-server-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-dsm-server/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 22000 - livenessProbe: - tcpSocket: - port: 22000 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "22000" - volumeMounts: - - name: linkis-dsm-server-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-dsm-server/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: eureka-config - configMap: - name: eureka-config - - name: linkis-dsm-server-config - configMap: - name: linkis-dsm-server-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-eureka-configmap.yaml b/k8s/linkis-eureka-configmap.yaml deleted file mode 100644 index 03d4c1d7cf6377e7a87e8e657aee50dd4919c332..0000000000000000000000000000000000000000 --- a/k8s/linkis-eureka-configmap.yaml +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -kind: ConfigMap -metadata: - name: eureka-config -data: - eurekaUrl: http://10.206.0.12:20230/eureka/ - diff --git a/k8s/linkis-gateway-configmap.yaml b/k8s/linkis-gateway-configmap.yaml deleted file mode 100644 index cdfb4fe47ba93eda3b6e8503f39fb3c5f2038eb1..0000000000000000000000000000000000000000 --- a/k8s/linkis-gateway-configmap.yaml +++ /dev/null @@ -1,45 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -data: - linkis.properties: |+ - wds.linkis.test.mode=true - wds.linkis.server.version=v1 - wds.linkis.ldap.proxy.url= - wds.linkis.ldap.proxy.baseDN= - wds.linkis.server.restful.uri=/ - wds.linkis.server.web.session.timeout=1h - wds.linkis.gateway.conf.enable.proxy.user=false - wds.linkis.gateway.conf.url.pass.auth=/dws/ - wds.linkis.gateway.admin.user=root - wds.linkis.gateway.conf.enable.token.auth=true - #wds.linkis.gateway.conf.enable.sso=true - wds.linkis.gateway.conf.enable.cloud-publicservice=true - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-gateway.properties: {} - manager: kubectl - operation: Update - name: linkis-gateway-config - namespace: default - diff --git a/k8s/linkis-gateway-deployment.yaml b/k8s/linkis-gateway-deployment.yaml deleted file mode 100644 index 9d2690b5af47382d75dbffef00c8b00583fdba15..0000000000000000000000000000000000000000 --- a/k8s/linkis-gateway-deployment.yaml +++ /dev/null @@ -1,110 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-gateway-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-gateway - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - - template: - metadata: - labels: - app: linkis-gateway - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-gateway - topologyKey: "kubernetes.io/hostname" - - containers: - - name: linkis-gateway - image: wedatasphere/linkis:linkis-gateway-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-gateway/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 14001 - protocol: TCP - livenessProbe: - tcpSocket: - port: 14001 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "14001" - volumeMounts: - - name: linkis-gateway-config - mountPath: /opt/linkis/conf -# - name: resultset -# mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-gateway/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-gateway-config - configMap: - name: linkis-gateway-config -# - name: resultset -# nfs: -# path: /data/k8s -# server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-gateway-service.yaml b/k8s/linkis-gateway-service.yaml deleted file mode 100644 index 73b2aeb28ac8623bc152db0c5a46464d28296e15..0000000000000000000000000000000000000000 --- a/k8s/linkis-gateway-service.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: Service -metadata: - name: linkis-gateway-service -spec: - selector: - app: linkis-gateway - ports: - - name: http - port: 14001 - protocol: TCP - targetPort: 14001 - nodePort: 32111 - type: NodePort diff --git a/k8s/linkis-mdm-server-configmap.yaml b/k8s/linkis-mdm-server-configmap.yaml deleted file mode 100644 index 6631a493475b8efc7a8eb5ff69cc1300bf5c6e7b..0000000000000000000000000000000000000000 --- a/k8s/linkis-mdm-server-configmap.yaml +++ /dev/null @@ -1,57 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -data: - linkis.properties: |+ - # - # Copyright 2019 WeBank - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # http://www.apache.org/licenses/LICENSE-2.0 - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - # - - wds.linkis.server.mybatis.mapperLocations= - wds.linkis.server.mybatis.typeAliasesPackage= - wds.linkis.server.mybatis.BasePackage= - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.metadatamanager.server.restful - - #sit - wds.linkis.server.version=v1 - - #test - wds.linkis.test.mode=true - wds.linkis.test.user=davidhua - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-bml.properties: {} - manager: kubectl - operation: Update - name: linkis-mdm-server-config - namespace: default - diff --git a/k8s/linkis-mdm-server-deployment.yaml b/k8s/linkis-mdm-server-deployment.yaml deleted file mode 100644 index 202aee5e98e196a9cf60e0631e884b3798685e5e..0000000000000000000000000000000000000000 --- a/k8s/linkis-mdm-server-deployment.yaml +++ /dev/null @@ -1,114 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-mdm-server-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-mdm-server - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-mdm-server - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-mdm-server - topologyKey: "kubernetes.io/hostname" - containers: - - name: linkis-mdm-server - image: wedatasphere/linkis:linkis-mdm-server-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-mdm-server/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 22001 - livenessProbe: - tcpSocket: - port: 22001 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "22001" - volumeMounts: - - name: linkis-mdm-server-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-mdm-server/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-mdm-server-config - configMap: - name: linkis-mdm-server-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-mdm-service-es-configmap.yaml b/k8s/linkis-mdm-service-es-configmap.yaml deleted file mode 100644 index 6ea3ee0b39d2fdbf8d6783b8183adb7e129ce11f..0000000000000000000000000000000000000000 --- a/k8s/linkis-mdm-service-es-configmap.yaml +++ /dev/null @@ -1,55 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -data: - linkis.properties: |+ - # - # Copyright 2019 WeBank - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # http://www.apache.org/licenses/LICENSE-2.0 - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - # - - wds.linkis.server.mybatis.mapperLocations= - wds.linkis.server.mybatis.typeAliasesPackage= - wds.linkis.server.mybatis.BasePackage= - wds.linkis.server.restful.scan.packages= - - #sit - wds.linkis.server.version=v1 - - - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-bml.properties: {} - manager: kubectl - operation: Update - name: linkis-mdm-service-es-config - namespace: default - diff --git a/k8s/linkis-mdm-service-es-deployment.yaml b/k8s/linkis-mdm-service-es-deployment.yaml deleted file mode 100644 index 01a91abea5132357bc0e6bdbc4e00d512057c76b..0000000000000000000000000000000000000000 --- a/k8s/linkis-mdm-service-es-deployment.yaml +++ /dev/null @@ -1,114 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-mdm-service-es-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-mdm-service-es - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-mdm-service-es - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-mdm-service-es - topologyKey: "kubernetes.io/hostname" - containers: - - name: linkis-mdm-service-es - image: wedatasphere/linkis:linkis-mdm-service-es-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-mdm-service-es/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 22002 - livenessProbe: - tcpSocket: - port: 22002 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "22002" - volumeMounts: - - name: linkis-mdm-service-es-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-mdm-service-es/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-mdm-service-es-config - configMap: - name: linkis-mdm-service-es-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-mdm-service-hive-configmap.yaml b/k8s/linkis-mdm-service-hive-configmap.yaml deleted file mode 100644 index 518b2f480dac48fcb737f8481eb482fd75280c78..0000000000000000000000000000000000000000 --- a/k8s/linkis-mdm-service-hive-configmap.yaml +++ /dev/null @@ -1,59 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -data: - linkis.properties: |+ - # - # Copyright 2019 WeBank - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # http://www.apache.org/licenses/LICENSE-2.0 - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - # - - wds.linkis.server.mybatis.mapperLocations= - wds.linkis.server.mybatis.typeAliasesPackage= - wds.linkis.server.mybatis.BasePackage= - wds.linkis.server.restful.scan.packages= - - #sit - wds.linkis.server.version=v1 - - #bml - wds.linkis.gateway.ip= - wds.linkis.gateway.port= - - - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-bml.properties: {} - manager: kubectl - operation: Update - name: linkis-mdm-service-hive-config - namespace: default - diff --git a/k8s/linkis-mdm-service-hive-deployment.yaml b/k8s/linkis-mdm-service-hive-deployment.yaml deleted file mode 100644 index 05efdff2f8564ffb2ca390b8cbb6414ea087c1e4..0000000000000000000000000000000000000000 --- a/k8s/linkis-mdm-service-hive-deployment.yaml +++ /dev/null @@ -1,114 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-mdm-service-hive-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-mdm-service-hive - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-mdm-service-hive - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-mdm-service-hive - topologyKey: "kubernetes.io/hostname" - containers: - - name: linkis-mdm-service-hive - image: wedatasphere/linkis:linkis-mdm-service-hive-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-mdm-service-hive/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 22003 - livenessProbe: - tcpSocket: - port: 22003 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "22003" - volumeMounts: - - name: linkis-mdm-service-hive-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-mdm-service-hive/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-mdm-service-hive-config - configMap: - name: linkis-mdm-service-hive-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-mdm-service-mysql-configmap.yaml b/k8s/linkis-mdm-service-mysql-configmap.yaml deleted file mode 100644 index 096d8ee88cdb3d04b105f5b3d8a7ac8b8129025f..0000000000000000000000000000000000000000 --- a/k8s/linkis-mdm-service-mysql-configmap.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -data: - linkis.properties: |+ - # - # Copyright 2019 WeBank - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # http://www.apache.org/licenses/LICENSE-2.0 - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - # - - wds.linkis.server.mybatis.mapperLocations= - wds.linkis.server.mybatis.typeAliasesPackage= - wds.linkis.server.mybatis.BasePackage= - wds.linkis.server.restful.scan.packages= - - #sit - wds.linkis.server.version=v1 - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-bml.properties: {} - manager: kubectl - operation: Update - name: linkis-mdm-service-mysql-config - namespace: default - diff --git a/k8s/linkis-mdm-service-mysql-deployment.yaml b/k8s/linkis-mdm-service-mysql-deployment.yaml deleted file mode 100644 index a1a4778e2168665f8a71ec7f4ec1b4abc2947b4c..0000000000000000000000000000000000000000 --- a/k8s/linkis-mdm-service-mysql-deployment.yaml +++ /dev/null @@ -1,114 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-mdm-service-mysql-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-mdm-service-mysql - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-mdm-service-mysql - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-mdm-service-mysql - topologyKey: "kubernetes.io/hostname" - containers: - - name: linkis-mdm-service-mysql - image: wedatasphere/linkis:linkis-mdm-service-mysql-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-mdm-service-mysql/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 22004 - livenessProbe: - tcpSocket: - port: 22004 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "22004" - volumeMounts: - - name: linkis-mdm-service-mysql-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-mdm-service-mysql/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-mdm-service-mysql-config - configMap: - name: linkis-mdm-service-mysql-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-metadata-configmap.yaml b/k8s/linkis-metadata-configmap.yaml deleted file mode 100644 index 7f1c6623eb05013788f0906c394f9e195d7788de..0000000000000000000000000000000000000000 --- a/k8s/linkis-metadata-configmap.yaml +++ /dev/null @@ -1,64 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - - #wds.linkis.test.mode=true - - wds.linkis.server.mybatis.datasource.url=${mysql.url} - - wds.linkis.server.mybatis.datasource.username=${mysql.username} - - wds.linkis.server.mybatis.datasource.password=${mysql.password} - - - wds.linkis.log.clear=true - wds.linkis.server.version=v1 - - ##restful - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.metadata.restful.api - - ##mybatis - wds.linkis.server.mybatis.mapperLocations=classpath:com/webank/wedatasphere/linkis/metadata/hive/dao/impl/*.xml,com/webank/wedatasphere/linkis/metadata/dao/impl/*.xml - - wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.linkis.metadata.domain - - wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.linkis.metadata.hive.dao,com.webank.wedatasphere.linkis.metadata.dao - - ##datasource - hive.meta.url=${hivemeta.url} - hive.meta.user=${hivemeta.username} - hive.meta.password=${hivemeta.password} - - #hive.config.dir - - - wds.linkis.engine.creation.wait.time.max=2m - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-metadata.properties: {} - manager: kubectl - operation: Update - name: linkis-metadata-config - namespace: default - diff --git a/k8s/linkis-metadata-deployment.yaml b/k8s/linkis-metadata-deployment.yaml deleted file mode 100644 index 203b5abdef70bd058fe7c880e1b75affc32aeaa1..0000000000000000000000000000000000000000 --- a/k8s/linkis-metadata-deployment.yaml +++ /dev/null @@ -1,114 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-metadata-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-metadata - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-metadata - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-metadata - topologyKey: "kubernetes.io/hostname" - containers: - - name: linkis-metadata - image: wedatasphere/linkis:linkis-metadata-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-metadata/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 14002 - livenessProbe: - tcpSocket: - port: 14002 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "14002" - volumeMounts: - - name: linkis-metadata-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-metadata/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-metadata-config - configMap: - name: linkis-metadata-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-publicservice-configmap.yaml b/k8s/linkis-publicservice-configmap.yaml deleted file mode 100644 index d3ae969ee7d007d8f1206553beb76902b12971db..0000000000000000000000000000000000000000 --- a/k8s/linkis-publicservice-configmap.yaml +++ /dev/null @@ -1,69 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - - - #wds.linkis.test.mode=true - - wds.linkis.server.mybatis.datasource.url=${mysql.url} - - wds.linkis.server.mybatis.datasource.username=${mysql.username} - - wds.linkis.server.mybatis.datasource.password=${mysql.password} - - - wds.linkis.log.clear=true - wds.linkis.server.version=v1 - - ##restful - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.jobhistory.restful,com.webank.wedatasphere.linkis.variable.restful,com.webank.wedatasphere.linkis.application.restful,com.webank.wedatasphere.linkis.configuration.restful,com.webank.wedatasphere.linkis.udf.api,com.webank.wedatasphere.linkis.filesystem.restful - - ##mybatis - wds.linkis.server.mybatis.mapperLocations=classpath:com/webank/wedatasphere/linkis/jobhistory/dao/impl/*.xml,classpath:com/webank/wedatasphere/linkis/variable/dao/impl/*.xml,classpath:com/webank/wedatasphere/linkis/application/dao/impl/*.xml,classpath:com/webank/wedatasphere/linkis/configuration/dao/impl/*.xml,classpath:com/webank/wedatasphere/linkis/udf/dao/impl/*.xml - - wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.linkis.application.entity,com.webank.wedatasphere.linkis.configuration.entity,com.webank.wedatasphere.linkis.query.entity,com.webank.wedatasphere.linkis.udf.entity,com.webank.wedatasphere.linkis.variable.entity - - wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.linkis.jobhistory.dao,com.webank.wedatasphere.linkis.variable.dao,com.webank.wedatasphere.linkis.application.dao,com.webank.wedatasphere.linkis.configuration.dao,com.webank.wedatasphere.linkis.udf.dao - - ##workspace - wds.linkis.workspace.filesystem.localuserrootpath=hdfs:///tmp/ - wds.linkis.workspace.filesystem.hdfsuserrootpath.prefix=hdfs:///tmp/ - wds.linkis.workspace.filesystem.hdfsuserrootpath.suffix=/linkis/ - - #hadoopconfig - #hadoop.config.dir - - ##UDF - wds.linkis.storage.is.share.node=true - wds.linkis.gateway.ip=linkis-gateway.sit.ihomefnt.org - wds.linkis.gateway.port=13429 - - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-publicservice.properties: {} - manager: kubectl - operation: Update - name: linkis-publicservice-config - namespace: default - diff --git a/k8s/linkis-publicservice-deployment.yaml b/k8s/linkis-publicservice-deployment.yaml deleted file mode 100644 index a3865feea67ade2e841a3a6e820b40111146f4f9..0000000000000000000000000000000000000000 --- a/k8s/linkis-publicservice-deployment.yaml +++ /dev/null @@ -1,113 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-publicservice-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-publicservice - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-publicservice - release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-publicservice - topologyKey: "kubernetes.io/hostname" - containers: - - name: linkis-publicservice - image: wedatasphere/linkis:linkis-publicservice-0.10.0 - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-publicservice/bin/stop.sh"] - ports: - - name: http - containerPort: 14003 - livenessProbe: - tcpSocket: - port: 14003 - initialDelaySeconds: 30 - periodSeconds: 60 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "14003" - volumeMounts: - - name: linkis-publicservice-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-publicservice/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-publicservice-config - configMap: - name: linkis-publicservice-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-resourcemanager-configmap.yaml b/k8s/linkis-resourcemanager-configmap.yaml deleted file mode 100644 index 0c498bf78bb8f772feafc951d5c6141cf547af30..0000000000000000000000000000000000000000 --- a/k8s/linkis-resourcemanager-configmap.yaml +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - #wds.linkis.test.mode=true - wds.linkis.server.mybatis.datasource.url=${mysql.url} - wds.linkis.server.mybatis.datasource.username=${mysql.username} - wds.linkis.server.mybatis.datasource.password=${mysql.password} - wds.linkis.log.clear=true - wds.linkis.server.version=v1 - ##restful - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.resourcemanager.restful - ##mybatis - wds.linkis.server.mybatis.mapperLocations=classpath:com/webank/wedatasphere/linkis/resourcemanager/dao/impl/*.xml - wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.linkis.resourcemanager.dao - wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.linkis.resourcemanager.dao - wds.linkis.yarn.rm.web.address=http://zt-hadoop.sit.ihomefnt.org:18088 - #hadoop.config.dir= - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-resourcemanager.properties: {} - manager: kubectl - operation: Update - name: linkis-resourcemanager-config - namespace: default - diff --git a/k8s/linkis-resourcemanager-deployment.yaml b/k8s/linkis-resourcemanager-deployment.yaml deleted file mode 100644 index c229fdf0227edc73fcb1d7e08b094bce76d0c09a..0000000000000000000000000000000000000000 --- a/k8s/linkis-resourcemanager-deployment.yaml +++ /dev/null @@ -1,115 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-resourcemanager-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-resourcemanager - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-resourcemanager - release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-resourcemanager - topologyKey: "kubernetes.io/hostname" - containers: - - name: linkis-resourcemanager - image: wedatasphere/linkis:linkis-resourcemanager-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-resourcemanager/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 14004 - livenessProbe: - tcpSocket: - port: 14004 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "14004" - volumeMounts: - - name: linkis-resourcemanager-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-resourcemanager/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-resourcemanager-config - configMap: - name: linkis-resourcemanager-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-ujes-hive-enginemanager-configmap.yaml b/k8s/linkis-ujes-hive-enginemanager-configmap.yaml deleted file mode 100644 index f83fc225f1f1313e7005abdc622060b65a293291..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-hive-enginemanager-configmap.yaml +++ /dev/null @@ -1,48 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -data: - linkis.properties: |+ - - - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.entrance.restful - wds.linkis.engine.application.name=hiveEngine - wds.linkis.server.component.exclude.packages=com.webank.wedatasphere.linkis.engine.,com.webank.wedatasphere.linkis.udf. - wds.linkis.server.version=v1 - - #sudo script - wds.linkis.enginemanager.sudo.script=/appcom/Install/WillinkInstall/linkis-ujes-spark-enginemanager/bin/rootScript.sh - - #hadoop config - #hadoop.config.dir=/appcom/config/hadoop-config - - #hive config - #hive.config.dir= - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-ujes-hive-enginemanager.properties: {} - manager: kubectl - operation: Update - name: linkis-ujes-hive-enginemanager-config - namespace: default - diff --git a/k8s/linkis-ujes-hive-enginemanager-deployment.yaml b/k8s/linkis-ujes-hive-enginemanager-deployment.yaml deleted file mode 100644 index fb748a69c4b28ba5d8268a4f4dcf7f97e0613374..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-hive-enginemanager-deployment.yaml +++ /dev/null @@ -1,124 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-ujes-hive-enginemanager-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-ujes-hive-enginemanager - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-ujes-hive-enginemanager - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-ujes-hive-enginemanager - - linkis-ujes-spark-enginemanager - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node1 - containers: - - name: linkis-ujes-hive-enginemanager - image: wedatasphere/linkis:linkis-ujes-hive-enginemanager-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-ujes-hive-enginemanager/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 16001 - livenessProbe: - tcpSocket: - port: 16001 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "16001" - volumeMounts: - - name: linkis-ujes-hive-enginemanager-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-ujes-hive-enginemanager/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-ujes-hive-enginemanager-config - configMap: - name: linkis-ujes-hive-enginemanager-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-ujes-hive-entrance-configmap.yaml b/k8s/linkis-ujes-hive-entrance-configmap.yaml deleted file mode 100644 index af81b47c7f6680dec0354f7742944b5cdb6373e2..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-hive-entrance-configmap.yaml +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.entrance.restful - - wds.linkis.engine.application.name=hiveEngine - wds.linkis.enginemanager.application.name=hiveEngineManager - - wds.linkis.query.application.name=cloud-publicservice - - wds.linkis.console.config.application.name=cloud-publicservice - wds.linkis.engine.creation.wait.time.max=20m - wds.linkis.server.version=v1 - #hadoop config dir - #hadoop.config.dir=/appcom/config/hadoop-config - wds.linkis.entrance.config.logPath=file:///appcom/logs/dataworkcloud/dwc/ - - wds.linkis.resultSet.store.path=hdfs:/// - - wds.linkis.server.socket.mode=true - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-ujes-hive-entrance.properties: {} - manager: kubectl - operation: Update - name: linkis-ujes-hive-entrance-config - namespace: default - diff --git a/k8s/linkis-ujes-hive-entrance-deployment.yaml b/k8s/linkis-ujes-hive-entrance-deployment.yaml deleted file mode 100644 index ce30c4942543dcfe8437c4d40213f301beec8659..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-hive-entrance-deployment.yaml +++ /dev/null @@ -1,123 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-ujes-hive-entrance-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-ujes-hive-entrance - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-ujes-hive-entrance - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-ujes-hive-entrance - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node1 - containers: - - name: linkis-ujes-hive-entrance - image: wedatasphere/linkis:linkis-ujes-hive-entrance-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-ujes-hive-entrance/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 16000 - livenessProbe: - tcpSocket: - port: 16000 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "16000" - volumeMounts: - - name: linkis-ujes-hive-entrance-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-ujes-hive-entrance/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-ujes-hive-entrance-config - configMap: - name: linkis-ujes-hive-entrance-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-ujes-jdbc-entrance-configmap.yaml b/k8s/linkis-ujes-jdbc-entrance-configmap.yaml deleted file mode 100644 index 614659cfec544e89107dc7d0ee470b3b7cc3a783..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-jdbc-entrance-configmap.yaml +++ /dev/null @@ -1,51 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - - - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.entrance.restful - - wds.linkis.engine.application.name=jdbcEngine - wds.linkis.enginemanager.application.name=jdbc - - wds.linkis.query.application.name=cloud-publicservice - - wds.linkis.console.config.application.name=cloud-publicservice - wds.linkis.engine.creation.wait.time.max=2m - wds.linkis.server.version=v1 - #hadoop.config.dir= - wds.linkis.entrance.config.logPath=hdfs://zt-hadoop1.sit.ihomefnt.org:9000/tmp/linkis/log - - wds.linkis.resultSet.store.path=hdfs://zt-hadoop.sit.ihomefnt.org:9000/tmp/linkis/resultset - - wds.linkis.server.socket.mode=true - - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-ujes-jdbc-entrance.properties: {} - manager: kubectl - operation: Update - name: linkis-ujes-jdbc-entrance-config - namespace: default - diff --git a/k8s/linkis-ujes-jdbc-entrance-deployment.yaml b/k8s/linkis-ujes-jdbc-entrance-deployment.yaml deleted file mode 100644 index bbd832063217d7769b281defd5f74fcb8bd875ca..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-jdbc-entrance-deployment.yaml +++ /dev/null @@ -1,123 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-ujes-jdbc-entrance-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-ujes-jdbc-entrance - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-ujes-jdbc-entrance - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-ujes-jdbc-entrance - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node3 - containers: - - name: linkis-ujes-jdbc-entrance - image: wedatasphere/linkis:linkis-ujes-jdbc-entrance-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-ujes-jdbc-entrance/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 14008 - livenessProbe: - tcpSocket: - port: 14008 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "14008" - volumeMounts: - - name: linkis-ujes-jdbc-entrance-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-ujes-jdbc-entrance/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-ujes-jdbc-entrance-config - configMap: - name: linkis-ujes-jdbc-entrance-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-ujes-mlsql-entrance-configmap.yaml b/k8s/linkis-ujes-mlsql-entrance-configmap.yaml deleted file mode 100644 index f2fc29d62d1f3deb2774f673f6dccaf715e3c0f3..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-mlsql-entrance-configmap.yaml +++ /dev/null @@ -1,51 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.entrance.restful - - wds.linkis.engine.application.name=mlsqlEngine - wds.linkis.enginemanager.application.name=mlsql - - wds.linkis.query.application.name=cloud-publicservice - - wds.linkis.console.config.application.name=cloud-publicservice - wds.linkis.engine.creation.wait.time.max=20m - wds.linkis.server.version=v1 - # hadoop.config.dir= - wds.linkis.entrance.config.logPath=file:///tmp/linkis/ - - wds.linkis.resultSet.store.path=file:///tmp/linkis - - wds.linkis.server.socket.mode=true - # wds.linkis.test.mode=true - - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-ujes-mlsql-entrance.properties: {} - manager: kubectl - operation: Update - name: linkis-ujes-mlsql-entrance-config - namespace: default - diff --git a/k8s/linkis-ujes-mlsql-entrance-deployment.yaml b/k8s/linkis-ujes-mlsql-entrance-deployment.yaml deleted file mode 100644 index a94f5a81e7de1efa355ac4694062bde080968935..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-mlsql-entrance-deployment.yaml +++ /dev/null @@ -1,123 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-ujes-mlsql-entrance-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-ujes-mlsql-entrance - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-ujes-mlsql-entrance - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-ujes-mlsql-entrance - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node3 - containers: - - name: linkis-ujes-mlsql-entrance - image: wedatasphere/linkis:linkis-ujes-mlsql-entrance-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-ujes-mlsql-entrance/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 17000 - livenessProbe: - tcpSocket: - port: 17000 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "17000" - volumeMounts: - - name: linkis-ujes-mlsql-entrance-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-ujes-mlsql-entrance/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-ujes-mlsql-entrance-config - configMap: - name: linkis-ujes-mlsql-entrance-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-ujes-pipeline-enginemanager-configmap.yaml b/k8s/linkis-ujes-pipeline-enginemanager-configmap.yaml deleted file mode 100644 index 5c5c7924262b7c3330072de8fb5933f8630ac023..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-pipeline-enginemanager-configmap.yaml +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - - - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.entrance.restful - wds.linkis.engine.application.name=pipeLineEngine - wds.linkis.server.component.exclude.packages=com.webank.wedatasphere.linkis.engine.,com.webank.wedatasphere.linkis.udf. - - wds.linkis.console.config.application.name=cloud-publicservice - #hadoop.config.dir=/appcom/config/hadoop-config - #spark.config.dir=/appcom/config/spark-config - - #rootScript shell path - wds.linkis.enginemanager.sudo.script=/appcom/tmp/johnnwang/Install/linkis-ujes-spark-enginemanager/bin/rootScript.sh - wds.linkis.server.version=v1 - - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-ujes-pipeline-enginemanager.properties: {} - manager: kubectl - operation: Update - name: linkis-ujes-pipeline-enginemanager-config - namespace: default - diff --git a/k8s/linkis-ujes-pipeline-enginemanager-deployment.yaml b/k8s/linkis-ujes-pipeline-enginemanager-deployment.yaml deleted file mode 100644 index 71375c141a64a028b05e5f72e7a5f7ab45297016..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-pipeline-enginemanager-deployment.yaml +++ /dev/null @@ -1,123 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-ujes-pipeline-enginemanager-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-ujes-pipeline-enginemanager - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-ujes-pipeline-enginemanager - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-ujes-pipeline-enginemanager - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node3 - containers: - - name: linkis-ujes-mlsql-entrance - image: wedatasphere/linkis:linkis-ujes-pipeline-enginemanager-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-ujes-pipeline-enginemanager/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 18001 - livenessProbe: - tcpSocket: - port: 18001 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "18001" - volumeMounts: - - name: linkis-ujes-pipeline-enginemanager-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-ujes-pipeline-enginemanager/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-ujes-pipeline-enginemanager-config - configMap: - name: linkis-ujes-pipeline-enginemanager-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-ujes-pipeline-entrance-configmap.yaml b/k8s/linkis-ujes-pipeline-entrance-configmap.yaml deleted file mode 100644 index 9c36b03f70dd924453bf87bfe5e56c354b4cfe34..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-pipeline-entrance-configmap.yaml +++ /dev/null @@ -1,57 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - - - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.entrance.restful - - wds.linkis.engine.application.name=pipeLineEngine - wds.linkis.enginemanager.application.name=pipeLineEngineManager - - wds.linkis.query.application.name=cloud-publicservice - - wds.linkis.console.config.application.name=cloud-publicservice - wds.linkis.engine.creation.wait.time.max=20m - - - wds.linkis.resultSet.store.path=hdfs:/// - - wds.linkis.server.version=v1 - wds.linkis.server.socket.mode=true - - bdp.server.distinct.mode=true - - #hadoop.config.dir=/appcom/config/hadoop-config - #spark.config.dir=/appcom/config/spark-config - - - - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-ujes-pipeline-entrance.properties: {} - manager: kubectl - operation: Update - name: linkis-ujes-pipeline-entrance-config - namespace: default - diff --git a/k8s/linkis-ujes-pipeline-entrance-deployment.yaml b/k8s/linkis-ujes-pipeline-entrance-deployment.yaml deleted file mode 100644 index 15fa3d815ae255176e8cdc7f8bdf4b85057cacff..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-pipeline-entrance-deployment.yaml +++ /dev/null @@ -1,123 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-ujes-pipeline-entrance-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-ujes-pipeline-entrance - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-ujes-pipeline-entrance - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-ujes-pipeline-entrance - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node3 - containers: - - name: linkis-ujes-pipeline-entrance - image: wedatasphere/linkis:linkis-ujes-pipeline-entrance-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-ujes-pipeline-entrance/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 18000 - livenessProbe: - tcpSocket: - port: 18000 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "18000" - volumeMounts: - - name: linkis-ujes-pipeline-entrance-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-ujes-pipeline-entrance/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-ujes-pipeline-entrance-config - configMap: - name: linkis-ujes-pipeline-entrance-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-ujes-python-enginemanager-configmap.yaml b/k8s/linkis-ujes-python-enginemanager-configmap.yaml deleted file mode 100644 index b6af7f059326f2152cdd0f2cf1083c7009c8aabb..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-python-enginemanager-configmap.yaml +++ /dev/null @@ -1,40 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - - - - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.entrance.restful - wds.linkis.engine.application.name=pythonEngine - wds.linkis.server.component.exclude.packages=com.webank.wedatasphere.linkis.engine.,com.webank.wedatasphere.linkis.udf. - wds.linkis.server.version=v1 - wds.linkis.enginemanager.sudo.script=rootScript.sh - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-ujes-python-enginemanager.properties: {} - manager: kubectl - operation: Update - name: linkis-ujes-python-enginemanager-config - namespace: default - diff --git a/k8s/linkis-ujes-python-enginemanager-deployment.yaml b/k8s/linkis-ujes-python-enginemanager-deployment.yaml deleted file mode 100644 index 98b3f4c0d23d5cf915329f51383733ce972bd20a..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-python-enginemanager-deployment.yaml +++ /dev/null @@ -1,123 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-ujes-python-enginemanager-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-ujes-python-enginemanager - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-ujes-python-enginemanager - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-ujes-python-enginemanager - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node3 - containers: - - name: linkis-ujes-python-enginemanager - image: wedatasphere/linkis:linkis-ujes-python-enginemanager-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-ujes-python-enginemanager/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 19001 - livenessProbe: - tcpSocket: - port: 19001 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "19001" - volumeMounts: - - name: linkis-ujes-python-enginemanager-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-ujes-python-enginemanager/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-ujes-python-enginemanager-config - configMap: - name: linkis-ujes-python-enginemanager-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-ujes-python-entrance-configmap.yaml b/k8s/linkis-ujes-python-entrance-configmap.yaml deleted file mode 100644 index c6f3e61a449a00f64071174f0620bd29e05f5c28..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-python-entrance-configmap.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - - - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.entrance.restful - - wds.linkis.engine.application.name=pythonEngine - wds.linkis.enginemanager.application.name=pythonEngineManager - - wds.linkis.query.application.name=cloud-publicservice - - wds.linkis.console.configuration.application.name=cloud-publicservice - wds.linkis.engine.creation.wait.time.max=2m - wds.linkis.server.version=v1 - - wds.linkis.entrance.config.logPath=file:///appcom/logs/dataworkcloud/dwc/ - - wds.linkis.server.socket.mode=true - - - #hadoop.config.dir=/appcom/config/hadoop-config - - wds.linkis.resultSet.store.path=hdfs:/// - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-ujes-python-entrance.properties: {} - manager: kubectl - operation: Update - name: linkis-ujes-python-entrance-config - namespace: default - diff --git a/k8s/linkis-ujes-python-entrance-deployment.yaml b/k8s/linkis-ujes-python-entrance-deployment.yaml deleted file mode 100644 index ac3d3ed3b93ac8bed0f705140fb42331e4f0a628..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-python-entrance-deployment.yaml +++ /dev/null @@ -1,123 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-ujes-python-entrance-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-ujes-python-entrance - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-ujes-python-entrance - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-ujes-python-entrance - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node3 - containers: - - name: linkis-ujes-python-entrance - image: wedatasphere/linkis:linkis-ujes-python-entrance-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-ujes-python-entrance/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 19000 - livenessProbe: - tcpSocket: - port: 19000 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "19000" - volumeMounts: - - name: linkis-ujes-python-entrance-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-ujes-python-entrance/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-ujes-python-entrance-config - configMap: - name: linkis-ujes-python-entrance-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-ujes-shell-enginemanager-configmap.yaml b/k8s/linkis-ujes-shell-enginemanager-configmap.yaml deleted file mode 100644 index c8cee63a48b376e1af50589cd075ee8c5343d1ed..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-shell-enginemanager-configmap.yaml +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.entrance.restful - wds.linkis.engine.application.name=shellEngine - wds.linkis.server.component.exclude.packages=com.webank.wedatasphere.linkis.engine.,com.webank.wedatasphere.linkis.udf. - wds.linkis.server.version=v1 - #sudo script - wds.linkis.enginemanager.sudo.script=rootScript.sh - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-ujes-shell-enginemanager.properties: {} - manager: kubectl - operation: Update - name: linkis-ujes-shell-enginemanager-config - namespace: default - diff --git a/k8s/linkis-ujes-shell-enginemanager-deployment.yaml b/k8s/linkis-ujes-shell-enginemanager-deployment.yaml deleted file mode 100644 index 890c014adeff374777469975badd23d43e0c49ef..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-shell-enginemanager-deployment.yaml +++ /dev/null @@ -1,123 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-ujes-shell-enginemanager-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-ujes-shell-enginemanager - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-ujes-shell-enginemanager - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-ujes-shell-enginemanager - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node2 - containers: - - name: linkis-ujes-shell-enginemanager - image: wedatasphere/linkis:linkis-ujes-shell-enginemanager-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-ujes-shell-enginemanager/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 20001 - livenessProbe: - tcpSocket: - port: 20001 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "20001" - volumeMounts: - - name: linkis-ujes-shell-enginemanager-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-ujes-shell-enginemanager/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-ujes-shell-enginemanager-config - configMap: - name: linkis-ujes-shell-enginemanager-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-ujes-shell-entrance-configmap.yaml b/k8s/linkis-ujes-shell-entrance-configmap.yaml deleted file mode 100644 index 26f28010658bb634cc72b73f22ea96927adacd86..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-shell-entrance-configmap.yaml +++ /dev/null @@ -1,42 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.entrance.restful - wds.linkis.engine.application.name=shellEngine - wds.linkis.enginemanager.application.name=shellEngineManager - wds.linkis.query.application.name=cloud-publicservice - wds.linkis.console.config.application.name=cloud-publicservice - wds.linkis.engine.creation.wait.time.max=20m - wds.linkis.server.version=v1 - wds.linkis.entrance.config.logPath=file:///tmp/linkis/ - wds.linkis.resultSet.store.path=file:///tmp/linkis - wds.linkis.server.socket.mode=true - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-ujes-shell-entrance.properties: {} - manager: kubectl - operation: Update - name: linkis-ujes-shell-entrance-config - namespace: default - diff --git a/k8s/linkis-ujes-shell-entrance-deployment.yaml b/k8s/linkis-ujes-shell-entrance-deployment.yaml deleted file mode 100644 index fa4710cf01d4c5ced26f438a51e2f6e8bd079679..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-shell-entrance-deployment.yaml +++ /dev/null @@ -1,123 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-ujes-shell-entrance-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-ujes-shell-entrance - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-ujes-shell-entrance - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-ujes-shell-entrance - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node2 - containers: - - name: linkis-ujes-shell-entrance - image: wedatasphere/linkis:linkis-ujes-shell-entrance-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-ujes-shell-entrance/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 20000 - livenessProbe: - tcpSocket: - port: 20000 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "20000" - volumeMounts: - - name: linkis-ujes-shell-entrance-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-ujes-shell-entrance/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-ujes-shell-entrance-config - configMap: - name: linkis-ujes-shell-entrance-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-ujes-spark-enginemanager-configmap.yaml b/k8s/linkis-ujes-spark-enginemanager-configmap.yaml deleted file mode 100644 index 24971c3643b1abb139ddbb739014f6b94ed91bc5..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-spark-enginemanager-configmap.yaml +++ /dev/null @@ -1,43 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - - - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.entrance.restful - wds.linkis.engine.application.name=sparkEngine - wds.linkis.server.component.exclude.packages=com.webank.wedatasphere.linkis.engine.,com.webank.wedatasphere.linkis.udf. - wds.linkis.server.version=v1 - wds.linkis.console.config.application.name=cloud-publicservice - wds.linkis.engine.udf.app.name=cloud-publicservice - wds.linkis.enginemanager.sudo.script=/appcom/tmp/johnnwang/Install/linkis-ujes-spark-enginemanager/bin/rootScript.sh - wds.linkis.spark.driver.conf.mainjar= - #spark.config.dir=/appcom/config/spark-config - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-ujes-spark-enginemanager.properties: {} - manager: kubectl - operation: Update - name: linkis-ujes-spark-enginemanager-config - namespace: default - diff --git a/k8s/linkis-ujes-spark-enginemanager-deployment.yaml b/k8s/linkis-ujes-spark-enginemanager-deployment.yaml deleted file mode 100644 index d5923b68cd453d2d38ebd73c884b7bbac61dd1aa..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-spark-enginemanager-deployment.yaml +++ /dev/null @@ -1,124 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-ujes-spark-enginemanager-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-ujes-spark-enginemanager - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-ujes-spark-enginemanager - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-ujes-spark-enginemanager - - linkis-ujes-hive-enginemanager - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node2 - containers: - - name: linkis-ujes-spark-enginemanager - image: wedatasphere/linkis:linkis-ujes-spark-enginemanager-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-ujes-spark-enginemanager/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 21001 - livenessProbe: - tcpSocket: - port: 21001 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "21001" - volumeMounts: - - name: linkis-ujes-spark-enginemanager-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-ujes-spark-enginemanager/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-ujes-spark-enginemanager-config - configMap: - name: linkis-ujes-spark-enginemanager-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/linkis-ujes-spark-entrance-configmap.yaml b/k8s/linkis-ujes-spark-entrance-configmap.yaml deleted file mode 100644 index 408c7b222978b61019910dbd22aa8fd7415ec832..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-spark-entrance-configmap.yaml +++ /dev/null @@ -1,54 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -data: - linkis.properties: |+ - - - wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.linkis.entrance.restful - - wds.linkis.engine.application.name=sparkEngine - wds.linkis.enginemanager.application.name=sparkEngineManager - - wds.linkis.query.application.name=cloud-publicservice - - wds.linkis.console.configuration.application.name=cloud-publicservice - wds.linkis.engine.creation.wait.time.max=20m - wds.linkis.console.variable.application.name=cloud-publicservice - - wds.linkis.server.version=v1 - #hadoop.config.dir=/appcom/config/hadoop-config - #spark.config.dir=/appcom/config/spark-config - - wds.linkis.entrance.config.logPath=file:///appcom/logs/dataworkcloud/dwc/ - - wds.linkis.server.socket.mode=true - - wds.linkis.resultSet.store.path=hdfs:/// - -kind: ConfigMap -metadata: - managedFields: - - apiVersion: v1 - fieldsType: FieldsV1 - fieldsV1: - f:data: - .: {} - f:linkis-ujes-spark-entrance.properties: {} - manager: kubectl - operation: Update - name: linkis-ujes-spark-entrance-config - namespace: default - diff --git a/k8s/linkis-ujes-spark-entrance-deployment.yaml b/k8s/linkis-ujes-spark-entrance-deployment.yaml deleted file mode 100644 index b3c783ce6dc87a04afb3313b5bb23081c149cec0..0000000000000000000000000000000000000000 --- a/k8s/linkis-ujes-spark-entrance-deployment.yaml +++ /dev/null @@ -1,123 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: linkis-ujes-spark-entrance-deployment -spec: - replicas: 1 - selector: - matchLabels: - app: linkis-ujes-spark-entrance - # release: dev - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: linkis-ujes-spark-entrance - # release: dev - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - linkis-ujes-spark-entrance - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node2 - containers: - - name: linkis-ujes-spark-entrance - image: wedatasphere/linkis:linkis-ujes-spark-entrance-0.10.0 - lifecycle: - preStop: - exec: - command: ["sh","/opt/linkis/linkis-ujes-spark-entrance/bin/stop.sh"] - imagePullPolicy: Always - ports: - - name: http - containerPort: 21000 - livenessProbe: - tcpSocket: - port: 21000 - initialDelaySeconds: 15 - periodSeconds: 20 - env: - - name: eurekaurl - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: EUREKA_URL - valueFrom: - configMapKeyRef: - name: eureka-config - key: eurekaUrl - - name: SERVER_HEAP_SIZE - value: 1024M - - name: START_PORT - value: "21000" - volumeMounts: - - name: linkis-ujes-spark-entrance-config - mountPath: /opt/linkis/conf - # - name: resultset - # mountPath: /opt/linkis/data - - name: varlog - mountPath: /opt/linkis/linkis-ujes-spark-entrance/logs - - name: hadoop-config - mountPath: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - mountPath: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - mountPath: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf - imagePullSecrets: - - name: registry-key - volumes: - - name: linkis-ujes-spark-entrance-config - configMap: - name: linkis-ujes-spark-entrance-config - # - name: resultset - # nfs: - # path: /data/k8s - # server: 10.206.0.12 - - name: varlog - hostPath: - path: /var/log - - name: eureka-config - configMap: - name: eureka-config - - name: hadoop-config - hostPath: - path: /opt/hadoop/hadoop-2.7.7/etc/hadoop - - name: hive-config - hostPath: - path: /opt/hive/apache-hive-2.3.6-bin/conf - - name: spark-config - hostPath: - path: /opt/spark/spark-2.4.4-bin-hadoop2.7/conf diff --git a/k8s/package.sh b/k8s/package.sh deleted file mode 100644 index c58999817cbbc9902e2b4bcd51f5b125aa254403..0000000000000000000000000000000000000000 --- a/k8s/package.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -mvn clean package -f bml/bmlserver/pom_k8s.xml - -mvn clean package -f datasource/datasourcemanager/server/pom_k8s.xml - -mvn clean package -f datasource/metadatamanager/server/pom_k8s.xml -mvn clean package -f datasource/metadatamanager/service/mysql/pom_k8s.xml -mvn clean package -f datasource/metadatamanager/service/elasticsearch/pom_k8s.xml -mvn clean package -f datasource/metadatamanager/service/hive/pom_k8s.xml - -mvn clean package -f gateway/gateway-ujes-support/pom_k8s.xml - -mvn clean package -f resourceManager/resourcemanagerserver/pom_k8s.xml - -mvn clean package -f contextservice/cs-server/pom_k8s.xml - -mvn clean package -f metadata/pom_k8s.xml - -mvn clean package -f publicService/pom_k8s.xml - -mvn clean package -f ujes/definedEngines/spark/enginemanager/pom_k8s.xml -mvn clean package -f ujes/definedEngines/spark/entrance/pom_k8s.xml - -mvn clean package -f ujes/definedEngines/hive/enginemanager/pom_k8s.xml -mvn clean package -f ujes/definedEngines/hive/entrance/pom_k8s.xml - -mvn clean package -f ujes/definedEngines/python/enginemanager/pom_k8s.xml -mvn clean package -f ujes/definedEngines/python/entrance/pom_k8s.xml - -mvn clean package -f ujes/definedEngines/pipeline/enginemanager/pom_k8s.xml -mvn clean package -f ujes/definedEngines/pipeline/entrance/pom_k8s.xml - -mvn clean package -f ujes/definedEngines/jdbc/entrance/pom_k8s.xml - -mvn clean package -f ujes/definedEngines/mlsql/entrance/pom_k8s.xml - -mvn clean package -f ujes/definedEngines/shell/entrance/pom_k8s.xml -mvn clean package -f ujes/definedEngines/shell/enginemanager/pom_k8s.xml - diff --git a/k8s/push.sh b/k8s/push.sh deleted file mode 100644 index 49464f74a9b7269485677f9058257a79de596997..0000000000000000000000000000000000000000 --- a/k8s/push.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/bash - -docker push $HARBOR_LINKIS/linkis:linkis-bml-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-dsm-server-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-mdm-server-0.10.0 -docker push $HARBOR_LINKIS/linkis:linkis-mdm-service-mysql-0.10.0 -docker push $HARBOR_LINKIS/linkis:linkis-mdm-service-es-0.10.0 -docker push $HARBOR_LINKIS/linkis:linkis-mdm-service-hive-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-gateway-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-resourcemanager-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-cs-server-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-metadata-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-publicservice-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-ujes-spark-enginemanager-0.10.0 -docker push $HARBOR_LINKIS/linkis:linkis-ujes-spark-entrance-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-ujes-hive-enginemanager-0.10.0 -docker push $HARBOR_LINKIS/linkis:linkis-ujes-hive-entrance-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-ujes-python-enginemanager-0.10.0 -docker push $HARBOR_LINKIS/linkis:linkis-ujes-python-entrance-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-ujes-pipeline-enginemanager-0.10.0 -docker push $HARBOR_LINKIS/linkis:linkis-ujes-pipeline-entrance-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-ujes-jdbc-enginemanager-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-ujes-mlsql-entrance-0.10.0 - -docker push $HARBOR_LINKIS/linkis:linkis-ujes-shell-enginemanager-0.10.0 -docker push $HARBOR_LINKIS/linkis:linkis-ujes-shell-entrance-0.10.0 - - diff --git a/k8s/register.yaml b/k8s/register.yaml deleted file mode 100644 index c4e9abe02218b8f4e6e4b3bdb5e4d9cbdc7a66bc..0000000000000000000000000000000000000000 --- a/k8s/register.yaml +++ /dev/null @@ -1,5 +0,0 @@ -kubectl create secret docker-registry registry-key \ ---docker-server=https://wedatasphere/linkis \ ---docker-username=wedatasphere \ ---docker-password=***** \ ---docker-email=wedatasphere@webank.com \ No newline at end of file diff --git a/k8s/scriptis.yaml b/k8s/scriptis.yaml deleted file mode 100644 index c27089ad746cf6477c4a73feda9ac94734a008bb..0000000000000000000000000000000000000000 --- a/k8s/scriptis.yaml +++ /dev/null @@ -1,81 +0,0 @@ -# -# Copyright 2019 WeBank -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: scriptis -spec: - replicas: 1 - selector: - matchLabels: - app: scriptis - strategy: - rollingUpdate: - maxSurge: 1 - maxUnavailable: 1 - type: RollingUpdate - template: - metadata: - labels: - app: scriptis - spec: - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app" - operator: In - values: - - scriptis - topologyKey: "kubernetes.io/hostname" - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - weight: 1 - preference: - matchExpressions: - - key: kubernetes.io/hostname - operator: In - values: - - node2 - containers: - - name: scriptis - image: wedatasphere/linkis:scriptis - imagePullPolicy: Always - ports: - - name: http - containerPort: 80 - livenessProbe: - tcpSocket: - port: 80 - initialDelaySeconds: 15 - periodSeconds: 20 - imagePullSecrets: - - name: registry-key ---- - -apiVersion: v1 -kind: Service -metadata: - name: scriptis-service -spec: - selector: - app: scriptis - ports: - - name: http - port: 80 - protocol: TCP - targetPort: 80 - clusterIP: None diff --git a/linkis-commons/linkis-common/pom.xml b/linkis-commons/linkis-common/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..f58dad3fb5bb816ddad43925468e6193cb0547b3 --- /dev/null +++ b/linkis-commons/linkis-common/pom.xml @@ -0,0 +1,141 @@ + + + + + + linkis + com.webank.wedatasphere.linkis + 1.0.0-RC1 + + 4.0.0 + + linkis-common + jar + + + + + + org.scala-lang + scala-library + + + org.scala-lang + scala-compiler + + + org.scala-lang + scala-reflect + + + org.scala-lang + scalap + + + commons-lang + commons-lang + + + commons-io + commons-io + ${commons-io.version} + + + commons-collections + commons-collections + + + com.google.guava + guava + + + + com.fasterxml.jackson.core + jackson-databind + ${fasterxml.jackson.version} + + + com.fasterxml.jackson.core + jackson-annotations + ${fasterxml.jackson.version} + + + com.fasterxml.jackson.module + jackson-module-scala_${scala.binary.version} + ${fasterxml.jackson.version} + + + com.google.guava + guava + + + org.scala-lang + scala-library + + + org.scala-lang + scala-reflect + + + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + + commons-net + commons-net + 3.1 + + + + commons-codec + commons-codec + 1.10 + + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/resources + + + ${project.artifactId}-${project.version} + + + + \ No newline at end of file diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCException.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCException.java similarity index 100% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCException.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCException.java diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCRetryException.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCRetryException.java similarity index 100% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCRetryException.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCRetryException.java diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCRuntimeException.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCRuntimeException.java similarity index 93% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCRuntimeException.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCRuntimeException.java index a1cae017a94f70ce8392db1102686a4da6b8d63a..2910dcd8708c73128dd425495a81c23835c499b6 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCRuntimeException.java +++ b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DWCRuntimeException.java @@ -25,9 +25,7 @@ package com.webank.wedatasphere.linkis.common.exception; import java.util.HashMap; import java.util.Map; -import static com.webank.wedatasphere.linkis.common.exception.DWCException.applicationName; -import static com.webank.wedatasphere.linkis.common.exception.DWCException.hostname; -import static com.webank.wedatasphere.linkis.common.exception.DWCException.hostPort; +import static com.webank.wedatasphere.linkis.common.exception.DWCException.*; public abstract class DWCRuntimeException extends RuntimeException{ diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DwcCommonErrorException.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DwcCommonErrorException.java similarity index 100% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DwcCommonErrorException.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/DwcCommonErrorException.java diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/ErrorException.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/ErrorException.java similarity index 100% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/ErrorException.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/ErrorException.java diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/ExceptionLevel.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/ExceptionLevel.java similarity index 100% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/ExceptionLevel.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/ExceptionLevel.java diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/ExceptionManager.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/ExceptionManager.java similarity index 100% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/ExceptionManager.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/ExceptionManager.java diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/FatalException.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/FatalException.java similarity index 98% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/FatalException.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/FatalException.java index 4c66a525a038e496889317ec796e14123bb8069e..9a49e722d1e27cde758c01bb0a9143ebe5f7c9aa 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/FatalException.java +++ b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/FatalException.java @@ -22,8 +22,6 @@ */ package com.webank.wedatasphere.linkis.common.exception; -import java.util.Map; - public class FatalException extends DWCException{ private ExceptionLevel level = ExceptionLevel.FATAL; public FatalException(int errCode, String desc){ diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/WarnException.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/WarnException.java similarity index 98% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/WarnException.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/WarnException.java index 64aeb27e30b89277225f8e8934c72b42eccbed74..781b24d2fbbb701f8c2b0225855aa6a35008bd0b 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/exception/WarnException.java +++ b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/exception/WarnException.java @@ -22,8 +22,6 @@ */ package com.webank.wedatasphere.linkis.common.exception; -import java.util.Map; - public class WarnException extends DWCRuntimeException{ private ExceptionLevel level = ExceptionLevel.WARN; public WarnException(int errCode, String desc){ diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/io/Fs.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/io/Fs.java similarity index 95% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/io/Fs.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/io/Fs.java index 8f522d94d066d7cbedd66651e752db03a97f26e8..7e1d5c2b174be0356011ab08e6bb4e3ec02f4afa 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/io/Fs.java +++ b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/io/Fs.java @@ -16,6 +16,7 @@ package com.webank.wedatasphere.linkis.common.io; +import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -25,7 +26,7 @@ import java.util.Map; /** * Created by enjoyyin on 2017/2/4. */ -public interface Fs { +public interface Fs extends Closeable { public abstract void init(Map properties) throws IOException; @@ -53,6 +54,4 @@ public interface Fs { public abstract boolean renameTo(FsPath oldDest, FsPath newDest) throws IOException; - public abstract void close() throws IOException; - } diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/io/FsPath.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/io/FsPath.java similarity index 99% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/io/FsPath.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/io/FsPath.java index a25efb56324f2bf21c99b8d4c109873e9dd702fc..dac533b5bb7d5386c967442f410ae06cb68d8b28 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/io/FsPath.java +++ b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/io/FsPath.java @@ -1,12 +1,9 @@ /* * Copyright 2019 WeBank - * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * * http://www.apache.org/licenses/LICENSE-2.0 - * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,6 +32,7 @@ import java.util.regex.Pattern; */ public class FsPath { + public static final String CUR_DIR = "."; public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows"); diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/utils/ByteTimeUtils.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/utils/ByteTimeUtils.java similarity index 98% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/utils/ByteTimeUtils.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/utils/ByteTimeUtils.java index 8486ec2fcaa5551c4dd8f16c62a5ca55ced101cc..18f0a54ced4fd035167de121a416da954d2ebf99 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/utils/ByteTimeUtils.java +++ b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/utils/ByteTimeUtils.java @@ -235,16 +235,16 @@ public class ByteTimeUtils { double value; String unit; - if (size >= 2*TB) { + if (size >= 2*TB || -2*TB >= size) { value = size * 1f / TB; unit = "TB"; - } else if (size >= 2*GB) { + } else if (size >= 2*GB || -2*GB >= size) { value = size * 1f / GB; unit = "GB"; - } else if (size >= 2*MB) { + } else if (size >= 2*MB || -2*MB >= size) { value = size * 1f / MB; unit = "MB"; - } else if (size >= 2*KB) { + } else if (size >= 2*KB || -2*KB >= size) { value = size * 1f / KB; unit = "KB"; } else { diff --git a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/utils/DESUtil.java b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/utils/DESUtil.java similarity index 99% rename from core/common/src/main/java/com/webank/wedatasphere/linkis/common/utils/DESUtil.java rename to linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/utils/DESUtil.java index 14efaf02c1de119b9250447639f82bfe12f93130..885ab4d3a35f696a8f4d0cb9585ada3506186b15 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/linkis/common/utils/DESUtil.java +++ b/linkis-commons/linkis-common/src/main/java/com/webank/wedatasphere/linkis/common/utils/DESUtil.java @@ -24,7 +24,6 @@ import javax.crypto.Cipher; import javax.crypto.SecretKey; import javax.crypto.SecretKeyFactory; import javax.crypto.spec.DESKeySpec; -import java.io.IOException; import java.security.SecureRandom; public class DESUtil { diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/ServiceInstance.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/ServiceInstance.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/ServiceInstance.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/ServiceInstance.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/collection/LoopArray.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/collection/LoopArray.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/collection/LoopArray.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/collection/LoopArray.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/BDPConfiguration.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/BDPConfiguration.scala similarity index 66% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/BDPConfiguration.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/BDPConfiguration.scala index 41c10f7536fe77f690ee28cc6ca638a37912a552..5b04946b4b1e674dbe640d54e513633678db9b5f 100644 --- a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/BDPConfiguration.scala +++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/BDPConfiguration.scala @@ -16,20 +16,21 @@ package com.webank.wedatasphere.linkis.common.conf -import java.io._ +import java.io.{File, FileInputStream, IOException, InputStream} import java.util.Properties + import com.webank.wedatasphere.linkis.common.utils.Logging import org.apache.commons.io.IOUtils import org.apache.commons.lang.StringUtils + import scala.collection.JavaConversions._ -/** - * Created by enjoyyin on 2018/1/9. - */ + private[conf] object BDPConfiguration extends Logging { val DEFAULT_PROPERTY_FILE_NAME = "linkis.properties" - val CHARSET_NAME = "utf-8" + + val DEFAULT_SERVER_CONF_FILE_NAME = "linkis-server.properties" private val config = new Properties private val sysProps = sys.props @@ -37,24 +38,50 @@ private[conf] object BDPConfiguration extends Logging { private val env = sys.env - val propertyFile = sysProps.getOrElse("wds.linkis.configuration", DEFAULT_PROPERTY_FILE_NAME) - private val configFileURL = getClass.getClassLoader.getResource(propertyFile) - if (configFileURL != null && new File(configFileURL.getPath).exists) initConfig(config, configFileURL.getPath) - else warn(s"******************************** Notice: The dataworkcloud configuration file $propertyFile is not exists! ***************************") + + private def init: Unit = { + + // load pub linkis conf + val propertyFile = sysProps.getOrElse("wds.linkis.configuration", DEFAULT_PROPERTY_FILE_NAME) + val configFileURL = getClass.getClassLoader.getResource(propertyFile) + if (configFileURL != null && new File(configFileURL.getPath).exists) initConfig(config, configFileURL.getPath) + else warn(s"******************************** Notice: The Linkis configuration file $propertyFile is not exists! ***************************") + + // load pub linkis conf + val serverConf = sysProps.getOrElse("wds.linkis.server.conf", DEFAULT_SERVER_CONF_FILE_NAME) + val serverConfFileURL = getClass.getClassLoader.getResource(serverConf) + if (serverConfFileURL != null && new File(serverConfFileURL.getPath).exists) initConfig(config, serverConfFileURL.getPath) + else warn(s"******************************** Notice: The Linkis serverConf file $serverConfFileURL is not exists! ***************************") + + // load server confs + val propertyFileOptions = sysProps.get("wds.linkis.server.confs") + if (propertyFileOptions.isDefined) { + val propertyFiles = propertyFileOptions.get.split(",") + propertyFiles.foreach { propertyF => + val configFileURL = getClass.getClassLoader.getResource(propertyF) + if (configFileURL != null && new File(configFileURL.getPath).exists) initConfig(config, configFileURL.getPath) + else warn(s"******************************** Notice: The Linkis configuration file $propertyF is not exists! ***************************") + } + } + + } + + try { + init + } catch { + case e: Throwable => + warn("Failed to init conf", e) + } private def initConfig(config: Properties, filePath: String) { var inputStream: InputStream = null - var inputStreamReader: InputStreamReader = null try { inputStream = new FileInputStream(filePath) - inputStreamReader = new InputStreamReader(inputStream, CHARSET_NAME) - config.load(inputStreamReader) - } catch { case e: IOException => - error("Can't load " + propertyFile, e) - } finally { - IOUtils.closeQuietly(inputStream) - IOUtils.closeQuietly(inputStreamReader) - } + config.load(inputStream) + } catch { + case e: IOException => + error("Can't load " + filePath, e) + } finally IOUtils.closeQuietly(inputStream) } def getOption(key: String): Option[String] = { diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/ByteType.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/ByteType.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/ByteType.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/ByteType.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/CommonVars.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/CommonVars.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/CommonVars.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/CommonVars.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/Configuration.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/Configuration.scala similarity index 89% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/Configuration.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/Configuration.scala index 9b9efbda2b2546dd5f1ce05182bb89d52fad67c7..4e094327edb3434b25f2240018777678dec7d492 100644 --- a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/Configuration.scala +++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/Configuration.scala @@ -1,12 +1,9 @@ /* * Copyright 2019 WeBank - * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * * http://www.apache.org/licenses/LICENSE-2.0 - * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,9 +28,9 @@ object Configuration extends Logging { val IS_TEST_MODE = CommonVars("wds.linkis.test.mode", false) - val hadoopConfDir = CommonVars("hadoop.config.dir", CommonVars("HADOOP_CONF_DIR", "").getValue).getValue + val LINKIS_HOME = CommonVars("wds.linkis.home", CommonVars("LINKIS_HOME", "/appcom/Install/LinkisInstall")) - val GATEWAY_URL: CommonVars[String] = CommonVars[String]("wds.linkis.gateway.url", "http://localhost:9001/") + val GATEWAY_URL: CommonVars[String] = CommonVars[String]("wds.linkis.gateway.url", "http://127.0.0.1:9001/") def getGateWayURL(): String = { val url = GATEWAY_URL.getValue.trim diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/DWCArgumentsParser.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/DWCArgumentsParser.scala similarity index 98% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/DWCArgumentsParser.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/DWCArgumentsParser.scala index edc384728647b4693e5505384d5555a70b606806..a6d691d2161a0bddf76ddd49810ee42bcf156254 100644 --- a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/DWCArgumentsParser.scala +++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/DWCArgumentsParser.scala @@ -18,14 +18,14 @@ package com.webank.wedatasphere.linkis.common.conf import org.apache.commons.lang.StringUtils -import scala.collection.{JavaConversions, mutable} import scala.collection.mutable.ArrayBuffer +import scala.collection.{JavaConversions, mutable} /** * Created by enjoyyin on 2018/9/26. */ object DWCArgumentsParser { - protected val DWC_CONF = "--dwc-conf" + protected val DWC_CONF = "--engineconn-conf" protected val SPRING_CONF = "--spring-conf" private var dwcOptionMap = Map.empty[String, String] diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/TimeType.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/TimeType.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/TimeType.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/conf/TimeType.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/FsReader.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/FsReader.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/FsReader.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/FsReader.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/FsWriter.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/FsWriter.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/FsWriter.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/FsWriter.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/MetaData.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/MetaData.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/MetaData.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/MetaData.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/Record.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/Record.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/Record.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/Record.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultDeserializer.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultDeserializer.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultDeserializer.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultDeserializer.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSerializer.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSerializer.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSerializer.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSerializer.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSet.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSet.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSet.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSet.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSetReader.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSetReader.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSetReader.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSetReader.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSetWriter.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSetWriter.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSetWriter.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/io/resultset/ResultSetWriter.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/Event.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/Event.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/Event.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/Event.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/EventListener.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/EventListener.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/EventListener.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/EventListener.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/ListenerEventBus.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/ListenerEventBus.scala similarity index 98% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/ListenerEventBus.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/ListenerEventBus.scala index 035fd665415d25b8358deac5cbc2f94fa583321d..6919e700e6ab1a906801c27e2c78c0c3cb5c695b 100644 --- a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/ListenerEventBus.scala +++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/ListenerEventBus.scala @@ -17,9 +17,8 @@ package com.webank.wedatasphere.linkis.common.listener import java.util.concurrent.atomic.{AtomicBoolean, AtomicLong} -import java.util.concurrent.{ArrayBlockingQueue, CopyOnWriteArrayList, Future, LinkedBlockingQueue, TimeoutException} +import java.util.concurrent.{ArrayBlockingQueue, CopyOnWriteArrayList, Future, TimeoutException} -import com.webank.wedatasphere.linkis.common.collection.BlockingLoopArray import com.webank.wedatasphere.linkis.common.utils.{ByteTimeUtils, Logging, Utils} import org.apache.commons.lang.time.DateFormatUtils diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/SingleThreadListenerBus.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/SingleThreadListenerBus.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/SingleThreadListenerBus.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/listener/SingleThreadListenerBus.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/log/LogUtils.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/log/LogUtils.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/log/LogUtils.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/log/LogUtils.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ClassUtils.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ClassUtils.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ClassUtils.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ClassUtils.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/FileService.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/FileService.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/FileService.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/FileService.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/JavaLog.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/JavaLog.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/JavaLog.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/JavaLog.scala diff --git a/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/JsonUtils.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/JsonUtils.scala new file mode 100644 index 0000000000000000000000000000000000000000..ee2f55bb7d14b21be2e948a3f613f5d464d5f701 --- /dev/null +++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/JsonUtils.scala @@ -0,0 +1,30 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.common.utils + +import java.text.SimpleDateFormat + +import com.fasterxml.jackson.databind.ObjectMapper + + +object JsonUtils { + + //TODO add gson + + implicit val jackson = new ObjectMapper().setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ")) + +} diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/LDAPUtils.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/LDAPUtils.scala similarity index 86% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/LDAPUtils.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/LDAPUtils.scala index 3905586825e223a0d344f90362463320ad053a50..971bbab6f25c0790338fe24edd9f3bef77832964 100644 --- a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/LDAPUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/LDAPUtils.scala @@ -19,8 +19,8 @@ package com.webank.wedatasphere.linkis.common.utils import java.util.Hashtable import javax.naming.Context import javax.naming.ldap.InitialLdapContext - import com.webank.wedatasphere.linkis.common.conf.CommonVars +import org.apache.commons.lang.StringUtils /** @@ -30,9 +30,12 @@ object LDAPUtils extends Logging { val url = CommonVars("wds.linkis.ldap.proxy.url", "").getValue val baseDN = CommonVars("wds.linkis.ldap.proxy.baseDN", "").getValue + val userNameFormat = CommonVars("wds.linkis.ldap.proxy.userNameFormat", "").getValue def login(userID: String, password: String): Unit = { val env = new Hashtable[String, String]() - val bindDN = userID + val bindDN = if (StringUtils.isBlank(userNameFormat)) userID else { + userNameFormat.split("%s", -1).mkString(userID) + } val bindPassword = password env.put(Context.SECURITY_AUTHENTICATION, "simple") env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory") diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Logging.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Logging.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Logging.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Logging.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/OverloadUtils.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/OverloadUtils.scala similarity index 80% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/OverloadUtils.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/OverloadUtils.scala index 78624b595208ffbd9874a0dc997e7708f49bba3c..6cc384fec1acb04857fc9acb36ab5d84e5e84de5 100644 --- a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/OverloadUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/OverloadUtils.scala @@ -18,12 +18,18 @@ package com.webank.wedatasphere.linkis.common.utils import java.lang.management.ManagementFactory +import com.sun.management.OperatingSystemMXBean + object OverloadUtils { + def getOSBean: OperatingSystemMXBean = ManagementFactory.getOperatingSystemMXBean.asInstanceOf[OperatingSystemMXBean] + def getProcessMaxMemory: Long = ManagementFactory.getMemoryMXBean.getHeapMemoryUsage.getMax def getProcessUsedMemory: Long = ManagementFactory.getMemoryMXBean.getHeapMemoryUsage.getUsed def getSystemCPUUsed: Float = ManagementFactory.getOperatingSystemMXBean.getSystemLoadAverage.toFloat + def getSystemFreeMemory: Long = getOSBean.getFreePhysicalMemorySize + } diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RSAUtils.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RSAUtils.scala similarity index 90% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RSAUtils.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RSAUtils.scala index 8ffcf1433ca8b50d3badc8625d6550c237c3e50f..5598ca33d05f7ff2b22ceb07ebf551959ff07262 100644 --- a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RSAUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RSAUtils.scala @@ -16,9 +16,10 @@ package com.webank.wedatasphere.linkis.common.utils +import java.nio.charset.StandardCharsets import java.security.{KeyPair, KeyPairGenerator, PrivateKey, PublicKey} -import javax.crypto.Cipher +import javax.crypto.Cipher import org.apache.commons.codec.binary.Hex import org.apache.commons.net.util.Base64 @@ -33,7 +34,10 @@ object RSAUtils { keyPair.generateKeyPair() } def getDefaultPublicKey(): String = { - new String(Base64.encodeBase64(keyPair.getPublic.getEncoded)) + new String(Base64.encodeBase64(keyPair.getPublic.getEncoded), StandardCharsets.UTF_8) + } + def getDefaultPrivateKey(): String = { + new String(Base64.encodeBase64(keyPair.getPrivate.getEncoded), StandardCharsets.UTF_8) } def encrypt(data: Array[Byte], publicKey: PublicKey): Array[Byte] = { val cipher = Cipher.getInstance("RSA") diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RefreshUtils.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RefreshUtils.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RefreshUtils.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RefreshUtils.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RetryHandler.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RetryHandler.scala similarity index 96% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RetryHandler.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RetryHandler.scala index d3f1153e2dfb152a0a8f216063fb7ecd02411deb..7342b9e87b31dfb9010aba8eb901aa2a12d1df31 100644 --- a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RetryHandler.scala +++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/RetryHandler.scala @@ -16,7 +16,7 @@ package com.webank.wedatasphere.linkis.common.utils -import com.webank.wedatasphere.linkis.common.exception.{DWCRetryException, FatalException} +import com.webank.wedatasphere.linkis.common.exception.FatalException import org.apache.commons.lang.{ClassUtils => CommonClassUtils} import scala.collection.mutable.ArrayBuffer diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ShutdownUtils.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ShutdownUtils.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ShutdownUtils.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ShutdownUtils.scala diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Utils.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Utils.scala similarity index 99% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Utils.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Utils.scala index 61438c2749a9947bf70f421bc1624a4a4cdc6f11..da6e7016557d1f37eba52ae4a0e7ac3c113454ad 100644 --- a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Utils.scala +++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Utils.scala @@ -284,5 +284,6 @@ object Utils extends Logging { } } + def getJvmUser: String = System.getProperty("user.name") } diff --git a/core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ZipUtils.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ZipUtils.scala similarity index 100% rename from core/common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ZipUtils.scala rename to linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ZipUtils.scala diff --git a/linkis-commons/linkis-hadoop-common/pom.xml b/linkis-commons/linkis-hadoop-common/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..51a74138fc60677c2c1283f924cf8d7eef3a90d1 --- /dev/null +++ b/linkis-commons/linkis-hadoop-common/pom.xml @@ -0,0 +1,221 @@ + + + + + + linkis + com.webank.wedatasphere.linkis + 1.0.0-RC1 + + 4.0.0 + + linkis-hadoop-common + jar + + + + + com.webank.wedatasphere.linkis + linkis-common + ${linkis.version} + provided + + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + + + log4j + log4j + + + org.mortbay.jetty + jetty + + + org.mortbay.jetty + jetty-util + + + com.sun.jersey + jersey-core + + + com.sun.jersey + jersey-server + + + com.sun.jersey + jersey-json + + + jsr311-api + javax.ws.rs + + + net.java.dev.jets3t + jets3t + + + com.jcraft + jsch + + + com.google.code.findbugs + jsr305 + + + xmlenc + xmlenc + + + net.java.dev.jets3t + jets3t + + + org.apache.avro + avro + + + org.apache.hadoop + hadoop-auth + + + com.jcraft + jsch + + + com.google.code.findbugs + jsr305 + + + servlet-api + javax.servlet + + + org.slf4j + slf4j-log4j12 + + + com.sun.jersey + * + + + org.codehaus.jackson + * + + + + org.eclipse.jetty + * + + + + + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + + + io.netty + netty + + + servlet-api + javax.servlet + + + com.google.guava + guava + + + com.sun.jersey + jersey-core + + + com.sun.jersey + jersey-server + + + org.slf4j + slf4j-log4j12 + + + com.sun.jersey + * + + + org.codehaus.jackson + * + + + org.eclipse.jetty + * + + + + + org.apache.hadoop + hadoop-auth + ${hadoop.version} + + + org.slf4j + slf4j-log4j12 + + + org.apache.httpcomponents + httpclient + + + org.apache.httpcomponents + * + + + org.eclipse.jetty + * + + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/resources + + + ${project.artifactId}-${project.version} + + \ No newline at end of file diff --git a/core/hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/conf/HadoopConf.scala b/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/conf/HadoopConf.scala similarity index 81% rename from core/hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/conf/HadoopConf.scala rename to linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/conf/HadoopConf.scala index 24e10df6ca673c16163435ae7cff254133ed1ff1..2cd91543654c9408041640c635a4f7cc87faa662 100644 --- a/core/hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/conf/HadoopConf.scala +++ b/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/conf/HadoopConf.scala @@ -15,9 +15,7 @@ package com.webank.wedatasphere.linkis.hadoop.common.conf import com.webank.wedatasphere.linkis.common.conf.CommonVars -/** - * Created by johnnwang on 2019/12/11. - */ + object HadoopConf { val HADOOP_ROOT_USER = CommonVars("wds.linkis.hadoop.root.user", "hadoop") @@ -30,4 +28,8 @@ object HadoopConf { val KEYTAB_HOST_ENABLED = CommonVars("wds.linkis.keytab.host.enabled", false) + val hadoopConfDir = CommonVars("hadoop.config.dir", CommonVars("HADOOP_CONF_DIR", "").getValue).getValue + + val HADOOP_EXTERNAL_CONF_DIR_PREFIX = CommonVars("wds.linkis.hadoop.external.conf.dir.prefix", "/appcom/config/external-conf/hadoop") + } diff --git a/core/hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/utils/HDFSUtils.scala b/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/utils/HDFSUtils.scala similarity index 78% rename from core/hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/utils/HDFSUtils.scala rename to linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/utils/HDFSUtils.scala index 3982b1af042973812e8712596ec7a5f5ceb51ca9..f365adb34419ab5ae403b382f70d7c7dd1460f0d 100644 --- a/core/hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/utils/HDFSUtils.scala +++ b/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/utils/HDFSUtils.scala @@ -10,35 +10,53 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.webank.wedatasphere.linkis.hadoop.common.utils import java.io.File import java.nio.file.Paths import java.security.PrivilegedExceptionAction -import com.webank.wedatasphere.linkis.common.conf.Configuration.hadoopConfDir -import com.webank.wedatasphere.linkis.hadoop.common.conf.HadoopConf._ +import com.webank.wedatasphere.linkis.hadoop.common.conf.HadoopConf +import com.webank.wedatasphere.linkis.hadoop.common.conf.HadoopConf.{hadoopConfDir, _} +import org.apache.commons.lang.StringUtils import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.hadoop.security.UserGroupInformation + /** * Created by enjoyyin on 2019/5/27. */ object HDFSUtils { - def getConfiguration(user: String): Configuration = getConfiguration(user, hadoopConfDir) + def getConfigurationByLabel(user: String, label: String): Configuration = { + getConfiguration(user, getHadoopConDirByLabel(label)) + } + + private def getHadoopConDirByLabel(label: String): String = { + if (StringUtils.isBlank(label)) { + hadoopConfDir + } else { + val prefix = if (HadoopConf.HADOOP_EXTERNAL_CONF_DIR_PREFIX.getValue.endsWith("/")) { + HadoopConf.HADOOP_EXTERNAL_CONF_DIR_PREFIX.getValue + } else { + HadoopConf.HADOOP_EXTERNAL_CONF_DIR_PREFIX.getValue + "/" + } + prefix + label + } + } + def getConfiguration(user: String, hadoopConfDir: String): Configuration = { val confPath = new File(hadoopConfDir) - if(!confPath.exists() || confPath.isFile) { + if (!confPath.exists() || confPath.isFile) { throw new RuntimeException(s"Create hadoop configuration failed, path $hadoopConfDir not exists.") } val conf = new Configuration() conf.addResource(new Path(Paths.get(hadoopConfDir, "core-site.xml").toAbsolutePath.toFile.getAbsolutePath)) conf.addResource(new Path(Paths.get(hadoopConfDir, "hdfs-site.xml").toAbsolutePath.toFile.getAbsolutePath)) - conf.addResource(new Path(Paths.get(hadoopConfDir, "yarn-site.xml").toAbsolutePath.toFile.getAbsolutePath)) conf } @@ -68,7 +86,7 @@ object HDFSUtils { def getKerberosUser(userName: String): String = { var user = userName if(KEYTAB_HOST_ENABLED.getValue){ - user = user+ "/" + KEYTAB_HOST.getValue + user = user + "/" + KEYTAB_HOST.getValue } user } diff --git a/linkis-commons/linkis-httpclient/pom.xml b/linkis-commons/linkis-httpclient/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..038ae0591c799b064ed48a1a39c2339d9dcc553a --- /dev/null +++ b/linkis-commons/linkis-httpclient/pom.xml @@ -0,0 +1,94 @@ + + + + + + linkis + com.webank.wedatasphere.linkis + 1.0.0-RC1 + + 4.0.0 + + linkis-httpclient + + + + com.webank.wedatasphere.linkis + linkis-common + ${linkis.version} + + + org.apache.httpcomponents + httpclient + ${httpclient.version} + + + org.apache.httpcomponents + httpmime + ${httpmime.version} + + + + org.json4s + json4s-jackson_${scala.binary.version} + ${json4s.version} + + + org.scala-lang + scala-library + + + com.fasterxml.jackson.core + jackson-databind + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-core + + + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + ${basedir}/src/main/resources + + + ${project.artifactId}-${project.version} + + + \ No newline at end of file diff --git a/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/AbstractHttpClient.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/AbstractHttpClient.scala new file mode 100644 index 0000000000000000000000000000000000000000..0f920aaa4c3594d4a6a590155ebc4a4f3a914d2d --- /dev/null +++ b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/AbstractHttpClient.scala @@ -0,0 +1,392 @@ +/* + * Copyright 2019 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.httpclient + +import java.util + +import com.webank.wedatasphere.linkis.common.conf.{CommonVars, Configuration} +import com.webank.wedatasphere.linkis.common.io.{Fs, FsPath} +import com.webank.wedatasphere.linkis.common.utils.Utils +import com.webank.wedatasphere.linkis.httpclient.authentication.{AbstractAuthenticationStrategy, AuthenticationAction, HttpAuthentication} +import com.webank.wedatasphere.linkis.httpclient.config.ClientConfig +import com.webank.wedatasphere.linkis.httpclient.discovery.{AbstractDiscovery, Discovery, HeartbeatAction} +import com.webank.wedatasphere.linkis.httpclient.exception.{HttpClientResultException, HttpMessageParseException} +import com.webank.wedatasphere.linkis.httpclient.loadbalancer.{AbstractLoadBalancer, DefaultLoadbalancerStrategy, LoadBalancer} +import com.webank.wedatasphere.linkis.httpclient.request._ +import com.webank.wedatasphere.linkis.httpclient.response._ +import org.apache.commons.io.IOUtils +import org.apache.commons.lang.StringUtils +import org.apache.http.client.CookieStore +import org.apache.http.client.config.RequestConfig +import org.apache.http.client.entity.{DeflateDecompressingEntity, GzipDecompressingEntity, UrlEncodedFormEntity} +import org.apache.http.client.methods._ +import org.apache.http.client.utils.URIBuilder +import org.apache.http.entity.mime.MultipartEntityBuilder +import org.apache.http.entity.{ContentType, StringEntity} +import org.apache.http.impl.client.{BasicCookieStore, HttpClients} +import org.apache.http.message.BasicNameValuePair +import org.apache.http.util.EntityUtils +import org.apache.http.{HttpException, HttpResponse, _} +import org.json4s.jackson.Serialization.read +import org.json4s.{DefaultFormats, Formats} + +import scala.collection.Iterable +import scala.collection.JavaConversions._ +import scala.concurrent.{ExecutionContext, ExecutionContextExecutorService} + + +/** + * Created by enjoyyin on 2019/5/20. + */ +abstract class AbstractHttpClient(clientConfig: ClientConfig, clientName: String) extends Client { + + protected implicit val formats: Formats = DefaultFormats + protected implicit val executors: ExecutionContext = Utils.newCachedExecutionContext(clientConfig.getMaxConnection, clientName, false) + + protected val CONNECT_TIME_OUT = CommonVars("wds.linkis.httpclient.default.connect.timeOut", 50000).getValue + + protected val httpClient = HttpClients.createDefault() + + if (clientConfig.getAuthenticationStrategy != null) clientConfig.getAuthenticationStrategy match { + case auth: AbstractAuthenticationStrategy => auth.setClient(this) + case _ => + } + protected val (discovery, loadBalancer): (Option[Discovery], Option[LoadBalancer]) = + if (this.clientConfig.isDiscoveryEnabled) { + val discovery = Some(createDiscovery()) + discovery.foreach { + case d: AbstractDiscovery => + d.setServerUrl(clientConfig.getServerUrl) + d.setClient(this) + d.setSchedule(clientConfig.getDiscoveryPeriod, clientConfig.getDiscoveryTimeUnit) + case d => d.setServerUrl(clientConfig.getServerUrl) + } + //如果discovery没有启用,那么启用loadBalancer是没有意义的 + val loadBalancer = if (clientConfig.isLoadbalancerEnabled && this.clientConfig.getLoadbalancerStrategy != null) + Some(this.clientConfig.getLoadbalancerStrategy.createLoadBalancer()) + else if (clientConfig.isLoadbalancerEnabled) Some(DefaultLoadbalancerStrategy.createLoadBalancer()) + else None + loadBalancer match { + case Some(lb: AbstractLoadBalancer) => + discovery.foreach(_.addDiscoveryListener(lb)) + case _ => + } + (discovery, loadBalancer) + } else (None, None) + + discovery.foreach(_.start()) + + protected def createDiscovery(): Discovery + + override def execute(requestAction: Action): Result = execute(requestAction, -1) + + override def execute(requestAction: Action, waitTime: Long): Result = { + if (!requestAction.isInstanceOf[HttpAction]) + throw new UnsupportedOperationException("only HttpAction supported, but the fact is " + requestAction.getClass) + val action = prepareAction(requestAction.asInstanceOf[HttpAction]) + val startTime = System.currentTimeMillis + val req = prepareReq(action) + val prepareReqTime = System.currentTimeMillis - startTime + val cookieStore = prepareCookie(action) + val attempts = new util.ArrayList[Long]() + def addAttempt(): CloseableHttpResponse = { + val startTime = System.currentTimeMillis + val response = executeRequest(req, Some(waitTime).filter(_ > 0), cookieStore) + attempts.add(System.currentTimeMillis - startTime) + response + } + val response = if (!clientConfig.isRetryEnabled) addAttempt() + else clientConfig.getRetryHandler.retry(addAttempt(), action.getClass.getSimpleName + "HttpRequest") + val beforeDeserializeTime = System.currentTimeMillis + responseToResult(response, action) match { + case metricResult: MetricResult => + if(metricResult.getMetric == null) metricResult.setMetric(new HttpMetric) + metricResult.getMetric.setPrepareReqTime(prepareReqTime) + metricResult.getMetric.addRetries(attempts) + metricResult.getMetric.setDeserializeTime(System.currentTimeMillis - beforeDeserializeTime) + metricResult.getMetric.setExecuteTotalTime(System.currentTimeMillis - startTime) + metricResult + case result: Result => result + } + } + + + override def execute(requestAction: Action, resultListener: ResultListener): Unit = { + if (!requestAction.isInstanceOf[HttpAction]) { + throw new UnsupportedOperationException("only HttpAction supported, but the fact is " + requestAction.getClass) + } + val action = prepareAction(requestAction.asInstanceOf[HttpAction]) + val req = prepareReq(action) + val cookieStore = prepareCookie(action) + val response: CloseableHttpResponse = executeAsyncRequest(req) + //response.onSuccess{case r => resultListener.onSuccess(responseToResult(r, action))} + //response.onFailure{case t => resultListener.onFailure(t)} + } + + protected def getRequestUrl(suffixUrl: String, requestBody: String): String = { + val urlPrefix = loadBalancer.map(_.chooseServerUrl(requestBody)).getOrElse(clientConfig.getServerUrl) + if(suffixUrl.contains(urlPrefix)) suffixUrl else connectUrl(urlPrefix, suffixUrl) + } + + protected def connectUrl(prefix: String, suffix: String): String = { + val prefixEnd = prefix.endsWith("/") + val suffixStart = suffix.startsWith("/") + if (prefixEnd && suffixStart) prefix.substring(0, prefix.length - 1) + suffix + else if (!prefixEnd && !suffixStart) prefix + "/" + suffix + else prefix + suffix + } + + protected def prepareAction(requestAction: HttpAction): HttpAction = requestAction + + protected def prepareCookie(requestAction:HttpAction):CookieStore = { + val cookieStore = new BasicCookieStore() + if (requestAction.getCookies.nonEmpty) requestAction.getCookies.foreach(cookieStore.addCookie) + cookieStore + } + + + protected def prepareReq(requestAction: HttpAction): HttpRequestBase = { + var realURL = "" + requestAction match { + case serverUrlAction: ServerUrlAction => + realURL = connectUrl(serverUrlAction.serverUrl, requestAction.getURL) + case _ => + realURL = getRequestUrl(requestAction.getURL, requestAction.getRequestBody) + } + + if (clientConfig.getAuthenticationStrategy != null) clientConfig.getAuthenticationStrategy.login(requestAction, realURL.replaceAll(requestAction.getURL, "")) match { + case authAction: HttpAuthentication => + val cookies = authAction.authToCookies + if (cookies != null && cookies.nonEmpty) cookies.foreach(requestAction.addCookie) + val headers = authAction.authToHeaders + if (headers != null && !headers.isEmpty()) { + headers.foreach { case (k, v) => requestAction.addHeader(k.toString(), v.toString()) } + } + case _ => + } + + val request = requestAction match { + case delete: DeleteAction => + val builder = new URIBuilder(realURL) + if (!delete.getParameters.isEmpty) { + delete.getParameters.foreach { case (k, v) => builder.addParameter(k.toString(), v.toString()) } + } + val httpDelete = new HttpDelete(builder.build()) + if (requestAction.getHeaders.nonEmpty) { + requestAction.getHeaders.foreach { case (k, v) => httpDelete.addHeader(k.toString(), v.toString()) } + } + httpDelete + case put: PutAction => + val httpPut = new HttpPut(realURL) + if (put.getParameters.nonEmpty || put.getFormParams.nonEmpty) { + val nvps = new util.ArrayList[NameValuePair] + if (put.getParameters.nonEmpty) { + put.getParameters.foreach { case (k, v) => nvps.add(new BasicNameValuePair(k, v.toString())) } + } + if (put.getFormParams.nonEmpty) { + put.getFormParams.foreach { case (k, v) => nvps.add(new BasicNameValuePair(k, v.toString())) } + } + httpPut.setEntity(new UrlEncodedFormEntity(nvps)) + } + + if (StringUtils.isNotBlank(put.getRequestPayload)) { + val stringEntity = new StringEntity(put.getRequestPayload, "UTF-8") + stringEntity.setContentEncoding(Configuration.BDP_ENCODING.getValue) + stringEntity.setContentType("application/json") + httpPut.setEntity(stringEntity) + } + + if (requestAction.getHeaders.nonEmpty) { + requestAction.getHeaders.foreach { case (k, v) => httpPut.addHeader(k.toString(), v.toString()) } + } + httpPut + case upload: UploadAction => + val httpPost = new HttpPost(realURL) + val builder = MultipartEntityBuilder.create() + if (upload.inputStreams != null) + upload.inputStreams.foreach { case (k, v) => + builder.addBinaryBody(k, v, ContentType.create("multipart/form-data"), k) + } + upload match { + case get: GetAction => get.getParameters. + retain((k, v) => v != null && k != null). + foreach { case (k, v) => builder.addTextBody(k.toString, v.toString) } + case _ => + } + upload match { + case get: GetAction => get.getHeaders. + retain((k, v) => v != null && k != null). + foreach { case (k, v) => httpPost.addHeader(k.toString, v.toString) } + case _ => + } + val httpEntity = builder.build() + httpPost.setEntity(httpEntity) + httpPost + case post: POSTAction => + val httpPost = new HttpPost(realURL) + if (post.getParameters.nonEmpty || post.getFormParams.nonEmpty) { + val nvps = new util.ArrayList[NameValuePair] + if (post.getParameters.nonEmpty) { + post.getParameters.foreach { case (k, v) => nvps.add(new BasicNameValuePair(k, v.toString())) } + } + if (post.getFormParams.nonEmpty) { + post.getFormParams.foreach { case (k, v) => nvps.add(new BasicNameValuePair(k, v.toString())) } + } + httpPost.setEntity(new UrlEncodedFormEntity(nvps)) + } + + if (StringUtils.isNotBlank(post.getRequestPayload)) { + val stringEntity = new StringEntity(post.getRequestPayload, "UTF-8") + stringEntity.setContentEncoding(Configuration.BDP_ENCODING.getValue) + stringEntity.setContentType("application/json") + httpPost.setEntity(stringEntity) + } + + if (requestAction.getHeaders.nonEmpty) { + requestAction.getHeaders.foreach { case (k, v) => httpPost.addHeader(k.toString(), v.toString()) } + } + httpPost + case get: GetAction => + val builder = new URIBuilder(realURL) + if (!get.getParameters.isEmpty) { + get.getParameters.foreach { case (k, v) => builder.addParameter(k.toString(), v.toString()) } + } + val httpGet = new HttpGet(builder.build()) + if (requestAction.getHeaders.nonEmpty) { + requestAction.getHeaders.foreach { case (k, v) => httpGet.addHeader(k.toString(), v.toString()) } + } + httpGet + case _ => + val httpost = new HttpPost(realURL) + val stringEntity = new StringEntity(requestAction.getRequestBody, "UTF-8") + stringEntity.setContentEncoding(Configuration.BDP_ENCODING.getValue) + stringEntity.setContentType("application/json") + httpost.setEntity(stringEntity) + if (requestAction.getHeaders.nonEmpty) { + requestAction.getHeaders.foreach { case (k, v) => httpost.addHeader(k.toString(), v.toString()) } + } + httpost + } + request + } + + protected def getFsByUser(user: String, path: FsPath): Fs + + + protected def executeRequest(req: HttpRequestBase, waitTime: Option[Long]): CloseableHttpResponse = { + val readTimeOut = waitTime.getOrElse(clientConfig.getReadTimeout) + val connectTimeOut = if (clientConfig.getConnectTimeout > 1000 || clientConfig.getConnectTimeout < 0) clientConfig.getConnectTimeout else CONNECT_TIME_OUT + val requestConfig = RequestConfig.custom + .setConnectTimeout(connectTimeOut.toInt) + .setConnectionRequestTimeout(connectTimeOut.toInt) + .setSocketTimeout(readTimeOut.toInt).build + req.setConfig(requestConfig) +// httpClient = HttpClients.createDefault() // todo check + val response = httpClient.execute(req) + response + } + + protected def executeRequest(req: HttpRequestBase, waitTime: Option[Long], cookieStore: CookieStore): CloseableHttpResponse = { + val readTimeOut = waitTime.getOrElse(clientConfig.getReadTimeout) + val connectTimeOut = if (clientConfig.getConnectTimeout > 1000 || clientConfig.getConnectTimeout < 0) clientConfig.getConnectTimeout else CONNECT_TIME_OUT + val requestConfig = RequestConfig.custom + .setConnectTimeout(connectTimeOut.toInt) + .setConnectionRequestTimeout(connectTimeOut.toInt) + .setSocketTimeout(readTimeOut.toInt).build + req.setConfig(requestConfig) + val response = httpClient.execute(req) + response + } + + + + //TODO 20200618 Modify to asynchronous request + protected def executeAsyncRequest(req: HttpRequestBase): CloseableHttpResponse = { + val response = httpClient.execute(req) + response + } + + protected def responseToResult(response: HttpResponse, requestAction: Action): Result = { + val entity = response.getEntity + val result = requestAction match { + case download: DownloadAction => + val statusCode = response.getStatusLine.getStatusCode + if (statusCode != 200) { + var responseBody: String = null + if (entity != null) { + responseBody = EntityUtils.toString(entity, "UTF-8") + } + throw new HttpClientResultException(s"request failed! ResponseBody is $responseBody.") + } + val inputStream = if(entity.getContentEncoding != null && StringUtils.isNotBlank(entity.getContentEncoding.getValue)) + entity.getContentEncoding.getValue.toLowerCase match { + case "gzip" => new GzipDecompressingEntity(entity).getContent + case "deflate" => new DeflateDecompressingEntity(entity).getContent + case str => throw new HttpClientResultException(s"request failed! Reason: not support decompress type $str.") + } else entity.getContent + download.write(inputStream) + Result() + case heartbeat: HeartbeatAction => + discovery.map { + case d: AbstractDiscovery => d.getHeartbeatResult(response, heartbeat) + }.getOrElse(throw new HttpMessageParseException("Discovery is not enable, HeartbeatAction is not needed!")) + case auth: AuthenticationAction => + clientConfig.getAuthenticationStrategy match { + case a: AbstractAuthenticationStrategy => a.getAuthenticationResult(response, auth) + case _ => throw new HttpMessageParseException("AuthenticationStrategy is not enable, login is not needed!") + } + case httpAction: HttpAction => + var responseBody: String = null + if (entity != null) { + responseBody = EntityUtils.toString(entity, "UTF-8") + } + httpResponseToResult(response, httpAction, responseBody) + .getOrElse(throw new HttpMessageParseException("cannot parse message: " + responseBody)) + } + result match { + case userAction: UserAction => requestAction match { + case _userAction: UserAction => userAction.setUser(_userAction.getUser) + case _ => + } + case _ => + } + result + } + + protected def httpResponseToResult(response: HttpResponse, requestAction: HttpAction, responseBody: String): Option[Result] + + protected def deserializeResponseBody(response: HttpResponse): Iterable[_] = { + var entity = response.getEntity + var responseBody: String = null + if (entity != null) { + responseBody = EntityUtils.toString(entity, "UTF-8") + } + if (responseBody.startsWith("{") && responseBody.endsWith("}")) + read[Map[String, Object]](responseBody) + else if (responseBody.startsWith("[") && responseBody.endsWith("}")) + read[List[Map[String, Object]]](responseBody) + else if (StringUtils.isEmpty(responseBody)) Map.empty[String, Object] + else if (responseBody.length > 200) throw new HttpException(responseBody.substring(0, 200)) + else throw new HttpException(responseBody) + } + + override def close(): Unit = { + discovery.foreach { + case d: AbstractDiscovery => IOUtils.closeQuietly(d) + case _ => + } + httpClient.close() + executors.asInstanceOf[ExecutionContextExecutorService].shutdown() + } +} \ No newline at end of file diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/Client.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/Client.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/Client.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/Client.scala diff --git a/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/GenericHttpClient.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/GenericHttpClient.scala new file mode 100644 index 0000000000000000000000000000000000000000..9d8caf22cc76d1d38857af2fc748c6d8b5318d06 --- /dev/null +++ b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/GenericHttpClient.scala @@ -0,0 +1,41 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.httpclient + +import com.webank.wedatasphere.linkis.common.io.{Fs, FsPath} +import com.webank.wedatasphere.linkis.httpclient.config.ClientConfig +import com.webank.wedatasphere.linkis.httpclient.discovery.Discovery +import com.webank.wedatasphere.linkis.httpclient.exception.HttpMethodNotSupportException +import com.webank.wedatasphere.linkis.httpclient.request.HttpAction +import com.webank.wedatasphere.linkis.httpclient.response.{HashMapHttpResult, Result} +import org.apache.http.HttpResponse + + +class GenericHttpClient(clientConfig: ClientConfig, clientName: String) extends AbstractHttpClient(clientConfig, clientName) { + + override protected def createDiscovery(): Discovery = throw new HttpMethodNotSupportException("GenericHttpClient not support discovery.") + + override protected def httpResponseToResult(response: HttpResponse, requestAction: HttpAction, responseBody: String): Option[Result] = { + val result = new HashMapHttpResult + result.set(responseBody, response.getStatusLine.getStatusCode, requestAction.getURL, response.getEntity.getContentType.getValue) + Some(result) + } + + override protected def getFsByUser(user: String, path: FsPath): Fs = { + null + } +} diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/ResultListener.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/ResultListener.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/ResultListener.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/ResultListener.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/AbstractAuthenticationStrategy.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/AbstractAuthenticationStrategy.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/AbstractAuthenticationStrategy.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/AbstractAuthenticationStrategy.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/Authentication.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/Authentication.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/Authentication.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/Authentication.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/AuthenticationAction.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/AuthenticationAction.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/AuthenticationAction.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/AuthenticationAction.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/AuthenticationStrategy.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/AuthenticationStrategy.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/AuthenticationStrategy.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/AuthenticationStrategy.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/HttpAuthentication.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/HttpAuthentication.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/HttpAuthentication.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/authentication/HttpAuthentication.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/config/ClientConfig.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/config/ClientConfig.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/config/ClientConfig.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/config/ClientConfig.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/config/ClientConfigBuilder.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/config/ClientConfigBuilder.scala similarity index 98% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/config/ClientConfigBuilder.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/config/ClientConfigBuilder.scala index a49c9ac6dc5f67adcb47e1e55710ee9212734dd9..a3f9a99351d3e10ad996845eb9085d23ee121d07 100644 --- a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/config/ClientConfigBuilder.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/config/ClientConfigBuilder.scala @@ -42,7 +42,7 @@ class ClientConfigBuilder protected() { protected var retryEnabled: Boolean = _ protected var retryHandler: RetryHandler = _ - def addUJESServerUrl(serverUrl: String): this.type = { + def addServerUrl(serverUrl: String): this.type = { this.serverUrl = serverUrl this } diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/AbstractDiscovery.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/AbstractDiscovery.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/AbstractDiscovery.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/AbstractDiscovery.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/Discovery.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/Discovery.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/Discovery.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/Discovery.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/DiscoveryListener.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/DiscoveryListener.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/DiscoveryListener.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/DiscoveryListener.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/HeartbeatAction.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/HeartbeatAction.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/HeartbeatAction.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/discovery/HeartbeatAction.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/DiscoveryException.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/DiscoveryException.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/DiscoveryException.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/DiscoveryException.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/HttpClientResultException.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/HttpClientResultException.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/HttpClientResultException.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/HttpClientResultException.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/HttpMessageParseException.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/HttpMessageParseException.scala similarity index 87% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/HttpMessageParseException.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/HttpMessageParseException.scala index b50629c9bf4edb3024c21442f8f3c2fa37f63bb7..a6b724d7b0d3874a8d9689d56769b05d0db0a985 100644 --- a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/HttpMessageParseException.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/exception/HttpMessageParseException.scala @@ -25,4 +25,5 @@ import com.webank.wedatasphere.linkis.common.exception.ErrorException /** * Created by enjoyyin on 2019/5/21. */ -class HttpMessageParseException(errorDesc: String) extends ErrorException(10900, errorDesc) \ No newline at end of file +class HttpMessageParseException(errorDesc: String) extends ErrorException(10900, errorDesc) +class HttpMethodNotSupportException(errorDesc: String) extends ErrorException(10902, errorDesc) \ No newline at end of file diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/AbstractLoadBalancer.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/AbstractLoadBalancer.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/AbstractLoadBalancer.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/AbstractLoadBalancer.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/DefaultLoadbalancerStrategy.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/DefaultLoadbalancerStrategy.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/DefaultLoadbalancerStrategy.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/DefaultLoadbalancerStrategy.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/LoadBalancer.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/LoadBalancer.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/LoadBalancer.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/LoadBalancer.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/LoadBalancerStrategy.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/LoadBalancerStrategy.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/LoadBalancerStrategy.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/loadbalancer/LoadBalancerStrategy.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/Action.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/Action.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/Action.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/Action.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/DownloadAction.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/DownloadAction.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/DownloadAction.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/DownloadAction.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/GetAction.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/GetAction.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/GetAction.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/GetAction.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/HttpAction.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/HttpAction.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/HttpAction.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/HttpAction.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/POSTAction.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/POSTAction.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/POSTAction.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/POSTAction.scala diff --git a/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/PutAction.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/PutAction.scala new file mode 100644 index 0000000000000000000000000000000000000000..d9113d201ec40ce7ed4481897a04de2673fdf3d8 --- /dev/null +++ b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/PutAction.scala @@ -0,0 +1,26 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.httpclient.request + + +abstract class PutAction extends POSTAction + +abstract class DeleteAction extends GetAction + +abstract class HeadAction extends GetAction + +abstract class OptionsAction extends GetAction \ No newline at end of file diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/ServerUrlAction.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/ServerUrlAction.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/ServerUrlAction.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/ServerUrlAction.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UploadAction.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UploadAction.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UploadAction.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UploadAction.scala index 821bd2dd92bb8deadef2856dbda97465ea9f5581..64ca310811eef8a93bb8084facbd76fdfb3fadf8 100644 --- a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UploadAction.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UploadAction.scala @@ -16,10 +16,10 @@ package com.webank.wedatasphere.linkis.httpclient.request -import scala.tools.nsc.interpreter.InputStream - import java.util +import scala.tools.nsc.interpreter.InputStream + /** * Created by enjoyyin on 2019/5/20. */ diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UserAction.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UserAction.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UserAction.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UserAction.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UserPwdAction.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UserPwdAction.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UserPwdAction.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UserPwdAction.scala diff --git a/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/HashMapHttpResult.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/HashMapHttpResult.scala new file mode 100644 index 0000000000000000000000000000000000000000..b10b048764092d610086a1efc7e9c42715e7c77c --- /dev/null +++ b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/HashMapHttpResult.scala @@ -0,0 +1,51 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.httpclient.response + +import java.util + +import com.webank.wedatasphere.linkis.common.utils.JsonUtils +import com.webank.wedatasphere.linkis.httpclient.exception.HttpClientResultException + + +class HashMapHttpResult extends HttpResult { + + private var resultMap: util.Map[String, Object] = _ + private var responseBody: String = _ + private var statusCode: Int = _ + private var url: String = _ + private var contentType: String = _ + + override def getContentType: String = contentType + + override def getUri: String = url + + override def getStatusCode: Int = statusCode + + def getResultMap: util.Map[String, Object] = resultMap + + override def set(responseBody: String, statusCode: Int, url: String, contentType: String): Unit = { + if(statusCode != 200) throw new HttpClientResultException(s"URL $url request failed! ResponseBody is $responseBody." ) + resultMap = JsonUtils.jackson.readValue(responseBody, classOf[util.Map[String, Object]]) + this.responseBody = responseBody + this.statusCode = statusCode + this.url = url + this.contentType = contentType + } + + override def getResponseBody: String = responseBody +} diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/HttpResult.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/HttpResult.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/HttpResult.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/HttpResult.scala diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/ListResult.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/ListResult.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/ListResult.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/ListResult.scala diff --git a/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/MetricResult.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/MetricResult.scala new file mode 100644 index 0000000000000000000000000000000000000000..5e1d4619ae1539665d082020cf01f55b141db5e0 --- /dev/null +++ b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/MetricResult.scala @@ -0,0 +1,60 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.httpclient.response + +import java.util + + +trait MetricResult { + + def getMetric: HttpMetric + + def setMetric(metric: HttpMetric): Unit + +} + +trait AbstractMetricResult extends MetricResult { + + private var metric: HttpMetric = _ + + override def getMetric: HttpMetric = metric + + override def setMetric(metric: HttpMetric): Unit = this.metric = metric +} + +class HttpMetric { + + private var prepareReqTime: Long = 0 + private var executeTotalTime: Long = 0 + private var deserializeTime: Long = 0 + private val attempts = new util.ArrayList[Long] + + def setPrepareReqTime(prepareReqTime: Long): Unit = this.prepareReqTime = prepareReqTime + def getPrepareReqTime: Long = prepareReqTime + def setExecuteTotalTime(executeTotalTime: Long): Unit = this.executeTotalTime = executeTotalTime + def getExecuteTotalTime: Long = executeTotalTime + def setDeserializeTime(deserializeTime: Long): Unit = this.deserializeTime = deserializeTime + def getDeserializeTime: Long = deserializeTime + + def addRetry(attemptTime: Long): Unit = attempts.add(attemptTime) + def addRetries(attempts: java.util.List[Long]): Unit = this.attempts.addAll(attempts) + def getAttemptTimes: util.List[Long] = attempts + + def getMetricMap: Map[String, Any] = Map("prepareReqTime" -> prepareReqTime, "executeTotalTime" -> executeTotalTime, "deserializeTime" -> deserializeTime, + "retriedNum" -> attempts.size, "attempts" -> attempts) + +} \ No newline at end of file diff --git a/core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/Result.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/Result.scala similarity index 100% rename from core/httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/Result.scala rename to linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/Result.scala diff --git a/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/impl/DefaultHttpResult.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/impl/DefaultHttpResult.scala new file mode 100644 index 0000000000000000000000000000000000000000..6ea135b07d6d3ac7d7c816dedaf293d96766a100 --- /dev/null +++ b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/response/impl/DefaultHttpResult.scala @@ -0,0 +1,42 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.httpclient.response.impl + +import com.webank.wedatasphere.linkis.httpclient.response.HttpResult + +class DefaultHttpResult extends HttpResult { + + var responseBody: String = _ + var statusCode: Int = _ + var uri: String = _ + var contentType: String = _ + + override def getContentType: String = contentType + + override def getUri: String = uri + + override def getStatusCode: Int = statusCode + + override def set(responseBody: String, statusCode: Int, url: String, contentType: String): Unit = { + this.responseBody = responseBody + this.statusCode = statusCode + this.uri = url + this.contentType = contentType + } + + override def getResponseBody: String = responseBody +} diff --git a/linkis-commons/linkis-message-scheduler/pom.xml b/linkis-commons/linkis-message-scheduler/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..117dcc27a3ad039d534221de390ac7723518981d --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/pom.xml @@ -0,0 +1,86 @@ + + + + + + linkis + com.webank.wedatasphere.linkis + 1.0.0-RC1 + ../../pom.xml + + 4.0.0 + + linkis-message-scheduler + + + + com.webank.wedatasphere.linkis + linkis-rpc + ${linkis.version} + + + org.springframework + spring-tx + ${spring.version} + + + com.webank.wedatasphere.linkis + linkis-scheduler + ${linkis.version} + + + junit + junit + 4.12 + test + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/resources + + **/*.properties + **/application.yml + **/bootstrap.yml + **/log4j2.xml + + + + ${project.artifactId}-${project.version} + + + \ No newline at end of file diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Chain.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Chain.java new file mode 100644 index 0000000000000000000000000000000000000000..df766e1329fbacac37936b57fc4785c646f62b4e --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Chain.java @@ -0,0 +1,32 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.annotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @date 2020/8/4 + */ +@Target({ElementType.METHOD}) +@Retention(RetentionPolicy.RUNTIME) +public @interface Chain { + + String value() default "default"; +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Implicit.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Implicit.java new file mode 100644 index 0000000000000000000000000000000000000000..a6ab53f33b1d8d112fed81d0e5c72394bf43a648 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Implicit.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.annotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @date 2020/7/28 + */ +@Target({ElementType.METHOD}) +@Retention(RetentionPolicy.RUNTIME) +public @interface Implicit { +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Method.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Method.java new file mode 100644 index 0000000000000000000000000000000000000000..9714ad1d37c63bebbf9d1b2b4005293b37b37d4e --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Method.java @@ -0,0 +1,31 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.annotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @date 2020/7/14 + */ +@Target({ElementType.TYPE}) +@Retention(RetentionPolicy.RUNTIME) +public @interface Method { + String value() default ""; +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/NotImplicit.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/NotImplicit.java new file mode 100644 index 0000000000000000000000000000000000000000..28f4356eff899260c21bfe4e5c0cbfc80c5a3ee7 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/NotImplicit.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.annotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @date 2020/8/4 + */ +@Target({ElementType.PARAMETER}) +@Retention(RetentionPolicy.RUNTIME) +public @interface NotImplicit { +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Order.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Order.java new file mode 100644 index 0000000000000000000000000000000000000000..39a523cb756ac930630ca9a66f37cac4538152fb --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Order.java @@ -0,0 +1,31 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.annotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @date 2020/7/14 + */ +@Target({ElementType.METHOD}) +@Retention(RetentionPolicy.RUNTIME) +public @interface Order { + int value() default 2147483647; +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Receiver.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Receiver.java new file mode 100644 index 0000000000000000000000000000000000000000..0d7c39b1d98ce835717b5caea14d60396f9c27a9 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/annotation/Receiver.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.annotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @date 2020/7/14 + */ +@Target({ElementType.METHOD}) +@Retention(RetentionPolicy.RUNTIME) +public @interface Receiver { +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/DefaultMessageJob.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/DefaultMessageJob.java new file mode 100644 index 0000000000000000000000000000000000000000..494f16b7d4ca0c8f464d2756942ceddbd4e46d02 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/DefaultMessageJob.java @@ -0,0 +1,185 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.builder; + +import com.webank.wedatasphere.linkis.message.context.AbstractMessageSchedulerContext; +import com.webank.wedatasphere.linkis.message.scheduler.MethodExecuteWrapper; +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; +import com.webank.wedatasphere.linkis.scheduler.executer.ExecuteRequest; +import com.webank.wedatasphere.linkis.scheduler.queue.Job; +import com.webank.wedatasphere.linkis.scheduler.queue.JobInfo; +import com.webank.wedatasphere.linkis.scheduler.queue.SchedulerEventState; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.locks.LockSupport; + +/** + * @date 2020/7/17 + */ +public class DefaultMessageJob extends Job implements MessageJob { + + private RequestProtocol requestProtocol; + + private Map> methodExecuteWrappers; + + private ServiceMethodContext smc; + + private AbstractMessageSchedulerContext context; + + //implements of MessageJob + + @Override + public RequestProtocol getRequestProtocol() { + return this.requestProtocol; + } + + @Override + public void setRequestProtocol(RequestProtocol requestProtocol) { + this.requestProtocol = requestProtocol; + } + + @Override + public Map> getMethodExecuteWrappers() { + return this.methodExecuteWrappers; + } + + @Override + public void setMethodExecuteWrappers(Map> methodExecuteWrappers) { + this.methodExecuteWrappers = methodExecuteWrappers; + } + + @Override + public ServiceMethodContext getMethodContext() { + return this.smc; + } + + @Override + public void setMethodContext(ServiceMethodContext smc) { + this.smc = smc; + } + + @Override + public AbstractMessageSchedulerContext getContext() { + return this.context; + } + + @Override + public void setContext(AbstractMessageSchedulerContext context) { + this.context = context; + } + + //implements of Job + + @Override + public void init() { + } + + @Override + public ExecuteRequest jobToExecuteRequest() { + return () -> null; + } + + @Override + public String getName() { + return getId(); + } + + @Override + public JobInfo getJobInfo() { + return null; + } + + @Override + public void close() throws IOException { + } + + // implements of Future + + // TODO: 2020/8/3 state 和blockThread的cas化 + + Thread blockThread = null; + + + public Thread getBlockThread() { + return this.blockThread; + } + + @Override + public boolean cancel(boolean mayInterruptIfRunning) { + if (mayInterruptIfRunning) { + cancel(); + } + return true; + } + + @Override + public Object get() throws ExecutionException, InterruptedException { + if (!this.isCompleted()) { + waitComplete(false, -1L); + } + return handleResult(); + } + + @Override + public Object getPartial() { + return this.getMethodContext().getResult(); + } + + public Object handleResult() throws ExecutionException { + if (this.isSucceed()) { + return this.getMethodContext().getResult(); + } + // TODO: 2020/8/3 cancel逻辑加入 + throw new ExecutionException(this.getErrorResponse().t()); + } + + @Override + public Object get(long timeout, TimeUnit unit) throws ExecutionException, InterruptedException, TimeoutException { + if (unit == null) unit = TimeUnit.NANOSECONDS; + if (!this.isCompleted() + && !SchedulerEventState.isCompleted(SchedulerEventState.apply(waitComplete(true, unit.toNanos(timeout))))) { + throw new TimeoutException(); + } + return handleResult(); + } + + private int waitComplete(boolean timed, long nanos) throws InterruptedException { + long endTime = timed ? System.nanoTime() + nanos : -1L; + for (; ; ) { + if (Thread.interrupted()) { + throw new InterruptedException(); + } + if (this.isCompleted()) { + return this.getState().id(); + } else if (blockThread == null) + blockThread = Thread.currentThread(); + else if (timed) { + nanos = endTime - System.nanoTime(); + if (nanos <= 0) { + return this.getState().id(); + } + LockSupport.parkNanos(this, nanos); + } else + LockSupport.park(this); + } + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/DefaultMessageJobBuilder.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/DefaultMessageJobBuilder.java new file mode 100644 index 0000000000000000000000000000000000000000..d587a989f7601031383524f7a6b1ba55cdaa740c --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/DefaultMessageJobBuilder.java @@ -0,0 +1,78 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.builder; + +import com.webank.wedatasphere.linkis.message.context.AbstractMessageSchedulerContext; +import com.webank.wedatasphere.linkis.message.scheduler.MethodExecuteWrapper; +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; + +import java.util.List; +import java.util.Map; + +/** + * @date 2020/7/17 + */ +public class DefaultMessageJobBuilder implements MessageJobBuilder { + + private RequestProtocol requestProtocol; + + private Map> methodExecuteWrappers; + + private ServiceMethodContext smc; + + private AbstractMessageSchedulerContext context; + + @Override + public MessageJobBuilder of() { + return new DefaultMessageJobBuilder(); + } + + @Override + public MessageJobBuilder with(RequestProtocol requestProtocol) { + this.requestProtocol = requestProtocol; + return this; + } + + @Override + public MessageJobBuilder with(Map> methodExecuteWrappers) { + this.methodExecuteWrappers = methodExecuteWrappers; + return this; + } + + + @Override + public MessageJobBuilder with(ServiceMethodContext smc) { + this.smc = smc; + return this; + } + + @Override + public MessageJobBuilder with(AbstractMessageSchedulerContext context) { + this.context = context; + return this; + } + + @Override + public MessageJob build() { + DefaultMessageJob messageJob = new DefaultMessageJob(); + messageJob.setMethodExecuteWrappers(this.methodExecuteWrappers); + messageJob.setRequestProtocol(this.requestProtocol); + messageJob.setMethodContext(this.smc); + messageJob.setContext(context); + return messageJob; + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/DefaultServiceMethodContext.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/DefaultServiceMethodContext.java new file mode 100644 index 0000000000000000000000000000000000000000..ccefa6a7beb99f5f03eaf81021760c603f43d303 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/DefaultServiceMethodContext.java @@ -0,0 +1,167 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.builder; + +import com.webank.wedatasphere.linkis.message.context.MessageSchedulerContext; +import com.webank.wedatasphere.linkis.message.exception.MessageWarnException; +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; +import com.webank.wedatasphere.linkis.rpc.Sender; +import com.webank.wedatasphere.linkis.scheduler.queue.Job; +import com.webank.wedatasphere.linkis.scheduler.queue.SchedulerEventState; +import scala.concurrent.duration.Duration; + +import javax.servlet.http.HttpServletRequest; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +import static com.webank.wedatasphere.linkis.message.conf.MessageSchedulerConf.*; + +/** + * @date 2020/7/14 + */ +public class DefaultServiceMethodContext implements ServiceMethodContext { + + private final Map attributes = new ConcurrentHashMap<>(); + + private final ThreadLocal> skips = new ThreadLocal<>(); + + private final ThreadLocal job = new ThreadLocal<>(); + + @Override + public void putAttribute(String key, Object value) { + this.attributes.put(key, value); + } + + @Override + public void putIfAbsent(String key, Object value) { + if (!notNull(key)) { + putAttribute(key, value); + } + } + + @SuppressWarnings("unchecked") + public T getAttribute(String key) { + return (T) this.attributes.get(key); + } + + @SuppressWarnings("unchecked") + public T getAttributeOrDefault(String key, T defaultValue) { + return (T) this.attributes.getOrDefault(key, defaultValue); + } + + @Override + public String getUser() { + return getAttribute(USER_KEY); + } + + @Override + public HttpServletRequest getRequest() { + return getAttribute(REQUEST_KEY); + } + + @Override + public boolean notNull(String key) { + return this.attributes.get(key) != null; + } + + @Override + public MessageJob publish(RequestProtocol requestProtocol) throws MessageWarnException { + MessageSchedulerContext context = getAttribute(CONTEXT_KEY); + return context.getPublisher().publish(requestProtocol, this); + } + + @Override + public void send(Object message) { + Sender sender = getAttribute(SENDER_KEY); + sender.send(message); + } + + @Override + public Object ask(Object message) { + Sender sender = getAttribute(SENDER_KEY); + return sender.ask(message); + } + + @Override + public Object ask(Object message, Duration timeout) { + Sender sender = getAttribute(SENDER_KEY); + return sender.ask(message, timeout); + } + + @Override + public Sender getSender() { + return getAttribute(SENDER_KEY); + } + + @Override + public void setTimeoutPolicy(MessageJobTimeoutPolicy policy) { + putAttribute(TIMEOUT_POLICY, policy); + } + + @Override + public void setResult(Object result) { + putAttribute(RESULT_KEY, result); + } + + @Override + public T getResult() { + return getAttribute(RESULT_KEY); + } + + @Override + public boolean isInterrupted() { + //linkis-shceduler 没有isInterrupted状态 + return SchedulerEventState.Cancelled() == this.job.get().getState(); + } + + @Override + public boolean isCancel() { + //linkis-shceduler只有cancel + return SchedulerEventState.Cancelled() == this.job.get().getState(); + } + + @Override + public boolean isSuccess() { + return SchedulerEventState.Succeed() == this.job.get().getState(); + } + + public void setJob(Job job) { + this.job.set(job); + } + + public void removeJob() { + this.job.remove(); + } + + public void setSkips(Integer... orders) { + Set oldOrders = skips.get(); + if (oldOrders == null) { + Set newOrders = new HashSet(Arrays.asList(orders)); + skips.set(newOrders); + } else { + oldOrders.addAll(Arrays.asList(orders)); + } + } + + public void removeSkips() { + this.skips.remove(); + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/Future.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/Future.java new file mode 100644 index 0000000000000000000000000000000000000000..d21be8eae78718e2c711d90ba4dc8a70224b8da4 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/Future.java @@ -0,0 +1,36 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.builder; + +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +/** + * @date 2020/8/4 + */ +public interface Future { + + boolean cancel(boolean mayInterruptIfRunning); + + V get() throws InterruptedException, ExecutionException; + + V getPartial(); + + V get(long timeout, TimeUnit unit) + throws InterruptedException, ExecutionException, TimeoutException; +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/MessageJob.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/MessageJob.java new file mode 100644 index 0000000000000000000000000000000000000000..7afe7f168e4c3c6ba011a0702c3b89f5674ec8cd --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/MessageJob.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.builder; + +import com.webank.wedatasphere.linkis.message.context.AbstractMessageSchedulerContext; +import com.webank.wedatasphere.linkis.message.scheduler.MethodExecuteWrapper; +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; + +import java.util.List; +import java.util.Map; + + +/** + * @date 2020/7/14 + */ +public interface MessageJob extends Runnable, Future { + + RequestProtocol getRequestProtocol(); + + void setRequestProtocol(RequestProtocol requestProtocol); + + Map> getMethodExecuteWrappers(); + + void setMethodExecuteWrappers(Map> methodExecuteWrappers); + + ServiceMethodContext getMethodContext(); + + void setMethodContext(ServiceMethodContext smc); + + AbstractMessageSchedulerContext getContext(); + + void setContext(AbstractMessageSchedulerContext context); + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/MessageJobBuilder.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/MessageJobBuilder.java new file mode 100644 index 0000000000000000000000000000000000000000..8ca452555f8d4d04cd8bfecc46ed2877f0d40800 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/MessageJobBuilder.java @@ -0,0 +1,42 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.builder; + +import com.webank.wedatasphere.linkis.message.context.AbstractMessageSchedulerContext; +import com.webank.wedatasphere.linkis.message.scheduler.MethodExecuteWrapper; +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; + +import java.util.List; +import java.util.Map; + +/** + * @date 2020/7/17 + */ +public interface MessageJobBuilder { + + MessageJobBuilder of(); + + MessageJobBuilder with(RequestProtocol requestProtocol); + + MessageJobBuilder with(Map> methodExecuteWrappers); + + MessageJobBuilder with(ServiceMethodContext smc); + + MessageJobBuilder with(AbstractMessageSchedulerContext context); + + MessageJob build(); +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/MessageJobListener.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/MessageJobListener.java new file mode 100644 index 0000000000000000000000000000000000000000..5ad6f394ba37868f9875b251a6dad21e9321dbb1 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/MessageJobListener.java @@ -0,0 +1,57 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.builder; + +import com.webank.wedatasphere.linkis.common.utils.JavaLog; +import com.webank.wedatasphere.linkis.scheduler.listener.JobListener; +import com.webank.wedatasphere.linkis.scheduler.queue.Job; + +import java.util.concurrent.locks.LockSupport; + +/** + * @date 2020/7/17 + */ +public class MessageJobListener extends JavaLog implements JobListener { + + @Override + public void onJobScheduled(Job job) { + + } + + @Override + public void onJobInited(Job job) { + + } + + @Override + public void onJobWaitForRetry(Job job) { + + } + + @Override + public void onJobRunning(Job job) { + + } + + @Override + public void onJobCompleted(Job job) { + if (job instanceof DefaultMessageJob) { + LockSupport.unpark(((DefaultMessageJob) job).getBlockThread()); + } + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/MessageJobTimeoutPolicy.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/MessageJobTimeoutPolicy.java new file mode 100644 index 0000000000000000000000000000000000000000..f23a46e5b1f4d40de88832806dfb2ee801ea3b5b --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/MessageJobTimeoutPolicy.java @@ -0,0 +1,35 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.builder; + +/** + * @date 2020/7/27 + */ +public enum MessageJobTimeoutPolicy { + /** + * 取消,但是不打断 + */ + CANCEL, + /** + * 打断 + */ + INTERRUPT, + /** + * 部分返回 + */ + PARTIAL +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/ServiceMethodContext.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/ServiceMethodContext.java new file mode 100644 index 0000000000000000000000000000000000000000..17329f6c19ece6c55985763d68caef827d6962e9 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/builder/ServiceMethodContext.java @@ -0,0 +1,73 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.builder; + + +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; +import com.webank.wedatasphere.linkis.rpc.Sender; +import scala.concurrent.duration.Duration; + +import javax.servlet.http.HttpServletRequest; + +/** + * @date 2020/7/14 + */ +public interface ServiceMethodContext { + + void putAttribute(String key, Object value); + + void putIfAbsent(String key, Object value); + + T getAttribute(String key); + + T getAttributeOrDefault(String key, T defaultValue); + + String getUser(); + + HttpServletRequest getRequest(); + + boolean notNull(String key); + + MessageJob publish(RequestProtocol requestProtocol); + + void send(Object message); + + Object ask(Object message); + + Object ask(Object message, Duration timeout); + + Sender getSender(); + + void setTimeoutPolicy(MessageJobTimeoutPolicy policy); + + void setResult(Object result); + + T getResult(); + + /** + * interrupted 状态 + * messageJob执行失败,messageJob 被cancel,并且mayInterruptIfRunning 为true的情况 + * + * @return + */ + boolean isInterrupted(); + + boolean isCancel(); + + boolean isSuccess(); + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/conf/MessageSchedulerConf.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/conf/MessageSchedulerConf.java new file mode 100644 index 0000000000000000000000000000000000000000..4cc41773479614a0ff518c71602b4f062e8e56e6 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/conf/MessageSchedulerConf.java @@ -0,0 +1,43 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.conf; + + +import com.webank.wedatasphere.linkis.common.conf.CommonVars; +import org.reflections.Reflections; +import org.reflections.scanners.MethodAnnotationsScanner; +import org.reflections.scanners.SubTypesScanner; +import org.reflections.scanners.TypeAnnotationsScanner; + +/** + * @date 2020/7/14 + */ +public class MessageSchedulerConf { + + public final static String SERVICE_SCAN_PACKAGE = CommonVars.apply("wds.linkis.ms.service.scan.package", "com.webank.wedatasphere").getValue(); + + public final static Reflections REFLECTIONS = new Reflections(SERVICE_SCAN_PACKAGE, new MethodAnnotationsScanner(), new TypeAnnotationsScanner(), new SubTypesScanner()); + + public final static String USER_KEY = "_username_"; + public final static String REQUEST_KEY = "_req_"; + public final static String RESULT_KEY = "_result_"; + public final static String CONTEXT_KEY = "_context_"; + public final static String SENDER_KEY = "_sender_"; + public final static String TIMEOUT_POLICY = "_timeout_policy_"; + public final static String DURATION_KEY = "_duration_"; + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/AbstractMessageSchedulerContext.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/AbstractMessageSchedulerContext.java new file mode 100644 index 0000000000000000000000000000000000000000..4e3fbfa184e6b407f36cb361ac63752cc0c60691 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/AbstractMessageSchedulerContext.java @@ -0,0 +1,115 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.context; + +import com.webank.wedatasphere.linkis.message.builder.MessageJobBuilder; +import com.webank.wedatasphere.linkis.message.parser.ImplicitParser; +import com.webank.wedatasphere.linkis.message.parser.ServiceParser; +import com.webank.wedatasphere.linkis.message.publisher.MessagePublisher; +import com.webank.wedatasphere.linkis.message.registry.AbstractImplicitRegistry; +import com.webank.wedatasphere.linkis.message.registry.AbstractServiceRegistry; +import com.webank.wedatasphere.linkis.message.scheduler.MessageScheduler; +import com.webank.wedatasphere.linkis.message.tx.TransactionManager; + +/** + * @date 2020/7/15 + */ +public abstract class AbstractMessageSchedulerContext implements MessageSchedulerContext { + + private AbstractServiceRegistry serviceRegistry; + + private MessagePublisher messagePublisher; + + private ServiceParser serviceParser; + + private MessageScheduler messageScheduler; + + private MessageJobBuilder messageJobBuilder; + + private TransactionManager txManager; + + private AbstractImplicitRegistry implicitRegistry; + + private ImplicitParser implicitParser; + + @Override + public MessagePublisher getPublisher() { + return this.messagePublisher; + } + + public void setPublisher(MessagePublisher messagePublisher) { + this.messagePublisher = messagePublisher; + } + + @Override + public AbstractServiceRegistry getServiceRegistry() { + return this.serviceRegistry; + } + + public void setServiceRegistry(AbstractServiceRegistry serviceRegistry) { + this.serviceRegistry = serviceRegistry; + } + + public void setserviceParser(ServiceParser serviceParser) { + this.serviceParser = serviceParser; + } + + public void setImplicitRegistry(AbstractImplicitRegistry implicitRegistry) { + this.implicitRegistry = implicitRegistry; + } + + public ImplicitParser getImplicitParser() { + return implicitParser; + } + + public void setImplicitParser(ImplicitParser implicitParser) { + this.implicitParser = implicitParser; + } + + public AbstractImplicitRegistry getImplicitRegistry() { + return this.implicitRegistry; + } + + public ServiceParser getservieParser() { + return this.serviceParser; + } + + public void setScheduler(MessageScheduler messageScheduler) { + this.messageScheduler = messageScheduler; + } + + public MessageScheduler getScheduler() { + return this.messageScheduler; + } + + public void setJobBuilder(MessageJobBuilder messageJobBuilder) { + this.messageJobBuilder = messageJobBuilder; + } + + public MessageJobBuilder getJobBuilder() { + return this.messageJobBuilder; + } + + public TransactionManager getTxManager() { + return this.txManager; + } + + public void setTxManager(TransactionManager txManager) { + this.txManager = txManager; + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/DefaultMessageSchedulerContext.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/DefaultMessageSchedulerContext.java new file mode 100644 index 0000000000000000000000000000000000000000..119c43d1813dcb8f4d370049c63210f248f7a021 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/DefaultMessageSchedulerContext.java @@ -0,0 +1,46 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.context; + +import com.webank.wedatasphere.linkis.message.builder.DefaultMessageJobBuilder; +import com.webank.wedatasphere.linkis.message.parser.DefaultImplicitParser; +import com.webank.wedatasphere.linkis.message.parser.DefaultServiceParser; +import com.webank.wedatasphere.linkis.message.publisher.DefaultMessagePublisher; +import com.webank.wedatasphere.linkis.message.registry.AbstractImplicitRegistry; +import com.webank.wedatasphere.linkis.message.registry.AbstractServiceRegistry; +import com.webank.wedatasphere.linkis.message.scheduler.DefaultMessageScheduler; +import com.webank.wedatasphere.linkis.message.tx.TransactionManager; + +/** + * @date 2020/7/15 + */ +public class DefaultMessageSchedulerContext extends AbstractMessageSchedulerContext { + + { + setImplicitParser(new DefaultImplicitParser()); + setImplicitRegistry(new AbstractImplicitRegistry(this){}); + setserviceParser(new DefaultServiceParser()); + setPublisher(new DefaultMessagePublisher(this)); + setServiceRegistry(new AbstractServiceRegistry(this) { + }); + setScheduler(new DefaultMessageScheduler()); + setJobBuilder(new DefaultMessageJobBuilder()); + setTxManager(new TransactionManager() { + }); + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/MSContextInstance.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/MSContextInstance.java new file mode 100644 index 0000000000000000000000000000000000000000..2b5be0b67e878de1f69c99b3b5c56bcb8322a63b --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/MSContextInstance.java @@ -0,0 +1,52 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.context; + +import com.webank.wedatasphere.linkis.message.utils.MessageUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @date 2020/9/17 + */ +public class MSContextInstance { + + private static final Logger LOGGER = LoggerFactory.getLogger(MSContextInstance.class); + + private static volatile MessageSchedulerContext context = null; + + public static MessageSchedulerContext get() { + if (context == null) { + synchronized (MSContextInstance.class) { + if (context != null) { + try { + MessageSchedulerContext bean = MessageUtils.getBean(MessageSchedulerContext.class); + if (bean != null) + context = bean; + else + context = new DefaultMessageSchedulerContext(); + } catch (Throwable e) { + LOGGER.warn("can not load message context from ioc container"); + context = new DefaultMessageSchedulerContext(); + } + } + + } + } + return context; + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/MessageSchedulerContext.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/MessageSchedulerContext.java new file mode 100644 index 0000000000000000000000000000000000000000..15aea42fb2f857d0787786a88d6a2a5de923637c --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/MessageSchedulerContext.java @@ -0,0 +1,34 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.context; + +import com.webank.wedatasphere.linkis.message.publisher.MessagePublisher; +import com.webank.wedatasphere.linkis.message.registry.ImplicitRegistry; +import com.webank.wedatasphere.linkis.message.registry.ServiceRegistry; + +/** + * @date 2020/7/14 + */ +public interface MessageSchedulerContext { + + MessagePublisher getPublisher(); + + ServiceRegistry getServiceRegistry(); + + ImplicitRegistry getImplicitRegistry(); + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/SpringMessageSchedulerContext.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/SpringMessageSchedulerContext.java new file mode 100644 index 0000000000000000000000000000000000000000..f087b988dc7ab5afb0e694a2cf54836b3cce39ae --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/context/SpringMessageSchedulerContext.java @@ -0,0 +1,44 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.context; + +import com.webank.wedatasphere.linkis.message.builder.DefaultMessageJobBuilder; +import com.webank.wedatasphere.linkis.message.parser.DefaultImplicitParser; +import com.webank.wedatasphere.linkis.message.parser.DefaultServiceParser; +import com.webank.wedatasphere.linkis.message.publisher.DefaultMessagePublisher; +import com.webank.wedatasphere.linkis.message.registry.SpringImplicitRegistry; +import com.webank.wedatasphere.linkis.message.registry.SpringServiceRegistry; +import com.webank.wedatasphere.linkis.message.scheduler.DefaultMessageScheduler; +import com.webank.wedatasphere.linkis.message.tx.SpringTransactionManager; + +/** + * @date 2020/9/11 + */ +public class SpringMessageSchedulerContext extends AbstractMessageSchedulerContext { + + { + setImplicitParser(new DefaultImplicitParser()); + setImplicitRegistry(new SpringImplicitRegistry(this)); + setserviceParser(new DefaultServiceParser()); + setPublisher(new DefaultMessagePublisher(this)); + setServiceRegistry(new SpringServiceRegistry(this)); + setScheduler(new DefaultMessageScheduler()); + setJobBuilder(new DefaultMessageJobBuilder()); + setTxManager(new SpringTransactionManager()); + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/exception/MessageErrorException.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/exception/MessageErrorException.java new file mode 100644 index 0000000000000000000000000000000000000000..b072b42ec632294bbb25f99546824bacd56e7294 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/exception/MessageErrorException.java @@ -0,0 +1,35 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.exception; + +import com.webank.wedatasphere.linkis.common.exception.ErrorException; + +/** + * @date 2020/7/15 + */ +public class MessageErrorException extends ErrorException { + + public MessageErrorException(int errCode, String desc) { + super(errCode, desc); + } + + public MessageErrorException(int errCode, String desc, Throwable t) { + super(errCode, desc); + initCause(t); + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/exception/MessageWarnException.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/exception/MessageWarnException.java new file mode 100644 index 0000000000000000000000000000000000000000..039c97c5310f206725c0f67cf31bd3b89ff89655 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/exception/MessageWarnException.java @@ -0,0 +1,35 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.exception; + +import com.webank.wedatasphere.linkis.common.exception.WarnException; + +/** + * @date 2020/6/10 17:43 + */ +public class MessageWarnException extends WarnException { + + public MessageWarnException(int errCode, String desc) { + super(errCode, desc); + } + + public MessageWarnException(int errCode, String desc, Throwable t) { + super(errCode, desc); + initCause(t); + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/DefaultImplicitParser.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/DefaultImplicitParser.java new file mode 100644 index 0000000000000000000000000000000000000000..91e27ea253689bc0e54cc8cc85218ee0fdf34b89 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/DefaultImplicitParser.java @@ -0,0 +1,69 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.parser; + +import com.webank.wedatasphere.linkis.message.annotation.Implicit; +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; + +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * @date 2020/7/28 + */ +public class DefaultImplicitParser implements ImplicitParser { + @Override + public Map> parse(Object implicitObject) { + Method[] methods = implicitObject.getClass().getMethods(); + return Arrays.stream(methods) + .filter(this::methodFilterPredicate) + .map(m -> this.getImplicitMethod(m, implicitObject)) + .collect(Collectors.groupingBy(ImplicitMethod::getOutput)); + } + + private ImplicitMethod getImplicitMethod(Method method, Object implicitObject) { + ImplicitMethod implicitMethod = new ImplicitMethod(); + implicitMethod.setMethod(method); + implicitMethod.setImplicitObject(implicitObject); + implicitMethod.setInput(method.getParameterTypes()[0].getName()); + implicitMethod.setOutput(method.getReturnType().getName()); + return implicitMethod; + } + + /** + * 标注了@implicit注解 + * 入参数量只有一个,返回值不为void + * 入参需要是RequestProtocol 的子类 + * 排除出参是入参的父类的情况 + * + * @param method + * @return + */ + private boolean methodFilterPredicate(Method method) { + if (method.getAnnotation(Implicit.class) != null + && method.getParameterCount() == 1 + && !void.class.equals(method.getReturnType())) { + // TODO: 2020/8/4 返回值支持集合 ,参数也可以不用是RequestProtocol的子类 + Class input = method.getParameterTypes()[0]; + return RequestProtocol.class.isAssignableFrom(input) && !method.getReturnType().isAssignableFrom(input); + } + return false; + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/DefaultServiceParser.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/DefaultServiceParser.java new file mode 100644 index 0000000000000000000000000000000000000000..e9f2e10db17cde8b52b75a0e560381eb47835b7a --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/DefaultServiceParser.java @@ -0,0 +1,95 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.parser; + +import com.webank.wedatasphere.linkis.message.annotation.Chain; +import com.webank.wedatasphere.linkis.message.annotation.NotImplicit; +import com.webank.wedatasphere.linkis.message.annotation.Order; +import com.webank.wedatasphere.linkis.message.annotation.Receiver; +import com.webank.wedatasphere.linkis.message.builder.ServiceMethodContext; + +import java.lang.reflect.Method; +import java.lang.reflect.Parameter; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * @date 2020/7/15 + */ +public class DefaultServiceParser implements ServiceParser { + + @Override + public Map> parse(Object service) { + // TODO: 2020/7/15 more analysis + Method[] methods = service.getClass().getMethods(); + return Arrays.stream(methods) + .filter(this::methodFilterPredicate) + .map(m -> getServiceMethod(m, service)) + .collect(Collectors.groupingBy(ServiceMethod::getProtocolName)); + } + + private ServiceMethod getServiceMethod(Method method, Object service) { + ServiceMethod serviceMethod = new ServiceMethod(); + serviceMethod.setMethod(method); + serviceMethod.setService(service); + serviceMethod.setAlias(String.format("%s.%s", service.getClass().getName(), method.getName())); + Order order = method.getAnnotation(Order.class); + if (order != null) { + serviceMethod.setOrder(order.value()); + } + Chain chain = method.getAnnotation(Chain.class); + if (chain != null) serviceMethod.setChainName(chain.value()); + Parameter[] parameters = method.getParameters(); + if (parameters.length == 2) { + serviceMethod.setHasMethodContext(true); + if (ServiceMethodContext.class.isAssignableFrom(parameters[0].getType())) + serviceMethod.setMethodContextOnLeft(true); + } + @SuppressWarnings("all") + Parameter parameter = Arrays.stream(parameters) + .filter(p -> !ServiceMethodContext.class.isAssignableFrom(p.getType())).findFirst().get(); + NotImplicit annotation = parameter.getAnnotation(NotImplicit.class); + if (annotation != null) serviceMethod.setAllowImplicit(false); + serviceMethod.setProtocolName(parameter.getType().getName()); + return serviceMethod; + } + + /** + * 标注了@Receiver注解,方法至少一个参数 + * 1个参数:非ServiceMethodContext 子类即可 + * 2个参数 其中一个需要是ServiceMethodContext 的子类 && 2个参数都非ServiceMethodContext 子类即可 + * + * @param method + * @return + */ + private boolean methodFilterPredicate(Method method) { + if (method.getAnnotation(Receiver.class) != null) { + Class[] parameterTypes = method.getParameterTypes(); + if (method.getParameterCount() == 1) { + return !ServiceMethodContext.class.isAssignableFrom(parameterTypes[0]); + } else if (method.getParameterCount() == 2) { + boolean hasContext = Arrays.stream(parameterTypes).anyMatch(ServiceMethodContext.class::isAssignableFrom); + boolean allContext = Arrays.stream(parameterTypes).allMatch(ServiceMethodContext.class::isAssignableFrom); + return hasContext && !allContext; + } + } + return false; + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/ImplicitMethod.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/ImplicitMethod.java new file mode 100644 index 0000000000000000000000000000000000000000..0c2e05ed7eda02a9125b3e9bf340c1a490a192ad --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/ImplicitMethod.java @@ -0,0 +1,65 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.parser; + +import java.lang.reflect.Method; + +/** + * @date 2020/7/28 + */ +public class ImplicitMethod { + + private Object implicitObject; + + private Method method; + + private String input; + + private String output; + + public Object getImplicitObject() { + return implicitObject; + } + + public void setImplicitObject(Object implicitObject) { + this.implicitObject = implicitObject; + } + + public Method getMethod() { + return method; + } + + public void setMethod(Method method) { + this.method = method; + } + + public String getInput() { + return input; + } + + public void setInput(String input) { + this.input = input; + } + + public String getOutput() { + return output; + } + + public void setOutput(String output) { + this.output = output; + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/ImplicitParser.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/ImplicitParser.java new file mode 100644 index 0000000000000000000000000000000000000000..766ea39a6872d264d917c1b4671734e053611f81 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/ImplicitParser.java @@ -0,0 +1,29 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.parser; + +import java.util.List; +import java.util.Map; + +/** + * @date 2020/7/28 + */ +public interface ImplicitParser { + + Map> parse(Object implicitObject); + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/ServiceMethod.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/ServiceMethod.java new file mode 100644 index 0000000000000000000000000000000000000000..14be8e0a4870f70194d336863db53e708f244b4a --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/ServiceMethod.java @@ -0,0 +1,125 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.parser; + +import java.lang.reflect.Method; + +/** + * @date 2020/7/15 + */ +public class ServiceMethod { + + private Object service; + + private Method method; + + private String alias; + + private String protocolName; + + private int order = 2147483647; + + private boolean allowImplicit = true; + + private boolean hasMethodContext; + + private ImplicitMethod implicitMethod; + + private boolean methodContextOnLeft; + + private String chainName = "default"; + + public String getChainName() { + return chainName; + } + + public void setChainName(String chainName) { + this.chainName = chainName; + } + + public ImplicitMethod getImplicitMethod() { + return implicitMethod; + } + + public void setImplicitMethod(ImplicitMethod implicitMethod) { + this.implicitMethod = implicitMethod; + } + + public boolean isHasMethodContext() { + return hasMethodContext; + } + + public void setHasMethodContext(boolean hasMethodContext) { + this.hasMethodContext = hasMethodContext; + } + + public boolean isAllowImplicit() { + return allowImplicit; + } + + public void setAllowImplicit(boolean allowImplicit) { + this.allowImplicit = allowImplicit; + } + + public int getOrder() { + return order; + } + + public void setOrder(int order) { + this.order = order; + } + + public Object getService() { + return service; + } + + public void setService(Object service) { + this.service = service; + } + + public Method getMethod() { + return method; + } + + public void setMethod(Method method) { + this.method = method; + } + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + public String getProtocolName() { + return protocolName; + } + + public void setProtocolName(String protocolName) { + this.protocolName = protocolName; + } + + public boolean isMethodContextOnLeft() { + return methodContextOnLeft; + } + + public void setMethodContextOnLeft(boolean methodContextOnLeft) { + this.methodContextOnLeft = methodContextOnLeft; + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/ServiceParser.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/ServiceParser.java new file mode 100644 index 0000000000000000000000000000000000000000..39cf158a523a04748561d4e73099074095d33411 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/parser/ServiceParser.java @@ -0,0 +1,29 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.parser; + +import java.util.List; +import java.util.Map; + +/** + * @date 2020/7/15 + */ +public interface ServiceParser { + + Map> parse(Object service); + +} \ No newline at end of file diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/publisher/AbstractMessagePublisher.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/publisher/AbstractMessagePublisher.java new file mode 100644 index 0000000000000000000000000000000000000000..537016cb1eb8e23fbe7cb4dedeccd04d39816d13 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/publisher/AbstractMessagePublisher.java @@ -0,0 +1,154 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.publisher; + +import com.webank.wedatasphere.linkis.common.utils.JavaLog; +import com.webank.wedatasphere.linkis.message.builder.DefaultServiceMethodContext; +import com.webank.wedatasphere.linkis.message.builder.MessageJob; +import com.webank.wedatasphere.linkis.message.builder.ServiceMethodContext; +import com.webank.wedatasphere.linkis.message.context.AbstractMessageSchedulerContext; +import com.webank.wedatasphere.linkis.message.exception.MessageWarnException; +import com.webank.wedatasphere.linkis.message.parser.ImplicitMethod; +import com.webank.wedatasphere.linkis.message.parser.ServiceMethod; +import com.webank.wedatasphere.linkis.message.scheduler.MethodExecuteWrapper; +import com.webank.wedatasphere.linkis.message.utils.MessageUtils; +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; + +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +import static com.webank.wedatasphere.linkis.message.conf.MessageSchedulerConf.CONTEXT_KEY; + +/** + * @date 2020/7/15 + */ +public abstract class AbstractMessagePublisher extends JavaLog implements MessagePublisher { + + private AbstractMessageSchedulerContext context; + + public AbstractMessagePublisher(AbstractMessageSchedulerContext context) { + this.context = context; + } + + public void setContext(AbstractMessageSchedulerContext context) { + this.context = context; + } + + /** + * key是requestProtocol的全类名,Map中,key是groupName + */ + private final Map>> protocolServiceMethodCache = new ConcurrentHashMap<>(); + + + @Override + public MessageJob publish(RequestProtocol requestProtocol) { + return publish(requestProtocol, new DefaultServiceMethodContext()); + } + + @Override + public MessageJob publish(RequestProtocol requestProtocol, ServiceMethodContext serviceMethodContext) { + logger().debug(String.format("receive request:%s", requestProtocol.getClass().getName())); + serviceMethodContext.putIfAbsent(CONTEXT_KEY, this.context); + Map> methodExecuteWrappers = getMethodExecuteWrappers(requestProtocol); + MessageJob messageJob = this.context.getJobBuilder().of() + .with(serviceMethodContext).with(requestProtocol).with(this.context) + .with(methodExecuteWrappers).build(); + this.context.getScheduler().submit(messageJob); + return messageJob; + } + + private Map> getMethodExecuteWrappers(RequestProtocol requestProtocol) { + String protocolName = requestProtocol.getClass().getName(); + Map> protocolServiceMethods = this.protocolServiceMethodCache.get(protocolName); + //静态信息,无需加锁 + if (protocolServiceMethods == null) { + Map> serviceMethodCache = this.context.getServiceRegistry().getServiceMethodCache(); + Map> implicitMethodCache = this.context.getImplicitRegistry().getImplicitMethodCache(); + //找出注册方法中,参数是当前请求的父类的 + Map> serviceMatchs = serviceMethodCache.entrySet().stream() + .filter(e -> MessageUtils.isAssignableFrom(e.getKey(), protocolName)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + //找出implicit方法中,参数是当前请求父类的,根据注册规则,implicit的出参必然和上面的servicematchKeys 不会重复 + Map> implicitMatchs = new HashMap<>(); + for (Map.Entry> implicitEntry : implicitMethodCache.entrySet()) { + //当前implicitMehtod中,input需要是protocolName 的父类or同类 + String implicitEntryKey = implicitEntry.getKey(); + List implicitEntryValue = implicitEntry.getValue(); + // 支持隐式 返回值 和service之间的接口继承关系 + Map> implicitServiceMethods = serviceMethodCache.entrySet().stream() + .filter(e -> MessageUtils.isAssignableFrom(e.getKey(), implicitEntryKey)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + //排除implicit返回值是protocolName的父类,可能存在另外一个不相干protocol的转换方法的返回值是当前protocol的父类 + if (!MessageUtils.isAssignableFrom(implicitEntryKey, protocolName) && !implicitServiceMethods.isEmpty()) { + for (Map.Entry> implicitServiceMethodEntry : implicitServiceMethods.entrySet()) { + String implicitServiceMethodEntryKey = implicitServiceMethodEntry.getKey(); + List implicitServiceMethodEntryValue = implicitServiceMethodEntry.getValue(); + //参数中要支持implicit的 + List filteredServiceMethods = implicitServiceMethodEntryValue.stream() + .filter(ServiceMethod::isAllowImplicit) + .collect(Collectors.toList()); + //隐式方法中参数需要是当前请求protocol的本类或子类 + List filteredImplicitMethods = implicitEntryValue.stream() + .filter(v -> MessageUtils.isAssignableFrom(v.getInput(), protocolName)) + .collect(Collectors.toList()); + if (!filteredServiceMethods.isEmpty() && !filteredImplicitMethods.isEmpty()) { + //针对每个ServiceMethod 选择,因为可能他们处于不同的service之间 + for (ServiceMethod filteredServiceMethod : filteredServiceMethods) { + Object service = filteredServiceMethod.getService(); + //同service优先 + Optional first = filteredImplicitMethods.stream() + .filter(m -> m.getImplicitObject() == service).findFirst(); + if (first.isPresent()) { + filteredServiceMethod.setImplicitMethod(first.get()); + } else { + // TODO: 2020/7/30 入参父子类的判断优先级,和scala一致 + //简单的只取第一个 + filteredServiceMethod.setImplicitMethod(filteredImplicitMethods.get(0)); + } + } + //添加到缓存中 + implicitMatchs.put(implicitServiceMethodEntryKey, filteredServiceMethods); + } + } + } + } + //merge + serviceMatchs.putAll(implicitMatchs); + //group by chain name 扁平化后再group,这时protocol的父类可能和转换的处于同一个chain中 + serviceMatchs = serviceMatchs.values().stream().flatMap(Collection::stream).collect(Collectors.groupingBy(ServiceMethod::getChainName)); + //order判断 + for (List value : serviceMatchs.values()) { + Integer repeatOrder = MessageUtils.repeatOrder(value); + if (repeatOrder != null && !MessageUtils.orderIsLast(repeatOrder, value)) { + throw new MessageWarnException(10000, String.format("repeat order : %s for request %s", repeatOrder, protocolName)); + } + } + this.protocolServiceMethodCache.put(protocolName, serviceMatchs); + } + //clone 对象并返回 + return serviceMethod2Wrapper(this.protocolServiceMethodCache.get(protocolName)); + } + + private Map> serviceMethod2Wrapper(Map> source) { + HashMap> target = new HashMap<>(); + source.forEach((k, v) -> target.put(k, v.stream().map(MethodExecuteWrapper::new).collect(Collectors.toList()))); + return target; + } + + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/publisher/DefaultMessagePublisher.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/publisher/DefaultMessagePublisher.java new file mode 100644 index 0000000000000000000000000000000000000000..d5e6d80ffc6807a3d62c3fce79c96e2e7f7df0d9 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/publisher/DefaultMessagePublisher.java @@ -0,0 +1,34 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.publisher; + +import com.webank.wedatasphere.linkis.message.context.AbstractMessageSchedulerContext; + +/** + * @date 2020/7/15 + */ +public class DefaultMessagePublisher extends AbstractMessagePublisher { + + public DefaultMessagePublisher(AbstractMessageSchedulerContext context) { + super(context); + } + + public DefaultMessagePublisher() { + this(null); + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/publisher/MessagePublisher.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/publisher/MessagePublisher.java new file mode 100644 index 0000000000000000000000000000000000000000..d92fe1284390b7782660f70afe9cdefe23dd702f --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/publisher/MessagePublisher.java @@ -0,0 +1,33 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.publisher; + +import com.webank.wedatasphere.linkis.message.builder.MessageJob; +import com.webank.wedatasphere.linkis.message.builder.ServiceMethodContext; +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; + + +/** + * @date 2020/7/14 + */ +public interface MessagePublisher { + + MessageJob publish(RequestProtocol requestProtocol); + + MessageJob publish(RequestProtocol requestProtocol, ServiceMethodContext serviceMethodContext); + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/AbstractImplicitRegistry.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/AbstractImplicitRegistry.java new file mode 100644 index 0000000000000000000000000000000000000000..cf4f9c617f183ec02ddd0a7173b52a5bd5ae62c8 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/AbstractImplicitRegistry.java @@ -0,0 +1,88 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.registry; + +import com.google.common.collect.Interner; +import com.google.common.collect.Interners; +import com.webank.wedatasphere.linkis.common.utils.JavaLog; +import com.webank.wedatasphere.linkis.message.context.AbstractMessageSchedulerContext; +import com.webank.wedatasphere.linkis.message.parser.ImplicitMethod; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * @date 2020/7/28 + */ +public abstract class AbstractImplicitRegistry extends JavaLog implements ImplicitRegistry { + + private final AbstractMessageSchedulerContext context; + /** + * key: output + */ + private final Map> implicitMethodCache = new ConcurrentHashMap<>(); + + public AbstractImplicitRegistry(AbstractMessageSchedulerContext context) { + this.context = context; + } + + @SuppressWarnings("all") + public final Interner lock = Interners.newWeakInterner(); + + private final Map registedImplicitObjectMap = new ConcurrentHashMap<>(); + + + @Override + @SuppressWarnings("all") + public void register(Object implicitObject) { + String implicitObjectName = implicitObject.getClass().getName(); + synchronized (this.lock.intern(implicitObjectName)) { + //1.是否解析过 + Object o = this.registedImplicitObjectMap.get(implicitObjectName); + if (o != null) return; + Map> implicitMethods = this.context.getImplicitParser().parse(implicitObject); + implicitMethods.forEach(this::refreshImplicitMethodCache); + this.registedImplicitObjectMap.put(implicitObjectName, implicitObject); + } + } + + @SuppressWarnings("all") + private void refreshImplicitMethodCache(String key, List implicitMethods) { + synchronized (this.lock.intern(key)) { + //同一个implicitObject 下的入参,出参相同的implit,会被过滤掉 + List implicitMethodsOld = this.implicitMethodCache.computeIfAbsent(key, k -> new ArrayList<>()); + for (ImplicitMethod implicitMethod : implicitMethods) { + if (isImplicitRepeat(new ArrayList<>(implicitMethodsOld), implicitMethod)) { + // TODO: 2020/7/29 logging + continue; + } + implicitMethodsOld.add(implicitMethod); + } + } + } + + private boolean isImplicitRepeat(List implicitMethodsOld, ImplicitMethod implicitMethod) { + return implicitMethodsOld.stream(). + anyMatch(im -> im.getImplicitObject() == implicitMethod.getImplicitObject() && im.getInput().equals(implicitMethod.getInput())); + } + + public Map> getImplicitMethodCache() { + return this.implicitMethodCache; + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/AbstractServiceRegistry.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/AbstractServiceRegistry.java new file mode 100644 index 0000000000000000000000000000000000000000..f96f98b22cf834c5804a87afb004975003970cd8 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/AbstractServiceRegistry.java @@ -0,0 +1,92 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.registry; + +import com.google.common.collect.Interner; +import com.google.common.collect.Interners; +import com.webank.wedatasphere.linkis.common.utils.JavaLog; +import com.webank.wedatasphere.linkis.message.context.AbstractMessageSchedulerContext; +import com.webank.wedatasphere.linkis.message.exception.MessageWarnException; +import com.webank.wedatasphere.linkis.message.parser.ServiceMethod; +import com.webank.wedatasphere.linkis.message.parser.ServiceParser; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * @date 2020/7/15 + */ +public abstract class AbstractServiceRegistry extends JavaLog implements ServiceRegistry { + + @SuppressWarnings("all") + public final Interner lock = Interners.newWeakInterner(); + /** + * key:requestprotocol or custom implicit object class name + */ + private final Map> serviceMethodCache = new ConcurrentHashMap<>(); + + private final Map registedServieMap = new ConcurrentHashMap<>(); + + private final AbstractMessageSchedulerContext context; + + public AbstractServiceRegistry(AbstractMessageSchedulerContext context) { + this.context = context; + } + + @SuppressWarnings("all") + @Override + public void register(Object service) { + //防止不同方式注册时候的并发,比如spring和手动注册,同时防止不同包名下类名一样的service + String serviceName = service.getClass().getName(); + synchronized (this.lock.intern(serviceName)) { + //1.是否注册过 + Object o = this.registedServieMap.get(serviceName); + if (o != null) return; + //2..解析 + ServiceParser serviceParser = this.context.getservieParser(); + Map> serviceMethods = serviceParser.parse(service); + //3.注册 + serviceMethods.forEach(this::register); + this.registedServieMap.put(serviceName, service); + } + } + + /** + * @param key + * @param value + * @throws MessageWarnException + */ + @SuppressWarnings("all") + private void register(String key, List serviceMethods) { + //防止相同key在不同service的并发注册 + synchronized (this.lock.intern(key)) { + //1.添加cache + refreshServiceMethodCache(key, serviceMethods); + } + } + + private void refreshServiceMethodCache(String key, List serviceMethods) { + this.serviceMethodCache.computeIfAbsent(key, k -> new ArrayList<>()).addAll(serviceMethods); + } + + public Map> getServiceMethodCache() { + return this.serviceMethodCache; + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/ImplicitRegistry.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/ImplicitRegistry.java new file mode 100644 index 0000000000000000000000000000000000000000..432270fc287a738f57db9aa9c7ce7dec563e6077 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/ImplicitRegistry.java @@ -0,0 +1,26 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.registry; + +/** + * @date 2020/7/28 + */ +public interface ImplicitRegistry { + + void register(Object implicitObject); + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/ServiceRegistry.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/ServiceRegistry.java new file mode 100644 index 0000000000000000000000000000000000000000..f22fb8a8a82647e6fb631fc8cae0d04ee6a794dc --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/ServiceRegistry.java @@ -0,0 +1,26 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.registry; + +/** + * @date 2020/7/14 + */ +public interface ServiceRegistry { + + void register(Object service); + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/SpringImplicitRegistry.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/SpringImplicitRegistry.java new file mode 100644 index 0000000000000000000000000000000000000000..d375f895f263a49173ba02ff31586e9c74d83c68 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/SpringImplicitRegistry.java @@ -0,0 +1,52 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.registry; + + +import com.webank.wedatasphere.linkis.message.annotation.Implicit; +import com.webank.wedatasphere.linkis.message.context.AbstractMessageSchedulerContext; +import com.webank.wedatasphere.linkis.message.utils.MessageUtils; + +import java.lang.reflect.Method; +import java.util.Set; +import java.util.stream.Collectors; + +import static com.webank.wedatasphere.linkis.message.conf.MessageSchedulerConf.REFLECTIONS; + +/** + * @date 2020/7/28 + */ +public class SpringImplicitRegistry extends AbstractImplicitRegistry { + + public SpringImplicitRegistry(AbstractMessageSchedulerContext context) { + super(context); + Set implicitMethods = REFLECTIONS.getMethodsAnnotatedWith(Implicit.class); + Set> implicitClasses = implicitMethods.stream().map(Method::getDeclaringClass).collect(Collectors.toSet()); + //区分出 bean中的方法,和其他,其他使用反射创建方法对象 + for (Class implicitClass : implicitClasses) { + Object bean = MessageUtils.getBean(implicitClass); + if (bean == null) { + try { + bean = implicitClass.newInstance(); + } catch (Throwable t) { + logger().warn(String.format("reflection failed to create object %s", implicitClass.getName())); + } + } + if (bean != null) this.register(bean); + } + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/SpringServiceRegistry.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/SpringServiceRegistry.java new file mode 100644 index 0000000000000000000000000000000000000000..693dd37d46129ab074b51da234a310dcc3d5d8df --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/registry/SpringServiceRegistry.java @@ -0,0 +1,42 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.registry; + +import com.webank.wedatasphere.linkis.message.annotation.Receiver; +import com.webank.wedatasphere.linkis.message.context.AbstractMessageSchedulerContext; +import com.webank.wedatasphere.linkis.message.utils.MessageUtils; + +import java.lang.reflect.Method; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import static com.webank.wedatasphere.linkis.message.conf.MessageSchedulerConf.REFLECTIONS; + +/** + * @date 2020/7/15 + */ +public class SpringServiceRegistry extends AbstractServiceRegistry { + + public SpringServiceRegistry(AbstractMessageSchedulerContext context) { + super(context); + Set> services = REFLECTIONS.getMethodsAnnotatedWith(Receiver.class).stream() + .map(Method::getDeclaringClass).collect(Collectors.toSet()); + services.stream().map(MessageUtils::getBean).filter(Objects::nonNull).forEach(this::register); + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/AbstractMessageExecutor.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/AbstractMessageExecutor.java new file mode 100644 index 0000000000000000000000000000000000000000..c8a1c76eb45e3816173f6aec4d9dd6233371ee94 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/AbstractMessageExecutor.java @@ -0,0 +1,158 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.scheduler; + +import com.webank.wedatasphere.linkis.common.utils.JavaLog; +import com.webank.wedatasphere.linkis.message.builder.DefaultServiceMethodContext; +import com.webank.wedatasphere.linkis.message.builder.MessageJob; +import com.webank.wedatasphere.linkis.message.builder.ServiceMethodContext; +import com.webank.wedatasphere.linkis.message.exception.MessageWarnException; +import com.webank.wedatasphere.linkis.message.parser.ImplicitMethod; +import com.webank.wedatasphere.linkis.message.utils.MessageUtils; +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; +import com.webank.wedatasphere.linkis.scheduler.queue.Job; + +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.*; +import java.util.stream.Collectors; + +/** + * @date 2020/7/21 + */ +public abstract class AbstractMessageExecutor extends JavaLog implements MessageExecutor { + + private Throwable t; + + private void methodErrorHandle(Throwable t) { + if (t.getCause() != null) { + this.t = t; + } else { + this.t = t; + logger().debug("unexpected error occur"); + } + } + + private List getMinOrderMethodWrapper(Map> methodWrappers) { + //获取所有key中order最小的 + List minOrderMethodWrapper = new ArrayList<>(); + methodWrappers.forEach((k, v) -> v.forEach(m -> { + if (MessageUtils.orderIsMin(m, v)) minOrderMethodWrapper.add(m); + })); + return minOrderMethodWrapper; + } + + private List getMinOrderMethodWrapper(List methodWrappers) { + //获取单个key中order最小的,一般是一个,尾链可能有多个 + return methodWrappers.stream().filter(m -> MessageUtils.orderIsMin(m, methodWrappers)).collect(Collectors.toList()); + } + + private boolean shouldBreak(Map> methodWrappers) { + return methodWrappers.values().stream().allMatch(List::isEmpty); + } + + private void cleanMethodContextThreadLocal(ServiceMethodContext methodContext) { + if (methodContext instanceof DefaultServiceMethodContext) { + ((DefaultServiceMethodContext) methodContext).removeJob(); + ((DefaultServiceMethodContext) methodContext).removeSkips(); + } + } + + private void setMethodContextThreadLocal(ServiceMethodContext methodContext, MessageJob job) { + if (methodContext instanceof DefaultServiceMethodContext && job instanceof Job) { + ((DefaultServiceMethodContext) methodContext).setJob((Job) job); + } + } + + @Override + public void run(MessageJob job) throws InterruptedException { + RequestProtocol requestProtocol = job.getRequestProtocol(); + ServiceMethodContext methodContext = job.getMethodContext(); + // TODO: 2020/7/22 data structure optimization of variable methodWrappers + Map> methodWrappers = job.getMethodExecuteWrappers(); + Integer count = methodWrappers.values().stream().map(List::size).reduce(0, Integer::sum); + LinkedBlockingDeque queue = new LinkedBlockingDeque<>(16); + CopyOnWriteArrayList> methodFutures = new CopyOnWriteArrayList<>(); + CountDownLatch countDownLatch = new CountDownLatch(count); + getMinOrderMethodWrapper(methodWrappers).forEach(queue::offer); + try { + while (!Thread.interrupted()) { + if (shouldBreak(methodWrappers)) { + break; + } + MethodExecuteWrapper methodWrapper = queue.poll(10, TimeUnit.MILLISECONDS); + if (methodWrapper == null) continue; + methodWrappers.get(methodWrapper.getChainName()).remove(methodWrapper); + Future methodFuture = getExecutorService().submit(() -> { + Object result = null; + try { + // TODO: 2020/7/31 判断逻辑挪走 + if (!methodWrapper.shouldSkip) { + //放置job状态 + setMethodContextThreadLocal(methodContext, job); + Method method = methodWrapper.getMethod(); + Object service = methodWrapper.getService(); + info(String.format("message scheduler executor ===> service: %s,method: %s", service.getClass().getName(), method.getName())); + Object implicit; + // TODO: 2020/8/4 implicit 的结果应该复用下 + ImplicitMethod implicitMethod = methodWrapper.getImplicitMethod(); + if (implicitMethod != null) { + implicit = implicitMethod.getMethod().invoke(implicitMethod.getImplicitObject(), requestProtocol); + } else { + implicit = requestProtocol; + } + if (methodWrapper.isHasMethodContext()) { + if (methodWrapper.isMethodContextOnLeft()) { + result = method.invoke(service, methodContext, implicit); + } else { + result = method.invoke(service, implicit, methodContext); + } + } else { + result = method.invoke(service, implicit); + } + // TODO: 2020/8/5 执行完成后判断service是否有主动skip的逻辑 + } + } catch (Throwable t) { + logger().error(String.format("method %s call failed", methodWrapper.getAlias()), t); + methodWrappers.forEach((k, v) -> v.forEach(m -> m.setShouldSkip(true))); + methodErrorHandle(t); + } finally { + if (result != null) { + methodContext.setResult(result); + } + //末链并发的时候,小概率可能会有重复的method被offer到queue中,但是在poll前循环就break了,无影响 + getMinOrderMethodWrapper(methodWrappers.get(methodWrapper.getChainName())).forEach(queue::offer); + //移除state和skips的状态 + cleanMethodContextThreadLocal(methodContext); + countDownLatch.countDown(); + } + }); + methodFutures.add(methodFuture); + } + countDownLatch.await(); + } catch (InterruptedException ie) { + methodFutures.forEach(f -> f.cancel(true)); + throw ie; + } + if (this.t != null) { + throw new MessageWarnException(10000, "method call failed", t); + } + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/DefaultMessageExecutor.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/DefaultMessageExecutor.java new file mode 100644 index 0000000000000000000000000000000000000000..e86e596b289dabd53a7d63d12e6d64f1bd124b1b --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/DefaultMessageExecutor.java @@ -0,0 +1,104 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.scheduler; + +import com.webank.wedatasphere.linkis.message.builder.MessageJob; +import com.webank.wedatasphere.linkis.message.exception.MessageWarnException; +import com.webank.wedatasphere.linkis.message.tx.TransactionManager; +import com.webank.wedatasphere.linkis.protocol.engine.EngineState; +import com.webank.wedatasphere.linkis.scheduler.executer.*; +import com.webank.wedatasphere.linkis.scheduler.queue.SchedulerEvent; + +import java.io.IOException; +import java.util.concurrent.ExecutorService; + +/** + * @date 2020/7/29 + */ +public class DefaultMessageExecutor extends AbstractMessageExecutor implements Executor { + + private final ExecutorService executorService; + + private final SchedulerEvent event; + + public DefaultMessageExecutor(SchedulerEvent event, ExecutorService executorService) { + this.event = event; + this.executorService = executorService; + } + + @Override + public ExecutorService getExecutorService() { + return this.executorService; + } + + @Override + public long getId() { + return 0; + } + + /** + * @param executeRequest + * @return + */ + @Override + public ExecuteResponse execute(ExecuteRequest executeRequest) { + if (event instanceof MessageJob) { + TransactionManager txManager = ((MessageJob) event).getContext().getTxManager(); + Object o = txManager.begin(); + try { + run((MessageJob) event); + txManager.commit(o); + return new SuccessExecuteResponse(); + } catch (InterruptedException ie) { + //handle InterruptedException + logger().error("message job execution interrupted", ie); + txManager.rollback(o); + return new ErrorExecuteResponse("message job execution interrupted", ie); + } catch (MessageWarnException mwe) { + //handle method call failed + logger().error("method call normal error return"); + txManager.rollback(o); + return new ErrorExecuteResponse("method call failed", mwe); + } catch (Throwable t) { + logger().debug("unexpected error occur", t); + txManager.rollback(o); + return new ErrorExecuteResponse("unexpected error", t); + } + } + MessageWarnException eventNotMatchError = new MessageWarnException(10000, "event is not instance of MessageJob"); + return new ErrorExecuteResponse("event is not instance of MessageJob", eventNotMatchError); + + } + + @Override + public EngineState state() { + return null; + } + + @Override + public ExecutorInfo getExecutorInfo() { + return new ExecutorInfo(0, null); + } + + + + + @Override + public void close() throws IOException { + + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/DefaultMessageScheduler.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/DefaultMessageScheduler.java new file mode 100644 index 0000000000000000000000000000000000000000..a07aa0426bf38e1a2857a575aa7e1876e15c81c9 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/DefaultMessageScheduler.java @@ -0,0 +1,80 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.scheduler; + +import com.webank.wedatasphere.linkis.message.builder.MessageJob; +import com.webank.wedatasphere.linkis.message.builder.MessageJobListener; +import com.webank.wedatasphere.linkis.scheduler.Scheduler; +import com.webank.wedatasphere.linkis.scheduler.executer.ExecutorManager; +import com.webank.wedatasphere.linkis.scheduler.queue.Group; +import com.webank.wedatasphere.linkis.scheduler.queue.GroupFactory; +import com.webank.wedatasphere.linkis.scheduler.queue.Job; +import com.webank.wedatasphere.linkis.scheduler.queue.parallelqueue.ParallelGroup; +import com.webank.wedatasphere.linkis.scheduler.queue.parallelqueue.ParallelScheduler; +import com.webank.wedatasphere.linkis.scheduler.queue.parallelqueue.ParallelSchedulerContextImpl; + +/** + * @date 2020/7/17 + */ +public class DefaultMessageScheduler implements MessageScheduler { + + // TODO: 2020/7/22 configuration + private static final int MAX_RUNING_JOB = Runtime.getRuntime().availableProcessors() * 2; + + private static final int MAX_PARALLELISM_USERS = Runtime.getRuntime().availableProcessors(); + + private static final int MAX_ASK_EXECUTOR_TIMES = 1000; + + private static final String GROUP_NAME = "message-scheduler"; + + private final Scheduler linkisScheduler; + + { + MessageExecutorExecutionManager messageExecutorManager = new MessageExecutorExecutionManager(); + linkisScheduler = new ParallelScheduler( + new ParallelSchedulerContextImpl(MAX_PARALLELISM_USERS) { + @Override + public ExecutorManager getOrCreateExecutorManager() { + return messageExecutorManager; + } + }); + linkisScheduler.init(); + GroupFactory groupFactory = linkisScheduler.getSchedulerContext().getOrCreateGroupFactory(); + //one consumer group is enough + Group group = groupFactory.getOrCreateGroup(GROUP_NAME); + if (group instanceof ParallelGroup) { + ParallelGroup parallelGroup = (ParallelGroup) group; + if (parallelGroup.getMaxRunningJobs() == 0) { + parallelGroup.setMaxRunningJobs(MAX_RUNING_JOB); + } + if (parallelGroup.getMaxAskExecutorTimes() == 0) { + parallelGroup.setMaxAskExecutorTimes(MAX_ASK_EXECUTOR_TIMES); + } + } + } + + @Override + public void submit(MessageJob messageJob) { + if (messageJob instanceof Job) { + ((Job) messageJob).setId(GROUP_NAME); + ((Job) messageJob).setJobListener(new MessageJobListener()); + linkisScheduler.submit((Job) messageJob); + } + } + + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/MessageExecutor.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/MessageExecutor.java new file mode 100644 index 0000000000000000000000000000000000000000..0dc353ac67c1ab2d11b67afd1652fcbfce180f1d --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/MessageExecutor.java @@ -0,0 +1,33 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.scheduler; + +import com.webank.wedatasphere.linkis.message.builder.MessageJob; + +import java.util.concurrent.ExecutorService; + +/** + * @date 2020/7/20 + */ +public interface MessageExecutor { + + void run(MessageJob job) throws InterruptedException; + + ExecutorService getExecutorService(); + + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/MessageExecutorExecutionManager.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/MessageExecutorExecutionManager.java new file mode 100644 index 0000000000000000000000000000000000000000..b8dcb28baed42230eb1b8d6a39af1f8d085026a4 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/MessageExecutorExecutionManager.java @@ -0,0 +1,77 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.scheduler; + +import com.webank.wedatasphere.linkis.common.utils.Utils; +import com.webank.wedatasphere.linkis.scheduler.executer.Executor; +import com.webank.wedatasphere.linkis.scheduler.executer.ExecutorManager; +import com.webank.wedatasphere.linkis.scheduler.listener.ExecutorListener; +import com.webank.wedatasphere.linkis.scheduler.queue.SchedulerEvent; +import scala.Option; +import scala.Some; +import scala.concurrent.duration.Duration; + +import java.util.concurrent.ExecutorService; + +/** + * @date 2020/7/17 + */ +public class MessageExecutorExecutionManager extends ExecutorManager { + + private final ExecutorService executorService = Utils.newCachedThreadPool( + Runtime.getRuntime().availableProcessors() * 2, "message-executor_", false); + + @Override + public void setExecutorListener(ExecutorListener executorListener) { + + } + + @Override + public Executor createExecutor(SchedulerEvent event) { + return new DefaultMessageExecutor(event, executorService); + } + + @Override + public Option askExecutor(SchedulerEvent event) { + return new Some<>(createExecutor(event)); + } + + @Override + public Option askExecutor(SchedulerEvent event, Duration wait) { + return askExecutor(event); + } + + @Override + public Option getById(long id) { + return new Some<>(null); + } + + @Override + public Executor[] getByGroup(String groupName) { + return new Executor[0]; + } + + @Override + public void delete(Executor executor) { + + } + + @Override + public void shutdown() { + + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/MessageScheduler.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/MessageScheduler.java new file mode 100644 index 0000000000000000000000000000000000000000..1c0701b7d05885f74f5f705f1aff124d375209d7 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/MessageScheduler.java @@ -0,0 +1,28 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.scheduler; + +import com.webank.wedatasphere.linkis.message.builder.MessageJob; + +/** + * @date 2020/7/14 + */ +public interface MessageScheduler { + + void submit(MessageJob messageJob); + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/MethodExecuteWrapper.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/MethodExecuteWrapper.java new file mode 100644 index 0000000000000000000000000000000000000000..736a1b8e4faa72081307bfa3cb29837fd32530a6 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/scheduler/MethodExecuteWrapper.java @@ -0,0 +1,78 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.scheduler; + +import com.webank.wedatasphere.linkis.message.parser.ImplicitMethod; +import com.webank.wedatasphere.linkis.message.parser.ServiceMethod; + +import java.lang.reflect.Method; + +/** + * @date 2020/7/21 + */ +public class MethodExecuteWrapper { + + public MethodExecuteWrapper(ServiceMethod serviceMethod) { + this.serviceMethod = serviceMethod; + } + + private final ServiceMethod serviceMethod; + + public boolean shouldSkip; + + public boolean isShouldSkip() { + return shouldSkip; + } + + public void setShouldSkip(boolean shouldSkip) { + this.shouldSkip = shouldSkip; + } + + public Method getMethod() { + return this.serviceMethod.getMethod(); + } + + public Object getService() { + return this.serviceMethod.getService(); + } + + + public String getAlias() { + return this.serviceMethod.getAlias(); + } + + public int getOrder() { + return this.serviceMethod.getOrder(); + } + + public String getChainName() { + return this.serviceMethod.getChainName(); + } + + public boolean isHasMethodContext() { + return this.serviceMethod.isHasMethodContext(); + } + + public ImplicitMethod getImplicitMethod() { + return this.serviceMethod.getImplicitMethod(); + } + + public boolean isMethodContextOnLeft() { + return this.serviceMethod.isMethodContextOnLeft(); + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/tx/SpringTransactionManager.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/tx/SpringTransactionManager.java new file mode 100644 index 0000000000000000000000000000000000000000..f203d7eedbdcc6e92a9044163f2821548489bcd6 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/tx/SpringTransactionManager.java @@ -0,0 +1,58 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.tx; + +import com.webank.wedatasphere.linkis.common.utils.JavaLog; +import com.webank.wedatasphere.linkis.message.utils.MessageUtils; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.interceptor.DefaultTransactionAttribute; + +/** + * @date 2020/7/23 + */ +public class SpringTransactionManager extends JavaLog implements TransactionManager { + + private final PlatformTransactionManager platformTransactionManager; + + public SpringTransactionManager() { + platformTransactionManager = MessageUtils.getBean(PlatformTransactionManager.class); + } + + + @Override + public Object begin() { + if (platformTransactionManager != null) { + return platformTransactionManager.getTransaction(new DefaultTransactionAttribute()); + } + return null; + } + + @Override + public void commit(Object o) { + if (o instanceof TransactionStatus && platformTransactionManager != null) { + platformTransactionManager.commit((TransactionStatus) o); + } + } + + @Override + public void rollback(Object o) { + if (o instanceof TransactionStatus && platformTransactionManager != null) { + platformTransactionManager.rollback((TransactionStatus) o); + } + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/tx/TransactionManager.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/tx/TransactionManager.java new file mode 100644 index 0000000000000000000000000000000000000000..b0b4af36ec64398ddf991409356749f7350fc6ef --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/tx/TransactionManager.java @@ -0,0 +1,34 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.tx; + +/** + * @date 2020/7/23 + */ +public interface TransactionManager { + + default Object begin() { + return null; + } + + default void commit(Object o) { + } + + default void rollback(Object o) { + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/utils/MessageUtils.java b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/utils/MessageUtils.java new file mode 100644 index 0000000000000000000000000000000000000000..55eaa98c30fc3e2e771b72c85a7bfcbb646bf290 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/java/com/webank/wedatasphere/linkis/message/utils/MessageUtils.java @@ -0,0 +1,93 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message.utils; + +import com.webank.wedatasphere.linkis.DataWorkCloudApplication; +import com.webank.wedatasphere.linkis.message.parser.ServiceMethod; +import com.webank.wedatasphere.linkis.message.scheduler.MethodExecuteWrapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.NoSuchBeanDefinitionException; +import org.springframework.context.ApplicationContext; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * @date 2020/7/28 + */ +public class MessageUtils { + + private static final Logger LOGGER = LoggerFactory.getLogger(MessageUtils.class); + + public static T getBean(Class tClass) { + T t = null; + ApplicationContext applicationContext = DataWorkCloudApplication.getApplicationContext(); + if (applicationContext != null) { + try { + t = applicationContext.getBean(tClass); + } catch (NoSuchBeanDefinitionException e) { + LOGGER.warn(String.format("can not get bean from spring ioc:%s", tClass.getName())); + } + } + return t; + } + + public static boolean isAssignableFrom(String supperClassName, String className) { + try { + return Class.forName(supperClassName).isAssignableFrom(Class.forName(className)); + } catch (ClassNotFoundException e) { + LOGGER.error("class not found", e); + return false; + } + } + + public static boolean orderIsMin(MethodExecuteWrapper methodExecuteWrapper, List methodExecuteWrappers) { + for (MethodExecuteWrapper tmp : methodExecuteWrappers) { + if (tmp.getOrder() < methodExecuteWrapper.getOrder()) { + return false; + } + } + return true; + } + + public static boolean orderIsLast(int order, List serviceMethods) { + // TODO: 2020/8/5 方法判断修改为重复的order 支持头部 + if (order == 2147483647) return true; + for (ServiceMethod serviceMethod : serviceMethods) { + if (serviceMethod.getOrder() > order) { + return false; + } + } + return false; + } + + public static Integer repeatOrder(List serviceMethods) { + Map tmp = new HashMap<>(); + for (ServiceMethod serviceMethod : serviceMethods) { + int order = serviceMethod.getOrder(); + if (tmp.get(order) == null) { + tmp.put(order, order); + } else { + return order; + } + } + return null; + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/resources/META-INF/spring.factories b/linkis-commons/linkis-message-scheduler/src/main/resources/META-INF/spring.factories new file mode 100644 index 0000000000000000000000000000000000000000..7dcbee3eca9d5a694c909ca02d1336467e7ae099 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/resources/META-INF/spring.factories @@ -0,0 +1,2 @@ +org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ +com.webank.wedatasphere.linkis.rpc.MessageRPCSpringConfiguration diff --git a/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageConverter.scala b/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageConverter.scala new file mode 100644 index 0000000000000000000000000000000000000000..06ba68283fc5e828f22be31995bedc3a06427278 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageConverter.scala @@ -0,0 +1,82 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.rpc + +import java.lang.reflect.Modifier +import java.util + +import com.webank.wedatasphere.linkis.common.utils.Utils +import com.webank.wedatasphere.linkis.message.annotation.Method +import com.webank.wedatasphere.linkis.message.conf.MessageSchedulerConf.{REFLECTIONS, _} +import com.webank.wedatasphere.linkis.message.exception.MessageErrorException +import com.webank.wedatasphere.linkis.protocol.message.RequestMethod +import com.webank.wedatasphere.linkis.rpc.exception.DWCURIException +import com.webank.wedatasphere.linkis.server.{BDPJettyServerHelper, Message} + +import scala.collection.JavaConversions._ + +/** + * @date 2020/8/6 + * + */ +class MessageConverter { + + private val protocolNameCache = new util.HashMap[String, String] + + REFLECTIONS.getTypesAnnotatedWith(classOf[Method]).foreach { t => + val method = t.getAnnotation(classOf[Method]) + protocolNameCache.put(method.value(), t.getName) + } + + REFLECTIONS.getSubTypesOf(classOf[RequestMethod]).filter(!_.isInterface).filter(c => !Modifier.isAbstract(c.getModifiers)).foreach { t => + val protocol = try { + t.newInstance() + } catch { + case e: Throwable => + throw new RuntimeException(s"Failed to create new instance of class ${t.getName}", e) + } + val method = t.getMethod("method").invoke(protocol).toString + protocolNameCache.put(method, t.getName) + } + + @throws[MessageErrorException] + def convert(message: Message): util.Map[String, Object] = { + val methodUrl = message.getMethod + val protocolStr = protocolNameCache.get(methodUrl) + if (protocolStr == null) throw new MessageErrorException(10000, s"no suitable protocol was found for method:${methodUrl}") + val returnType = new util.HashMap[String, Object]() + val data = message.getData + returnType += REQUEST_KEY -> data.remove(REQUEST_KEY) + val protocol = Utils.tryThrow(Class.forName(protocolStr)) { + case _: ClassNotFoundException => + new DWCURIException(10003, s"The corresponding anti-sequence class $protocolStr was not found.(找不到对应的反序列类$protocolStr.)") + case t: ExceptionInInitializerError => + val exception = new DWCURIException(10004, s"The corresponding anti-sequence class ${protocolStr} failed to initialize.(对应的反序列类${protocolStr}初始化失败.)") + exception.initCause(t) + exception + case t: Throwable => t + } + returnType += "_request_protocol_" -> BDPJettyServerHelper.gson.fromJson(BDPJettyServerHelper.gson.toJson(data), protocol) + //设置一个restful请求的客户端 + // TODO: req中获取到ip和地址 + data.clear() + data.put("name", "") + data.put("instance", "") + returnType + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageRPCConsumer.scala b/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageRPCConsumer.scala new file mode 100644 index 0000000000000000000000000000000000000000..5398d035283d1d38b5689582977d1316fbb079e7 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageRPCConsumer.scala @@ -0,0 +1,81 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.rpc + +import java.util + +import com.webank.wedatasphere.linkis.common.exception.ExceptionManager +import com.webank.wedatasphere.linkis.common.utils.Utils +import com.webank.wedatasphere.linkis.rpc.exception.DWCURIException +import com.webank.wedatasphere.linkis.rpc.serializer.ProtostuffSerializeUtil +import com.webank.wedatasphere.linkis.rpc.transform.RPCProduct.{CLASS_VALUE, OBJECT_VALUE} +import com.webank.wedatasphere.linkis.server.{EXCEPTION_MSG, JMap, Message} + +import scala.runtime.BoxedUnit + +/** + * @date 2020/8/6 + * + */ + +class MessageRPCConsumer { + + private val messageConverter: MessageConverter = new MessageConverter + + def overrideToObject(message: Message): Any = { + message.getStatus match { + case 0 => + val data = message.getData + if (data.isEmpty) return BoxedUnit.UNIT + if (isRPCRequest(data)) { + val objectStr = data.get(OBJECT_VALUE).toString + val objectClass = data.get(CLASS_VALUE).toString + val clazz = Utils.tryThrow(Class.forName(objectClass)) { + case _: ClassNotFoundException => + new DWCURIException(10003, s"The corresponding anti-sequence class $objectClass was not found.(找不到对应的反序列类$objectClass.)") + case t: ExceptionInInitializerError => + val exception = new DWCURIException(10004, s"The corresponding anti-sequence class ${objectClass} failed to initialize.(对应的反序列类${objectClass}初始化失败.)") + exception.initCause(t) + exception + case t: Throwable => t + } +// if (null != data.get(IS_REQUEST_PROTOCOL_CLASS) && data.get(IS_REQUEST_PROTOCOL_CLASS).toString.toBoolean) { + ProtostuffSerializeUtil.deserialize(objectStr, clazz) +// } else if (data.get(IS_SCALA_CLASS).toString.toBoolean) { +// val realClass = getSerializableScalaClass(clazz) +// Serialization.read(objectStr)(formats, ManifestFactory.classType(realClass)) +// } else { +// BDPJettyServerHelper.gson.fromJson(objectStr, clazz) +// } + } else { + messageConverter.convert(message) + } + case 4 => + val errorMsg = message.getData.get(EXCEPTION_MSG).asInstanceOf[JMap[String, Object]] + ExceptionManager.generateException(errorMsg) + case _ => + val errorMsg = message.getData.get(EXCEPTION_MSG) + if (errorMsg == null) throw new DWCURIException(10005, message.getMessage) + val realError = ExceptionManager.generateException(errorMsg.asInstanceOf[JMap[String, Object]]) + throw realError; + } + } + + def isRPCRequest(data: util.HashMap[String, Object]): Boolean = { + data.containsKey(OBJECT_VALUE) + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageRPCReceiveRestful.scala b/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageRPCReceiveRestful.scala new file mode 100644 index 0000000000000000000000000000000000000000..0bebc3f4fe28bd0adf76bfeb84a5dcd2fd0434e3 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageRPCReceiveRestful.scala @@ -0,0 +1,124 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.rpc + +import java.util.concurrent.TimeUnit + +import com.webank.wedatasphere.linkis.message.conf.MessageSchedulerConf._ +import com.webank.wedatasphere.linkis.rpc.exception.DWCURIException +import com.webank.wedatasphere.linkis.rpc.transform.{RPCConsumer, RPCProduct} +import com.webank.wedatasphere.linkis.server.{Message, catchIt} +import javax.annotation.PostConstruct +import javax.servlet.http.HttpServletRequest +import javax.ws.rs.core.MediaType +import javax.ws.rs.{Consumes, POST, Path, Produces} +import org.apache.commons.lang.StringUtils +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.context.annotation.{Import, Primary} +import org.springframework.stereotype.Component +import org.springframework.web.context.request.{RequestContextHolder, ServletRequestAttributes} + +import scala.concurrent.duration.Duration +import scala.runtime.BoxedUnit + +/** + * @date 2020/8/3 + * + */ +@Component +@Path("/rpc") +@Produces(Array(MediaType.APPLICATION_JSON)) +@Consumes(Array(MediaType.APPLICATION_JSON)) +@Primary +@Import(Array(classOf[MessageRPCConsumer])) +class MessageRPCReceiveRestful extends RPCReceiveRestful { + + @Autowired(required = false) + private var receiverChoosers: Array[ReceiverChooser] = Array.empty + @Autowired(required = false) + private var receiverSenderBuilders: Array[ReceiverSenderBuilder] = Array.empty + @Autowired + private var messageRPCConsumer: MessageRPCConsumer = _ + + private def getFirst[K, T](buildArray: Array[K], buildObj: K => Option[T]): Option[T] = { + var obj: Option[T] = None + for (builder <- buildArray if obj.isEmpty) obj = buildObj(builder) + obj + } + + //广播功能去掉,messageScheduler可以提供这种功能,目前只有entrance有此类方法,后续调整 + + private implicit def getReceiver(event: RPCMessageEvent): Option[Receiver] = getFirst[ReceiverChooser, Receiver](receiverChoosers, _.chooseReceiver(event)) + + private implicit def getSender(event: RPCMessageEvent): Sender = getFirst[ReceiverSenderBuilder, Sender](receiverSenderBuilders, _.build(event)).get + + private implicit def getMessageRPCConsumer(rpcConsumer: RPCConsumer): MessageRPCConsumer = messageRPCConsumer + + override def registerReceiverChooser(receiverChooser: ReceiverChooser): Unit = { + info("register a new ReceiverChooser " + receiverChooser) + receiverChoosers = receiverChooser +: receiverChoosers + } + + @PostConstruct + def init(): Unit = { + if (!receiverChoosers.exists(_.isInstanceOf[CommonReceiverChooser])) + receiverChoosers = receiverChoosers :+ new CommonReceiverChooser + info("init all receiverChoosers in spring beans, list => " + receiverChoosers.toList) + if (!receiverSenderBuilders.exists(_.isInstanceOf[CommonReceiverSenderBuilder])) + receiverSenderBuilders = receiverSenderBuilders :+ new CommonReceiverSenderBuilder + receiverSenderBuilders = receiverSenderBuilders.sortBy(_.order) + info("init all receiverSenderBuilders in spring beans, list => " + receiverSenderBuilders.toList) + } + + private implicit def toMessage(obj: Any): Message = obj match { + case Unit | () => + RPCProduct.getRPCProduct.ok() + case _: BoxedUnit => RPCProduct.getRPCProduct.ok() + case _ => + RPCProduct.getRPCProduct.toMessage(obj) + } + + private implicit def getReq: HttpServletRequest = { + RequestContextHolder.getRequestAttributes.asInstanceOf[ServletRequestAttributes].getRequest + } + + @Path("receive") + @POST + override def receive(message: Message): Message = invokeReceiver(message, _.receive(_, _)) + + private def invokeReceiver(message: Message, opEvent: (Receiver, Any, Sender) => Message)(implicit req: HttpServletRequest): Message = catchIt { + message.getData.put(REQUEST_KEY, req) + val obj = RPCConsumer.getRPCConsumer.overrideToObject(message) + val serviceInstance = BaseRPCSender.getInstanceInfo(message.getData) + val event = RPCMessageEvent(obj, serviceInstance) + event.map(opEvent(_, obj, event)).getOrElse(RPCProduct.getRPCProduct.notFound()) + } + + @Path("receiveAndReply") + @POST + override def receiveAndReply(message: Message): Message = invokeReceiver(message, _.receiveAndReply(_, _)) + + @Path("replyInMills") + @POST + override def receiveAndReplyInMills(message: Message): Message = catchIt { + val duration = message.getData.get("duration") + if (duration == null || StringUtils.isEmpty(duration.toString)) throw new DWCURIException(10002, "The timeout period is not set!(超时时间未设置!)") + val timeout = Duration(duration.toString.toLong, TimeUnit.MILLISECONDS) + invokeReceiver(message, _.receiveAndReply(_, timeout, _)) + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageRPCSpringConfiguration.scala b/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageRPCSpringConfiguration.scala new file mode 100644 index 0000000000000000000000000000000000000000..9c21b534fd45e1de41364c28f20074117db30dc0 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageRPCSpringConfiguration.scala @@ -0,0 +1,47 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.rpc + +import com.webank.wedatasphere.linkis.message.context.{MessageSchedulerContext, SpringMessageSchedulerContext} +import com.webank.wedatasphere.linkis.message.publisher.{AbstractMessagePublisher, DefaultMessagePublisher, MessagePublisher} +import org.springframework.context.annotation.Bean + +/** + * @date 2020/8/4 + * + */ +class MessageRPCSpringConfiguration { + + @Bean + def getPublisher: AbstractMessagePublisher = { + new DefaultMessagePublisher() + } + + @Bean + def getMessageSchedulerContext(messagePublisher: AbstractMessagePublisher): MessageSchedulerContext = { + val context = new SpringMessageSchedulerContext + messagePublisher.setContext(context) + context.setPublisher(messagePublisher) + context + } + + @Bean + def getReceiverChooser(messagePublisher: MessagePublisher): ReceiverChooser = { + new MessageReceiverChooser(Option(new MessageReceiver(messagePublisher))) + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageReceiver.scala b/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageReceiver.scala new file mode 100644 index 0000000000000000000000000000000000000000..2cf60d0db06f4e0e5d8cdcd2a9427855644427b1 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageReceiver.scala @@ -0,0 +1,84 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.rpc + +import java.util.concurrent.{TimeUnit, TimeoutException} + +import com.webank.wedatasphere.linkis.common.conf.CommonVars +import com.webank.wedatasphere.linkis.common.utils.Utils +import com.webank.wedatasphere.linkis.message.builder.{DefaultServiceMethodContext, MessageJobTimeoutPolicy, ServiceMethodContext} +import com.webank.wedatasphere.linkis.message.conf.MessageSchedulerConf.{SENDER_KEY, _} +import com.webank.wedatasphere.linkis.message.publisher.MessagePublisher +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol +import com.webank.wedatasphere.linkis.server.security.SecurityFilter +import javax.servlet.http.HttpServletRequest + +import scala.concurrent.duration.Duration +import scala.language.implicitConversions + +/** + * @date 2020/8/3 + * + */ +class MessageReceiver(mesagePublisher: MessagePublisher) extends Receiver { + + private val syncMaxTimeout: Duration = Duration(CommonVars("wds.linkis.ms.rpc.sync.timeout", 60 * 1000 * 5L).getValue, TimeUnit.MILLISECONDS) + + override def receive(message: Any, sender: Sender): Unit = { + mesagePublisher.publish(message, (message, syncMaxTimeout, sender)) + } + + override def receiveAndReply(message: Any, sender: Sender): Any = { + receiveAndReply(message, syncMaxTimeout, sender) + } + + override def receiveAndReply(message: Any, duration: Duration, sender: Sender): Any = { + val job = mesagePublisher.publish(message, (message, duration, sender)) + Utils.tryCatch(job.get(duration._1, duration._2)) { + case t: TimeoutException => + job.getMethodContext.getAttributeOrDefault(TIMEOUT_POLICY, MessageJobTimeoutPolicy.INTERRUPT) match { + case MessageJobTimeoutPolicy.CANCEL => job.cancel(false); throw t + case MessageJobTimeoutPolicy.INTERRUPT => job.cancel(true); throw t + case MessageJobTimeoutPolicy.PARTIAL => job.getPartial + } + case i: InterruptedException => job.cancel(true); throw i + case t: Throwable => job.cancel(true); throw t + } + } + + implicit def createMessageMethodScheduler(tunple: (Any, Duration, Sender)): ServiceMethodContext = { + val methodContext = new DefaultServiceMethodContext + methodContext.putAttribute(SENDER_KEY, tunple._3) + methodContext.putAttribute(DURATION_KEY, tunple._2) + tunple._1 match { + case m: java.util.Map[String, Object] => { + val req = m.get(REQUEST_KEY).asInstanceOf[HttpServletRequest] + methodContext.putAttribute(REQUEST_KEY, req) + methodContext.putAttribute(USER_KEY, SecurityFilter.getLoginUser(req)) + } + case _ => + } + methodContext + } + + implicit def any2RequestProtocol(message: Any): RequestProtocol = message match { + case p: RequestProtocol => p + case m: java.util.Map[String, Object] => m.get("_request_protocol_").asInstanceOf[RequestProtocol] + } + + +} diff --git a/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageReceiverChooser.scala b/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageReceiverChooser.scala new file mode 100644 index 0000000000000000000000000000000000000000..525ea60a0c6e3a0ba4838250327cf0d1a53f0e85 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/main/scala/com/webank/wedatasphere/linkis/rpc/MessageReceiverChooser.scala @@ -0,0 +1,35 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.rpc + +import java.util + +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol + +/** + * @date 2020/8/3 + * + */ + +class MessageReceiverChooser(receiver: Option[Receiver]) extends ReceiverChooser { + + override def chooseReceiver(event: RPCMessageEvent): Option[Receiver] = event.message match { + case _: util.Map[String, Object] => receiver + case _: RequestProtocol => receiver + case _ => None + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/DefaultRequestProtocol.java b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/DefaultRequestProtocol.java new file mode 100644 index 0000000000000000000000000000000000000000..1cd9175c5e0b84e3c2879db85ee0661c20f0093c --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/DefaultRequestProtocol.java @@ -0,0 +1,26 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message; + + +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; + +/** + * @date 2020/7/14 + */ +public class DefaultRequestProtocol implements RequestProtocol { +} diff --git a/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/ImplicitInterface.java b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/ImplicitInterface.java new file mode 100644 index 0000000000000000000000000000000000000000..242aea73b340c11bda7724ec28d74c99678379ac --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/ImplicitInterface.java @@ -0,0 +1,24 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message; + + +/** + * @date 2020/7/22 + */ +public interface ImplicitInterface { +} diff --git a/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/ImplicitInterfaceImpl.java b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/ImplicitInterfaceImpl.java new file mode 100644 index 0000000000000000000000000000000000000000..22cc4a6e50f8abda4cb3cfcc4fe42ccdf5d04b8f --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/ImplicitInterfaceImpl.java @@ -0,0 +1,23 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message; + +/** + * @date 2020/7/30 + */ +public class ImplicitInterfaceImpl implements ImplicitInterface { +} diff --git a/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/ImplicitObject.java b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/ImplicitObject.java new file mode 100644 index 0000000000000000000000000000000000000000..d3e9b00f589c2df79b5a199e6da3eaf81cd82213 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/ImplicitObject.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message; + +import com.webank.wedatasphere.linkis.message.annotation.Implicit; + +/** + * @date 2020/7/29 + */ +public class ImplicitObject { + + @Implicit + public ImplicitInterfaceImpl implicitMethod02(DefaultRequestProtocol requestProtocol) { + return null; + } +} diff --git a/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/SchedulerMessageTest.java b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/SchedulerMessageTest.java new file mode 100644 index 0000000000000000000000000000000000000000..5895d30949ff859e80b346ad7a0f3f3f8b80467a --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/SchedulerMessageTest.java @@ -0,0 +1,109 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message; + +import com.webank.wedatasphere.linkis.message.annotation.Receiver; +import com.webank.wedatasphere.linkis.message.builder.MessageJob; +import com.webank.wedatasphere.linkis.message.conf.MessageSchedulerConf; +import com.webank.wedatasphere.linkis.message.context.AbstractMessageSchedulerContext; +import com.webank.wedatasphere.linkis.message.context.DefaultMessageSchedulerContext; +import com.webank.wedatasphere.linkis.message.parser.ImplicitMethod; +import com.webank.wedatasphere.linkis.message.parser.ServiceMethod; +import com.webank.wedatasphere.linkis.message.registry.AbstractImplicitRegistry; +import com.webank.wedatasphere.linkis.message.registry.AbstractServiceRegistry; +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; +import org.junit.Before; +import org.junit.Test; +import org.reflections.Reflections; +import org.reflections.scanners.MethodAnnotationsScanner; + +import java.lang.reflect.Method; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeoutException; +import java.util.stream.Collectors; + +/** + * @date 2020/7/14 + */ +public class SchedulerMessageTest { + + private AbstractMessageSchedulerContext context; + + Reflections reflections = new Reflections(MessageSchedulerConf.SERVICE_SCAN_PACKAGE, new MethodAnnotationsScanner()); + + @Before + public void before() { + + context = new DefaultMessageSchedulerContext(); + } + + + @Test + public void servieParserTest() throws InterruptedException { + Map> parse = context.getservieParser().parse(new TestService()); + System.out.println(parse.size()); + } + + @Test + public void registryTest() throws InterruptedException { + TestService testService = new TestService(); + context.getServiceRegistry().register(testService); + context.getImplicitRegistry().register(testService); + System.out.println("serviceRegistry"); + } + + @Test + public void implicitParserTest() throws InterruptedException { + Map> parse = context.getImplicitParser().parse(new TestService()); + System.out.println(parse.size()); + } + + @Test + public void springRegisterTest() { + Set methodsAnnotatedWith = reflections.getMethodsAnnotatedWith(Receiver.class); + Set> collect = methodsAnnotatedWith.stream().map(Method::getDeclaringClass).collect(Collectors.toSet()); + System.out.println(collect.size()); + } + + @Test + public void test() { + System.out.println(RequestProtocol.class.isAssignableFrom(RequestProtocol.class)); + } + + @Test + public void publishTest() throws InterruptedException, ExecutionException, TimeoutException { + TestService testService = new TestService(); + TestService2 testService2 = new TestService2(); + AbstractImplicitRegistry implicitRegistry = this.context.getImplicitRegistry(); + implicitRegistry.register(testService); + implicitRegistry.register(testService2); + implicitRegistry.register(new ImplicitObject()); + AbstractServiceRegistry serviceRegistry = this.context.getServiceRegistry(); + serviceRegistry.register(testService); + serviceRegistry.register(testService2); + long start = System.currentTimeMillis(); + MessageJob publish = context.getPublisher().publish(new DefaultRequestProtocol()); + Object o = publish.get(); + System.out.println(o); + + System.out.println(System.currentTimeMillis() - start); + } + +} diff --git a/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/TestService.java b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/TestService.java new file mode 100644 index 0000000000000000000000000000000000000000..3c0f4eb44264ea30bc2169cd0c424c3c505ffb71 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/TestService.java @@ -0,0 +1,74 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message; + +import com.webank.wedatasphere.linkis.message.annotation.*; +import com.webank.wedatasphere.linkis.message.builder.ServiceMethodContext; +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; + +import java.util.ArrayList; +import java.util.List; + +/** + * @date 2020/7/14 + */ +public class TestService { + @Receiver + @Order(1) + public void method01(ImplicitInterface protocol) throws InterruptedException { + Thread.sleep(5000); + System.out.println("TestService1.method01"); + } + + @Receiver + public void method02(ServiceMethodContext smc, ImplicitInterface protocol) throws InterruptedException { + Thread.sleep(2000); + System.out.println("TestService1.method02"); + } + + @Receiver + @Chain("fgf") + public void method03(ServiceMethodContext smc, ImplicitInterface protocol) throws InterruptedException { + Thread.sleep(3000); + System.out.println("TestService1.method03"); + } + + @Receiver + public List method04( ServiceMethodContext smc, @NotImplicit DefaultRequestProtocol protocol) throws InterruptedException { + Thread.sleep(2000); + System.out.println("TestService1.method04"); + return new ArrayList<>(); + } + + @Implicit + public ImplicitInterfaceImpl implicitMethod02(DefaultRequestProtocol requestProtocol) { + return null; + } + + /** + * 测试 转换方法的优先级 + * + * @param protocol + * @return + */ + @Implicit + public ImplicitInterfaceImpl implicitMetho01(RequestProtocol protocol) { + return null; + } + + +} diff --git a/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/TestService2.java b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/TestService2.java new file mode 100644 index 0000000000000000000000000000000000000000..be22ea2787559d5498d88b46102d64b3f10247e4 --- /dev/null +++ b/linkis-commons/linkis-message-scheduler/src/test/java/com/webank/wedatasphere/linkis/message/TestService2.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.message; + +/** + * @date 2020/7/29 + */ +public class TestService2 { + +/* @Receiver + public void method01(@Implicit SubRequestProtocol protocol) throws InterruptedException { + System.out.println("TestService2.method01"); + }*/ + + +} diff --git a/linkis-commons/linkis-module/pom.xml b/linkis-commons/linkis-module/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..c28e15487e8b907ddb9f39091be3ce5e049efd0c --- /dev/null +++ b/linkis-commons/linkis-module/pom.xml @@ -0,0 +1,514 @@ + + + + + + linkis + com.webank.wedatasphere.linkis + 1.0.0-RC1 + + 4.0.0 + + linkis-module + + + + com.webank.wedatasphere.linkis + linkis-common + + + + org.springframework + spring-core + ${spring.version} + + + + org.springframework.boot + spring-boot + ${spring.boot.version} + + + org.apache.logging.log4j + log4j-api + + + org.apache.logging.log4j + log4j-core + + + spring-core + org.springframework + + + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + ${spring.eureka.version} + + + jsr311-api + javax.ws.rs + + + spring-boot-autoconfigure + org.springframework.boot + + + spring-boot-starter-aop + org.springframework.boot + + + spring-cloud-starter + org.springframework.cloud + + + spring-cloud-commons + org.springframework.cloud + + + spring-cloud-context + org.springframework.cloud + + + spring-boot-starter + org.springframework.boot + + + spring-boot-starter-cache + org.springframework.boot + + + jackson-core + com.fasterxml.jackson.core + + + jackson-databind + com.fasterxml.jackson.core + + + + + + org.springframework.boot + spring-boot-starter-cache + ${spring.boot.version} + + + spring-core + org.springframework + + + + + + org.springframework.boot + spring-boot-starter-jetty + ${spring.boot.version} + + + asm + org.ow2.asm + + + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-tomcat + + + hibernate-validator + org.hibernate.validator + + + spring-core + org.springframework + + + ${spring.boot.version} + + + + org.springframework.boot + spring-boot-starter + ${spring.boot.version} + + + org.springframework.boot + spring-boot-starter-logging + + + spring-core + org.springframework + + + + + org.springframework.boot + spring-boot-starter-log4j2 + ${spring.boot.version} + + + org.springframework.boot + spring-boot-starter-actuator + ${spring.boot.version} + + + org.springframework.boot + spring-boot-starter-logging + + + + + + org.springframework.cloud + spring-cloud-starter-config + + + org.springframework.boot + spring-boot-starter-logging + + + spring-web + org.springframework + + + spring-boot-starter + org.springframework.boot + + + spring-boot-autoconfigure + org.springframework.boot + + + spring-cloud-config-client + org.springframework.cloud + + + spring-cloud-starter + org.springframework.cloud + + + ${spring.cloud.version} + + + + spring-cloud-config-client + + + spring-cloud-commons + org.springframework.cloud + + + spring-cloud-context + org.springframework.cloud + + + spring-boot-autoconfigure + org.springframework.boot + + + spring-web + org.springframework + + + org.springframework.cloud + ${spring.cloud.version} + + + spring-cloud-starter + + + spring-cloud-commons + org.springframework.cloud + + + spring-cloud-context + org.springframework.cloud + + + spring-boot-starter + org.springframework.boot + + + org.springframework.cloud + ${spring.cloud.version} + + + + org.springframework.cloud + spring-cloud-context + ${spring.cloud.version} + + + spring-security-crypto + org.springframework.security + + + + + + org.springframework.cloud + spring-cloud-commons + ${spring.cloud.version} + + + spring-security-crypto + org.springframework.security + + + + + + org.springframework.security + spring-security-crypto + ${spring.security.cryto.version} + + + + + org.springframework.boot + spring-boot-starter-aop + + + org.springframework.boot + spring-boot-starter-logging + + + spring-core + org.springframework + + + ${spring.boot.version} + + + + mysql + mysql-connector-java + 5.1.49 + + + + org.glassfish.jersey.bundles + jaxrs-ri + 2.21 + + + cglib + cglib + 2.2.2 + + + commons-dbcp + commons-dbcp + 1.4 + + + org.eclipse.jetty + jetty-server + ${jetty.version} + + + org.eclipse.jetty + jetty-webapp + ${jetty.version} + + + org.eclipse.jetty.websocket + websocket-server + ${jetty.version} + + + + org.glassfish.jersey.ext + jersey-spring3 + ${jersey.servlet.version} + + + org.springframework + spring + + + org.springframework + spring-core + + + org.springframework + spring-web + + + org.springframework + spring-beans + + + org.springframework + spring-context + + + + + + + com.sun.jersey + jersey-server + 1.19.1 + + + jsr311-api + javax.ws.rs + + + + + com.sun.jersey + jersey-servlet + 1.19.1 + + + org.glassfish.jersey.containers + jersey-container-servlet + ${jersey.servlet.version} + + + org.glassfish.jersey.containers + jersey-container-servlet-core + ${jersey.servlet.version} + + + javax.ws.rs-api + javax.ws.rs + + + + + + org.glassfish.jersey.media + jersey-media-json-jackson + ${jersey.version} + + + jackson-core + com.fasterxml.jackson.core + + + jackson-databind + com.fasterxml.jackson.core + + + + + org.glassfish.jersey.media + jersey-media-multipart + ${jersey.version} + + + org.glassfish.jersey.ext + jersey-entity-filtering + ${jersey.version} + + + com.sun.jersey + jersey-json + + + jsr311-api + javax.ws.rs + + + jersey-core + com.sun.jersey + + + 1.19 + + + + com.fasterxml.jackson.core + jackson-databind + ${fasterxml.jackson.version} + + + com.fasterxml.jackson.core + jackson-annotations + ${fasterxml.jackson.version} + + + + org.reflections + reflections + ${reflections.version} + + + + com.google.code.gson + gson + ${gson.version} + + + io.netty + netty-all + ${netty.version} + + + org.json4s + json4s-jackson_${scala.binary.version} + ${json4s.version} + + + org.scala-lang + scala-library + + + jackson-databind + com.fasterxml.jackson.core + + + + + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + ${basedir}/src/main/resources + + + ${project.artifactId}-${project.version} + + + diff --git a/core/cloudModule/src/main/java/com/webank/wedatasphere/linkis/DataWorkCloudApplication.java b/linkis-commons/linkis-module/src/main/java/com/webank/wedatasphere/linkis/DataWorkCloudApplication.java similarity index 95% rename from core/cloudModule/src/main/java/com/webank/wedatasphere/linkis/DataWorkCloudApplication.java rename to linkis-commons/linkis-module/src/main/java/com/webank/wedatasphere/linkis/DataWorkCloudApplication.java index 9aac1509165e918abae71ebc76452093d685116f..8500ea5ac3a2ad5d3dbaa7888e00e80dc521a48b 100644 --- a/core/cloudModule/src/main/java/com/webank/wedatasphere/linkis/DataWorkCloudApplication.java +++ b/linkis-commons/linkis-module/src/main/java/com/webank/wedatasphere/linkis/DataWorkCloudApplication.java @@ -32,6 +32,7 @@ import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.webapp.WebAppContext; import org.springframework.boot.SpringApplication; +import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.boot.context.event.ApplicationPreparedEvent; @@ -102,6 +103,9 @@ public class DataWorkCloudApplication extends SpringBootServletInitializer { application.addListeners((ApplicationListener) Class.forName(listener).newInstance()); } } + if("true".equals(ServerConfiguration.IS_GATEWAY().getValue())){ + application.setWebApplicationType(WebApplicationType.REACTIVE); + } applicationContext = application.run(args); } @@ -138,9 +142,9 @@ public class DataWorkCloudApplication extends SpringBootServletInitializer { private static void initDWCApplication() { serviceInstance = new ServiceInstance(); serviceInstance.setApplicationName(applicationContext.getEnvironment().getProperty("spring.application.name")); - serviceInstance.setInstance(Utils.getLocalHostname() + ":" + applicationContext.getEnvironment().getProperty("server.port")); + serviceInstance.setInstance(Utils.getComputerName() + ":" + applicationContext.getEnvironment().getProperty("server.port")); DWCException.setApplicationName(serviceInstance.getApplicationName()); - DWCException.setHostname(Utils.getLocalHostname()); + DWCException.setHostname(Utils.getComputerName()); DWCException.setHostPort(Integer.parseInt(applicationContext.getEnvironment().getProperty("server.port"))); } @@ -173,7 +177,6 @@ public class DataWorkCloudApplication extends SpringBootServletInitializer { public WebServerFactoryCustomizer jettyFactoryCustomizer() { return new WebServerFactoryCustomizer() { public void customize(JettyServletWebServerFactory jettyServletWebServerFactory) { - jettyServletWebServerFactory.getJsp().setRegistered(false); jettyServletWebServerFactory.addServerCustomizers(new JettyServerCustomizer() { public void customize(Server server) { Handler[] childHandlersByClass = server.getChildHandlersByClass(WebAppContext.class); diff --git a/core/cloudModule/src/main/java/com/webank/wedatasphere/linkis/server/restful/RestfulApplication.java b/linkis-commons/linkis-module/src/main/java/com/webank/wedatasphere/linkis/server/restful/RestfulApplication.java similarity index 95% rename from core/cloudModule/src/main/java/com/webank/wedatasphere/linkis/server/restful/RestfulApplication.java rename to linkis-commons/linkis-module/src/main/java/com/webank/wedatasphere/linkis/server/restful/RestfulApplication.java index b396486608ed2079261616e38882754b1a290116..0a8468c1e5fc72acd9fb352b6807cee9be270721 100644 --- a/core/cloudModule/src/main/java/com/webank/wedatasphere/linkis/server/restful/RestfulApplication.java +++ b/linkis-commons/linkis-module/src/main/java/com/webank/wedatasphere/linkis/server/restful/RestfulApplication.java @@ -40,7 +40,7 @@ public class RestfulApplication extends ResourceConfig { if(StringUtils.isNotBlank(registerClasses)) { for(String clazz : registerClasses.split(",")) { logger.info("register " + clazz); - register(Class.forName(clazz)); + register(Class.forName(clazz, true, Thread.currentThread().getContextClassLoader())); } } String packages = ServerConfiguration.BDP_SERVER_RESTFUL_SCAN_PACKAGES().acquireNew(); diff --git a/core/cloudModule/src/main/java/com/webank/wedatasphere/linkis/server/utils/AopTargetUtils.java b/linkis-commons/linkis-module/src/main/java/com/webank/wedatasphere/linkis/server/utils/AopTargetUtils.java similarity index 100% rename from core/cloudModule/src/main/java/com/webank/wedatasphere/linkis/server/utils/AopTargetUtils.java rename to linkis-commons/linkis-module/src/main/java/com/webank/wedatasphere/linkis/server/utils/AopTargetUtils.java diff --git a/core/cloudModule/src/main/resources/mybatis-config.xml b/linkis-commons/linkis-module/src/main/resources/mybatis-config.xml similarity index 100% rename from core/cloudModule/src/main/resources/mybatis-config.xml rename to linkis-commons/linkis-module/src/main/resources/mybatis-config.xml diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/BDPJettyServerHelper.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/BDPJettyServerHelper.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/BDPJettyServerHelper.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/BDPJettyServerHelper.scala index a83fa66ec8e1f83f3a8ca2a0fe2cdcc8b39b8ebe..f809076b304b645c815373b7c2daa3a9d2edb4a1 100644 --- a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/BDPJettyServerHelper.scala +++ b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/BDPJettyServerHelper.scala @@ -23,13 +23,13 @@ import java.text.SimpleDateFormat import java.util.EnumSet import com.fasterxml.jackson.databind.ObjectMapper -import javax.servlet.{DispatcherType, Filter} import com.google.gson._ import com.webank.wedatasphere.linkis.common.utils.Logging import com.webank.wedatasphere.linkis.server.conf.ServerConfiguration._ import com.webank.wedatasphere.linkis.server.restful.RestfulApplication import com.webank.wedatasphere.linkis.server.socket.ControllerServer import com.webank.wedatasphere.linkis.server.socket.controller.{ServerEventService, ServerListenerEventBus} +import javax.servlet.{DispatcherType, Filter} import org.apache.commons.io.FileUtils import org.eclipse.jetty.server.session.SessionHandler import org.eclipse.jetty.servlet.{DefaultServlet, FilterHolder, ServletContextHandler, ServletHolder} diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/Message.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/Message.scala similarity index 99% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/Message.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/Message.scala index 5f7ac6df9b1eaddff4f1f96ff4b8efd3f0952e89..bb9632bc9f87a5f14efad8997f82bd773da88968 100644 --- a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/Message.scala +++ b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/Message.scala @@ -1,12 +1,9 @@ /* * Copyright 2019 WeBank - * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * * http://www.apache.org/licenses/LICENSE-2.0 - * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -17,10 +14,10 @@ package com.webank.wedatasphere.linkis.server import java.util + import javax.ws.rs.Path import javax.ws.rs.core.Response import javax.xml.bind.annotation.XmlRootElement - import org.apache.commons.lang.StringUtils import org.apache.commons.lang.exception.ExceptionUtils import org.reflections.ReflectionUtils @@ -108,7 +105,9 @@ object Message { message.setMessage(msg) message } + implicit def response(message: Message): String = BDPJettyServerHelper.gson.toJson(message) + def noLogin(msg: String, t: Throwable): Message = { val message = Message(status = -1) message.setMessage(msg) diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/conf/DataWorkCloudCustomExcludeFilter.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/conf/DataWorkCloudCustomExcludeFilter.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/conf/DataWorkCloudCustomExcludeFilter.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/conf/DataWorkCloudCustomExcludeFilter.scala diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/conf/ServerConfiguration.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/conf/ServerConfiguration.scala similarity index 99% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/conf/ServerConfiguration.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/conf/ServerConfiguration.scala index 52c45689b6ab565d75676708c956e16644ed706a..dde2ba7b3a2a0f5b79859076cce10cdf560f0312 100644 --- a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/conf/ServerConfiguration.scala +++ b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/conf/ServerConfiguration.scala @@ -97,4 +97,5 @@ object ServerConfiguration { val BDP_SERVER_RESTFUL_SCAN_PACKAGES = CommonVars("wds.linkis.server.restful.scan.packages", "") val BDP_SERVER_RESTFUL_REGISTER_CLASSES = CommonVars("wds.linkis.server.restful.register.classes", "") // val BDP_SERVER_SOCKET_SERVICE_SCAN_PACKAGES = CommonVars("wds.linkis.server.socket.service.scan.packages", BDP_SERVER_RESTFUL_SCAN_PACKAGES.getValue) + val IS_GATEWAY = CommonVars("wds.linkis.is.gateway", "false") } diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/exception/BDPServerException.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/exception/BDPServerException.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/exception/BDPServerException.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/exception/BDPServerException.scala diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/exception/FetchMapCacheFailedException.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/exception/FetchMapCacheFailedException.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/exception/FetchMapCacheFailedException.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/exception/FetchMapCacheFailedException.scala diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/exception/IllegalUserTicketException.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/exception/IllegalUserTicketException.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/exception/IllegalUserTicketException.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/exception/IllegalUserTicketException.scala diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/exception/LoginExpireException.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/exception/LoginExpireException.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/exception/LoginExpireException.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/exception/LoginExpireException.scala diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/exception/NoApplicationExistsException.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/exception/NoApplicationExistsException.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/exception/NoApplicationExistsException.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/exception/NoApplicationExistsException.scala diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/exception/NonLoginException.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/exception/NonLoginException.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/exception/NonLoginException.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/exception/NonLoginException.scala diff --git a/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/package.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/package.scala new file mode 100644 index 0000000000000000000000000000000000000000..50c6a5e653e78706e4fda61cc2da07a30b0e3a5f --- /dev/null +++ b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/package.scala @@ -0,0 +1,112 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis + +import java.util + +import com.webank.wedatasphere.linkis.common.exception.{ErrorException, ExceptionManager, FatalException, WarnException} +import com.webank.wedatasphere.linkis.common.utils.Utils +import com.webank.wedatasphere.linkis.server.exception.{BDPServerErrorException, NonLoginException} +import com.webank.wedatasphere.linkis.server.security.SecurityFilter +import javax.servlet.http.HttpServletRequest +import org.apache.commons.lang.StringUtils +import org.apache.commons.lang.exception.ExceptionUtils +import org.slf4j.Logger + +import scala.collection.{JavaConversions, mutable} + +/** + * Created by enjoyyin on 2018/5/2. + */ +package object server { + + val EXCEPTION_MSG = "errorMsg" + type JMap[K, V] = java.util.HashMap[K, V] + + implicit def getUser(req: HttpServletRequest): String = SecurityFilter.getLoginUsername(req) + + def validateFailed(message: String): Message = Message(status = 2).setMessage(message) + def validate[T](json: util.Map[String, T], keys: String*): Unit = { + keys.foreach(k => if(!json.contains(k) || json.get(k) == null || StringUtils.isEmpty(json.get(k).toString)) + throw new BDPServerErrorException(11001, s"Verification failed, $k cannot be empty!(验证失败,$k 不能为空!)")) + } + def error(message: String): Message = Message.error(message) + implicit def ok(msg: String): Message = Message.ok(msg) + implicit def error(t: Throwable): Message = Message.error(t) + implicit def error(e: (String, Throwable)): Message = Message.error(e) + implicit def error(msg: String, t: Throwable): Message = Message.error(msg -> t) + // def tryCatch[T](tryOp: => T)(catchOp: Throwable => T): T = Utils.tryCatch(tryOp)(catchOp) +// def tryCatch(tryOp: => Message)(catchOp: Throwable => Message): Message = Utils.tryCatch(tryOp){ +// case nonLogin: NonLoginException => Message.noLogin(msg = nonLogin.getMessage) +// case t => catchOp(t) +// } + def catchMsg(tryOp: => Message)(msg: String)(implicit log: Logger): Message = Utils.tryCatch(tryOp){ + case fatal: FatalException => + log.error("Fatal Error, system exit...", fatal) + System.exit(fatal.getErrCode) + Message.error("Fatal Error, system exit...") + case nonLogin: NonLoginException => + val message = Message.noLogin(nonLogin.getMessage) + message.data(EXCEPTION_MSG, nonLogin.toMap) + message + case error: ErrorException => + val cause = error.getCause + val errorMsg = cause match { + case t: ErrorException => s"error code(错误码): ${t.getErrCode}, error message(错误信息): ${t.getDesc}." + case _ => s"error code(错误码): ${error.getErrCode}, error message(错误信息): ${error.getDesc}." + } + log.error(errorMsg, error) + val message = Message.error(errorMsg) + message.data(EXCEPTION_MSG, error.toMap) + message + case warn: WarnException => + val warnMsg = s"Warning code(警告码): ${warn.getErrCode}, Warning message(警告信息): ${warn.getDesc}." + log.warn(warnMsg, warn) + val message = Message.warn(warnMsg) + message.data(EXCEPTION_MSG, warn.toMap) + message + case t => + log.error(msg, t) + val errorMsg = ExceptionUtils.getRootCauseMessage(t) + val message = if(StringUtils.isNotEmpty(errorMsg) && "operation failed(操作失败)" != msg) error(msg + "!the reason(原因):" + errorMsg) + else if(StringUtils.isNotEmpty(errorMsg)) error(errorMsg) else error(msg) + message.data(EXCEPTION_MSG, ExceptionManager.unknownException(message.getMessage)) + } + def catchIt(tryOp: => Message)(implicit log: Logger): Message = catchMsg(tryOp)("operation failed(操作失败)s") + implicit def toScalaBuffer[T](list: util.List[T]): mutable.Buffer[T] = JavaConversions.asScalaBuffer(list) + implicit def toScalaMap[K, V](map: util.Map[K, V]): mutable.Map[K, V] = JavaConversions.mapAsScalaMap(map) + implicit def toJavaList[T](list: mutable.Buffer[T]): util.List[T] = { + val arrayList = new util.ArrayList[T] + list.foreach(arrayList.add) + arrayList + } + implicit def toJavaMap[K, V](map: mutable.Map[K, V]): JMap[K, V] = { + val hashMap = new util.HashMap[K, V]() + map.foreach(m => hashMap.put(m._1, m._2)) + hashMap + } + implicit def toJavaMap[K, V](map: Map[K, V]): JMap[K, V] = { + val hashMap = new util.HashMap[K, V]() + map.foreach(m => hashMap.put(m._1, m._2)) + hashMap + } + implicit def asString(mapWithKey: (util.Map[String, Object], String)): String = mapWithKey._1.get(mapWithKey._2).asInstanceOf[String] + implicit def getString(mapWithKey: (util.Map[String, String], String)): String = mapWithKey._1.get(mapWithKey._2) + implicit def asInt(map: util.Map[String, Object], key: String): Int = map.get(key).asInstanceOf[Int] + implicit def asBoolean(mapWithKey: (util.Map[String, Object], String)): Boolean = mapWithKey._1.get(mapWithKey._2).asInstanceOf[Boolean] + +} diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/restful/RestfulCatchAOP.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/restful/RestfulCatchAOP.scala similarity index 99% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/restful/RestfulCatchAOP.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/restful/RestfulCatchAOP.scala index 70d3c0c74f72a195136a2ea4a5ee44206d632516..97b54dad9fa5c2fc5b81200b18effe3a1f54a965 100644 --- a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/restful/RestfulCatchAOP.scala +++ b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/restful/RestfulCatchAOP.scala @@ -16,10 +16,9 @@ package com.webank.wedatasphere.linkis.server.restful -import javax.ws.rs.core.Response - import com.webank.wedatasphere.linkis.common.utils.Logging import com.webank.wedatasphere.linkis.server.{Message, catchIt} +import javax.ws.rs.core.Response import org.aspectj.lang.ProceedingJoinPoint import org.aspectj.lang.annotation.{Around, Aspect, Pointcut} import org.springframework.stereotype.Component diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/security/SSOUtils.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/security/SSOUtils.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/security/SSOUtils.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/security/SSOUtils.scala diff --git a/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/security/SecurityFilter.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/security/SecurityFilter.scala new file mode 100644 index 0000000000000000000000000000000000000000..bab363bb3e6cbd7912b8b275b722b6d3110abc1c --- /dev/null +++ b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/security/SecurityFilter.scala @@ -0,0 +1,151 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.server.security + +import java.text.DateFormat +import java.util.{Date, Locale} + +import com.webank.wedatasphere.linkis.common.conf.Configuration +import com.webank.wedatasphere.linkis.common.utils.{Logging, RSAUtils, Utils} +import com.webank.wedatasphere.linkis.server.conf.ServerConfiguration +import com.webank.wedatasphere.linkis.server.exception.{IllegalUserTicketException, LoginExpireException, NonLoginException} +import com.webank.wedatasphere.linkis.server.security.SSOUtils.sslEnable +import com.webank.wedatasphere.linkis.server.{Message, _} +import javax.servlet._ +import javax.servlet.http.{Cookie, HttpServletRequest, HttpServletResponse} +import org.apache.commons.lang.StringUtils + +/** + * Created by enjoyyin on 2018/1/9. + */ +class SecurityFilter extends Filter { + + private val refererValidate = ServerConfiguration.BDP_SERVER_SECURITY_REFERER_VALIDATE.getValue + private val localAddress = ServerConfiguration.BDP_SERVER_ADDRESS.getValue + protected val testUser = ServerConfiguration.BDP_TEST_USER.getValue + + + override def init(filterConfig: FilterConfig): Unit = {} + + private def filterResponse(message: Message)(implicit response: HttpServletResponse): Unit = { + response.setHeader("Content-Type", "application/json;charset=UTF-8") + response.setStatus(Message.messageToHttpStatus(message)) + response.getOutputStream.print(message) + response.getOutputStream.flush() + } + + def doFilter(request: HttpServletRequest)(implicit response: HttpServletResponse): Boolean = { + addAccessHeaders(response) + if (refererValidate) { + //Security certification support, referer limited(安全认证支持,referer限定) + val referer = request.getHeader("Referer") + if (StringUtils.isNotEmpty(referer) && !referer.trim.contains(localAddress)) { + filterResponse(validateFailed("不允许的跨站请求!")) + return false + } + //Security certification support, solving verb tampering(安全认证支持,解决动词篡改) + request.getMethod.toUpperCase match { + case "GET" | "POST" | "PUT" | "DELETE" | "HEAD" | "TRACE" | "CONNECT" | "OPTIONS" => + case _ => + filterResponse(validateFailed("Do not use HTTP verbs to tamper with!(不可使用HTTP动词篡改!)")) + return false + } + } + if(request.getRequestURI == ServerConfiguration.BDP_SERVER_SECURITY_SSL_URI.getValue) { + val message = Message.ok("Get success!(获取成功!)").data("enable", SSOUtils.sslEnable) + if(SSOUtils.sslEnable) message.data("publicKey", RSAUtils.getDefaultPublicKey()) + filterResponse(message) + false + } else if(request.getRequestURI == ServerConfiguration.BDP_SERVER_RESTFUL_LOGIN_URI.getValue) { + true + } else { + val userName = Utils.tryCatch(SecurityFilter.getLoginUser(request)){ + case n: NonLoginException => + if(Configuration.IS_TEST_MODE.getValue) None else { + filterResponse(Message.noLogin(n.getMessage) << request.getRequestURI) + return false + } + case t: Throwable => + SecurityFilter.warn("", t) + throw t + } + if(userName.isDefined) { + true + } else if(Configuration.IS_TEST_MODE.getValue) { + SecurityFilter.info("test mode! login for uri: " + request.getRequestURI) + SecurityFilter.setLoginUser(response, testUser) + true + } else { + filterResponse(Message.noLogin("You are not logged in, please login first!(您尚未登录,请先登录!)") << request.getRequestURI) + false + } + } + } + + override def doFilter(servletRequest: ServletRequest, servletResponse: ServletResponse, filterChain: FilterChain): Unit = { + val request = servletRequest.asInstanceOf[HttpServletRequest] + implicit val response = servletResponse.asInstanceOf[HttpServletResponse] + if(doFilter(request)) filterChain.doFilter(servletRequest, servletResponse) + if(SecurityFilter.isRequestIgnoreTimeout(request)) SecurityFilter.removeIgnoreTimeoutSignal(response) + } + + protected def addAccessHeaders(response: HttpServletResponse) { + response.setHeader("Access-Control-Allow-Origin", "*") + response.setHeader("Access-Control-Allow-Credentials", "true") + response.setHeader("Access-Control-Allow-Headers", "authorization,Content-Type") + response.setHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, HEAD, DELETE") + val fullDateFormatEN = DateFormat.getDateTimeInstance(DateFormat.FULL, DateFormat.FULL, new Locale("EN", "en")) + response.setHeader("Date", fullDateFormatEN.format(new Date)) + } + + override def destroy(): Unit = {} +} + +object SecurityFilter extends Logging { + private[linkis] val OTHER_SYSTEM_IGNORE_UM_USER = "dataworkcloud_rpc_user" + private[linkis] val ALLOW_ACCESS_WITHOUT_TIMEOUT = "dataworkcloud_inner_request" + def getLoginUserThrowsExceptionWhenTimeout(req: HttpServletRequest): Option[String] = Option(req.getCookies).flatMap(cs => SSOUtils.getLoginUser(cs)) + .orElse(SSOUtils.getLoginUserIgnoreTimeout(key => Option(req.getHeader(key))).filter(_ == OTHER_SYSTEM_IGNORE_UM_USER)) + def getLoginUser(req: HttpServletRequest): Option[String] = Utils.tryCatch(getLoginUserThrowsExceptionWhenTimeout(req)) { + case _: LoginExpireException => + SSOUtils.getLoginUserIgnoreTimeout(key => Option(req.getCookies).flatMap(_.find(_.getName == key).map(_.getValue))).filter(user => user != OTHER_SYSTEM_IGNORE_UM_USER && + isRequestIgnoreTimeout(req)) + case t => throw t + } + def isRequestIgnoreTimeout(req: HttpServletRequest): Boolean = Option(req.getCookies).exists(_.exists(c => c.getName == ALLOW_ACCESS_WITHOUT_TIMEOUT && c.getValue == "true")) + def addIgnoreTimeoutSignal(response: HttpServletResponse): Unit = response.addCookie(ignoreTimeoutSignal()) + def ignoreTimeoutSignal(): Cookie = { + val cookie = new Cookie(ALLOW_ACCESS_WITHOUT_TIMEOUT, "true") + cookie.setMaxAge(-1) + cookie.setPath("/") + if(sslEnable) cookie.setSecure(true) + cookie + } + def removeIgnoreTimeoutSignal(response: HttpServletResponse): Unit = { + val cookie = new Cookie(ALLOW_ACCESS_WITHOUT_TIMEOUT, "false") + cookie.setMaxAge(0) + cookie.setPath("/") + if(sslEnable) cookie.setSecure(true) + response.addCookie(cookie) + } + def getLoginUsername(req: HttpServletRequest): String = getLoginUser(req).getOrElse(throw new IllegalUserTicketException( s"Illegal user token information(非法的用户token信息).")) + def setLoginUser(resp: HttpServletResponse, username: String): Unit = SSOUtils.setLoginUser(c => resp.addCookie(c), username) + def removeLoginUser(req: HttpServletRequest, resp: HttpServletResponse): Unit = { + SSOUtils.removeLoginUser(req.getCookies) + SSOUtils.removeLoginUserByAddCookie(s => resp.addCookie(s)) + } +} \ No newline at end of file diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/ControllerServer.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/ControllerServer.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/ControllerServer.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/ControllerServer.scala diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/ServerSocket.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/ServerSocket.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/ServerSocket.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/ServerSocket.scala index f79b552deff011265c86fadb3adb35604ac3153b..2b195fa3fcc879d230c39b4c5ed5880d2129d748 100644 --- a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/ServerSocket.scala +++ b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/ServerSocket.scala @@ -20,8 +20,8 @@ import java.util.concurrent.TimeUnit import com.webank.wedatasphere.linkis.common.collection.BlockingLoopArray import com.webank.wedatasphere.linkis.common.utils.Utils -import javax.servlet.http.HttpServletRequest import com.webank.wedatasphere.linkis.server.security.SecurityFilter +import javax.servlet.http.HttpServletRequest import org.eclipse.jetty.websocket.api.{Session, WebSocketAdapter} /** diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/SocketListener.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/SocketListener.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/SocketListener.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/SocketListener.scala diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/controller/ServerEvent.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/controller/ServerEvent.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/controller/ServerEvent.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/controller/ServerEvent.scala diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/controller/ServerEventService.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/controller/ServerEventService.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/controller/ServerEventService.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/controller/ServerEventService.scala diff --git a/core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/controller/ServerListenerEventBus.scala b/linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/controller/ServerListenerEventBus.scala similarity index 100% rename from core/cloudModule/src/main/scala/com/webank/wedatasphere/linkis/server/socket/controller/ServerListenerEventBus.scala rename to linkis-commons/linkis-module/src/main/scala/com/webank/wedatasphere/linkis/server/socket/controller/ServerListenerEventBus.scala diff --git a/linkis-commons/linkis-mybatis/pom.xml b/linkis-commons/linkis-mybatis/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..c60393013ec900b3fc7a47ad50beb273516a864a --- /dev/null +++ b/linkis-commons/linkis-mybatis/pom.xml @@ -0,0 +1,92 @@ + + + + + + linkis + com.webank.wedatasphere.linkis + 1.0.0-RC1 + ../../pom.xml + + 4.0.0 + jar + + linkis-mybatis + + + + com.webank.wedatasphere.linkis + linkis-module + provided + + + com.baomidou + mybatis-plus-boot-starter + ${mybatis-plus.boot.starter.version} + + + spring-boot-starter + org.springframework.boot + + + spring-boot-autoconfigure + org.springframework.boot + + + spring-beans + org.springframework + + + spring-jdbc + org.springframework + + + + + spring-jdbc + org.springframework + ${spring.version} + + + com.github.pagehelper + pagehelper + 5.1.4 + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/resources + + + ${project.artifactId}-${project.version} + + \ No newline at end of file diff --git a/core/cloudMybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/DataSourceConfig.java b/linkis-commons/linkis-mybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/DataSourceConfig.java similarity index 100% rename from core/cloudMybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/DataSourceConfig.java rename to linkis-commons/linkis-mybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/DataSourceConfig.java diff --git a/core/cloudMybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/MyBatisMapperScannerConfig.java b/linkis-commons/linkis-mybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/MyBatisMapperScannerConfig.java similarity index 100% rename from core/cloudMybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/MyBatisMapperScannerConfig.java rename to linkis-commons/linkis-mybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/MyBatisMapperScannerConfig.java diff --git a/core/cloudMybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/MybatisConfigurationFactory.java b/linkis-commons/linkis-mybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/MybatisConfigurationFactory.java similarity index 95% rename from core/cloudMybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/MybatisConfigurationFactory.java rename to linkis-commons/linkis-mybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/MybatisConfigurationFactory.java index 131b4127dd55081e6aaa0cd79fa94c9a5a822555..9d6efd2c1d670ce77c1c227c3b83482a39a35541 100644 --- a/core/cloudMybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/MybatisConfigurationFactory.java +++ b/linkis-commons/linkis-mybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/MybatisConfigurationFactory.java @@ -16,7 +16,7 @@ package com.webank.wedatasphere.linkis.mybatis; -import com.github.pagehelper.PageHelper; +import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean; import com.github.pagehelper.PageInterceptor; import com.webank.wedatasphere.linkis.common.utils.JavaLog; import com.webank.wedatasphere.linkis.mybatis.conf.MybatisConfiguration; @@ -59,14 +59,14 @@ public class MybatisConfigurationFactory extends JavaLog { // Provide SqlSeesion(提供SqlSeesion) @Bean(name = "sqlSessionFactory") @Primary - public SqlSessionFactory sqlSessionFactory() { + public MybatisSqlSessionFactoryBean sqlSessionFactory() { String typeAliasesPackage = MybatisConfiguration.BDP_SERVER_MYBATIS_TYPEALIASESPACKAGE.getValue(); //Configure the mapper scan to find all mapper.xml mapping files(配置mapper的扫描,找到所有的mapper.xml映射文件) String mapperLocations = MybatisConfiguration.BDP_SERVER_MYBATIS_MAPPER_LOCATIONS.getValue(); //Load the global configuration file(加载全局的配置文件) String configLocation = MybatisConfiguration.BDP_SERVER_MYBATIS_CONFIGLOCATION.getValue(); try { - SqlSessionFactoryBean sessionFactoryBean = new SqlSessionFactoryBean(); + MybatisSqlSessionFactoryBean sessionFactoryBean = new MybatisSqlSessionFactoryBean(); sessionFactoryBean.setDataSource(dataSource); info("Mybatis typeAliasesPackage=" + typeAliasesPackage); @@ -89,11 +89,12 @@ public class MybatisConfigurationFactory extends JavaLog { // Set the location of the mybatis-config.xml configuration file(设置mybatis-config.xml配置文件位置) sessionFactoryBean.setConfigLocation(new DefaultResourceLoader().getResource(configLocation)); + // Add paging plugin, print sql plugin(添加分页插件、打印sql插件) Interceptor[] plugins = new Interceptor[]{pageInterceptor()}; sessionFactoryBean.setPlugins(plugins); - return sessionFactoryBean.getObject(); + return sessionFactoryBean; } catch (IOException e) { error("mybatis resolver mapper*xml is error",e); return null; diff --git a/core/cloudMybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/conf/MybatisConfiguration.java b/linkis-commons/linkis-mybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/conf/MybatisConfiguration.java similarity index 100% rename from core/cloudMybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/conf/MybatisConfiguration.java rename to linkis-commons/linkis-mybatis/src/main/java/com/webank/wedatasphere/linkis/mybatis/conf/MybatisConfiguration.java diff --git a/linkis-commons/linkis-protocol/pom.xml b/linkis-commons/linkis-protocol/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..ee148de050898d9c2127a15edc312617817f4944 --- /dev/null +++ b/linkis-commons/linkis-protocol/pom.xml @@ -0,0 +1,59 @@ + + + + + + linkis + com.webank.wedatasphere.linkis + 1.0.0-RC1 + + 4.0.0 + + linkis-protocol + + + + com.webank.wedatasphere.linkis + linkis-common + provided + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + ${basedir}/src/main/resources + + + ${project.artifactId}-${project.version} + + + \ No newline at end of file diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/AbstractRetryableProtocol.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/AbstractRetryableProtocol.java new file mode 100644 index 0000000000000000000000000000000000000000..3a161b7c47ce62107b2df16380ae2fd58e66fcc8 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/AbstractRetryableProtocol.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.linkis.protocol; + + +public class AbstractRetryableProtocol implements RetryableProtocol { + + @Override + public long maxPeriod() { + return 3000L; + } + + @Override + public Class[] retryExceptions() { + return new Class[]{}; + } + + @Override + public int retryNum() { + return 2; + } + + @Override + public long period() { + return 1000L; + } + +} diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/constants/TaskConstant.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/constants/TaskConstant.java similarity index 79% rename from core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/constants/TaskConstant.java rename to linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/constants/TaskConstant.java index b45d05261a3cd315446f74e72d879a6a91b0ee0f..060ecaf757506d42fd596dbe5a6294c5bea32fc8 100644 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/constants/TaskConstant.java +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/constants/TaskConstant.java @@ -16,12 +16,15 @@ package com.webank.wedatasphere.linkis.protocol.constants; -/** - * created by enjoyyin on 2018/10/10 - * Description: - */ + public interface TaskConstant { + String UMUSER = "umUser"; + + String SUBMIT_USER = "submitUser"; + + String EXECUTE_USER = "executeUser"; + String TASKTYPE = "taskType"; String STORAGETYPE = "storageType"; String EXECUTIONCODE = "executionCode"; @@ -34,11 +37,20 @@ public interface TaskConstant { String SCRIPTPATH = "scriptPath"; String SOURCE = "source"; String RUNTYPE = "runType"; + String CACHE = "cache"; + String CACHE_EXPIRE_AFTER = "cacheExpireAfter"; + String READ_FROM_CACHE = "readFromCache"; + String READ_CACHE_BEFORE = "readCacheBefore"; String PARAMS_VARIABLE = "variable"; String PARAMS_CONFIGURATION = "configuration"; String PARAMS_CONFIGURATION_STARTUP = "startup"; String PARAMS_CONFIGURATION_RUNTIME = "runtime"; String PARAMS_CONFIGURATION_SPECIAL = "special"; - String PARAMS_CONFIGURATION_DATASOURCE = "datasource"; + + String LABELS = "labels"; + String EXECUTION_CONTENT = "executionContent"; + String CODE = "code"; + + } diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/engine/EngineState.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/engine/EngineState.java new file mode 100644 index 0000000000000000000000000000000000000000..a7f28b0378e901d168f05e8148b71833293e1b9a --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/engine/EngineState.java @@ -0,0 +1,50 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.linkis.protocol.engine; + + +public enum EngineState { + + /** + * 引擎的各种状态 + */ + Starting, Idle, Busy, ShuttingDown, Error, Dead, Success; + + public int id() { + return this.ordinal(); + } + + public static boolean isCompleted(EngineState engineState) { + switch (engineState) { + case Error: + case Dead: + case Success: + return true; + default: + return false; + } + } + + public static boolean isAvailable(EngineState engineState) { + switch (engineState) { + case Idle: + case Busy: + return true; + default: + return false; + } + } +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/engine/JobProgressInfo.scala b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/engine/JobProgressInfo.scala new file mode 100644 index 0000000000000000000000000000000000000000..c13f78f9153c5d8fa4c0ddca22fa84fe033c8f78 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/engine/JobProgressInfo.scala @@ -0,0 +1,21 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.engine + + +case class JobProgressInfo(id: String, totalTasks: Int, runningTasks: Int, failedTasks: Int, succeedTasks: Int) + diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/InsLabelAttachRequest.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/InsLabelAttachRequest.java new file mode 100644 index 0000000000000000000000000000000000000000..8f5392a53d4f237ecd75527608905bc5e6c2b41f --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/InsLabelAttachRequest.java @@ -0,0 +1,60 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.label; + +import com.webank.wedatasphere.linkis.common.ServiceInstance; + +import java.util.HashMap; +import java.util.Map; + + +public class InsLabelAttachRequest implements LabelRequest { + /** + * Service instance + */ + private ServiceInstance serviceInstance; + + /** + * Labels stored as map structure + */ + private Map labels = new HashMap<>(); + + + public InsLabelAttachRequest(){ + + } + + public InsLabelAttachRequest(ServiceInstance serviceInstance, Map labels){ + this.serviceInstance = serviceInstance; + this.labels = labels; + } + public ServiceInstance getServiceInstance() { + return serviceInstance; + } + + public void setServiceInstance(ServiceInstance serviceInstance) { + this.serviceInstance = serviceInstance; + } + + public Map getLabels() { + return labels; + } + + public void setLabels(Map labels) { + this.labels = labels; + } +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/InsLabelRefreshRequest.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/InsLabelRefreshRequest.java new file mode 100644 index 0000000000000000000000000000000000000000..0564686d96757ef7600bab8e32d05d2aee165782 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/InsLabelRefreshRequest.java @@ -0,0 +1,33 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.label; + +import com.webank.wedatasphere.linkis.common.ServiceInstance; + +import java.util.Map; + + +public class InsLabelRefreshRequest extends InsLabelAttachRequest{ + + public InsLabelRefreshRequest(){ + + } + + public InsLabelRefreshRequest(ServiceInstance serviceInstance, Map labels){ + super(serviceInstance, labels); + } +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/InsLabelRemoveRequest.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/InsLabelRemoveRequest.java new file mode 100644 index 0000000000000000000000000000000000000000..7da068c228fd9681e45770bd8392c21d95495b0b --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/InsLabelRemoveRequest.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.label; + +import com.webank.wedatasphere.linkis.common.ServiceInstance; + + +public class InsLabelRemoveRequest implements LabelRequest{ + + private ServiceInstance serviceInstance; + + public InsLabelRemoveRequest(){ + + } + + public InsLabelRemoveRequest(ServiceInstance serviceInstance){ + this.serviceInstance = serviceInstance; + } + + public ServiceInstance getServiceInstance() { + return serviceInstance; + } + + public void setServiceInstance(ServiceInstance serviceInstance) { + this.serviceInstance = serviceInstance; + } +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/LabelRequest.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/LabelRequest.java new file mode 100644 index 0000000000000000000000000000000000000000..b3d93eba1045d001ad0cb65e3829b69a7dbca29a --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/LabelRequest.java @@ -0,0 +1,23 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.label; + +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol; + + +public interface LabelRequest extends RequestProtocol { +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/NodeLabelAddRequest.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/NodeLabelAddRequest.java new file mode 100644 index 0000000000000000000000000000000000000000..cc4b50b1ff2bdbfd8b062d03cc6add01d888b45f --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/NodeLabelAddRequest.java @@ -0,0 +1,55 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.label; + +import com.webank.wedatasphere.linkis.common.ServiceInstance; + +import java.util.Map; + + +public class NodeLabelAddRequest implements LabelRequest { + + private ServiceInstance serviceInstance; + + private Map labels; + + + public NodeLabelAddRequest() { + + } + + public NodeLabelAddRequest(ServiceInstance serviceInstance, Map labels) { + this.serviceInstance = serviceInstance; + this.labels = labels; + } + + public ServiceInstance getServiceInstance() { + return serviceInstance; + } + + public void setServiceInstance(ServiceInstance serviceInstance) { + this.serviceInstance = serviceInstance; + } + + public Map getLabels() { + return labels; + } + + public void setLabels(Map labels) { + this.labels = labels; + } +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/NodeLabelRemoveRequest.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/NodeLabelRemoveRequest.java new file mode 100644 index 0000000000000000000000000000000000000000..79224834fc526097dad75241406472c0cb38c719 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/label/NodeLabelRemoveRequest.java @@ -0,0 +1,52 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.label; + +import com.webank.wedatasphere.linkis.common.ServiceInstance; + + +public class NodeLabelRemoveRequest implements LabelRequest { + + private ServiceInstance serviceInstance; + + private boolean isEngine; + + public NodeLabelRemoveRequest() { + + } + + public NodeLabelRemoveRequest(ServiceInstance serviceInstance, boolean isEngine) { + this.serviceInstance = serviceInstance; + this.isEngine = isEngine; + } + + public ServiceInstance getServiceInstance() { + return serviceInstance; + } + + public void setServiceInstance(ServiceInstance serviceInstance) { + this.serviceInstance = serviceInstance; + } + + public boolean isEngine() { + return isEngine; + } + + public void setEngine(boolean engine) { + isEngine = engine; + } +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/message/HttpPrototol.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/message/HttpPrototol.java new file mode 100644 index 0000000000000000000000000000000000000000..bf58e339c038c5d184f0b9fec9881c628fe8a334 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/message/HttpPrototol.java @@ -0,0 +1,24 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package com.webank.wedatasphere.linkis.protocol.message; + +import com.webank.wedatasphere.linkis.protocol.Protocol; + + +public interface HttpPrototol extends Protocol { +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/message/RequestMethod.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/message/RequestMethod.java new file mode 100644 index 0000000000000000000000000000000000000000..7c3dc8ec5f6ed1d66da7f4894c35704bd4fc0f07 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/message/RequestMethod.java @@ -0,0 +1,27 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package com.webank.wedatasphere.linkis.protocol.message; + + +public interface RequestMethod { + + default String method() { + return null; + } + +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/message/RequestProtocol.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/message/RequestProtocol.java new file mode 100644 index 0000000000000000000000000000000000000000..2027061b4c1f4593f024ddbd7e14805cbe68015b --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/message/RequestProtocol.java @@ -0,0 +1,22 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.message; + + +public interface RequestProtocol extends HttpPrototol { + +} diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/QueryProtocol.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/QueryProtocol.java similarity index 100% rename from core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/QueryProtocol.java rename to linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/QueryProtocol.java diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/CacheNotFound.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/CacheNotFound.java new file mode 100644 index 0000000000000000000000000000000000000000..82def11507c1cc9da2828d9c9e0e8d3dbeb514b6 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/CacheNotFound.java @@ -0,0 +1,20 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.query.cache; + +public class CacheNotFound implements ResponseReadCache { +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/CacheTaskResult.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/CacheTaskResult.java new file mode 100644 index 0000000000000000000000000000000000000000..dfb066819535cc49cfef5286c9ee0fc1e5a5ace0 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/CacheTaskResult.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.query.cache; + +public class CacheTaskResult implements ResponseReadCache { + + private String resultLocation; + + public CacheTaskResult(String resultLocation) { + this.resultLocation = resultLocation; + } + + public String getResultLocation() { + return resultLocation; + } +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/FailedToDeleteCache.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/FailedToDeleteCache.java new file mode 100644 index 0000000000000000000000000000000000000000..504f469ff6a178e9be512a86d03403ee76daf867 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/FailedToDeleteCache.java @@ -0,0 +1,29 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.query.cache; + +public class FailedToDeleteCache { + private String errorMessage; + + public FailedToDeleteCache(String errorMessage) { + this.errorMessage = errorMessage; + } + + public String getErrorMessage() { + return errorMessage; + } +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/RequestDeleteCache.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/RequestDeleteCache.java new file mode 100644 index 0000000000000000000000000000000000000000..1a7599f7c84c34bfe55c430464c7fbd27034f4f9 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/RequestDeleteCache.java @@ -0,0 +1,44 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.query.cache; + +import com.webank.wedatasphere.linkis.protocol.query.QueryProtocol; + +public class RequestDeleteCache implements QueryProtocol { + + private String executionCode; + private String engineType; + private String user; + + public RequestDeleteCache(String executionCode, String engineType, String user) { + this.executionCode = executionCode; + this.engineType = engineType; + this.user = user; + } + + public String getExecutionCode() { + return executionCode; + } + + public String getEngineType() { + return engineType; + } + + public String getUser() { + return user; + } +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/RequestReadCache.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/RequestReadCache.java new file mode 100644 index 0000000000000000000000000000000000000000..aa4d28d93ab7a7d595792f61a7200cf654a8f5f3 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/RequestReadCache.java @@ -0,0 +1,49 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.query.cache; + +import com.webank.wedatasphere.linkis.protocol.query.QueryProtocol; + +public class RequestReadCache implements QueryProtocol { + private String executionCode; + private String engineType; + private String user; + private Long readCacheBefore; + + public RequestReadCache(String executionCode, String engineType, String user, Long readCacheBefore) { + this.executionCode = executionCode; + this.engineType = engineType; + this.user = user; + this.readCacheBefore = readCacheBefore; + } + + public String getExecutionCode() { + return executionCode; + } + + public String getEngineType() { + return engineType; + } + + public String getUser() { + return user; + } + + public Long getReadCacheBefore() { + return readCacheBefore; + } +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/ResponseDeleteCache.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/ResponseDeleteCache.java new file mode 100644 index 0000000000000000000000000000000000000000..d8a7f8c7d9b713c10b641488bb49d8e2706216f2 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/ResponseDeleteCache.java @@ -0,0 +1,22 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.query.cache; + +import com.webank.wedatasphere.linkis.protocol.query.QueryProtocol; + +public interface ResponseDeleteCache extends QueryProtocol { +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/ResponseReadCache.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/ResponseReadCache.java new file mode 100644 index 0000000000000000000000000000000000000000..fa6f4d8b92894fec4d8be532bef9b58ba4ce242d --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/ResponseReadCache.java @@ -0,0 +1,22 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.query.cache; + +import com.webank.wedatasphere.linkis.protocol.query.QueryProtocol; + +public interface ResponseReadCache extends QueryProtocol { +} diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/SuccessDeletedCache.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/SuccessDeletedCache.java new file mode 100644 index 0000000000000000000000000000000000000000..20074949158fe1520d621ff81c62dc6682b9697a --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/query/cache/SuccessDeletedCache.java @@ -0,0 +1,20 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.query.cache; + +public class SuccessDeletedCache { +} diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/RequestLogin.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/RequestLogin.java similarity index 97% rename from core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/RequestLogin.java rename to linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/RequestLogin.java index 76237ea9f6204f5221a7c4d3c46da4399d86a28f..7f66f199c13f234f555d296a2e07f90152592f9c 100644 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/RequestLogin.java +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/RequestLogin.java @@ -17,9 +17,7 @@ package com.webank.wedatasphere.linkis.protocol.usercontrol; -/** - * Created by alexyang - */ + public class RequestLogin implements UserControlLoginProtocol { private String userName; diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/RequestRegister.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/RequestRegister.java similarity index 93% rename from core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/RequestRegister.java rename to linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/RequestRegister.java index f36a0211e2fbfa16a1d2dab368c7a889d1a961c7..08a75ba04445cec13da48d09e146efe0337d98c0 100644 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/RequestRegister.java +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/RequestRegister.java @@ -16,15 +16,13 @@ package com.webank.wedatasphere.linkis.protocol.usercontrol; -/** - * Created by alexyang - */ + public class RequestRegister implements UserControlRegtisterProtocol{ // json string private String params; - public RequestRegister() {} + public RequestRegister() {}; public String getParams() { return params; diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/ResponseLogin.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/ResponseLogin.java similarity index 97% rename from core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/ResponseLogin.java rename to linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/ResponseLogin.java index e82a4a73f242b0521a253d75b948e46ada1baa3d..cdbae958ea3e4483ff07db0126c8c0fe22cc5221 100644 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/ResponseLogin.java +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/ResponseLogin.java @@ -18,9 +18,7 @@ package com.webank.wedatasphere.linkis.protocol.usercontrol; import java.util.Date; -/** - * Created by alexyang - */ + public class ResponseLogin implements UserControlLoginProtocol { private String userName; diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/ResponseRegister.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/ResponseRegister.java similarity index 97% rename from core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/ResponseRegister.java rename to linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/ResponseRegister.java index 5c12d626264834bfdc0ac00dc6cdd66199447ecc..a6b0e4dfac9569408a67efbd3a1d7b0802204d4c 100644 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/ResponseRegister.java +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/ResponseRegister.java @@ -18,10 +18,8 @@ package com.webank.wedatasphere.linkis.protocol.usercontrol; import java.util.HashMap; -/** - * Created by alexyang - */ -public class ResponseRegister implements UserControlRegtisterProtocol { + +public class ResponseRegister implements UserControlRegtisterProtocol{ private int status; private String message; diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/UserControlLoginProtocol.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/UserControlLoginProtocol.java similarity index 77% rename from core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/UserControlLoginProtocol.java rename to linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/UserControlLoginProtocol.java index 95b00362d2b7d01b46e6fb789ebcfc13f3bdbe61..4cdb39dd86be34f9f355582acc39308a77ed1d70 100644 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/UserControlLoginProtocol.java +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/UserControlLoginProtocol.java @@ -1,7 +1,5 @@ package com.webank.wedatasphere.linkis.protocol.usercontrol; -/** - * Created by alexyang - */ + public interface UserControlLoginProtocol { } diff --git a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/UserControlRegtisterProtocol.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/UserControlRegtisterProtocol.java similarity index 78% rename from core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/UserControlRegtisterProtocol.java rename to linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/UserControlRegtisterProtocol.java index 04a6d1f7c9a1330c94505a2df0989dfdcd4e9f17..1ea4e04542c17abe54f922faada9c3736cb62496 100644 --- a/core/cloudProtocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/UserControlRegtisterProtocol.java +++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/usercontrol/UserControlRegtisterProtocol.java @@ -1,7 +1,5 @@ package com.webank.wedatasphere.linkis.protocol.usercontrol; -/** - * Created by alexyang - */ + public interface UserControlRegtisterProtocol { } diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/BroadcastProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/BroadcastProtocol.scala similarity index 94% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/BroadcastProtocol.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/BroadcastProtocol.scala index 6f5e20f1a061a83e439db215394961c944a42498..2ea57147bc443a96f100b1395a16ea21c9435b9c 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/BroadcastProtocol.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/BroadcastProtocol.scala @@ -16,9 +16,7 @@ package com.webank.wedatasphere.linkis.protocol -/** - * Created by enjoyyin on 2019/1/14. - */ + trait BroadcastProtocol extends Protocol { val throwsIfAnyFailed = false diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/CacheableProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/CacheableProtocol.scala similarity index 93% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/CacheableProtocol.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/CacheableProtocol.scala index b4706a78dd70a9f7ef9bc1abbfdf3a3ea75d4a6b..88b90800f4c44721ea2cdd3990bb535a861ae697 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/CacheableProtocol.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/CacheableProtocol.scala @@ -16,9 +16,7 @@ package com.webank.wedatasphere.linkis.protocol -/** - * Created by enjoyyin on 2019/1/14. - */ + trait CacheableProtocol extends Protocol { override def toString: String = super.toString } diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRCommonProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRCommonProtocol.scala similarity index 93% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRCommonProtocol.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRCommonProtocol.scala index 218ad3ebf24a356d4e1f71eaaba41c09c4c3841d..bf5612b4c048d9febdd24372e6d9b67b1cfbaac2 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRCommonProtocol.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRCommonProtocol.scala @@ -16,9 +16,7 @@ package com.webank.wedatasphere.linkis.protocol -/** - * Created by enjoyyin on 2019/1/14. - */ + trait IRCommonProtocol extends IRProtocol { val rule: String } diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRProtocol.scala similarity index 93% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRProtocol.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRProtocol.scala index f086081e1fc5f8b9472d9e94804a6a32af128ec0..885133f7d2beb565c846e19514fecd96644d69ba 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRProtocol.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRProtocol.scala @@ -16,7 +16,5 @@ package com.webank.wedatasphere.linkis.protocol -/** - * Created by enjoyyin on 2019/1/7. - */ + trait IRProtocol \ No newline at end of file diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRServiceGroupProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRServiceGroupProtocol.scala similarity index 95% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRServiceGroupProtocol.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRServiceGroupProtocol.scala index 89afccc0ed3addae7cb6c8cc411cb195dd9fbd64..b9f8e4df44108099324f06b43ac06175bf019a5a 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRServiceGroupProtocol.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/IRServiceGroupProtocol.scala @@ -16,9 +16,7 @@ package com.webank.wedatasphere.linkis.protocol -/** - * Created by enjoyyin on 2019/1/7. - */ + trait IRServiceGroupProtocol extends IRProtocol with InstanceProtocol { val userWithCreator: UserWithCreator diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/InstanceProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/InstanceProtocol.scala similarity index 94% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/InstanceProtocol.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/InstanceProtocol.scala index 650476cd3f7c9d53f1d942cb2b46a3bb219d4346..311d0d64a70fcafa25b89ba9491780c70ce50629 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/InstanceProtocol.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/InstanceProtocol.scala @@ -16,9 +16,7 @@ package com.webank.wedatasphere.linkis.protocol -/** - * Created by enjoyyin on 2019/1/7. - */ + trait InstanceProtocol extends Protocol { var choseInstance: Option[String] = None diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/Protocol.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/Protocol.scala similarity index 93% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/Protocol.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/Protocol.scala index f7501493ca984dd3b1c2a192580cbff82bb50798..2bfa0756838dc3cb0f925f60b278916349dd1854 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/Protocol.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/Protocol.scala @@ -16,7 +16,5 @@ package com.webank.wedatasphere.linkis.protocol -/** - * Created by enjoyyin on 2019/1/7. - */ + trait Protocol \ No newline at end of file diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/RetryableProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/RetryableProtocol.scala similarity index 79% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/RetryableProtocol.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/RetryableProtocol.scala index 84bf31b28008bb40d1b1d54051c7d1a0479ae437..9fd2f40e96a87193c77326576beb975bdd008422 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/RetryableProtocol.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/RetryableProtocol.scala @@ -16,12 +16,10 @@ package com.webank.wedatasphere.linkis.protocol -/** - * Created by enjoyyin on 2019/1/7. - */ + trait RetryableProtocol extends Protocol { - val retryNum = 2 - val period = 1000l - val maxPeriod = 3000l - val retryExceptions = Array.empty[Class[_ <: Throwable]] + def retryNum: Int = 2 + def period: Long = 1000l + def maxPeriod: Long = 3000l + def retryExceptions: Array[Class[_ <: Throwable]] = Array.empty[Class[_ <: Throwable]] } \ No newline at end of file diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/SingleInstanceProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/SingleInstanceProtocol.scala similarity index 93% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/SingleInstanceProtocol.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/SingleInstanceProtocol.scala index 3b25e4318e46668b69c34583e70aeb873c8851a4..3ea12e17e32881922fe76e01376ab6e01402522b 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/SingleInstanceProtocol.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/SingleInstanceProtocol.scala @@ -16,7 +16,5 @@ package com.webank.wedatasphere.linkis.protocol -/** - * Created by enjoyyin on 2019/1/7. - */ + trait SingleInstanceProtocol extends Protocol \ No newline at end of file diff --git a/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/callback/LogCallbackProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/callback/LogCallbackProtocol.scala new file mode 100644 index 0000000000000000000000000000000000000000..67d7c7e1423ee57654fb4999064f815a659e03cf --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/callback/LogCallbackProtocol.scala @@ -0,0 +1,11 @@ +package com.webank.wedatasphere.linkis.protocol.callback + +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol + + +// TODO: log type +case class LogCallbackProtocol(nodeId: String, logs: Array[String]) extends RequestProtocol + +case class YarnAPPIdCallbackProtocol(nodeId: String, applicationId: String) extends RequestProtocol + +case class YarnInfoCallbackProtocol(nodeId: String, uri: String) extends RequestProtocol diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/EngineCallback.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/EngineCallback.scala similarity index 96% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/EngineCallback.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/EngineCallback.scala index e5cef4085b6c2eca1d8096817a15376aa2183191..5ba62fc236f949938a89e1784bb91df461d73ad3 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/EngineCallback.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/EngineCallback.scala @@ -16,9 +16,7 @@ package com.webank.wedatasphere.linkis.protocol.engine -/** - * Created by enjoyyin on 2018/9/26. - */ + object EngineCallback { private val DWC_APPLICATION_NAME = "dwc.application.name" private val DWC_INSTANCE = "dwc.application.instance" diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/EngineState.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/EngineState.scala similarity index 92% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/EngineState.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/EngineState.scala index 5e16ca27a0e0b99e2c115ea439d9a7b44cf90551..ddeed2a36f55b704b0607187a80da1a46cfd6af6 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/EngineState.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/EngineState.scala @@ -16,10 +16,8 @@ package com.webank.wedatasphere.linkis.protocol.engine -/** - * Created by enjoyyin on 2018/9/27. - */ -object EngineState extends Enumeration { + +object EngineState0 extends Enumeration { type EngineState = Value val Starting, Idle, Busy, ShuttingDown, Error, Dead, Success = Value def isCompleted(executorState: EngineState): Boolean = executorState match { diff --git a/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestEngineStatus.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestEngineStatus.scala new file mode 100644 index 0000000000000000000000000000000000000000..5c089ba75ceec0223589069ef5d8044a7ef6e410 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestEngineStatus.scala @@ -0,0 +1,31 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.engine + +import com.webank.wedatasphere.linkis.protocol.RetryableProtocol +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol + + +case class RequestEngineStatus(messageType: Int) extends RetryableProtocol with RequestProtocol +object RequestEngineStatus { + val Status_Only = 1 + val Status_Overload = 2 + val Status_Concurrent = 3 + val Status_Overload_Concurrent = 4 + val Status_BasicInfo = 5 + val ALL = 6 +} \ No newline at end of file diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestUserEngineKill.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestUserEngineKill.scala similarity index 85% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestUserEngineKill.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestUserEngineKill.scala index c3596c305ecc98d4e5fd94aadec116a7a8f693fb..3808be496d1c94379d41fde9b246af6e69848770 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestUserEngineKill.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/engine/RequestUserEngineKill.scala @@ -16,9 +16,9 @@ package com.webank.wedatasphere.linkis.protocol.engine -import com.webank.wedatasphere.linkis.protocol.RetryableProtocol +import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol -case class RequestUserEngineKill(ticketId: String, creator: String, user: String, properties: Map[String, String]) extends RetryableProtocol +case class RequestUserEngineKill(ticketId: String, creator: String, user: String, properties: Map[String, String]) extends RequestProtocol case class ResponseUserEngineKill(ticketId: String, status: String, message: String) object ResponseUserEngineKill{ val Success = "Success" diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/mdq/MDQProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/mdq/MDQProtocol.scala similarity index 100% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/mdq/MDQProtocol.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/mdq/MDQProtocol.scala diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/task/Task.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/task/Task.scala similarity index 92% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/task/Task.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/task/Task.scala index e2f02f28b0bfadc66b9226ed7a42a154701ca03a..82cb9e7d610845294d4505991486c2a43a784525 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/task/Task.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/task/Task.scala @@ -16,10 +16,7 @@ package com.webank.wedatasphere.linkis.protocol.task -/** - * created by enjoyyin on 2018/10/8 - * Description: - */ + trait Task { def getInstance: String diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/ProtocolUtils.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/ProtocolUtils.scala similarity index 96% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/ProtocolUtils.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/ProtocolUtils.scala index 63e4e710596e7a696e0618414ed4e15185304245..9449f8746a6a71edd7af91182541eb14d0ff8797 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/ProtocolUtils.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/ProtocolUtils.scala @@ -18,9 +18,7 @@ package com.webank.wedatasphere.linkis.protocol.utils import com.webank.wedatasphere.linkis.common.conf.CommonVars -/** - * Created by enjoyyin on 2019/1/7. - */ + object ProtocolUtils { val SERVICE_SUFFIX = CommonVars("wds.linkis.service.suffix","engineManager,entrance,engine") diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/TaskUtils.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/TaskUtils.scala similarity index 93% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/TaskUtils.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/TaskUtils.scala index e888fe13168d9fb23848a20cc9780ad4aef80854..5ea907f3059985aad675e7608af5c62d105053da 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/TaskUtils.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/TaskUtils.scala @@ -79,7 +79,9 @@ object TaskUtils { def addSpecialMap(params: util.Map[String, Any], specialMap: util.Map[String, Any]) = addConfigurationMap(params, specialMap, TaskConstant.PARAMS_CONFIGURATION_SPECIAL) - def addDatasourceMap(params: util.Map[String, Any], datasourceMap: util.Map[String, Any]) = - addConfigurationMap(params, datasourceMap, TaskConstant.PARAMS_CONFIGURATION_DATASOURCE) + // tdoo + def getLabelsMap(params: util.Map[String, Any]) = getMap(params, TaskConstant.LABELS) + + def addLabelsMap(params: util.Map[String, Any], labels: util.Map[String, Any]): Unit = addMap(params, labels, TaskConstant.LABELS) } diff --git a/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/ZuulEntranceUtils.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/ZuulEntranceUtils.scala new file mode 100644 index 0000000000000000000000000000000000000000..0849d31ae0719f424bdb498aaba52a47d6cfef08 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/utils/ZuulEntranceUtils.scala @@ -0,0 +1,149 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.protocol.utils + +import com.webank.wedatasphere.linkis.common.ServiceInstance + + +object ZuulEntranceUtils { + + + private val INSTANCE_SPLIT_TOKEN = "_" + + private val EXEC_ID = "exec_id" + + private val SPLIT_LEN = 3 + + def parseExecID(longExecID: String): Array[String] = { + //Add creator to execID while old code is compatible(添加creator到execID,同时老代码兼容) + if (longExecID.startsWith(EXEC_ID)) { + val content = longExecID.replaceFirst(EXEC_ID, "") + val applicationNameLength = Integer.parseInt(content.substring(0, SPLIT_LEN)) + val instanceLength = Integer.parseInt(content.substring(SPLIT_LEN, SPLIT_LEN * 2)) + val applicationName = content.substring(SPLIT_LEN * 2, SPLIT_LEN * 2 + applicationNameLength) + val instances = content.substring(SPLIT_LEN * 2 + applicationNameLength, SPLIT_LEN * 2 + applicationNameLength + instanceLength) + val shortExecID = content.substring(SPLIT_LEN * 2 + applicationNameLength + instanceLength, content.length) + Array[String](EXEC_ID, applicationName, instances, shortExecID) + } else { + // @Deprecated 将在之后删掉该部分内容 + val creatorLength = Integer.parseInt(longExecID.substring(0, 2)) + val executeLength = Integer.parseInt(longExecID.substring(2, 4)) + val instanceLength = Integer.parseInt(longExecID.substring(4, 6)) + val creator = longExecID.substring(6, 6 + creatorLength) + val executeApplicationName = longExecID.substring(6 + creatorLength, 6 + creatorLength + executeLength) + val instance = longExecID.substring(6 + creatorLength + executeLength, 6 + creatorLength + executeLength + instanceLength) + val shortExecID = longExecID.substring(6 + creatorLength + executeLength + instanceLength, longExecID.length) + Array(creator, executeApplicationName, instance, shortExecID) + } + /*val executeLength = Integer.parseInt(longExecID.substring(0,2)) + val instanceLength = Integer.parseInt(longExecID.substring(2,4)) + val executeApplicationName:String = longExecID.substring(4, 4 + executeLength) + val instance:String = longExecID.substring(4 + executeLength, 4 + executeLength + instanceLength) + val shortExecID:String = longExecID.substring(4 + executeLength + instanceLength, longExecID.length) + Array[String](executeApplicationName, instance, shortExecID)*/ + } + + @Deprecated + def generateExecID(shortExecID: String, executeApplicationName: String, instance: String, creator: String): String = { + val creatorLength = getLengthStr(creator) + val executeLength = getLengthStr(executeApplicationName) + val instanceLength = getLengthStr(instance) + creatorLength + executeLength + instanceLength + creator + executeApplicationName + instance + shortExecID + } + + + private def isNumberic(s: String): Boolean = { + s.toCharArray foreach { + c => if (c < 48 || c > 57) return false + } + true + } + + /** + * + * @param shortExecID ExecID generated by the scheduler, such as IDE_neiljianliu_0(scheduler生成的ExecID, 如 IDE_neiljianliu_0) + * @param executeApplicationName {dd}{dd}${executeApplicationName}${instance}${shortExecID} + * @return + */ + @Deprecated + def generateExecID(shortExecID:String, executeApplicationName:String, instance:String):String = { + val executeLength = getLengthStr(executeApplicationName) + val instanceLength = getLengthStr(instance) + if (shortExecID.split("_").length == 3) { + //Backward compatible(向下兼容) + val creator = shortExecID.split("_")(0) + val creatorLength = getLengthStr(creator) + return creatorLength + executeLength + instanceLength + creator + executeApplicationName + instance + shortExecID + } + executeLength + instanceLength + executeApplicationName + instance + shortExecID + } + + def main(args: Array[String]): Unit = { + val str = generateExecID("spark_test_01", "linkis-cg-entrance", Array[String]("172.0.0.1:8080")) + val array = parseServiceInstanceByExecID(str) + println(array(3)) + } + + def parseServiceInstanceByExecID(longExecID: String): Array[ServiceInstance] = { + if (longExecID.startsWith(EXEC_ID)) { + val content = longExecID.replaceFirst(EXEC_ID, "") + val applicationNameLength = Integer.parseInt(content.substring(0, SPLIT_LEN)) + val instanceLength = Integer.parseInt(content.substring(SPLIT_LEN, SPLIT_LEN * 2)) + val applicationName = content.substring(SPLIT_LEN * 2, SPLIT_LEN * 2 + applicationNameLength) + val instances = content.substring(SPLIT_LEN * 2 + applicationNameLength, SPLIT_LEN * 2 + applicationNameLength + instanceLength) + val shortExecID = content.substring(SPLIT_LEN * 2 + applicationNameLength + instanceLength, content.length) + instances.split(INSTANCE_SPLIT_TOKEN).map(ServiceInstance(applicationName, _)) + } else { + // @Deprecated 将在之后删掉该部分内容 + val creatorLength = Integer.parseInt(longExecID.substring(0, 2)) + val executeLength = Integer.parseInt(longExecID.substring(2, 4)) + val instanceLength = Integer.parseInt(longExecID.substring(4, 6)) + val executeApplicationName = longExecID.substring(6 + creatorLength, 6 + creatorLength + executeLength) + val instance = longExecID.substring(6 + creatorLength + executeLength, 6 + creatorLength + executeLength + instanceLength) + Array(ServiceInstance(executeApplicationName, instance)) + } + } + + + private def getLengthStr(string: String): String = { + val length = string.length + if (length >= 10) String.valueOf(length) else "0" + String.valueOf(length) + } + + def generateExecID(shortExecID: String, applicationName: String, instances: Array[String]): String = { + if (null == instances || instances.isEmpty) { + throw new RuntimeException("生成ExecID失败,传入的Instance不能为空") + } + val applicationNameLength = getStrFixedLen(applicationName, SPLIT_LEN) + val instanceStr = instances.mkString(INSTANCE_SPLIT_TOKEN) + val instanceStrLength = getStrFixedLen(instanceStr, SPLIT_LEN) + EXEC_ID + applicationNameLength + instanceStrLength + applicationName + instanceStr + shortExecID + } + + + /*private def getLengthStr(string:String):String = { + val length = string.length + if (length >= 10) String.valueOf(length) else "0" + String.valueOf(length) + }*/ + + private def getStrFixedLen(string: String, len: Int): String = { + val str = String.valueOf(string.length) + val res = "0" * (len - str.length) + str + res + } + +} diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/variable/RequestQueryGlobalVariable.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/variable/RequestQueryGlobalVariable.scala similarity index 95% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/variable/RequestQueryGlobalVariable.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/variable/RequestQueryGlobalVariable.scala index 4629cefc2dd6c1e7ace767130dd5e864343eb107..c920a658510b7713c5389447e093e48d92d17bf5 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/variable/RequestQueryGlobalVariable.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/variable/RequestQueryGlobalVariable.scala @@ -18,9 +18,7 @@ package com.webank.wedatasphere.linkis.protocol.variable import com.webank.wedatasphere.linkis.protocol.{CacheableProtocol, RetryableProtocol} -/** - * Created by enjoyyin on 2018/10/18. - */ + trait VariableProtocol case class RequestQueryGlobalVariable (userName:String) extends CacheableProtocol with RetryableProtocol with VariableProtocol diff --git a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/variable/ResponseQueryVariable.scala b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/variable/ResponseQueryVariable.scala similarity index 95% rename from core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/variable/ResponseQueryVariable.scala rename to linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/variable/ResponseQueryVariable.scala index fe87ce42aec32c87af5fe3a6276f1607ee009ba6..4a92c75a7e05f05fcb600aa868bbac9d67807a9b 100644 --- a/core/cloudProtocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/variable/ResponseQueryVariable.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/com/webank/wedatasphere/linkis/protocol/variable/ResponseQueryVariable.scala @@ -18,9 +18,7 @@ package com.webank.wedatasphere.linkis.protocol.variable import java.util -/** - * Created by enjoyyin on 2018/10/18. - */ + class ResponseQueryVariable extends VariableProtocol { private var keyAndValue: util.Map[String, String] = _ def getKeyAndValue :util.Map[String, String] = keyAndValue diff --git a/linkis-commons/linkis-rpc/pom.xml b/linkis-commons/linkis-rpc/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..8084bf836962ab333f044c48005e5915dfddb7ae --- /dev/null +++ b/linkis-commons/linkis-rpc/pom.xml @@ -0,0 +1,167 @@ + + + + + + linkis + com.webank.wedatasphere.linkis + 1.0.0-RC1 + + 4.0.0 + + linkis-rpc + + + + com.webank.wedatasphere.linkis + linkis-protocol + ${linkis.version} + + + com.webank.wedatasphere.linkis + linkis-module + + + spring-cloud-commons + org.springframework.cloud + + + + + org.springframework.cloud + spring-cloud-starter-openfeign + ${spring.feign.version} + + + spring-boot-autoconfigure + org.springframework.boot + + + spring-boot-starter-aop + org.springframework.boot + + + spring-web + org.springframework + + + jackson-annotations + com.fasterxml.jackson.core + + + jackson-core + com.fasterxml.jackson.core + + + jackson-databind + com.fasterxml.jackson.core + + + jsr305 + com.google.code.findbugs + + + HdrHistogram + org.hdrhistogram + + + spring-cloud-commons + org.springframework.cloud + + + spring-cloud-starter-openfeign + org.springframework.cloud + + + + + org.springframework.cloud + spring-cloud-starter-openfeign + ${spring.cloud.version} + + + spring-boot-autoconfigure + org.springframework.boot + + + spring-boot-starter-aop + org.springframework.boot + + + spring-cloud-commons + org.springframework.cloud + + + spring-web + org.springframework + + + + + org.springframework.cloud + spring-cloud-commons + ${spring.cloud.version} + + + spring-security-crypto + org.springframework.security + + + + + + io.protostuff + protostuff-core + 1.6.2 + compile + + + + + + io.protostuff + protostuff-runtime + 1.6.2 + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + + ${basedir}/src/main/resources + + + ${project.artifactId}-${project.version} + + + \ No newline at end of file diff --git a/linkis-commons/linkis-rpc/src/main/java/com/webank/wedatasphere/linkis/rpc/serializer/NoneDelegate.java b/linkis-commons/linkis-rpc/src/main/java/com/webank/wedatasphere/linkis/rpc/serializer/NoneDelegate.java new file mode 100644 index 0000000000000000000000000000000000000000..e2e57467ecb3d807413228c1944a3d6ded02c172 --- /dev/null +++ b/linkis-commons/linkis-rpc/src/main/java/com/webank/wedatasphere/linkis/rpc/serializer/NoneDelegate.java @@ -0,0 +1,79 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.linkis.rpc.serializer; + +import io.protostuff.*; +import io.protostuff.runtime.Delegate; +import scala.Option; + +import java.io.IOException; + + +public class NoneDelegate implements Delegate