diff --git a/packageship/.gitignore b/packageship/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..2027f52cd9ebda8e294616082ebbf3cec258f56a
--- /dev/null
+++ b/packageship/.gitignore
@@ -0,0 +1,4 @@
+.DS_Store
+*/.DS_Store
+*.pyc
+*.vscode
\ No newline at end of file
diff --git a/packageship/README.md b/packageship/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..336a94661c9d1d0cdddb7d5045e5498349ab0491
--- /dev/null
+++ b/packageship/README.md
@@ -0,0 +1,95 @@
+# pkgmnt
+
+#### 介绍
+pkgmnt希望提供软件包依赖,生命周期,补丁查询等功能。
+1.软件包依赖:方便社区人员在新引入、软件包更新和删除的时候能方便的了解软件的影响范围。
+2.生命周期管理:跟踪upstream软件包发布状态,方便维护人员了解当前软件状态,及时升级到合理的版本。
+3.补丁查询:方便社区人员了解openEuler软件包的补丁情况,方便的提取补丁内容(待规划)
+
+
+#### 软件架构
+系统采用flask-restful开发,使用SQLAlchemy ORM查询框架,同时支持mysql和sqlite数据库,通过配置文件的
+形式进行更改
+
+
+#### 安装教程
+
+1. 安装系统的依赖包
+
+ pip install -r requirements.txt
+
+2. 执行打包命令,打包命令行工具,其中(pkgship)为命令行的名称,可以随意更改
+
+ 2.1 打包生成 .spec打包文件
+
+ pyinstaller -F -n pkgship cli.py
+
+ 2.2 修改 .spec打包文件,将hiddenimports中加入如下配置
+
+ hiddenimports=['pkg_resources.py2_warn']
+
+ 2.3 生成二进制命令文件
+
+ pyinstaller pkgship.spec
+
+ 2.4 二进制命令文件拷贝至可运行目录
+
+ cp dist/pkgship /usr/local/bin
+
+3. 系统的部署
+
+ 3.1 安装uwsgi服务器
+
+ pip install uwsgi
+
+ 3.2 修改服务的配置文件
+
+ cd /etc/pkgship/
+
+ vi package.ini
+
+ 备注: 配置文件中可以支撑sqlite数据库和mysql数据库,可根据相应配置进行修改
+
+ 如果需要调整 查询和修改相关端口,请同步更改 mange.ini 和selfpkg.ini 中的配置
+
+ 切记(manage.py为拥有写入权限,selfpkg为拥有查询权限)
+
+ 3.3 启动系统服务
+
+ 单独启动manage服务: pkgshipd start manage
+
+ 单独启动selfpkg服务: pkgshipd start selfpkg
+
+ 同时启动manage和selfpkg服务: pkgshipd start
+
+ 3.4 停止系统服务
+ 停止manage服务: pkgshipd stop manage
+
+ 停止selfpkg服务: pkgshipd stop selfpkg
+
+ 同时停止manage和selfpkg服务: pkgshipd stop
+
+
+
+
+#### 使用说明
+
+1. 命令行使用
+
+ pkgship --help
+
+2. restful接口使用
+
+ 参考接口设计文档中的接口定义,进行相关接口调用
+
+#### 参与贡献
+
+1. Fork 本仓库
+2. 新建 Feat_xxx 分支
+3. 提交代码
+4. 新建 Pull Request
+
+
+#### 会议记录
+1. 2020.5.18:https://etherpad.openeuler.org/p/aHIX4005bTY1OHtOd_Zc
+
diff --git a/packageship/doc/design/packageManagerDesigen.md b/packageship/doc/design/packageManagerDesigen.md
new file mode 100644
index 0000000000000000000000000000000000000000..be6278c637dda3266e02faa494b979d312c7b546
--- /dev/null
+++ b/packageship/doc/design/packageManagerDesigen.md
@@ -0,0 +1,223 @@
+#特性描述
+管理OS软件包依赖关系,提供依赖和被依赖关系的完整图谱查询功能,方便开发者识别软件包范围,减少依赖梳理复杂度。
+##原始需求-软件包依赖管理
+- 输入软件包A,支持查询A的所有编译依赖(新增软件包)
+- 输入软件包A,支持查询A的所有安装依赖(新增软件包)
+- 输入软件包A,支持查询所有安装依赖A的软件(升级,删除软件包场景)
+- 输入软件包A,支持查询所有编译依赖A的软件(升级,删除软件包场景)
+
+#依赖组件
+- createrepo
+
+#License
+Mulan V2
+
+#流程分析
+##软件包依赖管理
+
+###功能清单
+- SR-PKG-MANAGE01-AR01:支持repo数据库导入
+- SR-PKG-MANAGE01-AR02:支持对多个数据库分级查询(内部接口)
+- SR-PKG-MANAGE01-AR03:支持软件包安装/编译依赖查询
+- SR-PKG-MANAGE01-AR04:支持软件包自编译/自安装依赖查询
+- SR-PKG-MANAGE01-AR05:支持被依赖查询
+- SR-PKG-MANAGE01-AR06:支持编译被依赖查询
+- SR-PKG-MANAGE01-AR07:支持前台查询和显示软件依赖关系
+在线评审意见平台,支持查询评审意见及溯源
+
+##外部接口清单
+
+| 序号 | 接口名称 | 类型 | 说明 | 入参 | 出参 | 特性号 |
+| - | - | - | - | - | - | - |
+| 1 | /packages | GET | 支持查看所有软件包信息 | dbName | *packages* | AR01 & AR02 |
+| 2 | /packages | PUT | 支持更新指定软件包的信息 | *packages* | null | AR01 |
+| 3 | /packages/findByPackName | GET | 支持查询指定软件包的信息 | packageName,dbName,version(option) | *packages* | AR01 & AR02 |
+| 4 | /packages/findInstallDepend | POST | 支持查询指定软件包安装依赖(在一个或多个数据库中分级查询) | packageName,version(opetion),dbPreority | *response* | AR02 & AR03 |
+| 5 | /packages/findBuildDepend | POST | 支持查询指定软件包的编译依赖(在一个或多个数据库中分级查询) | packageName、version、repoPreority | *response* | AR02 & AR03 |
+| 6 | /packages/findSelfDepend | POST | 支持查询指定软件包的自安装/自编译依赖(在一个或多个数据库中分级查询) | packageName、version、repoPreority、withSubPack、withSelfBuild | packageName、installDepend、buildDepend、parentNode | AR02 & AR04 |
+| 7 | /packages/findBeDepend | POST | 支持在数据库中查询指定软件包的所有被依赖 | packageName、version、repoPreority、withSubPack | packageName、installBeDepend、buildBeDepend、parentNode | AR05 |
+| 8 | /repodatas | GET | 支持获取所有引入的版本库 | null | *Repodatas | AR01 |
+| 9 | /repodatas | POST | 支持repo数据库的导入 | dbName、dbPath、priority、dbStatus、repofiles | null | AR01 |
+| 10 | /repodatas | PUT | 支持版本库的更新 | dbName、dbPath、priority、dbStatus | null | AR01 |
+
+###python函数接口清单
+
+| 序号 | 接口名称 | 说明 | 入参 | 出参 |
+| - | - | - | - | - |
+| 1 | get_packages | 支持查看所有软件包信息 | dbName | *packages* |
+| 2 | update_package | 支持更新指定软件包信息 | *package* | null |
+| 3 | query_package | 支持查询指定软件包的信息 | source_name,dbname,version(option) | *package* |
+| 4 | query_install_depend | 支持查询指定软件包安装依赖(在一个或多个数据库中分级查询) | binary_name,version(option), db_preority | *response* |
+| 5 | query_build_depend | 支持查询指定软件包的编译依赖(在一个或多个数据库中分级查询) | source_name,version(option),db_preority,selfbuild=1/0 | *response* |
+| 6 | query_subpack | 支持查询指定源码包的子包 | srouce_name,version(option),db_preority | *subpack_list* |
+| 7 | query_srcpack | 支持查询指定二进制包的源码包 | binary_name,version(option),dbname | srouce_name |
+| 6 | query_self_depend | 支持查询指定软件包的自安装/自编译依赖(在一个或多个数据库中分级查询) | package_name,version(option),db_preority,withsubpack(default:0),withselfbuild(default:0) | *response* |
+| 7 | query_self_be_depend | 支持在数据库中查询指定源码包的所有被依赖(在一个或多个数据库中分级查询) | source_name,version(option),db_preority,withsubpack(default:0) | *response* |
+| 8 | get_db | 支持获取所有引入的版本库 | null | *dbinfo* |
+| 9 | import_db | 支持repo数据库的导入 | *dbinfo* | null |
+| 10 | update_db | 支持版本库的更新 | *dbinfo* | null |
+
+###外部config文件输入格式清单
+1.初始化配置文件(init_db.config)
+```
+#dbname - 数据库名称,unique,不可重复
+# src_db_file - 包含源码包信息的sqlite 文件
+# bin_db_file - 包含二进制包信息的sqlite 文件
+# status - 数据库状态,enable表示可用,disable表示不可用
+# priority - 1~100 default priority for user to query the information in databases
+
+- dbname: openEuler-20.03-LTS
+ src_db_file: /etc/pkgmng/dbname/primary_src.sqlite
+ bin_db_file: /etc/pkgmng/dbname/primary_binary.sqlite
+ status: enable
+ priority: 1
+
+- dbname: openEuler-20.04-LTS
+ src_db_file: testdb/src
+ bin_db_file: testdb/bin
+ status: enable
+ priority: 2
+
+- dbname: openEuler-20.05-LTS
+ src_db_file: testdb/src
+ bin_db_file: testdb/bin
+ status: enable
+ priority: 3
+```
+2.更新数据库信息(update_db.config)
+```
+- dbname: openEuler-20.03-LTS
+ changeDBname: openEuler-LTS
+ addDBFile: /etc/pkgmng/dbname/primary1.sqlite
+ removeDBFile: /etc/pkgmng/dbname/primary2.sqlite
+ status: disable
+ priority: 4
+```
+
+3.更新包的信息(package.config)
+```
+#level: 维护的优先级,1-4
+- dbname: openEuler-20.03-LTS
+ packageName: openssh
+ version: 2.99
+ maintainer: solar-hu
+ level: 3
+```
+###object
+```
+
+ openssh
+ 1.3.2
+ 2-66
+ GLv2
+ solar-hu
+ http://linuxcontainers.org
+ lxc-4.0.1.tar.gz
+ openEuler-20.03-LTS
+
+ zip-devel
+ libmediaart-devel
+
+
+ openssh-devel
+
+ maven-public
+ tomcat
+
+ openssh-static
+ openssh-help
+
+
+```
+
+```
+
+ openEuler
+ 4
+ enable
+
+```
+
+#数据表设计
+- src-pack
+
+| 序号 | 名称 | 说明 | 类型 | 键 | 允许空 | 默认值 |
+| - | - | - | - | - | - | - |
+| 1 | id | 源码包条目序号 | Int | Primary | NO | - |
+| 2 | name | 源码包包名 | String | | NO | - |
+| 3 | version | 版本号 | String | | NO | - |
+| 4 | license | 证书 | String | | NO | - |
+| 5 | sourceURL | 源码包获取地址 | String | | YES | - |
+| 6 | downloadURL | 下载地址获取 | String | | YES | - |
+| 7 | Maintaniner | 维护责任人 | String | | YES | - |
+| 8 | MaintainLevel | 维护优先级 | String | | YES | - |
+
+- bin-pack
+
+| 序号 | 名称 | 说明 | 类型 | 键 | 允许空 | 默认值 |
+| - | - | - | - | - | - | - |
+| 1 | id | 二进制包条目序号 | Int | Primary | NO | - |
+| 2 | name | 二进制包包名 | String | | NO | - |
+| 3 | version | 版本号 | String | | NO | - |
+| 4 | srcIDkey | 源码包包名ID | Int | foreignkey | NO | - |
+
+- pack-requires
+
+| 序号 | 名称 | 说明 | 类型 | 键 | 允许空 | 默认值 |
+| - | - | - | - | - | - | - |
+| 1 | id | 依赖组件条目序号 | Int | Primary | NO | - |
+| 2 | name | 依赖组件名 | String | | NO | - |
+| 3 | depProIDkey | 依赖组件对应的ID | Int | foreignkey | NO | - |
+| 4 | srcIDkey | 若为源码包该值不为空,列出来的是编译依赖 | Int | foreignkey | YES | - |
+| 5 | binIDkey | 若为安装包该值不为空,列出来的是安装依赖 | Int | foreignkey | YES | - |
+
+- pack-provides
+
+| 序号 | 名称 | 说明 | 类型 | 键 | 允许空 | 默认值 |
+| - | - | - | - | - | - | - |
+| 1 | id | 组件条目序号 | Int | Primary | NO | - |
+| 2 | name | 组件名 | Int | Primary | NO | - |
+| 3 | binIDkey | 提供组件的二进制包ID | Int | foreignkey | NO | - |
+
+
+- repoCheckSame
+
+| 序号 | 名称 | 说明 | 类型 | 键 | 允许空 | 默认值 |
+| - | - | - | - | - | - | - |
+| 1 | id | repoFile条目序号 | Int | Primary | NO | - |
+| 2 | name | repoFile名称 | String | | NO | - |
+| 3 | md5sum | md5sum指 | String | | NO | - |
+
+
+
+#功能设计
+##主体流程分析
+
+
+##依赖关系梳理
+findInstallDepend:
+
+findBuildDepend:
+
+findBeDepend(withSubPack = 0):
+删除源码包A造成的影响:
+1.影响他的子包(A1,A2)
+2.安装依赖A1,A2的二进制包
+3.编译依赖A1,A2的源码包
+
+findBeDepend(withSubPack = 1):
+删除源码包A造成的影响:
+1.影响他的子包(A1,A2)
+2.安装依赖A1,A2的二进制包(B1)
+3.编译依赖A1,A2的源码包
+4.删除B1的源码包B,影响B的其他子包B2,B3
+
+
+#遗留问题
+- repo数据库分析,如何做数据组织 汪奕如
+- 嵌套依赖查询流程整理 汪奕如
+- svn/git监控原型验证 陈燕潘
+- gitee机机接口对齐 陈燕潘
+- 版本升级如何更新到补丁获取系统中 陈燕潘
+- web前台拓扑图UCD设计 NA
+- 数据表设计 汪奕如
diff --git a/packageship/doc/design/pkgimg/Package.JPG b/packageship/doc/design/pkgimg/Package.JPG
new file mode 100644
index 0000000000000000000000000000000000000000..775833d113cc0d3c975fe397abb8fa73034f24da
Binary files /dev/null and b/packageship/doc/design/pkgimg/Package.JPG differ
diff --git a/packageship/doc/design/pkgimg/Repodatas.JPG b/packageship/doc/design/pkgimg/Repodatas.JPG
new file mode 100644
index 0000000000000000000000000000000000000000..b835f470ed32da06374a1f3330d5b1b817840fb3
Binary files /dev/null and b/packageship/doc/design/pkgimg/Repodatas.JPG differ
diff --git a/packageship/doc/design/pkgimg/beDepend_1.JPG b/packageship/doc/design/pkgimg/beDepend_1.JPG
new file mode 100644
index 0000000000000000000000000000000000000000..8dfd56a92936f14f2afe587926ca06f5c027f2e5
Binary files /dev/null and b/packageship/doc/design/pkgimg/beDepend_1.JPG differ
diff --git a/packageship/doc/design/pkgimg/beDepend_2.JPG b/packageship/doc/design/pkgimg/beDepend_2.JPG
new file mode 100644
index 0000000000000000000000000000000000000000..4db41a623a7e32f01d1353e4f966a89755b151ac
Binary files /dev/null and b/packageship/doc/design/pkgimg/beDepend_2.JPG differ
diff --git a/packageship/doc/design/pkgimg/buildDepend_1.JPG b/packageship/doc/design/pkgimg/buildDepend_1.JPG
new file mode 100644
index 0000000000000000000000000000000000000000..cd01369fbe5e3a4075c603a62adf5252b24a2401
Binary files /dev/null and b/packageship/doc/design/pkgimg/buildDepend_1.JPG differ
diff --git a/packageship/doc/design/pkgimg/depend_flowchart.png b/packageship/doc/design/pkgimg/depend_flowchart.png
new file mode 100644
index 0000000000000000000000000000000000000000..7c6447afa562b3d981df8a355b043d4ee2dadef9
Binary files /dev/null and b/packageship/doc/design/pkgimg/depend_flowchart.png differ
diff --git a/packageship/doc/design/pkgimg/installDepend.JPG b/packageship/doc/design/pkgimg/installDepend.JPG
new file mode 100644
index 0000000000000000000000000000000000000000..0f35928860281ca6ca249200e6313dc63dab621f
Binary files /dev/null and b/packageship/doc/design/pkgimg/installDepend.JPG differ
diff --git a/packageship/doc/design/pkgimg/packagemanagement.JPG b/packageship/doc/design/pkgimg/packagemanagement.JPG
new file mode 100644
index 0000000000000000000000000000000000000000..df7151a74e051051eaeb3fb16a3abb5f0e0d2e37
Binary files /dev/null and b/packageship/doc/design/pkgimg/packagemanagement.JPG differ
diff --git a/packageship/example/annotation_specifications.py b/packageship/example/annotation_specifications.py
new file mode 100644
index 0000000000000000000000000000000000000000..169f1cc34d664c974212d2c995ec3f02be28786e
--- /dev/null
+++ b/packageship/example/annotation_specifications.py
@@ -0,0 +1,88 @@
+"""
+description: Function and class annotation specifications in the project
+functions: test
+"""
+# In the PY file, if all are functions, the format of the top information is as above,
+# the description information is filled in, and the function name is filled in functions
+# Args:
+# List the name of each parameter with a colon and a space after the name,
+# Separate the description of this parameter.
+# If the description is too long than 80 characters in a single line,
+# use a hanging indent of 2 or 4 spaces (consistent with the rest of the file)
+# The description should include the type and meaning required
+# Returns:
+# Describes the type and semantics of the return value. If the function returns none,
+# this part can be omitted
+# Raises:
+# Possible anomalies
+
+
+def test(name, age):
+ """
+ Description: Function description information
+ Args:
+ name: name information
+ age: age information
+ Returns:
+ Returned information
+ Raises:
+ IOError: An error occurred accessing the bigtable.Table object.
+ """
+ name = 'tom'
+ age = 11
+ return name, age
+
+
+# description: Function and class annotation specifications in the project
+# class: SampleClass
+# In the PY file, if all are classes, the top information format is as above,
+# description fills in the description information, class fills in the class name,
+# uses three quotation marks, does not need#
+# Class should have a document string under its definition that describes
+# the class
+# If your class has attributes,
+# Then there should be an attribute section in the document
+# And it should follow the same format as function parameters
+class SampleClass():
+ """
+ Summary of class here.
+ Longer class information....
+ Attributes:
+ likes_spam: A boolean indicating if we like SPAM or not.
+ eggs: An integer count of the eggs we have laid.
+ """
+
+ def __init__(self, likes_spam=False):
+ """Inits SampleClass with blah."""
+ self.likes_spam = likes_spam
+ self.eggs = "eggs"
+
+ def public_method_one(self, egg, fun):
+ """
+ Description: Function description information
+ Args:
+ egg: egg information
+ fun: fun information
+ Returns:
+ Returned information
+ Raises:
+ AttributeError
+ """
+ self.eggs = "eggs"
+ egg = "egg"
+ fun = "fun"
+ return egg, fun
+
+ def public_method_two(self, tom):
+ """
+ Description: Function description information
+ Args:
+ tom: tom information
+ Returns:
+ Returned information
+ Raises:
+ Error
+ """
+ self.likes_spam = True
+ tom = 'cat'
+ return tom
diff --git a/packageship/packageship/__init__.py b/packageship/packageship/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/packageship/packageship/application/__init__.py b/packageship/packageship/application/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..bc3a6316a89f8f9f41d91387a43444657aab3f5b
--- /dev/null
+++ b/packageship/packageship/application/__init__.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python3
+"""
+ Initial operation and configuration of the flask project
+"""
+from flask import Flask
+from flask_session import Session
+from packageship.application.settings import Config
+from packageship.libs.log import setup_log
+
+OPERATION = None
+
+
+def init_app(operation):
+ """
+ Project initialization function
+ """
+ app = Flask(__name__)
+
+ # log configuration
+ setup_log(Config)
+
+ # Load configuration items
+
+ app.config.from_object(Config)
+
+ # Open session function
+ Session(app)
+
+ global OPERATION
+ OPERATION = operation
+
+ # Register Blueprint
+ from packageship.application.apps import blue_point
+ for blue, api in blue_point:
+ api.init_app(app)
+ app.register_blueprint(blue)
+
+ return app
diff --git a/packageship/packageship/application/app_global.py b/packageship/packageship/application/app_global.py
new file mode 100644
index 0000000000000000000000000000000000000000..25d9dbe0301c55e2856f9fca10b15f143a7f33b1
--- /dev/null
+++ b/packageship/packageship/application/app_global.py
@@ -0,0 +1,29 @@
+#!/usr/bin/python3
+"""
+Description: Interception before request
+"""
+from flask import request
+from packageship import application
+from packageship.application.apps.package.url import urls
+
+
+__all__ = ['identity_verification']
+
+
+def identity_verification():
+ """
+ Description: Requested authentication
+ Args:
+ Returns:
+ Raises:
+ """
+ if request.url_rule:
+ url_rule = request.url_rule.rule
+ for view, url, authentication in urls:
+ if url == url_rule and application.OPERATION in authentication.keys():
+ if request.method not in authentication.get(application.OPERATION):
+ return False
+ break
+ return True
+
+ return False
diff --git a/packageship/packageship/application/apps/__init__.py b/packageship/packageship/application/apps/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..6a86c78c92c6943cd11840e2b114989bc6bf50ab
--- /dev/null
+++ b/packageship/packageship/application/apps/__init__.py
@@ -0,0 +1,11 @@
+#!/usr/bin/python3
+"""
+Blueprint collection trying to page
+"""
+from packageship.application.apps.package import package, api as package_api
+
+blue_point = [
+ (package, package_api)
+]
+
+__all__ = ['blue_point']
diff --git a/packageship/packageship/application/apps/package/__init__.py b/packageship/packageship/application/apps/package/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..987ad6145195cd94d044464c03e277aa73bf270a
--- /dev/null
+++ b/packageship/packageship/application/apps/package/__init__.py
@@ -0,0 +1,16 @@
+from flask.blueprints import Blueprint
+from flask_restful import Api
+from packageship.application.apps.package.url import urls
+from packageship import application
+
+package = Blueprint('package', __name__)
+
+# init restapi
+api = Api()
+
+for view, url, operation in urls:
+ if application.OPERATION and application.OPERATION in operation.keys():
+ api.add_resource(view, url)
+
+
+__all__ = ['package', 'api']
diff --git a/packageship/packageship/application/apps/package/function/__init__.py b/packageship/packageship/application/apps/package/function/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/packageship/packageship/application/apps/package/function/be_depend.py b/packageship/packageship/application/apps/package/function/be_depend.py
new file mode 100644
index 0000000000000000000000000000000000000000..84654af72ec420727fa35fd132f25fd6460305d0
--- /dev/null
+++ b/packageship/packageship/application/apps/package/function/be_depend.py
@@ -0,0 +1,222 @@
+#!/usr/bin/python3
+"""
+Description:The dependencies of the query package
+ Used for package deletion and upgrade scenarios
+ This includes both install and build dependencies
+Class: BeDepend
+"""
+from sqlalchemy import text
+from sqlalchemy.exc import SQLAlchemyError
+from sqlalchemy.sql import literal_column
+from flask import current_app
+from packageship.libs.dbutils import DBHelper
+from packageship.application.models.package import src_pack
+from packageship.application.apps.package.function.constants import ResponseCode
+
+
+class BeDepend():
+ """
+ Description: Find the dependencies of the source package
+ Attributes:
+ source_name: source name
+ db_name: database name
+ with_sub_pack: with_sub_pack
+ source_name_set:Source package lookup set
+ bin_name_set:Bin package lookup set
+ result_dict:return json
+ """
+
+ def __init__(self, source_name, db_name, with_sub_pack):
+ """
+ init class
+ """
+ self.source_name = source_name
+ self.db_name = db_name
+ self.with_sub_pack = with_sub_pack
+ self.source_name_set = set()
+ self.bin_name_set = set()
+ self.result_dict = dict()
+
+ def main(self):
+ """
+ Description: Map the database, if the source
+ package of the query is not in the database,
+ throw an exception. Directly to the end
+ Args:
+ Returns:
+ "source name": [
+ "source",
+ "version",
+ "dbname",
+ [
+ [
+ "root",
+ null
+ ]
+ ]
+ ]
+ Raises:
+ """
+ with DBHelper(db_name=self.db_name) as data_base:
+ src_obj = data_base.session.query(
+ src_pack).filter_by(name=self.source_name).first()
+ if src_obj:
+ # spell dictionary
+ self.result_dict[self.source_name + "_src"] = [
+ "source",
+ src_obj.version,
+ self.db_name,
+ [["root", None]]
+ ]
+ self.source_name_set.add(self.source_name)
+ self.package_bedepend(
+ [src_obj.id], data_base, package_type='src')
+
+ return self.result_dict
+
+ def package_bedepend(self, pkg_id_list, data_base, package_type):
+ """
+ Description: Query the dependent function
+ Args:
+ pkg_id_list:source or binary packages id
+ data_base: database
+ package_type: package type
+ Returns:
+ Raises:
+ SQLAlchemyError: Database connection exception
+ """
+ search_set = set(pkg_id_list)
+ id_in = literal_column('id').in_(search_set)
+ # package_type
+ if package_type == 'src':
+ sql_str = text("""
+ SELECT b1.name AS search_bin_name,
+ b1.version AS search_bin_version,
+ src.NAME AS source_name,
+ b2.name AS bin_name,
+ b2.id AS bin_id,
+ s1.name AS bebuild_src_name,
+ s1.id AS bebuild_src_id,
+ s2.name AS install_depend_src_name,
+ s2.id AS install_depend_src_id
+ FROM
+ ( SELECT id,NAME FROM src_pack WHERE {} ) src
+ LEFT JOIN bin_pack b1 ON b1.srcIDkey = src.id
+ LEFT JOIN pack_provides ON pack_provides.binIDkey = b1.id
+ LEFT JOIN pack_requires ON pack_requires.depProIDkey = pack_provides.id
+ LEFT JOIN src_pack s1 ON s1.id = pack_requires.srcIDkey
+ LEFT JOIN bin_pack b2 ON b2.id = pack_requires.binIDkey
+ LEFT JOIN src_pack s2 ON s2.id = b2.srcIDkey;""".format(id_in))
+ if package_type == 'bin':
+ sql_str = text("""
+ SELECT b1.name AS search_bin_name,
+ b1.version AS search_bin_version,
+ s3.NAME AS source_name,
+ b2.name AS bin_name,
+ b2.id AS bin_id,
+ s1.name AS bebuild_src_name,
+ s1.id AS bebuild_src_id,
+ s2.name AS install_depend_src_name,
+ s2.id AS install_depend_src_id
+ FROM
+ (SELECT id,NAME,version,srcIDkey FROM bin_pack WHERE {} ) b1
+ LEFT JOIN src_pack s3 ON s3.id = b1.srcIDkey
+ LEFT JOIN pack_provides ON pack_provides.binIDkey = b1.id
+ LEFT JOIN pack_requires ON pack_requires.depProIDkey = pack_provides.id
+ LEFT JOIN src_pack s1 ON s1.id = pack_requires.srcIDkey
+ LEFT JOIN bin_pack b2 ON b2.id = pack_requires.binIDkey
+ LEFT JOIN src_pack s2 ON s2.id = b2.srcIDkey;
+ """.format(id_in))
+ try:
+ result = data_base.session.execute(
+ sql_str, {
+ 'id_{}'.format(i): v for i, v in enumerate(
+ search_set, 1)}).fetchall()
+ except SQLAlchemyError as sql_err:
+ current_app.logger.error(sql_err)
+ return ResponseCode.response_json(ResponseCode.CONNECT_DB_ERROR)
+
+ if result is None:
+ return
+ # Source and binary packages that were found to be dependent
+ source_id_list = []
+ bin_id_list = []
+ for obj in result:
+ if obj.source_name is None:
+ source_name = 'NOT FOUND'
+ else:
+ source_name = obj.source_name
+ if obj.bebuild_src_name:
+ # Determine if the source package has been checked
+ parent_node = obj.bebuild_src_name
+ be_type = "build"
+ # Call the spell dictionary function
+ self.make_dicts(
+ obj.search_bin_name,
+ source_name,
+ obj.search_bin_version,
+ parent_node,
+ be_type)
+
+ if obj.bebuild_src_name not in self.source_name_set:
+ self.source_name_set.add(obj.bebuild_src_name)
+ source_id_list.append(obj.bebuild_src_id)
+
+ if obj.bin_name:
+ # Determine if the bin package has been checked
+ parent_node = obj.bin_name
+ be_type = "install"
+ # Call the spell dictionary function
+ self.make_dicts(
+ obj.search_bin_name,
+ source_name,
+ obj.search_bin_version,
+ parent_node,
+ be_type)
+
+ if obj.bin_name not in self.bin_name_set:
+ self.bin_name_set.add(obj.bin_name)
+ bin_id_list.append(obj.bin_id)
+
+ # withsubpack=1
+ if self.with_sub_pack == "1":
+ if obj.install_depend_src_name not in self.source_name_set:
+ self.source_name_set.add(
+ obj.install_depend_src_name)
+ source_id_list.append(obj.install_depend_src_id)
+
+ if len(source_id_list) != 0:
+ self.package_bedepend(
+ source_id_list, data_base, package_type="src")
+ if len(bin_id_list) != 0:
+ self.package_bedepend(bin_id_list, data_base, package_type="bin")
+
+ def make_dicts(self, key, source_name, version, parent_node, be_type):
+ """
+ Description: Splicing dictionary function
+ Args:
+ key: dependent bin name
+ source_name: source name
+ version: version
+ parent_node: Rely on package name
+ be_type: dependent type
+ Returns:
+ Raises:
+ """
+ if key not in self.result_dict:
+ self.result_dict[key] = [
+ source_name,
+ version,
+ self.db_name,
+ [
+ [parent_node,
+ be_type
+ ]
+ ]
+ ]
+ else:
+ if [parent_node, be_type] not in self.result_dict[key][-1]:
+ self.result_dict[key][-1].append([
+ parent_node,
+ be_type
+ ])
diff --git a/packageship/packageship/application/apps/package/function/build_depend.py b/packageship/packageship/application/apps/package/function/build_depend.py
new file mode 100644
index 0000000000000000000000000000000000000000..672cbe60ea27a9e98a8936cac6d58f04d78dd66b
--- /dev/null
+++ b/packageship/packageship/application/apps/package/function/build_depend.py
@@ -0,0 +1,226 @@
+#!/usr/bin/python3
+"""
+Description: Find compilation dependency of source package
+class: BuildDepend
+"""
+from packageship.application.apps.package.function.searchdb import SearchDB
+from packageship.application.apps.package.function.install_depend import InstallDepend
+from packageship.application.apps.package.function.constants import ResponseCode, ListNode
+
+
+class BuildDepend():
+ """
+ Description: Find compilation dependency of source package
+ Attributes:
+ pkg_name_list: List of package names
+ db_list: List of database names
+ self_build: Compile dependency conditions
+ history_dict: Query history dict
+ search_db:Query an instance of a database class
+ result_dict:A dictionary to store the data that needs to be echoed
+ source_dict:A dictionary to store the searched source code package name
+ """
+
+ def __init__(self, pkg_name_list, db_list, self_build=0, history_dict=None):
+ """
+ init class
+ """
+ self.pkg_name_list = pkg_name_list
+ self._self_build = self_build
+
+ self.db_list = db_list
+ self.search_db = SearchDB(db_list)
+
+ self.result_dict = dict()
+ self.source_dict = dict()
+
+ self.history_dicts = history_dict if history_dict else {}
+
+ def build_depend_main(self):
+ """
+ Description: Entry function
+ Args:
+ Returns:
+ ResponseCode: response code
+ result_dict: Dictionary of query results
+ source_dict: Dictionary of source code package
+ Raises:
+ """
+ if not self.search_db.db_object_dict:
+ return ResponseCode.DIS_CONNECTION_DB, None, None
+
+ if self._self_build == 0:
+ code = self.build_depend(self.pkg_name_list)
+ if None in self.result_dict:
+ del self.result_dict[None]
+ return code, self.result_dict, None
+
+ if self._self_build == 1:
+ self.self_build(self.pkg_name_list)
+ if None in self.result_dict:
+ del self.result_dict[None]
+ # There are two reasons for the current status code to return SUCCESS
+ # 1, Other branches return three return values.
+ # Here, a place holder is needed to prevent unpacking errors during call
+ # 2, This function is an auxiliary function of other modules.
+ # The status code is not the final display status code
+ return ResponseCode.SUCCESS, self.result_dict, self.source_dict
+
+ return ResponseCode.PARAM_ERROR, None, None
+
+ def build_depend(self, pkg_list):
+ """
+ Description: Compile dependency query
+ Args:
+ pkg_list:You need to find the dependent source package name
+ Returns:
+ ResponseCode: response code
+ Raises:
+ """
+ res_status, build_list = self.search_db.get_build_depend(pkg_list)
+
+ if not build_list:
+ return res_status if res_status == \
+ ResponseCode.DIS_CONNECTION_DB else \
+ ResponseCode.PACK_NAME_NOT_FOUND
+ # create root node and get next search list
+ search_list = self._create_node_and_get_search_list(build_list, pkg_list)
+
+ code, res_dict = \
+ InstallDepend(self.db_list).query_install_depend(search_list,
+ self.history_dicts)
+ if not res_dict:
+ return code
+
+ for k, values in res_dict.items():
+ if k in self.result_dict:
+ if ['root', None] in values[ListNode.PARENT_LIST]:
+ index = values[ListNode.PARENT_LIST].index(['root', None])
+ del values[ListNode.PARENT_LIST][index]
+
+ self.result_dict[k][ListNode.PARENT_LIST].extend(values[ListNode.PARENT_LIST])
+ else:
+ self.result_dict[k] = values
+
+ return ResponseCode.SUCCESS
+
+ def _create_node_and_get_search_list(self, build_list, pkg_list):
+ """
+ Description: To create root node in self.result_dict and
+ return the name of the source package to be found next time
+ Args:
+ build_list:List of binary package names
+ pkg_list: List of binary package names
+ Returns:
+ the name of the source package to be found next time
+ Raises:
+ """
+ search_set = set()
+ search_list = []
+ for obj in build_list:
+ if not obj.search_name:
+ continue
+
+ if obj.search_name + "_src" not in self.result_dict:
+ self.result_dict[obj.search_name + "_src"] = [
+ 'source',
+ obj.search_version,
+ obj.db_name,
+ [
+ ['root', None]
+ ]
+ ]
+ search_set.add(obj.search_name)
+
+ if not obj.bin_name:
+ continue
+
+ if obj.bin_name in self.history_dicts:
+ self.result_dict[obj.bin_name] = [
+ self.history_dicts[obj.bin_name][ListNode.SOURCE_NAME],
+ self.history_dicts[obj.bin_name][ListNode.VERSION],
+ self.history_dicts[obj.bin_name][ListNode.DBNAME],
+ [
+ [obj.search_name, 'build']
+ ]
+ ]
+ else:
+ if obj.bin_name in search_list:
+ self.result_dict[obj.bin_name][ListNode.PARENT_LIST].append([
+ obj.search_name, 'build'
+ ])
+ else:
+ self.result_dict[obj.bin_name] = [
+ obj.source_name,
+ obj.version,
+ obj.db_name,
+ [
+ [obj.search_name, 'build']
+ ]
+ ]
+ search_list.append(obj.bin_name)
+
+ if search_set and len(search_set) != len(pkg_list):
+ temp_set = set(pkg_list) - search_set
+ for name in temp_set:
+ self.result_dict[name + "_src"] = [
+ None,
+ None,
+ 'NOT_FOUND',
+ [
+ ['root', None]
+ ]
+ ]
+ return search_list
+
+ def self_build(self, pkg_name_li):
+ """
+ Description: Using recursion to find compilation dependencies
+ Args:
+ pkg_name_li: Source package name list
+ Returns:
+ Raises:
+ """
+ if not pkg_name_li:
+ return
+
+ next_src_set = set()
+ _, bin_info_lis = self.search_db.get_build_depend(pkg_name_li)
+
+ if not bin_info_lis:
+ return
+
+ # generate data content
+ for obj in bin_info_lis:
+
+ if not obj.bin_name:
+ continue
+ # for first loop, init the source_dict
+ if not self.source_dict:
+ for src_name in pkg_name_li:
+ self.source_dict[src_name] = [obj.db_name, obj.search_version]
+ if obj.bin_name not in self.result_dict:
+ self.result_dict[obj.bin_name] = [
+ obj.source_name if obj.source_name else None,
+ obj.version if obj.version else None,
+ obj.db_name if obj.db_name else "NOT_FOUND",
+ [
+ [obj.search_name, "build"]
+ ]
+ ]
+ else:
+ node = [obj.search_name, "build"]
+ node_list = self.result_dict[obj.bin_name][-1]
+ if node not in node_list:
+ node_list.append(node)
+
+ if obj.source_name and \
+ obj.source_name not in self.source_dict and \
+ obj.source_name not in self.history_dicts:
+ self.source_dict[obj.source_name] = [obj.db_name,
+ obj.version]
+ next_src_set.add(obj.source_name)
+
+ self.self_build(next_src_set)
+
+ return
diff --git a/packageship/packageship/application/apps/package/function/constants.py b/packageship/packageship/application/apps/package/function/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..7efe41274fccca7c6e135a6abf0af3bb2a0d08be
--- /dev/null
+++ b/packageship/packageship/application/apps/package/function/constants.py
@@ -0,0 +1,81 @@
+#!/usr/bin/python3
+"""
+Description: Response contain and code ID
+class: ListNode, ResponseCode
+"""
+
+
+class ListNode():
+ """
+ Description: Dethe structure of dict:
+ {package_name: [source_name,
+ dbname,
+ [[parent_node_1, depend_type],[parent_node_2, depend_type],...]],
+ check_tag]
+ }
+ changeLog:
+ """
+
+ SOURCE_NAME = 0
+ VERSION = 1
+ DBNAME = 2
+ PARENT_LIST = 3
+ # FOR PARENT LIST:
+ PARENT_NODE = 0
+ DEPEND_TYPE = 1
+
+# response code
+
+
+class ResponseCode():
+ """
+ Description: response code to web
+ changeLog:
+ """
+ # Four digits are common status codes
+ SUCCESS = "2001"
+ PARAM_ERROR = "4001"
+ DB_NAME_ERROR = "4002"
+ PACK_NAME_NOT_FOUND = "4003"
+ CONNECT_DB_ERROR = "4004"
+ INPUT_NONE = "4005"
+ FILE_NOT_FOUND = "4006"
+ # Database operation module error status code
+ DELETE_DB_ERROR = "40051"
+ CONFIGFILE_PATH_EMPTY = "50001"
+ FAILED_CREATE_DATABASE_TABLE = "50002"
+ TYPE_ERROR = "50003"
+ DATA_MERGE_ERROR = "50004"
+ FILE_NOT_FIND_ERROR = "50005"
+ DIS_CONNECTION_DB = "50006"
+
+ CODE_MSG_MAP = {
+ SUCCESS: "Successful Operation!",
+ PARAM_ERROR: "Parameter error, please check the parameter and query again.",
+ DB_NAME_ERROR: "Database does not exist! Please check the database name",
+ PACK_NAME_NOT_FOUND: "Sorry! The querying package does not exist in the databases",
+ CONNECT_DB_ERROR: "Failed to Connect the database! "
+ "Please check the database connection",
+ INPUT_NONE: "The input is None, please check the input value.",
+ FILE_NOT_FOUND: "Database import success file does not exist",
+ DELETE_DB_ERROR: "Failed to delete database",
+ CONFIGFILE_PATH_EMPTY: "Initialization profile does not exist or cannot be found",
+ FAILED_CREATE_DATABASE_TABLE: "Failed to create database or table",
+ TYPE_ERROR: "The source code and binary path types in the initialization file are abnormal",
+ DATA_MERGE_ERROR: "abnormal multi-file database integration",
+ FILE_NOT_FIND_ERROR: "system initialization configuration file does not exist",
+ DIS_CONNECTION_DB: "Unable to connect to the database, check the database configuration"}
+
+ @classmethod
+ def response_json(cls, code, data=None):
+ """
+ Description: classmethod
+ """
+ return {
+ "code": code,
+ "msg": cls.CODE_MSG_MAP[code],
+ "data": data
+ }
+
+ def __str__(self):
+ return 'ResponseCode'
diff --git a/packageship/packageship/application/apps/package/function/install_depend.py b/packageship/packageship/application/apps/package/function/install_depend.py
new file mode 100644
index 0000000000000000000000000000000000000000..8e5c1e7ecf7a665aa0818ddfd6a115ae673b29cb
--- /dev/null
+++ b/packageship/packageship/application/apps/package/function/install_depend.py
@@ -0,0 +1,164 @@
+#!/usr/bin/python3
+"""
+Description: Querying for install dependencies
+ Querying packages install depend for those package can be installed
+class: InstallDepend, DictionaryOperations
+"""
+from packageship.libs.log import Log
+from .searchdb import SearchDB
+from .constants import ResponseCode
+from .constants import ListNode
+
+LOGGER = Log(__name__)
+
+
+class InstallDepend():
+ """
+ Description: query install depend of package
+ Attributes:
+ db_list: A list of Database name to show the priority
+ __search_list: Contain the binary packages searched in the next loop
+ binary_dict: Contain all the binary packages info and operation
+ __search_db: A object of database which would be connected
+ changeLog:
+ """
+ #pylint: disable = too-few-public-methods
+ def __init__(self, db_list):
+ """
+ Initialization class
+ """
+ self.binary_dict = DictionaryOperations()
+ self.__search_list = []
+
+ self.db_list = db_list
+ self.__search_db = SearchDB(db_list)
+
+ def query_install_depend(self, binary_list, history_dicts=None):
+ """
+ Description: init result dict and determint the loop end point
+ Args:
+ binary_list: A list of binary rpm package name
+ history_dicts: record the searching install depend history,
+ defualt is None
+ Returns:
+ binary_dict.dictionary:
+ {binary_name: [
+ src,
+ dbname,
+ version,
+ [
+ parent_node_package_name
+ 'install'
+ ]
+ ]}
+ Raises:
+ """
+ if not self.__search_db.db_object_dict:
+ return ResponseCode.DIS_CONNECTION_DB, None
+ if not binary_list:
+ return ResponseCode.INPUT_NONE, None
+ for binary in binary_list:
+ if binary:
+ self.__search_list.append(binary)
+ else:
+ LOGGER.logger.warning("There is a NONE in input value:" + str(binary_list))
+ while self.__search_list:
+ self.__query_single_install_dep(history_dicts)
+ return ResponseCode.SUCCESS, self.binary_dict.dictionary
+
+ def __query_single_install_dep(self, history_dicts):
+ """
+ Description: query a package install depend and append to result
+ Args:
+ history_dicts: A list of binary rpm package name
+ Returns:
+ response_code: response code
+ Raises:
+ """
+ result_list = self.__search_db.get_install_depend(self.__search_list)
+ for search in self.__search_list:
+ if search not in self.binary_dict.dictionary:
+ self.binary_dict.init_key(key=search, parent_node=[])
+ self.__search_list.clear()
+ if result_list:
+ for result, dbname in result_list:
+ if not self.binary_dict.dictionary[result.search_name][ListNode.PARENT_LIST]:
+ self.binary_dict.init_key(key=result.search_name,
+ src=result.search_src_name,
+ version=result.search_version,
+ dbname=dbname)
+ else:
+ self.binary_dict.update_value(key=result.search_name,
+ src=result.search_src_name,
+ version=result.search_version,
+ dbname=dbname)
+
+ if result.depend_name:
+ if result.depend_name in self.binary_dict.dictionary:
+ self.binary_dict.update_value(key=result.depend_name,
+ parent_node=[result.search_name, 'install'])
+ elif history_dicts is not None and result.depend_name in history_dicts:
+ self.binary_dict.init_key(
+ key=result.depend_name,
+ src=history_dicts[result.depend_name][ListNode.SOURCE_NAME],
+ version=history_dicts[result.depend_name][ListNode.VERSION],
+ dbname=None,
+ parent_node=[[result.search_name, 'install']]
+ )
+ else:
+ self.binary_dict.init_key(key=result.depend_name,
+ parent_node=[[result.search_name, 'install']])
+ self.__search_list.append(result.depend_name)
+
+
+class DictionaryOperations():
+ """
+ Description: Related to dictionary operations, creating dictionary, append dictionary
+ Attributes:
+ dictionary: Contain all the binary packages info after searching
+ changeLog:
+ """
+
+ def __init__(self):
+ """
+ init class
+ """
+ self.dictionary = dict()
+
+ def init_key(self, key, src=None, version=None, dbname=None, parent_node=None):
+ """
+ Description: Creating dictionary
+ Args:
+ key: binary_name
+ src: source_name
+ version: version
+ dbname: databases name
+ parent_node: parent_node
+ Returns:
+ dictionary[key]: [src, version, dbname, parent_node]
+ """
+ if dbname:
+ self.dictionary[key] = [src, version, dbname, [['root', None]]]
+ else:
+ self.dictionary[key] = [src, version, dbname, parent_node]
+
+ def update_value(self, key, src=None, version=None, dbname=None, parent_node=None):
+ """
+ Description: append dictionary
+ Args:
+ key: binary_name
+ src: source_name
+ version: version
+ dbname: database name
+ parent_node: parent_node
+ Returns:
+ Raises:
+ """
+ if src:
+ self.dictionary[key][ListNode.SOURCE_NAME] = src
+ if version:
+ self.dictionary[key][ListNode.VERSION] = version
+ if dbname:
+ self.dictionary[key][ListNode.DBNAME] = dbname
+ if parent_node:
+ self.dictionary[key][ListNode.PARENT_LIST].append(parent_node)
diff --git a/packageship/packageship/application/apps/package/function/packages.py b/packageship/packageship/application/apps/package/function/packages.py
new file mode 100644
index 0000000000000000000000000000000000000000..7d3ca4557237716ff4559dbec9de9dce2c647596
--- /dev/null
+++ b/packageship/packageship/application/apps/package/function/packages.py
@@ -0,0 +1,208 @@
+#!/usr/bin/python3
+"""
+Description: Get package information and modify package information
+functions: get_packages, buildep_packages, sub_packages, get_single_package,
+ update_single_package, update_maintaniner_info
+"""
+from flask import current_app
+
+from packageship.libs.dbutils import DBHelper
+from packageship.application.models.package import src_pack
+from packageship.application.models.package import pack_provides
+from packageship.application.models.package import maintenance_info
+from packageship.application.models.package import pack_requires
+from packageship.application.models.package import bin_pack
+from packageship.libs.exception import Error
+
+
+def get_packages(dbname):
+ """
+ Description: Get all packages info
+ Args:
+ dbname: Database name
+ Returns:
+ Package information is returned as a list
+ Raises:
+ AttributeError: Object does not have this property
+ Error: Abnormal error
+ """
+ with DBHelper(db_name=dbname) as db_name:
+ src_pack_queryset = db_name.session.query(src_pack).all()
+ resp_list = []
+ for src_pack_obj in src_pack_queryset:
+ package = {}
+ package["sourceName"] = src_pack_obj.name
+ package["version"] = src_pack_obj.version
+ package["license"] = src_pack_obj.license
+ package["maintainer"] = src_pack_obj.Maintaniner
+ package["maintainlevel"] = src_pack_obj.MaintainLevel
+ package["sourceURL"] = src_pack_obj.sourceURL
+ package["maintainlevel"] = src_pack_obj.MaintainLevel
+ package["downloadURL"] = src_pack_obj.downloadURL
+ package["dbname"] = dbname
+ resp_list.append(package)
+ return resp_list
+
+
+def buildep_packages(dbname, src_pack_id):
+ """
+ Description: Query package layer 1 compilation dependency
+ Args:
+ dbname: databases name
+ src_pack_id: The ID of the source package
+ Returns:
+ buildDep Compile dependencies of source packages
+ Raises:
+ AttributeError: Object does not have this property
+ """
+ with DBHelper(db_name=dbname) as db_name:
+ b_pack_requires_set = db_name.session.query(
+ pack_requires).filter_by(srcIDkey=src_pack_id).all()
+ b_dep_proid_keys = [
+ dep_proid_obj.depProIDkey for dep_proid_obj in b_pack_requires_set]
+ b_pack_pro_set = db_name.session.query(pack_provides).filter(
+ pack_provides.id.in_(b_dep_proid_keys)).all()
+ b_bin_pack_ids = [
+ bin_pack_obj.binIDkey for bin_pack_obj in b_pack_pro_set]
+ b_bin_pack_set = db_name.session.query(bin_pack).filter(
+ bin_pack.id.in_(b_bin_pack_ids)).all()
+ builddep = [bin_pack_obj.name for bin_pack_obj in b_bin_pack_set]
+ return builddep
+
+
+def sub_packages(dbname, src_pack_id):
+ """
+ Description: Query package layer 1 installation dependency
+ Args:
+ dbname: databases name
+ src_pack_id: srcpackage id
+ Returns:
+ subpack Source package to binary package, then find the installation dependencies
+ of the binary package
+ Raises:
+ AttributeError: Object does not have this property
+ """
+ with DBHelper(db_name=dbname) as db_name:
+ subpack = {}
+ i_bin_pack_set = db_name.session.query(
+ bin_pack).filter_by(srcIDkey=src_pack_id).all()
+ i_bin_pack_ids = [
+ bin_pack_obj.id for bin_pack_obj in i_bin_pack_set]
+ for i_bin_pack_id in i_bin_pack_ids:
+ i_bin_pack_name = db_name.session.query(
+ bin_pack).filter_by(id=i_bin_pack_id).first().name
+ i_pack_req_set = db_name.session.query(
+ pack_requires).filter_by(binIDkey=i_bin_pack_id).all()
+ i_dep_proid_keys = [
+ dep_proid_obj.depProIDkey for dep_proid_obj in i_pack_req_set]
+ i_dep_proid_keys = list(set(i_dep_proid_keys))
+ i_pack_provides_set = db_name.session.query(pack_provides).filter(
+ pack_provides.id.in_(i_dep_proid_keys)).all()
+ i_bin_pack_ids = [
+ bin_pack_obj.binIDkey for bin_pack_obj in i_pack_provides_set]
+ i_bin_pack_set = db_name.session.query(bin_pack).filter(
+ bin_pack.id.in_(i_bin_pack_ids)).all()
+ i_bin_pack_names = [
+ bin_pack_obj.name for bin_pack_obj in i_bin_pack_set]
+ subpack[i_bin_pack_name] = i_bin_pack_names
+ return subpack
+
+
+def get_single_package(dbname, sourcename):
+ """
+ Description: Get all packages info
+ Args:
+ dbname: Database name
+ sourcename: Source package name
+ Returns:
+ package info
+ Raises:
+ AttributeError: Object does not have this property
+ """
+ with DBHelper(db_name=dbname) as db_name:
+ package = {}
+ src_pack_obj = db_name.session.query(src_pack).filter_by(
+ name=sourcename).first()
+ package["sourceName"] = src_pack_obj.name
+ package["version"] = src_pack_obj.version
+ package["license"] = src_pack_obj.license
+ package["maintainer"] = src_pack_obj.Maintaniner
+ package["maintainlevel"] = src_pack_obj.MaintainLevel
+ package["sourceURL"] = src_pack_obj.sourceURL
+ package["downloadURL"] = src_pack_obj.downloadURL
+ package["dbname"] = dbname
+ src_pack_id = src_pack_obj.id
+ builddep = buildep_packages(dbname, src_pack_id)
+ subpack = sub_packages(dbname, src_pack_id)
+ package['buildDep'] = builddep
+ package['subpack'] = subpack
+ return package
+
+
+def update_single_package(
+ package_name,
+ dbname,
+ maintainer,
+ maintain_level):
+ """
+ Description: change single package management
+ Args:
+ package_name: package name
+ dbname: Database name
+ maintainer: maintainer info
+ maintain_level: maintain_level info
+ Returns:
+ message success or failed
+ Raises:
+ AttributeError: Object does not have this property
+ TypeError: Abnormal error
+ """
+ with DBHelper(db_name=dbname) as db_name:
+ update_obj = db_name.session.query(
+ src_pack).filter_by(name=package_name).first()
+ update_obj.Maintaniner = maintainer
+ update_obj.MaintainLevel = maintain_level
+ db_name.session.commit()
+
+
+def update_maintaniner_info(package_name,
+ dbname,
+ maintaniner,
+ maintainlevel):
+ """
+ Description: update separately maintaniner info
+ Args:
+ package_name: package name
+ dbname: Database name
+ maintainer: maintainer info
+ maintain_level: maintain_level info
+ Returns:
+ message success or failed
+ Raises:
+ AttributeError: Object does not have this property
+ Error: Abnormal error
+ """
+ with DBHelper(db_name=dbname) as db_name:
+ src_pack_obj = db_name.session.query(src_pack).filter_by(
+ name=package_name).first()
+ name = src_pack_obj.name
+ version = src_pack_obj.version
+ with DBHelper(db_name='maintenance.information') as dbs_name:
+ try:
+ information_obj = dbs_name.session.query(maintenance_info).filter_by(
+ name=package_name, version=version).first()
+ if information_obj is None:
+ information = maintenance_info(
+ name=name,
+ version=version,
+ maintaniner=maintaniner,
+ maintainlevel=maintainlevel)
+ dbs_name.session.add(information)
+ dbs_name.session.commit()
+ else:
+ information_obj.maintaniner = maintaniner
+ information_obj.maintainlevel = maintainlevel
+ dbs_name.session.commit()
+ except (AttributeError, Error) as attri_error:
+ current_app.logger.error(attri_error)
+ return
diff --git a/packageship/packageship/application/apps/package/function/searchdb.py b/packageship/packageship/application/apps/package/function/searchdb.py
new file mode 100644
index 0000000000000000000000000000000000000000..bba2994b5dea437de38a69d2bf9d51c24c649527
--- /dev/null
+++ b/packageship/packageship/application/apps/package/function/searchdb.py
@@ -0,0 +1,426 @@
+#!/usr/bin/python3
+"""
+Description: A set for all query databases function
+class: SearchDB
+functions: db_priority
+"""
+from collections import namedtuple
+
+import yaml
+from flask import current_app
+from sqlalchemy import text
+from sqlalchemy.exc import SQLAlchemyError, DisconnectionError
+from sqlalchemy.sql import literal_column
+
+from packageship.libs.dbutils import DBHelper
+from packageship.libs.log import Log
+from packageship.application.models.package import bin_pack
+from packageship.libs.exception import ContentNoneException, Error
+from packageship.system_config import DATABASE_FILE_INFO
+from .constants import ResponseCode
+
+LOGGER = Log(__name__)
+
+
+class SearchDB():
+ """
+ Description: query in database
+ Attributes:
+ db_list: Database list
+ db_object_dict:A dictionary for storing database connection objects
+ changeLog:
+ """
+ def __new__(cls, *args, **kwargs):
+ # pylint: disable=w0613
+ if not hasattr(cls, "_instance"):
+ cls._instance = super(SearchDB, cls).__new__(cls)
+ return cls._instance
+
+ def __init__(self, db_list):
+ """
+ init class
+ """
+ self.db_object_dict = dict()
+ for db_name in db_list:
+ try:
+ with DBHelper(db_name=db_name) as data_base:
+ self.db_object_dict[db_name] = data_base
+ except DisconnectionError as connection_error:
+ current_app.logger.error(connection_error)
+
+ def get_install_depend(self, binary_list):
+ """
+ Description: get a package install depend from database:
+ binary_name -> binary_id -> requires_set -> requires_id_set -> provides_set
+ -> install_depend_binary_id_key_list -> install_depend_binary_name_list
+ Args:
+ binary_list: a list of binary package name
+ Returns:
+ install depend list
+ Raises:
+ """
+ result_list = []
+ get_list = []
+ if not self.db_object_dict:
+ LOGGER.logger.warning("Unable to connect to the database, \
+ check the database configuration")
+ return result_list
+ if None in binary_list:
+ binary_list.remove(None)
+ search_set = set(binary_list)
+ if not search_set:
+ LOGGER.logger.warning(
+ "The input is None, please check the input value.")
+ return result_list
+ for db_name, data_base in self.db_object_dict.items():
+ try:
+ name_in = literal_column('name').in_(search_set)
+ sql_com = text("""
+ SELECT DISTINCT
+ bin_pack.NAME AS depend_name,
+ bin_pack.version AS depend_version,
+ s2.NAME AS depend_src_name,
+ bin.NAME AS search_name,
+ s1.`name` AS search_src_name,
+ s1.version AS search_version
+ FROM
+ ( SELECT id, NAME,srcIDkey FROM bin_pack WHERE {} ) bin
+ LEFT JOIN pack_requires ON bin.id = pack_requires.binIDkey
+ LEFT JOIN pack_provides ON pack_provides.id = pack_requires.depProIDkey
+ LEFT JOIN bin_pack ON bin_pack.id = pack_provides.binIDkey
+ LEFT JOIN src_pack s1 ON s1.id = bin.srcIDkey
+ LEFT JOIN src_pack s2 ON s2.id = bin_pack.srcIDkey;""".format(name_in))
+ install_set = data_base.session. \
+ execute(sql_com, {'name_{}'.format(i): v
+ for i, v in enumerate(search_set, 1)}).fetchall()
+ if install_set:
+ # find search_name in db_name
+ # depend_name's db_name will be found in next loop
+ for result in install_set:
+ result_list.append((result, db_name))
+ get_list.append(result.search_name)
+ get_set = set(get_list)
+ get_list.clear()
+ search_set.symmetric_difference_update(get_set)
+ if not search_set:
+ return result_list
+ else:
+ continue
+ except AttributeError as error_msg:
+ LOGGER.logger.error(error_msg)
+ except SQLAlchemyError as error_msg:
+ LOGGER.logger.error(error_msg)
+ return_tuple = namedtuple('return_tuple',
+ 'depend_name depend_version depend_src_name \
+ search_name search_src_name search_version')
+ for binary_name in search_set:
+ result_list.append((return_tuple(None, None, None,
+ binary_name, None, None), 'NOT FOUND'))
+ return result_list
+
+ def get_src_name(self, binary_name):
+ """
+ Description: get a package source name from database:
+ bianry_name ->binary_source_name -> source_name
+ Args:
+ binary_name: search package's name, database preority list
+ Returns:
+ db_name: database name
+ source_name: source name
+ source_version: source version
+ Raises:
+ AttributeError: The object does not have this property
+ SQLAlchemyError: sqlalchemy error
+ """
+ for db_name, data_base in self.db_object_dict.items():
+ try:
+ bin_obj = data_base.session.query(bin_pack).filter_by(
+ name=binary_name
+ ).first()
+ source_name = bin_obj.src_pack.name
+ source_version = bin_obj.src_pack.version
+ if source_name is not None:
+ return ResponseCode.SUCCESS, db_name, \
+ source_name, source_version
+ except AttributeError as error_msg:
+ LOGGER.logger.error(error_msg)
+ except SQLAlchemyError as error_msg:
+ LOGGER.logger.error(error_msg)
+ return ResponseCode.DIS_CONNECTION_DB, None
+ return ResponseCode.PACK_NAME_NOT_FOUND, None, None, None
+
+ def get_sub_pack(self, source_name_list):
+ """
+ Description: get a subpack list based on source name list:
+ source_name ->source_name_id -> binary_name
+ Args:
+ source_name_list: search package's name, database preority list
+ Returns:
+ result_list: subpack tuple
+ Raises:
+ AttributeError: The object does not have this property
+ SQLAlchemyError: sqlalchemy error
+ """
+ if not self.db_object_dict:
+ return ResponseCode.DIS_CONNECTION_DB, None
+
+ if None in source_name_list:
+ source_name_list.remove(None)
+ search_set = set(source_name_list)
+ result_list = []
+ get_list = []
+ if not search_set:
+ return ResponseCode.INPUT_NONE, None
+ for db_name, data_base in self.db_object_dict.items():
+ try:
+ name_in = literal_column('name').in_(search_set)
+ sql_com = text('''SELECT
+ t1.NAME as subpack_name,
+ t2.version as search_version,
+ t2.NAME as search_name
+ FROM bin_pack t1, src_pack t2
+ WHERE
+ t2.id = t1.srcIDkey
+ AND t2.{}
+ '''.format(name_in))
+ subpack_tuple = data_base.session. \
+ execute(sql_com, {'name_{}'.format(i): v
+ for i, v in enumerate(search_set, 1)}).fetchall()
+ if subpack_tuple:
+ for result in subpack_tuple:
+ result_list.append((result, db_name))
+ get_list.append(result.search_name)
+ search_set.symmetric_difference_update(set(get_list))
+ get_list.clear()
+ if not search_set:
+ return ResponseCode.SUCCESS, result_list
+ else:
+ continue
+ except AttributeError as attr_error:
+ current_app.logger.error(attr_error)
+ except SQLAlchemyError as sql_error:
+ current_app.logger.error(sql_error)
+ return_tuple = namedtuple(
+ 'return_tuple', 'subpack_name search_version search_name')
+ for search_name in search_set:
+ LOGGER.logger.warning("Can't not find " +
+ search_name + " subpack in all database")
+ result_list.append(
+ (return_tuple(None, None, search_name), 'NOT_FOUND'))
+ return ResponseCode.SUCCESS, result_list
+
+ def get_binary_in_other_database(self, not_found_binary, db_):
+ """
+ Description: Binary package name data not found in
+ the current database, go to other databases to try
+ Args:
+ not_found_binary: not_found_build These data cannot be found in the current database
+ db_:current database name
+ Returns:
+ a list :[(search_name,source_name,bin_name,
+ bin_version,db_name,search_version,req_name),
+ (search_name,source_name,bin_name,
+ bin_version,db_name,search_version,req_name),]
+ Raises:
+ AttributeError: The object does not have this property
+ SQLAlchemyError: sqlalchemy error
+ """
+ if not not_found_binary:
+ return []
+
+ return_tuple = namedtuple("return_tuple", [
+ "search_name",
+ "source_name",
+ "bin_name",
+ "version",
+ "db_name",
+ "search_version",
+ "req_name"
+ ])
+ src_req_map = {req_: src for src, req_ in not_found_binary}
+
+ local_search_set = {req_ for _, req_ in not_found_binary}
+
+ local_dict = {k: v for k, v in self.db_object_dict.items() if k != db_}
+ res = []
+
+ for db_name, data_base in local_dict.items():
+ try:
+ sql_string = text("""
+ SELECT
+ t3.NAME AS source_name,
+ t1.NAME AS bin_name,
+ t1.version,
+ t3.version as search_version,
+ t2.NAME AS req_name
+ FROM
+ bin_pack t1,
+ pack_provides t2,
+ src_pack t3
+ WHERE
+ t2.{}
+ AND t1.id = t2.binIDkey
+ AND t1.srcIDkey = t3.id;
+ """.format(literal_column('name').in_(local_search_set)))
+ build_set_2 = data_base.session. \
+ execute(sql_string, {'name_{}'.format(i): v
+ for i, v in enumerate(local_search_set, 1)}).fetchall()
+ if not build_set_2:
+ continue
+
+ res.extend([return_tuple(
+ src_req_map.get(bin_pack.req_name),
+ bin_pack.source_name,
+ bin_pack.bin_name,
+ bin_pack.version,
+ db_name,
+ bin_pack.search_version,
+ bin_pack.req_name
+ ) for bin_pack in build_set_2 if bin_pack.bin_name])
+
+ for obj in res:
+ local_search_set.remove(obj.req_name)
+
+ except AttributeError as attr_error:
+ current_app.logger.error(attr_error)
+ except SQLAlchemyError as sql_error:
+ current_app.logger.error(sql_error)
+ return res
+
+ def get_build_depend(self, source_name_li):
+ """
+ Description: get a package build depend from database
+ Args:
+ source_name_li: search package's name list
+ Returns:
+ all source pkg build depend list
+ structure :[(search_name,source_name,bin_name,bin_version,db_name,search_version),
+ (search_name,source_name,bin_name,bin_version,db_name,search_version),]
+
+ Raises:
+ AttributeError: The object does not have this property
+ SQLAlchemyError: sqlalchemy error
+ """
+ # pylint: disable=R0914
+ return_tuple = namedtuple("return_tuple", [
+ "search_name",
+ "source_name",
+ "bin_name",
+ "version",
+ "db_name",
+ "search_version"
+ ])
+
+ s_name_set = set(source_name_li)
+ if not s_name_set:
+ return ResponseCode.PARAM_ERROR, None
+
+ not_found_binary = set()
+ build_list = []
+
+ for db_name, data_base in self.db_object_dict.items():
+ try:
+ sql_com = text("""SELECT DISTINCT
+ src.NAME AS search_name,
+ src.version AS search_version,
+ s2.NAME AS source_name,
+ pack_provides.binIDkey AS bin_id,
+ pack_requires.NAME AS req_name,
+ bin_pack.version AS version,
+ bin_pack.NAME AS bin_name
+ FROM
+ ( SELECT id, NAME,version FROM src_pack WHERE {} ) src
+ LEFT JOIN pack_requires ON src.id = pack_requires.srcIDkey
+ LEFT JOIN pack_provides ON pack_provides.id = pack_requires.depProIDkey
+ LEFT JOIN bin_pack ON bin_pack.id = pack_provides.binIDkey
+ LEFT JOIN src_pack s1 ON s1.id = pack_requires.srcIDkey
+ LEFT JOIN src_pack s2 ON bin_pack.srcIDkey = s2.id;
+ """.format(literal_column("name").in_(s_name_set)))
+
+ build_set = data_base.session. \
+ execute(sql_com, {'name_{}'.format(i): v
+ for i, v in enumerate(s_name_set, 1)}).fetchall()
+
+ if not build_set:
+ continue
+
+ # When processing source package without compilation dependency
+ to_remove_obj_index = []
+ for index, b_pack in enumerate(build_set):
+ if not b_pack.source_name and not b_pack.req_name:
+ obj = return_tuple(
+ b_pack.search_name,
+ b_pack.source_name,
+ b_pack.bin_name,
+ b_pack.version,
+ db_name,
+ b_pack.search_version
+ )
+
+ build_list.append(obj)
+ to_remove_obj_index.append(index)
+
+ for i in reversed(to_remove_obj_index):
+ build_set.pop(i)
+
+ if not build_set:
+ continue
+
+ build_list.extend([
+ return_tuple(
+ bin_pack.search_name,
+ bin_pack.source_name,
+ bin_pack.bin_name,
+ bin_pack.version,
+ db_name,
+ bin_pack.search_version
+ ) for bin_pack in build_set if bin_pack.bin_id and bin_pack.bin_name
+ ])
+ # Component name can't find its binary package name
+ not_found_binary.update([(bin_pack.search_name, bin_pack.req_name)
+ for bin_pack in build_set if not bin_pack.bin_id])
+
+ s_name_set -= {bin_pack.search_name for bin_pack in build_set
+ if bin_pack.bin_id}
+
+ if not not_found_binary and not s_name_set:
+ return ResponseCode.SUCCESS, build_list
+
+ for obj in self.get_binary_in_other_database(not_found_binary, db_name):
+ build_list.append(obj)
+
+ not_found_binary.clear()
+
+ except AttributeError as attr_error:
+ current_app.logger.error(attr_error)
+ except SQLAlchemyError as sql_error:
+ current_app.logger.error(sql_error)
+ return ResponseCode.DIS_CONNECTION_DB, None
+ return ResponseCode.SUCCESS, build_list
+
+
+def db_priority():
+ """
+ Description: Read yaml file, return database name, according to priority
+ Args:
+ Returns:
+ db_list: database name list
+ Raises:
+ FileNotFoundError: file cannot be found
+ Error: abnormal error
+ """
+ try:
+ with open(DATABASE_FILE_INFO, 'r', encoding='utf-8') as file_context:
+
+ init_database_date = yaml.load(
+ file_context.read(), Loader=yaml.FullLoader)
+ if init_database_date is None:
+ raise ContentNoneException(
+ "The content of the database initialization configuration file cannot be empty")
+ init_database_date.sort(key=lambda x: x['priority'], reverse=False)
+ db_list = [item.get('database_name')
+ for item in init_database_date]
+ return db_list
+ except (FileNotFoundError, Error) as file_not_found:
+ current_app.logger.error(file_not_found)
+ return None
diff --git a/packageship/packageship/application/apps/package/function/self_depend.py b/packageship/packageship/application/apps/package/function/self_depend.py
new file mode 100644
index 0000000000000000000000000000000000000000..e63d97c77d09fe5b43e3d17a1628b55d091039ec
--- /dev/null
+++ b/packageship/packageship/application/apps/package/function/self_depend.py
@@ -0,0 +1,313 @@
+#!/usr/bin/python3
+"""
+Description: Querying for self dependencies
+ Querying packages install and build depend for those package can be
+ build and install
+class: SelfDepend, DictionaryOperations
+"""
+
+import copy
+from packageship.libs.log import Log
+from .searchdb import SearchDB
+from .constants import ResponseCode
+from .constants import ListNode
+from .install_depend import InstallDepend as install_depend
+from .build_depend import BuildDepend as build_depend
+
+LOGGER = Log(__name__)
+
+
+class SelfDepend():
+ """
+ Description:
+ Querying for self dependencies
+ Querying packages install and build depend for those package can be
+ build and install
+ Attributes:
+ db_list: list of database names
+ binary_dict: Contain all the binary packages info and operation
+ source_dicts: Contain all the source packages info and operation
+ result_tmp: restore the return result dict
+ search_install_list: Contain the binary packages searched install dep in the next loop
+ search_build_list: Contain the source packages searched build dep in the next loop
+ search_subpack_list: Contain the source packages searched subpack in the next loop
+ withsubpack: withsubpack
+ search_db: A object of database which would be connected
+ """
+ def __init__(self, db_list):
+ """
+ init class
+ """
+ self.binary_dict = DictionaryOperations()
+ self.source_dicts = DictionaryOperations()
+ self.result_tmp = dict()
+ self.search_install_list = []
+ self.search_build_list = []
+ self.search_subpack_list = []
+ self.withsubpack = 0
+ self.db_list = db_list
+ self.search_db = SearchDB(db_list)
+
+ def query_depend(self, packname, selfbuild, withsubpack, packtype='binary'):
+ """
+ Description: init result dict and determint the loop end point
+ Args:
+ packname: Package name
+ selfbuild: selfbuild
+ withsubpack: withsubpack
+ packtype: package type
+ Returns:
+ binary_dict.dictionary: Contain all the binary packages info after searching
+ source_dicts.dictionary: Contain all the source packages info after searching
+ Raises:
+ """
+ if not self.search_db.db_object_dict:
+ return ResponseCode.DIS_CONNECTION_DB, None, None
+ if not packname:
+ return ResponseCode.INPUT_NONE
+
+ self.withsubpack = withsubpack
+ response_code = self.init_dict(packname, packtype)
+ if response_code != ResponseCode.SUCCESS:
+ return response_code, self.binary_dict.dictionary, self.source_dicts.dictionary
+
+ for key, _ in self.binary_dict.dictionary.items():
+ self.search_install_list.append(key)
+ for key, _ in self.source_dicts.dictionary.items():
+ self.search_build_list.append(key)
+ if self.withsubpack == 1:
+ self.search_subpack_list.append(key)
+
+ while self.search_build_list or self.search_install_list or self.search_subpack_list:
+ if self.search_install_list:
+ self.query_install()
+ if self.withsubpack == 1 and self.search_subpack_list:
+ self.with_subpack()
+ if self.search_build_list:
+ self.query_build(selfbuild)
+ return response_code, self.binary_dict.dictionary, self.source_dicts.dictionary
+
+ def init_dict(self, packname, packtype):
+ """
+ Description: init result dict
+ Args:
+ packname: package name
+ packtype: package type
+ Returns:
+ response_code
+ Raises:
+ """
+ if packtype == 'source':
+ response_code, subpack_list = self.search_db.get_sub_pack([packname])
+ if subpack_list:
+ for subpack_tuple, dbname in subpack_list:
+ self.source_dicts.append_src(packname, dbname, subpack_tuple.search_version)
+ if dbname != 'NOT_FOUND':
+ self.binary_dict.append_bin(key=subpack_tuple.subpack_name,
+ src=packname,
+ version=subpack_tuple.search_version,
+ dbname=dbname)
+ else:
+ return ResponseCode.PACK_NAME_NOT_FOUND
+
+ else:
+ response_code, dbname, source_name, version = \
+ self.search_db.get_src_name(packname)
+ if response_code != ResponseCode.SUCCESS:
+ return response_code
+ self.source_dicts.append_src(source_name, dbname, version)
+ self.binary_dict.append_bin(key=packname,
+ src=source_name,
+ version=version,
+ dbname=dbname)
+ return response_code
+
+ def query_install(self):
+ """
+ Description: query install depend
+ Args:
+ Returns:
+ Raises:
+ """
+ self.result_tmp.clear()
+ _, self.result_tmp = \
+ install_depend(self.db_list).query_install_depend(self.search_install_list,
+ self.binary_dict.dictionary)
+ self.search_install_list.clear()
+ for key, values in self.result_tmp.items():
+ if key in self.binary_dict.dictionary:
+ if ['root', None] in values[ListNode.PARENT_LIST]:
+ index = values[ListNode.PARENT_LIST].index(['root', None])
+ del values[ListNode.PARENT_LIST][index]
+ self.binary_dict.update_value(key=key, parent_list=values[ListNode.PARENT_LIST])
+ else:
+ if not key:
+ continue
+ self.binary_dict.dictionary[key] = copy.deepcopy(values)
+ source_name = values[ListNode.SOURCE_NAME]
+ if not source_name:
+ LOGGER.logger.warning("source name is None")
+ if source_name and source_name not in self.source_dicts.dictionary:
+ self.source_dicts.append_src(key=source_name,
+ dbname=values[ListNode.DBNAME],
+ version=values[ListNode.VERSION])
+ self.search_build_list.append(source_name)
+ if self.withsubpack == 1:
+ self.search_subpack_list.append(source_name)
+
+ def with_subpack(self):
+ """
+ Description: query subpackage
+ Args:
+ Returns:
+ Raises:
+ """
+ if None in self.search_subpack_list:
+ LOGGER.logger.warning("There is a NONE in input value:" + \
+ str(self.search_subpack_list))
+ self.search_subpack_list.remove(None)
+ _, result_list = self.search_db.get_sub_pack(self.search_subpack_list)
+ for subpack_tuple, dbname in result_list:
+ if dbname != 'NOT_FOUND':
+ if subpack_tuple.subpack_name not in self.binary_dict.dictionary:
+ self.binary_dict.append_bin(key=subpack_tuple.subpack_name,
+ src=subpack_tuple.search_name,
+ version=subpack_tuple.search_version,
+ dbname=dbname,
+ parent_node=[subpack_tuple.search_name, 'Subpack'])
+ self.search_install_list.append(subpack_tuple.subpack_name)
+ self.search_subpack_list.clear()
+
+ def query_build(self, selfbuild):
+ """
+ Description: query build depend
+ Args:
+ selfbuild: selfbuild
+ Returns:
+ Raises:
+ """
+ self.result_tmp.clear()
+ if selfbuild == 0:
+ self.query_builddep()
+ else:
+ self.query_selfbuild()
+
+ def query_builddep(self):
+ """
+ Description: for selfbuild == 0, query selfbuild depend
+ Args:
+ Returns:
+ Raises:
+ """
+ _, self.result_tmp, _ = build_depend(
+ self.search_build_list,
+ self.db_list,
+ self_build=0,
+ history_dict=self.binary_dict.dictionary
+ ).build_depend_main()
+
+ self.search_build_list.clear()
+ for key, values in self.result_tmp.items():
+ if not key:
+ LOGGER.logger.warning("key is NONE for value = " + str(values))
+ continue
+ if key not in self.binary_dict.dictionary and values[0] != 'source':
+ self.binary_dict.dictionary[key] = copy.deepcopy(values)
+ if self.withsubpack == 1:
+ source_name = values[ListNode.SOURCE_NAME]
+ if not source_name:
+ LOGGER.logger.warning("source name is None")
+ if source_name and source_name not in self.source_dicts.dictionary:
+ self.source_dicts.append_src(key=source_name,
+ dbname=values[ListNode.DBNAME],
+ version=values[ListNode.VERSION])
+ self.search_subpack_list.append(source_name)
+ elif key in self.binary_dict.dictionary:
+ self.binary_dict.update_value(key=key, parent_list=values[ListNode.PARENT_LIST])
+
+ def query_selfbuild(self):
+ """
+ Description: for selfbuild == 1, query selfbuild depend
+ Args:
+ Returns:
+ """
+ _, self.result_tmp, source_dicts_tmp = build_depend(
+ self.search_build_list,
+ self.db_list,
+ self_build=1,
+ history_dict=self.source_dicts.dictionary
+ ).build_depend_main()
+
+ for key, values in self.result_tmp.items():
+ if not key:
+ LOGGER.logger.warning("key is NONE for value = " + str(values))
+ continue
+ if key in self.binary_dict.dictionary:
+ self.binary_dict.update_value(key=key, parent_list=values[ListNode.PARENT_LIST])
+ else:
+ self.binary_dict.dictionary[key] = copy.deepcopy(values)
+ self.search_install_list.append(key)
+ for key, values in source_dicts_tmp.items():
+ if not key:
+ LOGGER.logger.warning("key is NONE for value = " + str(values))
+ continue
+ if key not in self.source_dicts.dictionary:
+ self.source_dicts.dictionary[key] = copy.deepcopy(values)
+ if self.with_subpack == 1:
+ self.search_subpack_list.append(key)
+ self.search_build_list.clear()
+
+
+class DictionaryOperations():
+ """
+ Description: Related to dictionary operations, creating dictionary, append dictionary
+ Attributes:
+ dictionary: dict
+ """
+
+ def __init__(self):
+ """
+ init class
+ """
+ self.dictionary = dict()
+
+ def append_src(self, key, dbname, version):
+ """
+ Description: Appending source dictionary
+ Args:
+ key: bianry name
+ dbname: database name
+ version: version
+ Returns:
+ Raises:
+ """
+ self.dictionary[key] = [dbname, version]
+
+ def append_bin(self, key, src=None, version=None, dbname=None, parent_node=None):
+ """
+ Description: Appending binary dictionary
+ Args:
+ key: binary name
+ src: source name
+ version: version
+ dbname: database name
+ parent_node: parent node
+ Returns:
+ Raises:
+ """
+ if not parent_node:
+ self.dictionary[key] = [src, version, dbname, [['root', None]]]
+ else:
+ self.dictionary[key] = [src, version, dbname, [parent_node]]
+
+ def update_value(self, key, parent_list=None):
+ """
+ Args:
+ key: binary name
+ parent_list: parent list
+ Returns:
+ Raises:
+ """
+
+ if parent_list:
+ self.dictionary[key][ListNode.PARENT_LIST].extend(parent_list)
diff --git a/packageship/packageship/application/apps/package/serialize.py b/packageship/packageship/application/apps/package/serialize.py
new file mode 100644
index 0000000000000000000000000000000000000000..ac86ce9aaeed9629ca35393da5b4656d4608173a
--- /dev/null
+++ b/packageship/packageship/application/apps/package/serialize.py
@@ -0,0 +1,189 @@
+#!/usr/bin/python3
+"""
+Description: marshmallow serialize
+"""
+from marshmallow import Schema
+from marshmallow import fields
+from marshmallow import ValidationError
+from marshmallow import validate
+
+
+class PackagesSchema(Schema):
+ """
+ Description: PackagesSchema serialize
+ """
+ dbName = fields.Str(validate=validate.Length(
+ max=50), required=False, allow_none=True)
+
+
+class GetpackSchema(Schema):
+ """
+ Description: GetpackSchema serialize
+ """
+ sourceName = fields.Str(
+ required=True,
+ validate=validate.Length(min=1,
+ max=200))
+
+ dbName = fields.Str(validate=validate.Length(
+ max=30), required=False, allow_none=True)
+ version = fields.Str(validate=validate.Length(
+ max=200), required=False, allow_none=True)
+
+
+def validate_maintainlevel(maintainlevel):
+ """
+ Description: Method test
+ Args:
+ maintainlevel: maintainlevel
+ Returns:
+ True or failure
+ Raises:
+ ValidationError: Test failed
+ """
+ if maintainlevel not in ['1', '2', '3', '4']:
+ raise ValidationError("maintainLevel is illegal data ")
+
+
+class PutpackSchema(Schema):
+ """
+ Description: PutpackSchema serialize
+ """
+ sourceName = fields.Str(
+ required=True,
+ validate=validate.Length(
+ min=1,
+ max=200))
+ dbName = fields.Str(
+ required=True,
+ validate=validate.Length(
+ min=1,
+ max=50))
+ maintainer = fields.Str(validate=validate.Length(
+ max=50), required=False, allow_none=True)
+ maintainlevel = fields.Str(
+ validate=validate_maintainlevel,
+ required=False,
+ allow_none=True)
+
+
+class InstallDependSchema(Schema):
+ """
+ Description: InstallDependSchema
+ """
+ binaryName = fields.Str(
+ required=True,
+ validate=validate.Length(
+ min=1, max=500))
+ db_list = fields.List(fields.String(), required=False, allow_none=True)
+
+
+class BuildDependSchema(Schema):
+ """
+ Description: BuildDependSchema serialize
+ """
+ sourceName = fields.Str(
+ required=True,
+ validate=validate.Length(
+ min=1, max=200))
+ db_list = fields.List(fields.String(), required=False, allow_none=True)
+
+
+def validate_withsubpack(withsubpack):
+ """
+ Description: Method test
+ Args:
+ withsubpack: withsubpack
+ Returns:
+ True or failure
+ Raises:
+ ValidationError: Test failed
+ """
+ if withsubpack not in ['0', '1']:
+ raise ValidationError("withSubpack is illegal data ")
+
+
+class BeDependSchema(Schema):
+ """
+ Description: BeDependSchema serialize
+ """
+ packagename = fields.Str(
+ required=True,
+ validate=validate.Length(
+ min=1,
+ max=200))
+ withsubpack = fields.Str(
+ validate=validate_withsubpack,
+ required=False, allow_none=True)
+ dbname = fields.Str(
+ required=True,
+ validate=validate.Length(
+ min=1,
+ max=50))
+
+
+def validate_selfbuild(selfbuild):
+ """
+ Description: Method test
+ """
+ if selfbuild not in ['0', '1']:
+ raise ValidationError("selfbuild is illegal data ")
+
+
+def validate_packtype(packtype):
+ """
+ Description: Method test
+ """
+ if packtype not in ['source', 'binary']:
+ raise ValidationError("packtype is illegal data ")
+
+
+class SelfDependSchema(Schema):
+ """
+ Description: SelfDependSchema serialize
+ """
+ packagename = fields.Str(
+ required=True,
+ validate=validate.Length(
+ min=1,
+ max=200))
+ db_list = fields.List(fields.String(), required=False, allow_none=True)
+ selfbuild = fields.Str(validate=validate_selfbuild,
+ required=False, allow_none=True)
+ withsubpack = fields.Str(
+ validate=validate_withsubpack, required=False, allow_none=True)
+ packtype = fields.Str(validate=validate_packtype,
+ required=False, allow_none=True)
+
+
+class DeletedbSchema(Schema):
+ """
+ Description: DeletedbSchema serialize
+ """
+ dbName = fields.Str(
+ required=True,
+ validate=validate.Length(
+ min=1,
+ max=200))
+
+
+def have_err_db_name(db_list, db_priority):
+ """
+ Description: have error database name method
+ Args:
+ db_list: db_list db list of inputs
+ db_priority: db_priority default list
+ Returns:
+ If any element in db_list is no longer in db_priority, return false
+ Raises:
+ """
+ return any(filter(lambda db_name: db_name not in db_priority, db_list))
+
+
+class InitSystemSchema(Schema):
+ """
+ Description: InitSystemSchema serialize
+ """
+ configfile = fields.Str(
+ validate=validate.Length(
+ max=50), required=False, allow_none=True)
diff --git a/packageship/packageship/application/apps/package/url.py b/packageship/packageship/application/apps/package/url.py
new file mode 100644
index 0000000000000000000000000000000000000000..5a08213485915ab4762a6aa7adca453bfa47e89f
--- /dev/null
+++ b/packageship/packageship/application/apps/package/url.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python3
+"""
+Description: url set
+"""
+from . import view
+
+urls = [
+ # Get all packages' info
+ (view.Packages, '/packages', {'query': ('GET')}),
+
+
+ # Query and update a package info
+ (view.SinglePack, '/packages/findByPackName',
+ {'query': ('GET'), 'write': ('PUT')}),
+
+ # Query a package's install depend(support querying in one or more databases)
+ (view.InstallDepend, '/packages/findInstallDepend', {'query': ('POST')}),
+
+ # Query a package's build depend(support querying in one or more databases)
+
+ (view.BuildDepend, '/packages/findBuildDepend', {'query': ('POST')}),
+
+ # Query a package's all dependencies including install and build depend
+ # (support quering a binary or source package in one or more databases)
+ (view.SelfDepend, '/packages/findSelfDepend', {'query': ('POST')}),
+
+ # Query a package's all be dependencies including be installed and built depend
+ (view.BeDepend, '/packages/findBeDepend', {'query': ('POST')}),
+
+ # Get all imported databases, import new databases and update existed databases
+
+ (view.Repodatas, '/repodatas', {'query': ('GET'), 'write': ('DELETE')}),
+
+ # Reload database
+ (view.InitSystem, '/initsystem', {'write': ('POST')})
+]
diff --git a/packageship/packageship/application/apps/package/view.py b/packageship/packageship/application/apps/package/view.py
new file mode 100644
index 0000000000000000000000000000000000000000..2058738f7738dfce5c17d17b4a8ef1e36889b879
--- /dev/null
+++ b/packageship/packageship/application/apps/package/view.py
@@ -0,0 +1,689 @@
+#!/usr/bin/python3
+"""
+description: Interface processing
+class: BeDepend, BuildDepend, InitSystem, InstallDepend, Packages,
+Repodatas, SelfDepend, SinglePack
+"""
+import yaml
+from flask import request
+from flask import jsonify
+from flask import current_app
+from flask_restful import Resource
+from sqlalchemy.exc import DisconnectionError
+
+from packageship.application.initsystem.data_import import InitDataBase
+from packageship.libs.configutils.readconfig import ReadConfig
+from packageship.libs.exception import Error
+from packageship.libs.exception import ContentNoneException
+from packageship.libs.exception import DataMergeException
+from packageship.libs.log import Log
+from packageship.system_config import DATABASE_FILE_INFO
+from .function.constants import ResponseCode
+from .function.packages import get_packages
+from .function.packages import update_single_package
+from .function.packages import update_maintaniner_info
+from .function.packages import get_single_package
+from .function.searchdb import db_priority
+from .serialize import PackagesSchema
+from .serialize import GetpackSchema
+from .serialize import PutpackSchema
+from .serialize import DeletedbSchema
+from .serialize import InitSystemSchema
+from .serialize import BeDependSchema
+from .function.be_depend import BeDepend as be_depend
+from .function.install_depend import InstallDepend as installdepend
+from .function.build_depend import BuildDepend as builddepend
+from .function.self_depend import SelfDepend as self_depend
+from .serialize import InstallDependSchema
+from .serialize import BuildDependSchema
+from .serialize import SelfDependSchema
+from .serialize import have_err_db_name
+
+LOGGER = Log(__name__)
+#pylint: disable = no-self-use
+
+
+class Packages(Resource):
+ """
+ Description: interface for package info management
+ Restful API: get
+ changeLog:
+ """
+
+ def get(self):
+ """
+ Description: Get all package info from a database
+ Args:
+ dbName: Data path name, not required parameter
+ Returns:
+ {
+ "code": "",
+ "data": [
+ {
+ "dbname": "",
+ "downloadURL": "",
+ "license": "",
+ "maintainer": ,
+ "maintainlevel": ,
+ "sourceName": "",
+ "sourceURL": "",
+ "version": ""
+ },
+ "msg": ""
+ }
+ Raises:
+ DisconnectionError: Unable to connect to database exception
+ AttributeError: Object does not have this property
+ Error: Abnormal error
+ """
+ # Get verification parameters
+ schema = PackagesSchema()
+ data = schema.dump(request.args)
+ if schema.validate(data):
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.PARAM_ERROR)
+ )
+ dbname = data.get("dbName", None)
+ # Call method to query
+ try:
+ dbpreority = db_priority()
+ if dbpreority is None:
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.FILE_NOT_FOUND)
+ )
+ if not dbname:
+ response = []
+ for dbname in dbpreority:
+ query_result = get_packages(dbname)
+ for item in query_result:
+ response.append(item)
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.SUCCESS, response)
+ )
+ if dbname not in dbpreority:
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.DB_NAME_ERROR)
+ )
+ response = get_packages(dbname)
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.SUCCESS, response)
+ )
+ # Database queries data and catches exceptions
+ except DisconnectionError as dis_connection_error:
+ current_app.logger.error(dis_connection_error)
+ return jsonify(
+ ResponseCode.response_json(
+ ResponseCode.DIS_CONNECTION_DB))
+
+
+class SinglePack(Resource):
+ """
+ description: single package management
+ Restful API: get, put
+ ChangeLog:
+ """
+
+ def get(self):
+ """
+ description: Searching a package info
+ Args:
+ dbName: Database name, not required parameter
+ sourceName: Source code package name, must pass
+ Returns:
+ {
+ "code": "",
+ "data": [
+ {
+ "buildDep": [],
+ "dbname": "",
+ "downloadURL": "",
+ "license": "",
+ "maintainer": "",
+ "maintainlevel": "",
+ "sourceName": "",
+ "sourceURL": "",
+ "subpack": {},
+ "version": ""
+ }
+ ],
+ "msg": ""
+ }
+ Raises:
+ DisconnectionError: Unable to connect to database exception
+ AttributeError: Object does not have this property
+ TypeError: Exception of type
+ Error: Abnormal error
+ """
+ # Get verification parameters
+ schema = GetpackSchema()
+ data = schema.dump(request.args)
+ if schema.validate(data):
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.PARAM_ERROR)
+ )
+ dbname = data.get("dbName", None)
+ sourcename = data.get("sourceName")
+
+ # Call method to query
+ try:
+ dbpreority = db_priority()
+ if db_priority is None:
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.FILE_NOT_FOUND)
+ )
+ if not dbname:
+ response = []
+ for dbname in dbpreority:
+ query_result = get_single_package(dbname, sourcename)
+ response.append(query_result)
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.SUCCESS, response)
+ )
+
+ # Database queries data and catches exceptions
+ if dbname not in dbpreority:
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.DB_NAME_ERROR)
+ )
+ response = get_single_package(dbname, sourcename)
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.SUCCESS, [response])
+ )
+ except DisconnectionError as dis_connection_error:
+ current_app.logger.error(dis_connection_error)
+ abnormal = ResponseCode.DIS_CONNECTION_DB
+
+ except (AttributeError, TypeError, Error) as attribute_error:
+ current_app.logger.error(attribute_error)
+ abnormal = ResponseCode.PACK_NAME_NOT_FOUND
+ if abnormal is not None:
+ return jsonify(ResponseCode.response_json(abnormal))
+
+ def put(self):
+ """
+ Description: update a package info,
+ Args:
+ dbName: Database name,Parameters are required
+ sourceName: The name of the source code package. Parameters are required
+ maintainer: Maintainer, parameter not required
+ maintainlevel: Maintenance level, parameter not required
+ Returns:
+ {
+ "code": "",
+ "data": "",
+ "msg": ""
+ }
+ Raises:
+ DisconnectionError: Unable to connect to database exception
+ AttributeError: Object does not have this property
+ TypeError: Exception of type
+ Error: Abnormal error
+ """
+ # Get verification parameters
+ schema = PutpackSchema()
+ data = schema.dump(request.get_json())
+ if schema.validate(data):
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.PARAM_ERROR)
+ )
+ dbname = data.get('dbName')
+ sourcename = data.get('sourceName')
+ maintainer = data.get('maintainer', None)
+ maintain_level = data.get('maintainlevel', None)
+
+ # Call method to query
+ if not maintainer and not maintain_level:
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.PARAM_ERROR)
+ )
+
+ if dbname not in db_priority():
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.DB_NAME_ERROR)
+ )
+ # Database queries data and catches exceptions
+ try:
+ update_single_package(
+ sourcename, dbname, maintainer, maintain_level)
+ update_maintaniner_info(
+ sourcename, dbname, maintainer, maintain_level)
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.SUCCESS)
+ )
+ except DisconnectionError as dis_connection_error:
+ current_app.logger.error(dis_connection_error)
+ return jsonify(
+ ResponseCode.response_json(
+ ResponseCode.DIS_CONNECTION_DB))
+ except (AttributeError, TypeError, Error) as attri_error:
+ current_app.logger.error(attri_error)
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.PACK_NAME_NOT_FOUND)
+ )
+
+
+class InstallDepend(Resource):
+ """
+ Description: install depend of binary package
+ Restful API: post
+ changeLog:
+ """
+
+ def post(self):
+ """
+ Description: Query a package's install depend(support
+ querying in one or more databases)
+ input:
+ binaryName
+ dbPreority: the array for database preority
+ return:
+ resultDict{
+ binary_name: //binary package name
+ [
+ src, //the source package name for
+ that binary packge
+ dbname,
+ version,
+ [
+ parent_node, //the binary package name which is
+ the install depend for binaryName
+ type //install install or build, which
+ depend on the function
+ ]
+ ]
+ }
+ Raises:
+ """
+ schema = InstallDependSchema()
+
+ data = request.get_json()
+ validate_err = schema.validate(data)
+ if validate_err:
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.PARAM_ERROR)
+ )
+ pkg_name = data.get("binaryName")
+
+ db_pri = db_priority()
+ if not db_pri:
+ return jsonify(
+ ResponseCode.response_json(
+ ResponseCode.FILE_NOT_FIND_ERROR
+ )
+ )
+
+ db_list = data.get("db_list") if data.get("db_list") \
+ else db_pri
+
+ if not all([pkg_name, db_list]):
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.PARAM_ERROR)
+ )
+
+ if have_err_db_name(db_list, db_pri):
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.DB_NAME_ERROR)
+ )
+
+ response_code, install_dict = \
+ installdepend(db_list).query_install_depend([pkg_name])
+
+ if not install_dict:
+ return jsonify(
+ ResponseCode.response_json(response_code)
+ )
+
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.SUCCESS, data=install_dict)
+ )
+
+
+class BuildDepend(Resource):
+ """
+ Description: build depend of binary package
+ Restful API: post
+ changeLog:
+ """
+
+ def post(self):
+ """
+ Description: Query a package's build depend and
+ build depend package's install depend
+ (support querying in one or more databases)
+ Args:
+ sourceName :name of the source package
+ dbPreority:the array for database preority
+ Returns:
+ {
+ "code": "",
+ "data": "",
+ "msg": ""
+ }
+ Raises:
+ """
+ schema = BuildDependSchema()
+
+ data = request.get_json()
+ if schema.validate(data):
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.PARAM_ERROR)
+ )
+ pkg_name = data.get("sourceName")
+
+ db_pri = db_priority()
+
+ if not db_pri:
+ return jsonify(
+ ResponseCode.response_json(
+ ResponseCode.FILE_NOT_FIND_ERROR
+ )
+ )
+
+ db_list = data.get("db_list") if data.get("db_list") \
+ else db_pri
+
+ if have_err_db_name(db_list, db_pri):
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.DB_NAME_ERROR)
+ )
+
+ build_ins = builddepend([pkg_name], db_list)
+
+ res_code, res_dict, _ = build_ins.build_depend_main()
+
+ return jsonify(
+ ResponseCode.response_json(
+ res_code,
+ data=res_dict if res_dict else None
+ )
+ )
+
+
+class SelfDepend(Resource):
+ """
+ Description: querying install and build depend for a package
+ and others which has the same src name
+ Restful API: post
+ changeLog:
+ """
+
+ def post(self):
+ """
+ Description: Query a package's all dependencies including install and build depend
+ (support quering a binary or source package in one or more databases)
+ Args:
+ packageName:package name
+ packageType: source/binary
+ selfBuild :0/1
+ withSubpack: 0/1
+ dbPreority:the array for database preority
+ Returns:
+ {
+ "code": "",
+ "data": "",
+ "msg": ""
+ }
+ Raises:
+ """
+ schema = SelfDependSchema()
+
+ data = request.get_json()
+ validate_err = schema.validate(data)
+
+ if validate_err:
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.PARAM_ERROR)
+ )
+
+ pkg_name = data.get("packagename")
+ db_pri = db_priority()
+
+ if not db_pri:
+ return jsonify(
+ ResponseCode.response_json(
+ ResponseCode.FILE_NOT_FIND_ERROR
+ )
+ )
+ db_list = data.get("db_list") if data.get("db_list") \
+ else db_pri
+
+ self_build = data.get("selfbuild", 0)
+ with_sub_pack = data.get("withsubpack", 0)
+ pack_type = data.get("packtype", "binary")
+
+ if have_err_db_name(db_list, db_pri):
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.DB_NAME_ERROR)
+ )
+ response_code, binary_dicts, source_dicts = \
+ self_depend(db_list).query_depend(pkg_name, int(self_build),
+ int(with_sub_pack), pack_type)
+
+ if not all([binary_dicts, source_dicts]):
+ return jsonify(
+ ResponseCode.response_json(response_code)
+ )
+
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.SUCCESS, data={
+ "binary_dicts": binary_dicts,
+ "source_dicts": source_dicts
+ })
+ )
+
+
+class BeDepend(Resource):
+ """
+ Description: querying be installed and built depend for a package
+ and others which has the same src name
+ Restful API: post
+ changeLog:
+ """
+
+ def post(self):
+ """
+ description: Query a package's all dependencies including
+ be installed and built depend
+ Args:
+ packageName:package name
+ withSubpack: 0/1
+ dbname:database name
+ Returns:
+ resultList[
+ restult[
+ binaryName:
+ srcName:
+ dbName:
+ type: beinstall or bebuild, which depend on the function
+ childNode: the binary package name which is the be built/installed
+ depend for binaryName
+ ]
+ ]
+ exception:
+ changeLog:
+ """
+ schema = BeDependSchema()
+ data = request.get_json()
+ validate_err = schema.validate(data)
+
+ if validate_err:
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.PARAM_ERROR)
+ )
+
+ package_name = data.get("packagename")
+ with_sub_pack = data.get("withsubpack")
+ db_name = data.get("dbname")
+
+ if db_name not in db_priority():
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.DB_NAME_ERROR)
+ )
+
+ bedepnd_ins = be_depend(package_name, db_name, with_sub_pack)
+
+ res_dict = bedepnd_ins.main()
+
+ if not res_dict:
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.PACK_NAME_NOT_FOUND)
+ )
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.SUCCESS, data=res_dict)
+ )
+
+
+class Repodatas(Resource):
+ """
+ description: Get database information and delete database
+ Restful API: get, delete
+ ChangeLog:
+ """
+
+ def get(self):
+ """
+ description: get all database
+ Args:
+ Returns:
+ {
+ "code": "",
+ "data": [
+ {
+ "database_name": "",
+ "priority": "",
+ "status": ""
+ }
+ ],
+ "msg": ""
+ }
+ Raises:
+ FileNotFoundError: File not found exception
+ TypeError: Exception of wrong type
+ Error: abnormal Error
+ """
+ try:
+ with open(DATABASE_FILE_INFO, 'r', encoding='utf-8') as file_context:
+ init_database_date = yaml.load(
+ file_context.read(), Loader=yaml.FullLoader)
+ if init_database_date is None:
+ raise ContentNoneException(
+ "The content of the database initialization configuration "
+ "file cannot be empty ")
+ init_database_date.sort(
+ key=lambda x: x['priority'], reverse=False)
+ return jsonify(
+ ResponseCode.response_json(
+ ResponseCode.SUCCESS,
+ data=init_database_date))
+ except (FileNotFoundError, TypeError, Error) as file_not_found:
+ current_app.logger.error(file_not_found)
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.FILE_NOT_FOUND)
+ )
+
+ def delete(self):
+ """
+ description: get all database
+ Args:
+ Returns:
+ {
+ "code": "",
+ "data": "",
+ "msg": ""
+ }
+ Raises:
+ FileNotFoundError: File not found exception,
+ TypeError: Exception of wrong type
+ Error: Abnormal error
+ """
+ schema = DeletedbSchema()
+ data = schema.dump(request.args)
+ if schema.validate(data):
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.PARAM_ERROR)
+ )
+ db_name = data.get("dbName")
+ db_list = db_priority()
+ if db_list is None:
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.FILE_NOT_FOUND)
+ )
+ if db_name not in db_priority():
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.DB_NAME_ERROR)
+ )
+ try:
+ drop_db = InitDataBase()
+ drop_db.delete_db(db_name)
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.SUCCESS)
+ )
+ except (FileNotFoundError, TypeError, Error) as error:
+ current_app.logger.error(error)
+ return jsonify(
+ ResponseCode.response_json(ResponseCode.DELETE_DB_ERROR)
+ )
+
+
+class InitSystem(Resource):
+ """
+ description: Initialize database
+ Restful API: post
+ ChangeLog:
+ """
+
+ def post(self):
+ """
+ description: InitSystem
+ Args:
+ Returns:
+ {
+ "code": "",
+ "data": "",
+ "msg": ""
+ }
+ Raises:
+ ContentNoneException: Unable to connect to the exception of the database
+ DisconnectionError:Exception connecting to database
+ TypeError:Exception of wrong type
+ DataMergeException:Exception of merging data
+ FileNotFoundError:File not found exception
+ Error: abnormal Error
+ """
+ schema = InitSystemSchema()
+
+ data = request.get_json()
+ validate_err = schema.validate(data)
+ if validate_err:
+ return jsonify(
+ ResponseCode.response_json(
+ ResponseCode.PARAM_ERROR))
+ configfile = data.get("configfile", None)
+ try:
+ abnormal = None
+ if not configfile:
+ _config_path = ReadConfig().get_system('init_conf_path')
+ InitDataBase(config_file_path=_config_path).init_data()
+ else:
+ InitDataBase(config_file_path=configfile).init_data()
+ except ContentNoneException as content_none_error:
+ LOGGER.logger.error(content_none_error)
+ abnormal = ResponseCode.CONFIGFILE_PATH_EMPTY
+ except DisconnectionError as dis_connection_error:
+ LOGGER.logger.error(dis_connection_error)
+ abnormal = ResponseCode.DIS_CONNECTION_DB
+ except TypeError as type_error:
+ LOGGER.logger.error(type_error)
+ abnormal = ResponseCode.TYPE_ERROR
+ except DataMergeException as data_merge_error:
+ LOGGER.logger.error(data_merge_error)
+ abnormal = ResponseCode.DATA_MERGE_ERROR
+ except FileNotFoundError as file_not_found_error:
+ LOGGER.logger.error(file_not_found_error)
+ abnormal = ResponseCode.FILE_NOT_FIND_ERROR
+ except Error as error:
+ LOGGER.logger.error(error)
+ abnormal = ResponseCode.FAILED_CREATE_DATABASE_TABLE
+ if abnormal is not None:
+ return jsonify(ResponseCode.response_json(abnormal))
+ return jsonify(ResponseCode.response_json(ResponseCode.SUCCESS))
diff --git a/packageship/packageship/application/initsystem/__init__.py b/packageship/packageship/application/initsystem/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/packageship/packageship/application/initsystem/data_import.py b/packageship/packageship/application/initsystem/data_import.py
new file mode 100644
index 0000000000000000000000000000000000000000..ccf4ea6b96903c5231832e68d38f8af61c868f97
--- /dev/null
+++ b/packageship/packageship/application/initsystem/data_import.py
@@ -0,0 +1,951 @@
+#!/usr/bin/python3
+"""
+Description: Initialization of data import
+ Import the data in the sqlite database into the mysql database
+Class: InitDataBase,MysqlDatabaseOperations,SqliteDatabaseOperations
+"""
+import os
+import pathlib
+import yaml
+from sqlalchemy.exc import SQLAlchemyError, InternalError
+from packageship.libs.dbutils.sqlalchemy_helper import DBHelper
+from packageship.libs.exception import ContentNoneException
+from packageship.libs.exception import DatabaseRepeatException
+from packageship.libs.exception import DataMergeException
+from packageship.libs.exception import Error
+from packageship.libs.configutils.readconfig import ReadConfig
+from packageship.libs.log import Log
+from packageship.application.models.package import bin_pack, src_pack, pack_requires, pack_provides
+from packageship.application.initsystem.datamerge import MergeData
+from packageship.application.models.temporarydb import src_package
+from packageship.application.models.temporarydb import src_requires
+from packageship.application.models.temporarydb import bin_package
+from packageship.application.models.temporarydb import bin_requiresment
+from packageship.application.models.temporarydb import bin_provides
+from packageship import system_config
+
+LOGGER = Log(__name__)
+
+
+class InitDataBase():
+ """
+ Description: Database initialization, generate multiple databases and data
+ based on configuration files
+ Attributes:
+ config_file_path: configuration file path
+ config_file_datas: initialize the configuration content of the database
+ db_type: type of database
+ """
+
+ def __init__(self, config_file_path=None):
+ """
+ Description: Class instance initialization
+ Args:
+ config_file_path: Configuration file path
+ """
+ self.config_file_path = config_file_path
+
+ if self.config_file_path:
+ # yaml configuration file content
+ self.config_file_datas = self.__read_config_file()
+
+ self._read_config = ReadConfig()
+
+ self.db_type = self._read_config.get_database('dbtype')
+
+ if self.db_type is None:
+ self.db_type = 'mysql'
+
+ if self.db_type not in ['mysql', 'sqlite']:
+ LOGGER.logger.error("database type configuration error")
+ raise Error('database type configuration error')
+ self._src_requires_dicts = dict()
+ self._bin_package_dicts = dict()
+ self._bin_package_name = dict()
+ self._bin_requires_dicts = dict()
+ self._bin_provides_dicts = dict()
+ self._src_packages = dict()
+ self._src_package_names = dict()
+ self._sqlite_db = None
+ self.requires = dict()
+
+ def __read_config_file(self):
+ """
+ Description: Read the contents of the configuration file load each
+ node data in the yaml configuration file as
+ a list to return
+ Args:
+
+ Returns:
+ Initialize the contents of the database configuration file
+ Raises:
+ FileNotFoundError: The specified file does not exist
+ TypeError: Wrong type of data
+ """
+
+ if not os.path.exists(self.config_file_path):
+ raise FileNotFoundError(
+ 'system initialization configuration file does not exist')
+ # load yaml configuration file
+ with open(self.config_file_path, 'r', encoding='utf-8') as file_context:
+ init_database_config = yaml.load(
+ file_context.read(), Loader=yaml.FullLoader)
+ if init_database_config is None:
+ raise ContentNoneException(
+ 'The content of the database initialization configuration file cannot be empty')
+ if not isinstance(init_database_config, list):
+ raise TypeError('wrong type of configuration file')
+ for config_item in init_database_config:
+ if not isinstance(config_item, dict):
+ raise TypeError('wrong type of configuration file')
+ return init_database_config
+
+ def init_data(self):
+ """
+ Description: Initialization of the database
+ Args:
+
+ Returns:
+
+ Raises:
+ IOError: An error occurred while deleting the database information file
+ """
+ if getattr(self, 'config_file_datas', None) is None or \
+ self.config_file_datas is None:
+ raise ContentNoneException('Initialization file content is empty')
+
+ if self.__exists_repeat_database():
+ raise DatabaseRepeatException(
+ 'There is a duplicate database configuration')
+ if not InitDataBase.delete_settings_file():
+ raise IOError(
+ 'An error occurred while deleting the database configuration file')
+
+ # Create a database maintained by benchmark information
+ if self.db_type == 'mysql':
+ MysqlDatabaseOperations(
+ 'maintenance.information').create_datum_database()
+ else:
+ SqliteDatabaseOperations(
+ 'maintenance.information').create_datum_database()
+
+ for database in self.config_file_datas:
+ if not database.get('dbname'):
+ continue
+ priority = database.get('priority')
+ if not isinstance(priority, int) or priority < 0 or priority > 100:
+ continue
+ status = database.get('status')
+ if status not in ['enable', 'disable']:
+ continue
+
+ # Initialization data
+ self._init_data(database)
+
+ def _create_database(self, database):
+ """
+ Description: create related databases
+ Args:
+ database: Initialize the configuration content of the database
+ Returns:
+ The generated mysql database or sqlite database
+ Raises:
+ SQLAlchemyError: Abnormal database operation
+ """
+
+ db_name = database.get('dbname')
+ self._sqlite_db = SqliteDatabaseOperations(db_name=db_name)
+
+ if self.db_type == 'mysql':
+ creatadatabase = MysqlDatabaseOperations(db_name)
+ if not creatadatabase.create_database():
+ raise SQLAlchemyError("failed to create database or table")
+ return db_name
+
+ sqltedb_file = self._sqlite_db.create_sqlite_database()
+ if sqltedb_file is None:
+ raise SQLAlchemyError(
+ "failed to create database or table")
+ return sqltedb_file
+
+ def _init_data(self, database):
+ """
+ Description: data initialization operation
+ Args:
+ database: Initialize the configuration content of the database
+ Returns:
+
+ Raises:
+ ContentNoneException: Exception with empty content
+ TypeError: Data type error
+ SQLAlchemyError: Abnormal database operation
+ DataMergeException: Error in data integration
+ IOError: An error occurred while deleting the database information file
+ """
+
+ try:
+ db_file = None
+ # 1. create a database and related tables in the database
+ db_name = self._create_database(database)
+ # 2. get the data of binary packages and source packages
+ src_package_paths = database.get('src_db_file')
+ bin_package_paths = database.get('bin_db_file')
+
+ if src_package_paths is None or bin_package_paths is None:
+ raise ContentNoneException(
+ 'The configured database file path is empty')
+ if not isinstance(src_package_paths, list) \
+ or not isinstance(bin_package_paths, list):
+ raise TypeError(
+ 'The source code and binary path types in the initialization file are abnormal')
+ # 3. Obtain temporary source package files and binary package files
+ db_file = self.file_merge(
+ src_package_paths, bin_package_paths)
+ # 4. dependencies between combined data
+ self.data_relationship(db_file)
+ # 5. save data
+ self.save_data(db_name)
+
+ except (SQLAlchemyError, ContentNoneException,
+ DataMergeException, TypeError, Error) as error_msg:
+ # Delete the specified database
+ try:
+ if self.db_type == 'mysql':
+ MysqlDatabaseOperations.drop_database(
+ database.get('dbname'))
+ else:
+ self._sqlite_db.drop_database()
+
+ except (IOError, Error) as exception_msg:
+ LOGGER.logger.error(exception_msg)
+ else:
+ # Update the configuration file of the database
+ database_content = {
+ 'database_name': database.get('dbname'),
+ 'priority': database.get('priority'),
+ 'status': database.get('status')
+ }
+ InitDataBase.__updata_settings_file(
+ database_content=database_content)
+ finally:
+ try:
+ if os.path.exists(db_file):
+ os.remove(db_file)
+ except (IOError, UnboundLocalError) as error_msg:
+ LOGGER.logger.error(error_msg)
+
+ def _src_package_relation(self, src_package_data):
+ """
+ Description: Mapping of data relations of source packages
+ Args:
+ src_package_data: Source package data
+ Returns:
+
+ Raises:
+
+ """
+
+ _src_package_name = src_package_data.name
+ _src_package = {
+ "name": src_package_data.name,
+ "version": src_package_data.version,
+ "license": src_package_data.rpm_license,
+ "sourceURL": src_package_data.url,
+ "Maintaniner": src_package_data.maintaniner
+ }
+ if _src_package_name not in self._src_packages.keys():
+ self._src_packages[_src_package_name] = _src_package
+ else:
+ # Determine the version number
+ if src_package_data.version > \
+ self._src_packages[_src_package_name]['version']:
+
+ self._src_packages[_src_package_name] = _src_package
+ # Delete previous version
+ for key in [names[0] for names in self._src_package_names.items()
+ if _src_package_name == names[1]]:
+ del self._src_package_names[key]
+
+ self._src_package_names[src_package_data.pkgKey] = _src_package_name
+
+ def _src_requires_relation(self, src_requires_data):
+ """
+ Description: Source package dependent package data relationship mapping
+ Args:
+ src_requires_data: Source package dependent package data
+ Returns:
+
+ Raises:
+
+ """
+
+ _src_package_name = self._src_package_names.get(
+ src_requires_data.pkgKey)
+ if _src_package_name:
+ if _src_package_name not in self._src_requires_dicts.keys():
+ self._src_requires_dicts[_src_package_name] = []
+ self._src_requires_dicts[_src_package_name].append({
+ 'name': src_requires_data.name
+ })
+
+ def _bin_package_relation(self, bin_package_data):
+ """
+ Description: Binary package relationship mapping problem
+ Args:
+ bin_package_data: Binary package data
+ Returns:
+
+ Raises:
+
+ """
+
+ _bin_pkg_key = bin_package_data.pkgKey
+ self._bin_package_name[bin_package_data.name] = _bin_pkg_key
+
+ src_package_name = bin_package_data.src_pack_name
+ _bin_package = {
+ 'name': bin_package_data.name,
+ 'version': bin_package_data.version,
+ 'relation': True
+ }
+ if src_package_name not in self._bin_package_dicts.keys():
+ self._bin_package_dicts[src_package_name] = []
+
+ # Determine whether the version number is consistent with the source code package
+ # If they are the same, an association relationship is established.
+ for index, bin_package_item in enumerate(self._bin_package_dicts[src_package_name]):
+ if bin_package_item.get('name') == bin_package_data.name:
+ if bin_package_item.get('version') < bin_package_data.version:
+ self._bin_package_dicts[src_package_name][index]['relation'] = False
+
+ self._bin_package_dicts[src_package_name].append(_bin_package)
+
+ def _bin_requires_relation(self, bin_requires_data):
+ """
+ Description: Binary package dependency package relationship mapping problem
+ Args:
+ bin_requires_data: Binary package dependency package data
+ Returns:
+
+ Raises:
+
+ """
+
+ _bin_pkg_key = bin_requires_data.pkgKey
+ if _bin_pkg_key:
+ if _bin_pkg_key not in self._bin_requires_dicts:
+ self._bin_requires_dicts[_bin_pkg_key] = []
+
+ self._bin_requires_dicts[_bin_pkg_key].append({
+ 'name': bin_requires_data.name
+ })
+
+ def _bin_provides_relation(self, bin_provides_data):
+ """
+ Description: Binary package provided by the relationship mapping problem
+ Args:
+ bin_provides_data: Component data provided by the binary package
+ Returns:
+
+ Raises:
+
+ """
+
+ _bin_pkg_key = bin_provides_data.pkgKey
+ if _bin_pkg_key:
+ if _bin_pkg_key not in self._bin_provides_dicts:
+ self._bin_provides_dicts[_bin_pkg_key] = []
+ self._bin_provides_dicts[_bin_pkg_key].append({
+ 'name': bin_provides_data.name
+ })
+
+ def data_relationship(self, db_file):
+ """
+ Description: dependencies between combined data
+ Args:
+ db_file: Temporary database file
+ Returns:
+
+ Raises:
+ Error information
+ """
+
+ self._bin_provides_dicts = dict()
+ self._bin_requires_dicts = dict()
+ self._bin_package_name = dict()
+ self._bin_package_dicts = dict()
+ self._src_requires_dicts = dict()
+ self._src_packages = dict()
+ self._src_package_names = dict()
+ try:
+ with DBHelper(db_name=db_file, db_type='sqlite:///') as database:
+ # source package data
+ for src_package_item in database.session.query(src_package).all():
+ self._src_package_relation(src_package_item)
+
+ # combine all dependent packages of source packages
+ for src_requires_item in database.session.query(src_requires).all():
+ self._src_requires_relation(src_requires_item)
+
+ # combine all binary packages
+ for bin_package_item in database.session.query(bin_package).all():
+ self._bin_package_relation(bin_package_item)
+
+ # combine all dependent packages under the current binary package
+ for bin_requires_item in database.session.query(
+ bin_requiresment).all():
+ self._bin_requires_relation(bin_requires_item)
+
+ # combine the packages provided by the current binary package
+
+ for bin_provides_item in database.session.query(bin_provides).all():
+ self._bin_provides_relation(bin_provides_item)
+
+ except Error as error_msg:
+ LOGGER.logger.error(error_msg)
+
+ def file_merge(self, src_package_paths, bin_package_paths):
+ """
+ Description: integration of multiple data files
+ Args:
+ src_package_paths: Source package database file
+ bin_package_paths: Binary package database file
+ Returns:
+ Path of the generated temporary database file
+ Raises:
+ DataMergeException: Abnormal data integration
+ """
+ _db_file = os.path.join(
+ self._sqlite_db.database_file_folder, 'temporary_database')
+
+ if os.path.exists(_db_file):
+ os.remove(_db_file)
+ # create a temporary sqlite database
+ with DBHelper(db_name=_db_file, db_type='sqlite:///') as database:
+ tables = ['src_package', 'src_requires',
+ 'bin_package', 'bin_requiresment', 'bin_provides']
+ database.create_table(tables)
+
+ _src_package_key = 1
+ # load all source package files and import the files into it
+ for src_file in src_package_paths:
+ load_sqlite_data = MergeData(db_file=src_file)
+
+ # Combine data from all source packages
+
+ _src_package_key, src_merge_result = load_sqlite_data.src_file_merge(
+ src_package_key=_src_package_key, db_file=_db_file)
+ if not src_merge_result:
+ raise DataMergeException(
+ 'abnormal multi-file database integration')
+
+ # load binary package file
+ _bin_package_key = 1
+ for bin_file in bin_package_paths:
+ load_sqlite_data = MergeData(db_file=bin_file)
+
+ # Combine all binary package data
+ _bin_package_key, bin_merge_result = load_sqlite_data.bin_file_merge(
+ bin_package_key=_bin_package_key, db_file=_db_file)
+ if not bin_merge_result:
+ raise DataMergeException(
+ 'abnormal multi-file database integration')
+ return _db_file
+
+ def __exists_repeat_database(self):
+ """
+ Description: Determine if the same database name exists
+ Args:
+
+ Returns:
+ True if there are duplicate databases, false otherwise
+ Raises:
+
+ """
+ db_names = [name.get('dbname')
+ for name in self.config_file_datas]
+
+ if len(set(db_names)) != len(self.config_file_datas):
+ return True
+
+ return False
+
+ def _save_bin_package(self, src_packages):
+ """
+ Description: Save binary package data
+ Args:
+ src_packages: Source package data
+ Returns:
+ Binary package data
+ Raises:
+
+ """
+ bin_packages = []
+ for package_data in src_packages:
+ try:
+ bin_package_datas = self._bin_package_dicts.pop(
+ package_data.name)
+ except KeyError:
+ bin_package_datas = None
+
+ if bin_package_datas:
+ for bin_package_item in bin_package_datas:
+ bin_package_dict = {
+ 'name': bin_package_item.get('name'),
+ 'version': bin_package_item.get('version'),
+ 'srcIDkey': None
+ }
+ if bin_package_item.get('relation'):
+ bin_package_dict['srcIDkey'] = package_data.id
+ bin_packages.append(bin_package_dict)
+
+ # source package dependency package
+ src_requires_datas = self._src_requires_dicts.get(
+ package_data.name)
+ if src_requires_datas:
+ for src_requires_item in src_requires_datas:
+ requires_name = src_requires_item.get('name')
+ if requires_name:
+ if requires_name not in self.requires.keys():
+ self.requires[requires_name] = []
+ self.requires[requires_name].append({
+ 'name': src_requires_item.get('name'),
+ 'srcIDkey': package_data.id,
+ 'depProIDkey': None,
+ 'binIDkey': None
+ })
+
+ # organization independent binary package
+
+ for bin_packs in self._bin_package_dicts.values():
+ for bin_pack_item in bin_packs:
+ bin_packages.append({
+ 'name': bin_pack_item.get('name'),
+ 'version': bin_pack_item.get('version'),
+ 'srcIDkey': None
+ })
+ return bin_packages
+
+ def _save_bin_provides(self, bin_packages):
+ """
+ Description: Save package data provided by binary
+ Args:
+ bin_packages: Binary package data
+ Returns:
+ Package data provided by binary
+ Raises:
+
+ """
+ bin_provides_list = []
+ for bin_pack_entity in bin_packages:
+
+ # Get the pkgKey of the current binary package
+ pkg_key = self._bin_package_name.get(bin_pack_entity.name)
+
+ if self._bin_requires_dicts.get(pkg_key):
+ for bin_requires_item in self._bin_requires_dicts.get(pkg_key):
+ requires_name = bin_requires_item.get('name')
+ if requires_name:
+ if requires_name not in self.requires.keys():
+ self.requires[requires_name] = []
+ self.requires[requires_name].append({
+ 'name': bin_requires_item.get('name'),
+ 'binIDkey': bin_pack_entity.id,
+ 'depProIDkey': None,
+ 'srcIDkey': None
+ })
+
+ if self._bin_provides_dicts.get(pkg_key):
+ for bin_provides_item in self._bin_provides_dicts.get(pkg_key):
+ bin_provides_list.append({
+ 'name': bin_provides_item.get('name'),
+ 'binIDkey': bin_pack_entity.id
+ })
+ return bin_provides_list
+
+ def save_data(self, db_name):
+ """
+ Description: save related package data
+ Args:
+ db_name: The name of the database
+ Returns:
+
+ Raises:
+
+ """
+
+ with DBHelper(db_name=db_name) as data_base:
+ # Add source package data
+ data_base.batch_add(
+ [src_package_item[1] for src_package_item in self._src_packages.items()], src_pack)
+
+ self.requires = dict()
+
+ # Save dependency data of binary packages and source packages
+
+ data_base.batch_add(self._save_bin_package(
+ data_base.session.query(src_pack).all()), bin_pack)
+
+ # Save all packages and dependent packages provided by the binary package
+
+ data_base.batch_add(self._save_bin_provides(
+ data_base.session.query(bin_pack).all()), pack_provides)
+
+ all_requires = []
+ for provides_item in data_base.session.query(pack_provides).all():
+ if provides_item.name in self.requires.keys():
+ for requires_item in self.requires[provides_item.name]:
+ requires_item['depProIDkey'] = provides_item.id
+ all_requires.append(requires_item)
+
+ data_base.batch_add(all_requires, pack_requires)
+
+ @staticmethod
+ def __updata_settings_file(**Kwargs):
+ """
+ Description: update some configuration files related to the database in the system
+ Args:
+ **Kwargs: data related to configuration file nodes
+ database_name: Name database
+ Returns:
+
+ Raises:
+ FileNotFoundError: The specified file was not found
+ IOError: File or network operation io abnormal
+ """
+ try:
+ if not os.path.exists(system_config.DATABASE_FILE_INFO):
+ pathlib.Path(system_config.DATABASE_FILE_INFO).touch()
+ with open(system_config.DATABASE_FILE_INFO, 'a+', encoding='utf8') as file_context:
+ setting_content = []
+ if 'database_content' in Kwargs.keys():
+ content = Kwargs.get('database_content')
+ if content:
+ setting_content.append(content)
+ yaml.dump(setting_content, file_context)
+
+ except FileNotFoundError as not_found:
+ LOGGER.logger.error(not_found)
+ except IOError as exception_msg:
+ LOGGER.logger.error(exception_msg)
+
+ @staticmethod
+ def delete_settings_file():
+ """
+ Description: Delete the configuration file of the database
+ Args:
+
+ Returns:
+ True if the deletion is successful, otherwise false
+ Raises:
+ IOError: File or network operation io abnormal
+ """
+
+ try:
+ if os.path.exists(system_config.DATABASE_FILE_INFO):
+ os.remove(system_config.DATABASE_FILE_INFO)
+ except (IOError, Error) as exception_msg:
+ LOGGER.logger.error(exception_msg)
+ return False
+ else:
+ return True
+
+ def delete_db(self, db_name):
+ """
+ Description: elete the database
+ Args:
+ db_name: The name of the database
+ Returns:
+
+ Raises:
+ IOError: File or network operation io abnormal
+ """
+ if self.db_type == 'mysql':
+ del_result = MysqlDatabaseOperations.drop_database(db_name)
+ else:
+ if not hasattr(self, '_sqlite_db'):
+ self._sqlite_db = SqliteDatabaseOperations(db_name=db_name)
+ del_result = self._sqlite_db.drop_database()
+
+ if del_result:
+ try:
+ file_read = open(system_config.DATABASE_FILE_INFO, 'r', encoding='utf-8')
+ _databases = yaml.load(
+ file_read.read(), Loader=yaml.FullLoader)
+ for database in _databases:
+ if database.get('database_name') == db_name:
+ _databases.remove(database)
+ # Delete the successfully imported database configuration node
+ with open(system_config.DATABASE_FILE_INFO, 'w+', encoding='utf-8') as file_context:
+ yaml.safe_dump(_databases, file_context)
+ except (IOError, Error) as del_config_error:
+ LOGGER.logger.error(del_config_error)
+ finally:
+ file_read.close()
+
+
+class MysqlDatabaseOperations():
+ """
+ Description: Related to database operations, creating databases, creating tables
+ Attributes:
+ db_name: The name of the database
+ create_database_sql: SQL statement to create a database
+ drop_database_sql: Delete the SQL statement of the database
+ """
+
+ def __init__(self, db_name):
+ """
+ Description: Class instance initialization
+ Args:
+ db_name: Database name
+ """
+ self.db_name = db_name
+ self.create_database_sql = ''' CREATE DATABASE if not exists `{db_name}` \
+ DEFAULT CHARACTER SET utf8mb4; '''.format(db_name=self.db_name)
+ self.drop_database_sql = '''drop DATABASE if exists `{db_name}` '''.format(
+ db_name=self.db_name)
+
+ def create_database(self):
+ """
+ Description: create a database
+ Args:
+
+ Returns:
+ True if successful, otherwise false
+ Raises:
+ SQLAlchemyError: An exception occurred while creating the database
+ """
+
+ with DBHelper(db_name='mysql') as data_base:
+
+ try:
+ # create database
+ data_base.session.execute(self.drop_database_sql)
+ data_base.session.execute(self.create_database_sql)
+ except SQLAlchemyError as exception_msg:
+ LOGGER.logger.error(exception_msg)
+ return False
+ else:
+ # create tables
+ return self.__create_tables()
+
+ @classmethod
+ def drop_database(cls, db_name):
+ """
+ Description: Delete the database according to the specified name
+ Args:
+ db_name: The name of the database to be deleted
+ Returns:
+ True if successful, otherwise false
+ Raises:
+ SQLAlchemyError: An exception occurred while creating the database
+ """
+ if db_name is None:
+ raise IOError(
+ "The name of the database to be deleted cannot be empty")
+ with DBHelper(db_name='mysql') as data_base:
+ drop_database = ''' drop DATABASE if exists `{db_name}` '''.format(
+ db_name=db_name)
+ try:
+ data_base.session.execute(drop_database)
+ except SQLAlchemyError as exception_msg:
+ LOGGER.logger.error(exception_msg)
+ return False
+ else:
+ return True
+
+ def __create_tables(self):
+ """
+ Description: Create the specified data table
+ Args:
+
+ Returns:
+ True if successful, otherwise false
+ Raises:
+ SQLAlchemyError: An exception occurred while creating the database
+ """
+ try:
+ with DBHelper(db_name=self.db_name) as database:
+ tables = ['src_pack', 'bin_pack', 'pack_provides',
+ 'pack_requires']
+ database.create_table(tables)
+
+ except SQLAlchemyError as exception_msg:
+ LOGGER.logger.error(exception_msg)
+ return False
+ else:
+ return True
+
+ def create_datum_database(self):
+ """
+ Description: Create a benchmark database to save the maintainer's information
+ Args:
+
+ Returns:
+ True if successful, otherwise false
+ Raises:
+ SQLAlchemyError: An exception occurred while creating the database
+ """
+ with DBHelper(db_name='mysql') as data_base:
+ # create database
+ try:
+ data_base.session.execute(self.create_database_sql)
+ except SQLAlchemyError as exception_msg:
+ LOGGER.logger.error(exception_msg)
+ return False
+ else:
+ # create tables
+ return self.__create_datum_tables()
+
+ def __create_datum_tables(self):
+ """
+ Description: Create a data table of maintainer information
+ rgs:
+
+ Returns:
+ True if successful, otherwise false
+ Raises:
+ SQLAlchemyError: An exception occurred while creating the database
+ Error: Error information
+ """
+ try:
+ with DBHelper(db_name=self.db_name) as database:
+ tables = ['maintenance_info']
+ database.create_table(tables)
+ except InternalError as exists_table_err:
+ LOGGER.logger.error(exists_table_err)
+ return True
+ except (SQLAlchemyError, Error) as exception_msg:
+ LOGGER.logger.error(exception_msg)
+ return False
+
+ else:
+ return True
+
+
+class SqliteDatabaseOperations():
+ """
+ Description: sqlite database related operations
+ Attributes:
+ db_name: Name database
+ database_file_folder: Database folder path
+ """
+
+ def __init__(self, db_name, **kwargs):
+ """
+ Description: Class instance initialization
+ Args:
+ db_name: Database name
+ kwargs: data related to configuration file nodes
+ """
+ self.db_name = db_name
+ self._read_config = ReadConfig()
+ if getattr(kwargs, 'database_path', None) is None:
+ self._database_file_path()
+ else:
+ self.database_file_folder = kwargs.get('database_path')
+
+ def _database_file_path(self):
+ """
+ Description: Database file path
+ Args:
+ Returns:
+
+ Raises:
+ IOError: File or network operation io abnormal
+ """
+ self.database_file_folder = self._read_config.get_system(
+ 'data_base_path')
+ if not self.database_file_folder:
+ self.database_file_folder = system_config.DATABASE_FOLDER_PATH
+
+ if not os.path.exists(self.database_file_folder):
+ try:
+ os.makedirs(self.database_file_folder)
+ except IOError as makedirs_error:
+ LOGGER.logger.error(makedirs_error)
+ self.database_file_folder = None
+
+ def create_sqlite_database(self):
+ """
+ Description: create sqlite database and table
+ Args:
+
+ Returns:
+ After successful generation, return the database file address,
+ otherwise return none
+ Raises:
+ FileNotFoundError: The specified folder path does not exist
+ SQLAlchemyError: An error occurred while generating the database
+ """
+ if self.database_file_folder is None:
+ raise FileNotFoundError('Database folder does not exist')
+
+ _db_file = os.path.join(
+ self.database_file_folder, self.db_name)
+
+ if os.path.exists(_db_file + '.db'):
+ os.remove(_db_file + '.db')
+
+ # create a sqlite database
+ with DBHelper(db_name=_db_file) as database:
+ tables = ['src_pack', 'bin_pack',
+ 'pack_requires', 'pack_provides']
+ try:
+ database.create_table(tables)
+ except SQLAlchemyError as create_table_err:
+ LOGGER.logger.error(create_table_err)
+ return None
+
+ return _db_file
+
+ def drop_database(self):
+ """
+ Description: Delete the specified sqlite database
+ Args:
+
+ Returns:
+ Return true after successful deletion, otherwise return false
+ Raises:
+ IOError: An io exception occurred while deleting the specified database file
+ """
+
+ try:
+ db_path = os.path.join(
+ self.database_file_folder, self.db_name + '.db')
+ if os.path.exists(db_path):
+ os.remove(db_path)
+ except IOError as exception_msg:
+ LOGGER.logger.error(exception_msg)
+ return False
+ else:
+ return True
+
+ def create_datum_database(self):
+ """
+ Description: create sqlite database and table
+ Args:
+
+ Returns:
+ After successful generation, return the database file address,
+ otherwise return none
+ Raises:
+ FileNotFoundError: The specified database folder does not exist
+ SQLAlchemyError: An error occurred while generating the database
+ """
+ if self.database_file_folder is None:
+ raise FileNotFoundError('Database folder does not exist')
+
+ _db_file = os.path.join(
+ self.database_file_folder, self.db_name)
+
+ if not os.path.exists(_db_file + '.db'):
+ # create a sqlite database
+ with DBHelper(db_name=_db_file) as database:
+ tables = ['maintenance_info']
+ try:
+ database.create_table(tables)
+ except SQLAlchemyError as create_table_err:
+ LOGGER.logger.error(create_table_err)
+ return None
+ return _db_file
diff --git a/packageship/packageship/application/initsystem/datamerge.py b/packageship/packageship/application/initsystem/datamerge.py
new file mode 100644
index 0000000000000000000000000000000000000000..85a6be7e4e7b0b47c76061537f606826fa411156
--- /dev/null
+++ b/packageship/packageship/application/initsystem/datamerge.py
@@ -0,0 +1,377 @@
+#!/usr/bin/python3
+"""
+Description: Integration of multiple sqlite file data, including reading
+ sqlite database and inserting data
+Class: MergeData
+"""
+from sqlalchemy.exc import SQLAlchemyError
+from packageship.application.models.temporarydb import src_package
+from packageship.application.models.temporarydb import src_requires
+from packageship.application.models.temporarydb import bin_package
+from packageship.application.models.temporarydb import bin_requiresment
+from packageship.application.models.temporarydb import bin_provides
+from packageship.application.models.package import maintenance_info
+from packageship.libs.dbutils import DBHelper
+from packageship.libs.log import Log
+
+LOGGER = Log(__name__)
+
+
+class MergeData():
+ """
+ Description: Load data from sqlite database
+ Attributes:
+ db_file: Database file
+ db_type: Connected database type
+ datum_database: Base database name
+ """
+
+ def __init__(self, db_file):
+ """
+ Description: Class instance initialization
+ Args:
+ db_file: Database file
+ """
+ self.db_file = db_file
+ self.db_type = 'sqlite:///'
+ self.datum_database = 'maintenance.information'
+ self.src_requires_dicts = dict()
+ self.src_package_datas = []
+ self.bin_provides_dicts = dict()
+ self.bin_package_datas = []
+ self.mainter_infos = dict()
+ self.bin_requires_dicts = dict()
+
+ @staticmethod
+ def __columns(cursor):
+ """
+ Description: functional description:Returns all the column names
+ queried by the current cursor
+ Args:
+ cursor: Cursor
+
+ Returns:
+ The first columns
+ Raises:
+
+ """
+ return [col[0] for col in cursor.description]
+
+ def get_package_data(self):
+ """
+ Description: get binary package or source package data
+ Args:
+
+ Returns:
+ All source package data queried
+ Raises:
+ SQLAlchemyError: An error occurred while executing the sql statement
+ """
+ try:
+ with DBHelper(db_name=self.db_file, db_type=self.db_type, import_database=True) \
+ as database:
+ src_packages_data = database.session.execute(
+ "select pkgKey,name,version,rpm_license,url,rpm_sourcerpm from packages")
+ columns = MergeData.__columns(
+ src_packages_data.cursor)
+ return [dict(zip(columns, row)) for row in src_packages_data.fetchall()]
+ except SQLAlchemyError as sql_error:
+ LOGGER.logger.error(sql_error)
+ return None
+
+ def get_requires_data(self):
+ """
+ Description: get dependent package data of binary package or source package
+ Args:
+
+ Returns:
+ All dependent data queried
+ Raises:
+ SQLAlchemyError: An error occurred while executing the sql statement
+ """
+ try:
+ with DBHelper(db_name=self.db_file, db_type=self.db_type, import_database=True) \
+ as database:
+ requires = database.session.execute(
+ "select pkgKey,name from requires")
+ columns = MergeData.__columns(requires.cursor)
+ return [dict(zip(columns, row)) for row in requires.fetchall()]
+ except SQLAlchemyError as sql_error:
+ LOGGER.logger.error(sql_error)
+ return None
+
+ def get_provides(self):
+ """
+ Description: get the dependency package provided by the binary package
+ Args:
+
+ Returns:
+ Query the component data provided by all binary packages
+ Raises:
+ SQLAlchemyError: An error occurred while executing the sql statement
+ """
+ try:
+ with DBHelper(db_name=self.db_file, db_type=self.db_type, import_database=True) \
+ as database:
+ requires = database.session.execute(
+ "select pkgKey,name from provides")
+ columns = MergeData.__columns(requires.cursor)
+ return [dict(zip(columns, row)) for row in requires.fetchall()]
+ except SQLAlchemyError as sql_error:
+ LOGGER.logger.error(sql_error)
+ return None
+
+ def get_maintenance_info(self):
+ """
+ Description: Obtain the information of the maintainer
+ Args:
+
+ Returns:
+ Maintainer related information
+ Raises:
+ SQLAlchemyError: An error occurred while executing the sql statement
+ """
+ try:
+ if not hasattr(self, 'mainter_infos'):
+ self.mainter_infos = dict()
+ with DBHelper(db_name=self.datum_database) as database:
+ for info in database.session.query(maintenance_info).all():
+ if info.name not in self.mainter_infos.keys():
+ self.mainter_infos[info.name] = []
+ self.mainter_infos[info.name].append({
+ 'version': info.version,
+ 'maintaniner': info.maintaniner
+ })
+ except SQLAlchemyError as sql_error:
+ LOGGER.logger.error(sql_error)
+
+ def src_file_merge(self, src_package_key, db_file):
+ """
+ Description: Source code related data integration
+ Args:
+ src_package_key: The relevant key value of the source package
+ db_file: Database file
+ Returns:
+ Key value after successful data combination
+ (0, False) or (src_package_key, True)
+ Raises:
+ SQLAlchemyError: An error occurred while executing the sql statement
+ """
+ self.get_maintenance_info()
+
+ self.__compose_src_package()
+
+ self.__compose_src_rquires()
+
+ # Combination of relationships between source packages and dependent packages
+ src_requires_data = []
+ for src_package_item in self.src_package_datas:
+ src_package_key += 1
+ requires = self.src_requires_dicts.get(
+ src_package_item.get('pkgKey'))
+ if requires:
+ for src_requires_item in requires:
+ src_requires_item['pkgKey'] = src_package_key
+ src_requires_data.append(src_requires_item)
+ src_package_item['pkgKey'] = src_package_key
+
+ try:
+ with DBHelper(db_name=db_file, db_type=self.db_type) as data_base:
+ data_base.batch_add(self.src_package_datas, src_package)
+ data_base.batch_add(src_requires_data, src_requires)
+ except SQLAlchemyError as sql_error:
+ LOGGER.logger.error(sql_error)
+ return (0, False)
+ else:
+ return (src_package_key, True)
+
+ def __compose_src_package(self):
+ """
+ Description: Combine source package data
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ if getattr(self, 'src_package_datas', None) is None:
+ self.src_package_datas = []
+
+ for src_package_item in self.get_package_data():
+ src_package_name = src_package_item.get('name')
+ if src_package_name:
+ # Find the maintainer information of the current data
+ maintenance_infos = self.mainter_infos.get(src_package_name)
+ maintenance = []
+ version = src_package_item.get('version')
+ if self.mainter_infos.get(src_package_name):
+ for maintenance_item in maintenance_infos:
+ if maintenance_item.get('version') == version:
+ maintenance.append(maintenance_item)
+
+ self.src_package_datas.append(
+ {
+ "name": src_package_item.get('name'),
+ "version": version,
+ "rpm_license": src_package_item.get('rpm_license'),
+ "url": src_package_item.get('url'),
+ "pkgKey": src_package_item.get('pkgKey'),
+ 'maintaniner':
+ maintenance[0].get('maintaniner') if maintenance and len(
+ maintenance) > 0 else None
+ }
+ )
+
+ def __compose_src_rquires(self):
+ """
+ Description: Combine source package dependent package data
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ if getattr(self, 'src_requires_dicts', None) is None:
+ self.src_requires_dicts = dict()
+
+ for src_requires_item in self.get_requires_data():
+ pkg_key = src_requires_item.get('pkgKey')
+ if pkg_key:
+ if pkg_key not in self.src_requires_dicts.keys():
+ self.src_requires_dicts[pkg_key] = []
+ self.src_requires_dicts[pkg_key].append(
+ {
+ 'name': src_requires_item.get('name'),
+ 'pkgKey': pkg_key
+ }
+ )
+
+ def __compose_bin_package(self):
+ """
+ Description: Combine binary package data
+ Args:
+
+ Returns:
+
+ Raises:
+ AttributeError
+ """
+ if getattr(self, 'bin_package_datas', None) is None:
+ self.bin_package_datas = []
+
+ for bin_package_item in self.get_package_data():
+ try:
+ src_package_name = bin_package_item.get('rpm_sourcerpm').split(
+ '-' + bin_package_item.get('version'))[0]
+ except AttributeError as exception_msg:
+ src_package_name = None
+ LOGGER.logger.warning(exception_msg)
+ else:
+ self.bin_package_datas.append(
+ {
+ "name": bin_package_item.get('name'),
+ "version": bin_package_item.get('version'),
+ "license": bin_package_item.get('rpm_license'),
+ "sourceURL": bin_package_item.get('url'),
+ "src_pack_name": src_package_name,
+ "pkgKey": bin_package_item.get('pkgKey')
+ }
+ )
+
+ def __compose_bin_requires(self):
+ """
+ Description: Combining binary dependent package data
+ Args:
+
+ Returns:
+
+ Raises:
+ """
+ if getattr(self, 'bin_requires_dicts', None) is None:
+ self.bin_requires_dicts = dict()
+
+ for bin_requires_item in self.get_requires_data():
+ pkg_key = bin_requires_item.get('pkgKey')
+ if pkg_key:
+ if pkg_key not in self.bin_requires_dicts.keys():
+ self.bin_requires_dicts[pkg_key] = []
+ self.bin_requires_dicts[pkg_key].append({
+ 'name': bin_requires_item.get('name'),
+ 'pkgKey': 0
+ })
+
+ def __compose_bin_provides(self):
+ """
+ Description: Combine binary package data
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ if getattr(self, 'bin_provides_dicts', None) is None:
+ self.bin_provides_dicts = dict()
+
+ for bin_provides_item in self.get_provides():
+ pkg_key = bin_provides_item.get('pkgKey')
+ if pkg_key:
+ if pkg_key not in self.bin_provides_dicts.keys():
+ self.bin_provides_dicts[pkg_key] = []
+ self.bin_provides_dicts[pkg_key].append({
+ 'name': bin_provides_item.get('name'),
+ 'pkgKey': 0
+ })
+
+ def bin_file_merge(self, bin_package_key, db_file):
+ """
+ Description: Binary package related data integration
+ Args:
+ bin_package_key: Primary key of binary package
+ db_file: Database file
+ Returns:
+ Key value after successful data combination
+ (0, False) or (bin_package_key, True)
+ Raises:
+ SQLAlchemyError: An error occurred while executing the sql statement
+ """
+ self.__compose_bin_package()
+ # binary package dependent package integration
+
+ self.__compose_bin_requires()
+
+ self.__compose_bin_provides()
+
+ # integrate the id data of the binary package
+ bin_requires_datas = []
+ bin_provides_datas = []
+ for bin_package_item in self.bin_package_datas:
+ bin_package_key += 1
+ # dependent packages
+ requires = self.bin_requires_dicts.get(
+ bin_package_item.get('pkgKey'))
+ if requires:
+ for bin_requires_item in requires:
+ bin_requires_item['pkgKey'] = bin_package_key
+ bin_requires_datas.append(bin_requires_item)
+
+ provides = self.bin_provides_dicts.get(
+ bin_package_item.get('pkgKey'))
+ if provides:
+ for bin_provides_item in provides:
+ bin_provides_item['pkgKey'] = bin_package_key
+ bin_provides_datas.append(bin_provides_item)
+ bin_package_item['pkgKey'] = bin_package_key
+ # save binary package related data
+ try:
+ with DBHelper(db_name=db_file, db_type=self.db_type) as data_base:
+ data_base.batch_add(self.bin_package_datas, bin_package)
+ data_base.batch_add(bin_requires_datas, bin_requiresment)
+ data_base.batch_add(bin_provides_datas, bin_provides)
+ except SQLAlchemyError as sql_error:
+ LOGGER.logger.error(sql_error)
+ return (0, False)
+ else:
+ return (bin_package_key, True)
diff --git a/packageship/packageship/application/models/__init__.py b/packageship/packageship/application/models/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..79752094b1769eedae0b2069dc8203a17e61c7cb
--- /dev/null
+++ b/packageship/packageship/application/models/__init__.py
@@ -0,0 +1,4 @@
+#!/usr/bin/python3
+"""
+Entity mapping model of database
+"""
diff --git a/packageship/packageship/application/models/package.py b/packageship/packageship/application/models/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..c0f8acb3fdbc369e38ee6f9dc8a4cee26933bdc1
--- /dev/null
+++ b/packageship/packageship/application/models/package.py
@@ -0,0 +1,98 @@
+#!/usr/bin/python3
+"""
+Description: Database entity model mapping
+"""
+from sqlalchemy import Column, Integer, String, ForeignKey
+from sqlalchemy.orm import relationship
+from packageship.libs.dbutils.sqlalchemy_helper import DBHelper
+
+
+class src_pack(DBHelper.BASE): # pylint: disable=C0103,R0903
+ """
+ Description: functional description:Source package model
+ """
+
+ __tablename__ = 'src_pack'
+
+ id = Column(Integer, primary_key=True)
+
+ name = Column(String(500), nullable=True)
+
+ version = Column(String(200), nullable=True)
+
+ license = Column(String(500), nullable=True)
+
+ sourceURL = Column(String(200), nullable=True)
+
+ downloadURL = Column(String(200), nullable=True)
+
+ Maintaniner = Column(String(50), nullable=True)
+
+ MaintainLevel = Column(String(20), nullable=True)
+
+
+class bin_pack(DBHelper.BASE): # pylint: disable=C0103,R0903
+ """
+ Description: functional description:Binary package data
+ """
+ __tablename__ = 'bin_pack'
+
+ id = Column(Integer, primary_key=True)
+
+ name = Column(String(500), nullable=True)
+
+ version = Column(String(200), nullable=True)
+
+ srcIDkey = Column(Integer, ForeignKey('src_pack.id'))
+
+ src_pack = relationship('src_pack', backref="bin_pack")
+
+
+class pack_requires(DBHelper.BASE): # pylint: disable=C0103,R0903
+ """
+ functional description:
+ """
+
+ __tablename__ = 'pack_requires'
+
+ id = Column(Integer, primary_key=True)
+
+ name = Column(String(500), nullable=True)
+
+ # depProIDkey = Column(Integer, ForeignKey(
+ # 'pack_provides.id'), nullable=True)
+
+ depProIDkey = Column(Integer)
+ srcIDkey = Column(Integer, ForeignKey('src_pack.id'), nullable=True)
+
+ binIDkey = Column(Integer, ForeignKey('bin_pack.id'), nullable=True)
+
+
+class pack_provides(DBHelper.BASE): # pylint: disable=C0103,R0903
+ """
+ functional description:
+ """
+ __tablename__ = 'pack_provides'
+
+ id = Column(Integer, primary_key=True)
+
+ name = Column(String(500), nullable=True)
+
+ binIDkey = Column(Integer, ForeignKey('bin_pack.id'))
+
+
+class maintenance_info(DBHelper.BASE): # pylint: disable=C0103,R0903
+ """
+ Maintain data related to person information
+ """
+ __tablename__ = 'maintenance_info'
+
+ id = Column(Integer, primary_key=True)
+
+ name = Column(String(500), nullable=True)
+
+ version = Column(String(500), nullable=True)
+
+ maintaniner = Column(String(100), nullable=True)
+
+ maintainlevel = Column(String(100), nullable=True)
diff --git a/packageship/packageship/application/models/temporarydb.py b/packageship/packageship/application/models/temporarydb.py
new file mode 100644
index 0000000000000000000000000000000000000000..07a2dd17a77f670038af5e3f30a1d0494ceceead
--- /dev/null
+++ b/packageship/packageship/application/models/temporarydb.py
@@ -0,0 +1,86 @@
+#!/usr/bin/python3
+"""
+Description: Database entity model mapping
+"""
+from sqlalchemy import Column, Integer, String
+from packageship.libs.dbutils.sqlalchemy_helper import DBHelper
+
+
+class src_package(DBHelper.BASE): # pylint: disable=C0103,R0903
+ """
+ Description: Temporary source package model
+ """
+
+ __tablename__ = 'src_package'
+
+ pkgKey = Column(Integer, primary_key=True)
+
+ name = Column(String(500), nullable=True)
+
+ version = Column(String(200), nullable=True)
+
+ rpm_license = Column(String(500), nullable=True)
+
+ url = Column(String(200), nullable=True)
+
+ maintaniner = Column(String(100), nullable=True)
+
+
+class bin_package(DBHelper.BASE): # pylint: disable=C0103,R0903
+ """
+ Description: Temporary binary package model
+ """
+ __tablename__ = 'bin_package'
+
+ pkgKey = Column(Integer, primary_key=True)
+
+ name = Column(String(500), nullable=True)
+
+ version = Column(String(200), nullable=True)
+
+ rpm_license = Column(String(500), nullable=True)
+
+ url = Column(String(500), nullable=True)
+
+ rpm_sourcerpm = Column(String(500), nullable=True)
+
+ src_pack_name = Column(String(200), nullable=True)
+
+
+class src_requires(DBHelper.BASE): # pylint: disable=C0103,R0903
+ """
+ Description: Temporary source package depends on package model
+ """
+ __tablename__ = 'src_requires'
+
+ id = Column(Integer, primary_key=True)
+
+ pkgKey = Column(Integer)
+
+ name = Column(String(500), nullable=True)
+
+
+class bin_requiresment(DBHelper.BASE): # pylint: disable=C0103,R0903
+ """
+ Description: Dependency package model for temporary binary packages
+ """
+ __tablename__ = 'bin_requiresment'
+
+ id = Column(Integer, primary_key=True)
+
+ pkgKey = Column(Integer)
+
+ name = Column(String(500), nullable=True)
+
+
+class bin_provides(DBHelper.BASE): # pylint: disable=C0103,R0903
+ """
+ Description: Provided package model for temporary binary packages
+ """
+ __tablename__ = 'bin_provides'
+
+ id = Column(Integer, primary_key=True)
+
+ pkgKey = Column(Integer)
+
+ name = Column(String(500), nullable=True)
diff --git a/packageship/packageship/application/settings.py b/packageship/packageship/application/settings.py
new file mode 100644
index 0000000000000000000000000000000000000000..bc090439dbcae9b160247c04d171e6e0046e4e19
--- /dev/null
+++ b/packageship/packageship/application/settings.py
@@ -0,0 +1,68 @@
+#!/usr/bin/python3
+"""
+Description: Basic configuration of flask framework
+"""
+import random
+from packageship.libs.configutils.readconfig import ReadConfig
+
+
+class Config():
+ """
+ Description: Configuration items in a formal environment
+ Attributes:
+ _read_config: read config
+ _set_config_val: Set the value of the configuration item
+ """
+ SECRET_KEY = None
+
+ DEBUG = False
+
+ LOG_LEVEL = 'INFO'
+
+ def __init__(self):
+
+ self._read_config = ReadConfig()
+
+ self.set_config_val()
+
+ @classmethod
+ def _random_secret_key(cls, random_len=32):
+ """
+ Description: Generate random strings
+ """
+ cls.SECRET_KEY = ''.join(
+ [random.choice('abcdefghijklmnopqrstuvwxyz!@#$%^&*()') for index in range(random_len)])
+
+ @classmethod
+ def _set_debug(cls, debug):
+ """
+ Description: Set the debugging mode
+ """
+ if debug == 'true':
+ cls.DEBUG = True
+
+ @classmethod
+ def _set_log_level(cls, log_level):
+ """
+ Description: Set the log level
+ """
+ cls.LOG_LEVEL = log_level
+
+ def set_config_val(self):
+ """
+ Description: Set the value of the configuration item
+ Args:
+ Returns:
+ Raises:
+ """
+ Config._random_secret_key()
+
+ debug = self._read_config.get_system('debug')
+
+ if debug:
+ Config._set_debug(debug)
+
+ log_level = self._read_config.get_config('LOG', 'log_level')
+
+ if log_level:
+ Config._set_log_level(log_level)
diff --git a/packageship/packageship/libs/__init__.py b/packageship/packageship/libs/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..f4f5866b119aefc85e00b88ef42f9bb9b5d5103c
--- /dev/null
+++ b/packageship/packageship/libs/__init__.py
@@ -0,0 +1,4 @@
+#!/usr/bin/python3
+"""
+Encapsulation of public class methods
+"""
diff --git a/packageship/packageship/libs/configutils/__init__.py b/packageship/packageship/libs/configutils/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/packageship/packageship/libs/configutils/readconfig.py b/packageship/packageship/libs/configutils/readconfig.py
new file mode 100644
index 0000000000000000000000000000000000000000..3c9cc2bc374f6bddb466a0aba40ca3cdc4d67f13
--- /dev/null
+++ b/packageship/packageship/libs/configutils/readconfig.py
@@ -0,0 +1,76 @@
+#!/usr/bin/python3
+"""
+ Description:Read the base class of the configuration file in the system
+ which mainly includes obtaining specific node values
+ and obtaining arbitrary node values
+ Class:ReadConfig
+"""
+import configparser
+from configparser import NoSectionError
+from configparser import NoOptionError
+from packageship.system_config import SYS_CONFIG_PATH
+
+
+class ReadConfig():
+ """
+ Description: Read the configuration file base class in the system
+ Attributes:
+ conf:Configuration file for the system
+ conf.read:Read the system configuration file
+ """
+
+ def __init__(self):
+ self.conf = configparser.ConfigParser()
+ self.conf.read(SYS_CONFIG_PATH)
+
+ def get_system(self, param):
+ """
+ Description: Get any data value under the system configuration node
+ Args:
+ param:The node parameters that need to be obtained
+ Returns:
+ Raises:
+ """
+ if param:
+ try:
+ return self.conf.get("SYSTEM", param)
+ except NoSectionError:
+ return None
+ except NoOptionError:
+ return None
+ return None
+
+ def get_database(self, param):
+ """
+ Description: Get any data value under the database configuration node
+ Args:
+ param:The node parameters that need to be obtained
+ Returns:
+ Raises:
+ """
+ if param:
+ try:
+ return self.conf.get("DATABASE", param)
+ except NoSectionError:
+ return None
+ except NoOptionError:
+ return None
+ return None
+
+ def get_config(self, node, param):
+ """
+ Description: Get configuration data under any node
+ Args:
+ node:node
+ param:The node parameters that need to be obtained
+ Returns:
+ Raises:
+ """
+ if all([node, param]):
+ try:
+ return self.conf.get(node, param)
+ except NoSectionError:
+ return None
+ except NoOptionError:
+ return None
+ return None
diff --git a/packageship/packageship/libs/dbutils/__init__.py b/packageship/packageship/libs/dbutils/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..78ac1617112465680f720d650e779adc48937b22
--- /dev/null
+++ b/packageship/packageship/libs/dbutils/__init__.py
@@ -0,0 +1,7 @@
+#!/usr/bin/python3
+"""
+Database access public class method
+"""
+from .sqlalchemy_helper import DBHelper
+
+__all__ = ['DBHelper']
diff --git a/packageship/packageship/libs/dbutils/sqlalchemy_helper.py b/packageship/packageship/libs/dbutils/sqlalchemy_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..228aee2252d88c34574d6951650280adec404e0e
--- /dev/null
+++ b/packageship/packageship/libs/dbutils/sqlalchemy_helper.py
@@ -0,0 +1,251 @@
+#!/usr/bin/python3
+"""
+Description: Simple encapsulation of sqlalchemy orm framework operation database
+Class: DBHelper
+"""
+import os
+from sqlalchemy import create_engine
+from sqlalchemy import MetaData
+from sqlalchemy.orm import sessionmaker
+from sqlalchemy.exc import SQLAlchemyError
+from sqlalchemy.exc import DisconnectionError
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.engine.url import URL
+from packageship.libs.exception.ext import Error
+from packageship.libs.exception.ext import DbnameNoneException
+from packageship.libs.exception.ext import ContentNoneException
+from packageship.libs.configutils.readconfig import ReadConfig
+from packageship import system_config
+
+
+class DBHelper():
+ """
+ Description: Database connection, operation public class
+ Attributes:
+ user_name: Username
+ password: Password
+ ip_address: Ip address
+ port: Port
+ db_name: Database name
+ db_type: Database type
+ session: Session
+ """
+ # The base class inherited by the data model
+ BASE = declarative_base()
+
+ def __init__(self, user_name=None, password=None, ip_address=None, # pylint: disable=R0913
+ port=None, db_name=None, db_type=None, **kwargs):
+ """
+ Description: Class instance initialization
+
+ """
+ self.user_name = user_name
+ self._readconfig = ReadConfig()
+ if self.user_name is None:
+ self.user_name = self._readconfig.get_database('user_name')
+
+ self.password = password
+ if self.password is None:
+ self.password = self._readconfig.get_database('password')
+
+ self.ip_address = ip_address
+
+ if self.ip_address is None:
+ self.ip_address = self._readconfig.get_database('host')
+
+ self.port = port
+
+ if self.port is None:
+ self.port = self._readconfig.get_database('port')
+
+ self.db_name = db_name
+
+ if self.db_name is None:
+ self.db_name = self._readconfig.get_database('database')
+
+ self.db_type = db_type
+
+ if self.db_type is None:
+ # read the contents of the configuration file
+ _db_type = self._readconfig.get_database('dbtype')
+ if _db_type is None or _db_type == 'mysql':
+ self.db_type = 'mysql+pymysql'
+ else:
+ self.db_type = 'sqlite:///'
+ if 'import_database' not in kwargs.keys():
+ self._db_file_path()
+ self.db_name = os.path.join(
+ self.database_file_path, self.db_name + '.db')
+ self._create_engine()
+ self.session = None
+
+ def _create_engine(self):
+ """
+ Description: Create a database connection object
+ Args:
+
+ Returns:
+ Raises:
+ DisconnectionError: A disconnect is detected on a raw DB-API connection.
+
+ """
+ if self.db_type.startswith('sqlite'):
+ if not self.db_name:
+ raise DbnameNoneException(
+ 'The connected database name is empty')
+ self.engine = create_engine(
+ self.db_type + self.db_name, encoding='utf-8', convert_unicode=True,
+ connect_args={'check_same_thread': False})
+ else:
+ if all([self.user_name, self.password, self.ip_address, self.port, self.db_name]):
+ # create connection object
+ self.engine = create_engine(URL(**{'database': self.db_name,
+ 'username': self.user_name,
+ 'password': self.password,
+ 'host': self.ip_address,
+ 'port': self.port,
+ 'drivername': self.db_type}),
+ encoding='utf-8',
+ convert_unicode=True)
+ else:
+ raise DisconnectionError(
+ 'A disconnect is detected on a raw DB-API connection')
+
+ def _db_file_path(self):
+ """
+ Description: load the path stored in the sqlite database
+ Args:
+
+ Returns:
+ Raises:
+
+ """
+ self.database_file_path = self._readconfig.get_system(
+ 'data_base_path')
+ if not self.database_file_path:
+ self.database_file_path = system_config.DATABASE_FOLDER_PATH
+ if not os.path.exists(self.database_file_path):
+ os.makedirs(self.database_file_path)
+
+ def __enter__(self):
+ """
+ Description: functional description:Create a context manager for the database connection
+ Args:
+
+ Returns:
+ Class instance
+ Raises:
+
+ """
+
+ session = sessionmaker()
+ if not hasattr(self, 'engine'):
+ raise DisconnectionError('Abnormal database connection')
+ session.configure(bind=self.engine)
+
+ self.session = session()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ """
+ Description: functional description:Release the database connection pool
+ and close the connection
+ Args:
+
+ Returns:
+ exc_type: Abnormal type
+ exc_val: Abnormal value
+ exc_tb: Abnormal table
+ Raises:
+
+ """
+ self.session.close()
+
+ @classmethod
+ def create_all(cls, db_name=None):
+ """
+ Description: functional description:Create all database tables
+ Args:
+ db_name: Database name
+ Returns:
+
+ Raises:
+
+ """
+
+ cls.BASE.metadata.create_all(bind=cls(db_name=db_name).engine)
+
+ def create_table(self, tables):
+ """
+ Description: Create a single table
+ Args:
+ tables: Table
+ Returns:
+
+ Raises:
+ """
+ meta = MetaData(self.engine)
+ for table_name in DBHelper.BASE.metadata.tables.keys():
+ if table_name in tables:
+ table = DBHelper.BASE.metadata.tables[table_name]
+ table.metadata = meta
+ table.create()
+
+ def add(self, entity):
+ """
+ Description: Insert a single data entity
+ Args:
+ entity: Data entity
+ Return:
+ If the addition is successful, return the corresponding entity, otherwise return None
+ Raises:
+ ContentNoneException: An exception occurred while content is none
+ SQLAlchemyError: An exception occurred while creating the database
+ """
+
+ if entity is None:
+ raise ContentNoneException(
+ 'The added entity content cannot be empty')
+
+ try:
+ self.session.add(entity)
+
+ except SQLAlchemyError as sql_error:
+ raise Error(sql_error)
+ else:
+ self.session.commit()
+ return entity
+
+ def batch_add(self, dicts, model):
+ """
+ Description:tables for adding databases in bulk
+ Args:
+ dicts:Entity dictionary data to be added
+ model:Solid model class
+ Returns:
+
+ Raises:
+ TypeError: An exception occurred while incoming type does not meet expectations
+ SQLAlchemyError: An exception occurred while creating the database
+ """
+
+ if model is None:
+ raise ContentNoneException('solid model must be specified')
+
+ if dicts is None:
+ raise ContentNoneException(
+ 'The inserted data content cannot be empty')
+
+ if not isinstance(dicts, list):
+ raise TypeError(
+ 'The input for bulk insertion must be a dictionary \
+ list with the same fields as the current entity')
+ try:
+ self.session.execute(
+ model.__table__.insert(),
+ dicts
+ )
+ except SQLAlchemyError as sql_error:
+ raise Error(sql_error)
+ else:
+ self.session.commit()
diff --git a/packageship/packageship/libs/exception/__init__.py b/packageship/packageship/libs/exception/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..38fdb8ddc0cac24e21c0095fa3da08afe226cbd0
--- /dev/null
+++ b/packageship/packageship/libs/exception/__init__.py
@@ -0,0 +1,12 @@
+#!/usr/bin/python3
+"""
+Customized exception information class
+"""
+from packageship.libs.exception.ext import ContentNoneException
+from packageship.libs.exception.ext import DatabaseRepeatException
+from packageship.libs.exception.ext import DataMergeException
+from packageship.libs.exception.ext import Error
+from packageship.libs.exception.ext import DbnameNoneException
+
+__all__ = ['ContentNoneException',
+ 'DatabaseRepeatException', 'DataMergeException', 'Error', 'DbnameNoneException']
diff --git a/packageship/packageship/libs/exception/ext.py b/packageship/packageship/libs/exception/ext.py
new file mode 100644
index 0000000000000000000000000000000000000000..56b6dcf1c9b538d0315acdbc231b9d5381a55cd5
--- /dev/null
+++ b/packageship/packageship/libs/exception/ext.py
@@ -0,0 +1,64 @@
+#!/usr/bin/python3
+"""
+Description:System exception information
+Class:Error,ContentNoneException,DbnameNoneException,
+ DatabaseRepeatException,DataMergeException
+"""
+
+
+class Error(Exception):
+
+ """
+ Description: Read the configuration file base class in the system
+ Attributes:
+ message:Exception information
+ """
+
+ def __init__(self, msg=''):
+ self.message = msg
+ Exception.__init__(self, msg)
+
+ def __repr__(self):
+ return self.message
+
+ __str__ = __repr__
+
+
+class ContentNoneException(Error):
+ """
+ Description: Content is empty exception
+ Attributes:
+ """
+
+ def __init__(self, message):
+ Error.__init__(self, 'No content: %r' % (message,))
+
+
+class DbnameNoneException(ContentNoneException):
+ """
+ Description: Exception with empty database name
+ Attributes:
+ """
+
+ def __init__(self, message):
+ ContentNoneException.__init__(self, '%r' % (message,))
+
+
+class DatabaseRepeatException(Error):
+ """
+ Description: There are duplicate exceptions in the database
+ Attributes:
+ """
+
+ def __init__(self, message):
+ Error.__init__(self, 'Database repeat: %r' % (message,))
+
+
+class DataMergeException(Error):
+ """
+ Description: abnormal integration data
+ Attributes:
+ """
+
+ def __init__(self, message):
+ Error.__init__(self, 'DataMerge exception: %r' % (message,))
diff --git a/packageship/packageship/libs/log/__init__.py b/packageship/packageship/libs/log/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..3decd458bb39dd8edcf986def0957fc4fc6853c0
--- /dev/null
+++ b/packageship/packageship/libs/log/__init__.py
@@ -0,0 +1,8 @@
+#!/usr/bin/python3
+"""
+Common methods for logging
+"""
+from packageship.libs.log.loghelper import setup_log
+from packageship.libs.log.loghelper import Log
+
+__all__ = ['setup_log', 'Log']
diff --git a/packageship/packageship/libs/log/loghelper.py b/packageship/packageship/libs/log/loghelper.py
new file mode 100644
index 0000000000000000000000000000000000000000..190e43a753883685397d865e050111332f9d952b
--- /dev/null
+++ b/packageship/packageship/libs/log/loghelper.py
@@ -0,0 +1,113 @@
+#!/usr/bin/python3
+"""
+Logging related
+"""
+import os
+import pathlib
+import logging
+from logging.handlers import RotatingFileHandler
+from packageship.system_config import LOG_FOLDER_PATH
+from packageship.libs.configutils.readconfig import ReadConfig
+
+READCONFIG = ReadConfig()
+
+
+def setup_log(config=None):
+ """
+ Log logging in the context of flask
+ """
+ if config:
+ logging.basicConfig(level=config.LOG_LEVEL)
+ else:
+ _level = READCONFIG.get_config('LOG', 'log_level')
+ if _level is None:
+ _level = 'INFO'
+ logging.basicConfig(level=_level)
+ path = READCONFIG.get_config('LOG', 'log_path')
+ if path is None:
+ log_name = READCONFIG.get_config('LOG', 'log_name')
+ if log_name is None:
+ log_name = 'log_info.log'
+ path = os.path.join(LOG_FOLDER_PATH, log_name)
+ if not os.path.exists(path):
+ try:
+ os.makedirs(os.path.split(path)[0])
+ except FileExistsError:
+ pathlib.Path(path).touch()
+
+ file_log_handler = RotatingFileHandler(
+ path, maxBytes=1024 * 1024 * 300, backupCount=10)
+
+ formatter = logging.Formatter(
+ '%(levelname)s %(filename)s:%(lineno)d %(message)s')
+
+ file_log_handler.setFormatter(formatter)
+
+ logging.getLogger().addHandler(file_log_handler)
+
+
+class Log():
+ """
+ General log operations
+ """
+
+ def __init__(self, name=__name__, path=None):
+ self.__name = name
+ self.__path = path
+ self.__file_handler = None
+ if self.__path is None:
+ self.__path = READCONFIG.get_system('log_path')
+ log_name = READCONFIG.get_config('LOG', 'log_name')
+ if log_name is None:
+ log_name = 'log_info.log'
+ if self.__path is None:
+ self.__path = os.path.join(LOG_FOLDER_PATH, log_name)
+ else:
+ self.__path = os.path.join(LOG_FOLDER_PATH, path)
+
+ if not os.path.exists(self.__path):
+ try:
+ os.makedirs(os.path.split(self.__path)[0])
+ except FileExistsError:
+ pathlib.Path(self.__path).touch()
+ self.__level = READCONFIG.get_config('LOG', 'log_level')
+ if self.__level is None:
+ self.__level = 'INFO'
+ self.__logger = logging.getLogger(self.__name)
+ self.__logger.setLevel(self.__level)
+
+ def __ini_handler(self):
+ # self.__stream_handler = logging.StreamHandler()
+ self.__file_handler = logging.FileHandler(
+ self.__path, encoding='utf-8')
+
+ def __set_handler(self):
+ # self.__stream_handler.setLevel(level)
+ self.__file_handler.setLevel(self.__level)
+ # self.__logger.addHandler(self.__stream_handler)
+ self.__logger.addHandler(self.__file_handler)
+
+ def __set_formatter(self):
+ formatter = logging.Formatter('%(asctime)s-%(name)s-%(filename)s-[line:%(lineno)d]'
+ '-%(levelname)s-[ log details ]: %(message)s',
+ datefmt='%a, %d %b %Y %H:%M:%S')
+ # self.__stream_handler.setFormatter(formatter)
+ self.__file_handler.setFormatter(formatter)
+
+ def close_handler(self):
+ """
+ Turn off log processing
+ """
+ # self.__stream_handler.close()
+ self.__file_handler.close()
+
+ @property
+ def logger(self):
+ """
+ Get logs
+ """
+ self.__ini_handler()
+ self.__set_handler()
+ self.__set_formatter()
+ self.close_handler()
+ return self.__logger
diff --git a/packageship/packageship/manage.py b/packageship/packageship/manage.py
new file mode 100644
index 0000000000000000000000000000000000000000..dc79873eb00f8de9e87c358bb272deca8e0f99f5
--- /dev/null
+++ b/packageship/packageship/manage.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python3
+"""
+Description: Entry for project initialization and service startupc
+"""
+import os
+from packageship.libs.exception import Error
+try:
+ from packageship.system_config import SYS_CONFIG_PATH
+ if not os.path.exists(SYS_CONFIG_PATH):
+ raise FileNotFoundError(
+ 'the system configuration file does not exist and the log cannot be started')
+except FileNotFoundError as file_not_found:
+ from packageship.libs.log.loghelper import Log
+ Log(__name__).logger.error(file_not_found)
+ raise Exception(
+ 'the system configuration file does not exist and the log cannot be started')
+else:
+ from packageship.libs.configutils.readconfig import ReadConfig
+
+from packageship.application import init_app
+try:
+ app = init_app('write')
+except Error as error:
+ raise Exception('Service failed to start')
+else:
+ from packageship.application.app_global import identity_verification
+
+
+@app.before_request
+def before_request():
+ """
+ Description: Global request interception
+ """
+ if not identity_verification():
+ return 'No right to perform operation'
+
+
+if __name__ == "__main__":
+ _readconfig = ReadConfig()
+ port = _readconfig.get_system('write_port')
+ addr = _readconfig.get_system('write_ip_addr')
+ app.run(port=port, host=addr)
diff --git a/packageship/packageship/package.ini b/packageship/packageship/package.ini
new file mode 100644
index 0000000000000000000000000000000000000000..96d09e2e9fd6eb72c2540abd4dfb8cd94ffe9565
--- /dev/null
+++ b/packageship/packageship/package.ini
@@ -0,0 +1,69 @@
+[SYSTEM]
+
+; Configuration file path for data initialization
+init_conf_path=/etc/pkgship/conf.yaml
+
+; Whether the system is in debug mode
+debug=false
+
+; Where to store data files when using sqlite database
+; data_base_path=/var/run/pkgship_dbs
+
+; Port managed by the administrator, with write permission
+
+write_port=8080
+
+; Ordinary user query port, only the right to query data, no permission to write data
+
+query_port=8090
+
+; IP address path with write permission
+
+write_ip_addr=127.0.0.1
+
+; IP address path with permission to query data
+
+query_ip_addr=127.0.0.1
+
+
+[DATABASE]
+
+; Basic configuration of sqlalchemy to connect to the database
+
+;Username of the database
+user_name=
+
+;connection password
+password=
+
+;host address
+host=
+
+; number for database connection
+port=
+
+;Connected data name
+database=
+
+; dbtype:The type of database is mainly divided into mysql and sqlite
+dbtype=sqlite
+
+[LOG]
+
+; Custom log storage path
+; log_path=/var/run/pkgship
+
+; Logging level
+; The log level option value can only be as follows
+; INFO DEBUG WARNING ERROR CRITICAL
+log_level=INFO
+
+; logging name
+log_name=log_info.log
+
+[UWSGI]
+
+daemonize=/var/log/uwsgi.log
+
+buffer-size=65536
+
diff --git a/packageship/packageship/pkgship.py b/packageship/packageship/pkgship.py
new file mode 100644
index 0000000000000000000000000000000000000000..95b16486a5032ecc0fdb90eef5080afea31da584
--- /dev/null
+++ b/packageship/packageship/pkgship.py
@@ -0,0 +1,1214 @@
+#!/usr/bin/python3
+"""
+Description: Entry method for custom commands
+Class: BaseCommand,PkgshipCommand,RemoveCommand,InitDatabaseCommand,UpdateDatabaseCommand,
+ AllPackageCommand,UpdatePackageCommand,BuildDepCommand,InstallDepCommand,
+ SelfBuildCommand,BeDependCommand,SingleCommand
+"""
+import os
+import json
+
+try:
+ import argparse
+ import requests
+ from requests.exceptions import ConnectionError as ConnErr
+ from requests.exceptions import HTTPError
+ import prettytable
+ from prettytable import PrettyTable
+ from packageship.libs.log import Log
+ from packageship.libs.exception import Error
+ from packageship.libs.configutils.readconfig import ReadConfig
+
+ LOGGER = Log(__name__)
+except ImportError as import_error:
+ print('Error importing related dependencies, \
+ please check if related dependencies are installed')
+else:
+ from packageship.application.apps.package.function.constants import ResponseCode
+ from packageship.application.apps.package.function.constants import ListNode
+
+DB_NAME = 0
+
+
+def main():
+ """
+ Description: Command line tool entry, register related commands
+ Args:
+
+ Returns:
+
+ Raises:
+ Error: An error occurred while executing the command
+ """
+ try:
+ packship_cmd = PkgshipCommand()
+ packship_cmd.parser_args()
+ except Error as error:
+ LOGGER.logger.error(error)
+ print('command error')
+
+
+class BaseCommand():
+ """
+ Description: Basic attributes used for command invocation
+ Attributes:
+ write_host: Can write operation single host address
+ read_host: Can read the host address of the operation
+ headers: Send HTTP request header information
+ """
+
+ def __init__(self):
+ """
+ Description: Class instance initialization
+
+ """
+ self._read_config = ReadConfig()
+ self.write_host = None
+ self.read_host = None
+ self.__http = 'http://'
+ self.headers = {"Content-Type": "application/json",
+ "Accept-Language": "zh-CN,zh;q=0.9"}
+
+ self.load_read_host()
+ self.load_write_host()
+
+ def load_write_host(self):
+ """
+ Description: Address to load write permission
+ Args:
+
+ Returns:
+ Raises:
+
+ """
+ wirte_port = self._read_config.get_system('write_port')
+
+ write_ip = self._read_config.get_system('write_ip_addr')
+
+ _write_host = self.__http + write_ip + ":" + wirte_port
+
+ setattr(self, 'write_host', _write_host)
+
+ def load_read_host(self):
+ """
+ Returns:Address to load read permission
+ Args:
+
+ Returns:
+ Raises:
+
+ """
+ read_port = self._read_config.get_system('query_port')
+
+ read_ip = self._read_config.get_system('query_ip_addr')
+
+ _read_host = self.__http + read_ip + ":" + read_port
+
+ setattr(self, 'read_host', _read_host)
+
+
+class PkgshipCommand(BaseCommand):
+ """
+ Description: PKG package command line
+ Attributes:
+ statistics: Summarized data table
+ table: Output table
+ columns: Calculate the width of the terminal dynamically
+ params: Command parameters
+ """
+ parser = argparse.ArgumentParser(
+ description='package related dependency management')
+ subparsers = parser.add_subparsers(
+ help='package related dependency management')
+
+ def __init__(self):
+ """
+ Description: Class instance initialization
+ """
+ super(PkgshipCommand, self).__init__()
+ self.statistics = dict()
+ self.table = PkgshipCommand.create_table(
+ ['package name', 'src name', 'version', 'database'])
+
+ # Calculate the total width of the current terminal
+ self.columns = int(os.popen('stty size', 'r').read().split()[1])
+ self.params = []
+
+ @staticmethod
+ def register_command(command):
+ """
+ Description: Registration of related commands
+
+ Args:
+ command: Related commands
+
+ Returns:
+ Raises:
+
+ """
+ command.register()
+
+ def register(self):
+ """
+ Description: Command line parameter injection
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ for command_params in self.params:
+ self.parse.add_argument( # pylint: disable=E1101
+ command_params[0],
+ type=eval(command_params[1]), # pylint: disable=W0123
+ help=command_params[2],
+ default=command_params[3])
+
+ @classmethod
+ def parser_args(cls):
+ """
+ Description: Register the command line and parse related commands
+ Args:
+
+ Returns:
+
+ Raises:
+ Error: An error occurred during command parsing
+ """
+ cls.register_command(RemoveCommand())
+ cls.register_command(InitDatabaseCommand())
+ cls.register_command(UpdateDatabaseCommand())
+ cls.register_command(AllPackageCommand())
+ cls.register_command(UpdatePackageCommand())
+ cls.register_command(BuildDepCommand())
+ cls.register_command(InstallDepCommand())
+ cls.register_command(SelfBuildCommand())
+ cls.register_command(BeDependCommand())
+ cls.register_command(SingleCommand())
+ try:
+ args = cls.parser.parse_args()
+ args.func(args)
+ except Error:
+ print('command error')
+
+ def parse_package(self, response_data):
+ """
+ Description: Parse the corresponding data of the package
+ Args:
+ response_data: http request response content
+ Returns:
+
+ Raises:
+
+ """
+ if response_data.get('code') == ResponseCode.SUCCESS:
+ package_all = response_data.get('data')
+ if isinstance(package_all, list):
+ for package_item in package_all:
+ row_data = [package_item.get('sourceName'), package_item.get(
+ 'dbname'), package_item.get('version'), package_item.get('license')]
+ self.table.add_row(row_data)
+ else:
+ print(response_data.get('msg'))
+
+ def parse_depend_package(self, response_data):
+ """
+ Description: Parsing package data with dependencies
+ Args:
+ response_data: http request response content
+ Returns:
+ Summarized data table
+ Raises:
+
+ """
+ bin_package_count = 0
+ src_package_count = 0
+ if response_data.get('code') == ResponseCode.SUCCESS:
+ package_all = response_data.get('data')
+ if isinstance(package_all, dict):
+
+ for bin_package, package_depend in package_all.items():
+ # distinguish whether the current data is the data of the root node
+ if isinstance(package_depend, list) and \
+ package_depend[ListNode.SOURCE_NAME] != 'source':
+
+ row_data = [bin_package,
+ package_depend[ListNode.SOURCE_NAME],
+ package_depend[ListNode.VERSION],
+ package_depend[ListNode.DBNAME]]
+ # Whether the database exists
+ if package_depend[ListNode.DBNAME] not in self.statistics:
+ self.statistics[package_depend[ListNode.DBNAME]] = {
+ 'binary': [],
+ 'source': []
+ }
+ # Determine whether the current binary package exists
+ if bin_package not in \
+ self.statistics[package_depend[ListNode.DBNAME]]['binary']:
+ self.statistics[package_depend[ListNode.DBNAME]
+ ]['binary'].append(bin_package)
+ bin_package_count += 1
+ # Determine whether the source package exists
+ if package_depend[ListNode.SOURCE_NAME] not in \
+ self.statistics[package_depend[ListNode.DBNAME]]['source']:
+ self.statistics[package_depend[ListNode.DBNAME]]['source'].append(
+ package_depend[ListNode.SOURCE_NAME])
+ src_package_count += 1
+
+ if hasattr(self, 'table') and self.table:
+ self.table.add_row(row_data)
+ else:
+ LOGGER.logger.error(response_data.get('msg'))
+ print(response_data.get('msg'))
+ statistics_table = self.statistics_table(
+ bin_package_count, src_package_count)
+ return statistics_table
+
+ def print_(self, content=None, character='=', dividing_line=False):
+ """
+ Description: Output formatted characters
+ Args:
+ content: Output content
+ character: Output separator content
+ dividing_line: Whether to show the separator
+ Returns:
+
+ Raises:
+
+ """
+ # Get the current width of the console
+
+ if dividing_line:
+ print(character * self.columns)
+ if content:
+ print(content)
+ if dividing_line:
+ print(character * self.columns)
+
+ @staticmethod
+ def create_table(title):
+ """
+ Description: Create printed forms
+ Args:
+ title: Table title
+ Returns:
+ ASCII format table
+ Raises:
+
+ """
+ table = PrettyTable(title)
+ # table.set_style(prettytable.PLAIN_COLUMNS)
+ table.align = 'l'
+ table.horizontal_char = '='
+ table.junction_char = '='
+ table.vrules = prettytable.NONE
+ table.hrules = prettytable.FRAME
+ return table
+
+ def statistics_table(self, bin_package_count, src_package_count):
+ """
+ Description: Generate data for total statistical tables
+ Args:
+ bin_package_count: Number of binary packages
+ src_package_count: Number of source packages
+ Returns:
+ Summarized data table
+ Raises:
+
+ """
+ statistics_table = self.create_table(['', 'binary', 'source'])
+ statistics_table.add_row(
+ ['self depend sum', bin_package_count, src_package_count])
+
+ # cyclically count the number of source packages and binary packages in each database
+ for database, statistics_item in self.statistics.items():
+ statistics_table.add_row([database, len(statistics_item.get(
+ 'binary')), len(statistics_item.get('source'))])
+ return statistics_table
+
+ @staticmethod
+ def http_error(response):
+ """
+ Description: Log error messages for http
+ Args:
+ response: Response content of http request
+ Returns:
+
+ Raises:
+ HTTPError: http request error
+ """
+ try:
+ print(response.raise_for_status())
+ except HTTPError as http_error:
+ LOGGER.logger.error(http_error)
+ print('Request failed')
+ print(http_error)
+
+
+class RemoveCommand(PkgshipCommand):
+ """
+ Description: Delete database command
+ Attributes:
+ parse: Command line parsing example
+ params: Command line parameters
+ """
+
+ def __init__(self):
+ """
+ Description: Class instance initialization
+ """
+ super(RemoveCommand, self).__init__()
+ self.parse = PkgshipCommand.subparsers.add_parser(
+ 'rm', help='delete database operation')
+ self.params = [('db', 'str', 'name of the database operated', '')]
+
+ def register(self):
+ """
+ Description: Command line parameter injection
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ super(RemoveCommand, self).register()
+ self.parse.set_defaults(func=self.do_command)
+
+ def do_command(self, params):
+ """
+ Description: Action to execute command
+ Args:
+ params: Command line parameters
+ Returns:
+
+ Raises:
+ ConnErr: Request connection error
+
+ """
+ if params.db is None:
+ print('No database specified for deletion')
+ else:
+ _url = self.write_host + '/repodatas?dbName={}'.format(params.db)
+ try:
+ response = requests.delete(_url)
+ except ConnErr as conn_err:
+ LOGGER.logger.error(conn_err)
+ print(str(conn_err))
+ else:
+ # Determine whether to delete the mysql database or sqlite database
+ if response.status_code == 200:
+ data = json.loads(response.text)
+ if data.get('code') == ResponseCode.SUCCESS:
+ print('delete success')
+ else:
+ LOGGER.logger.error(data.get('msg'))
+ print(data.get('msg'))
+ else:
+ self.http_error(response)
+
+
+class InitDatabaseCommand(PkgshipCommand):
+ """
+ Description: Initialize database command
+ Attributes:
+ parse: Command line parsing example
+ params: Command line parameters
+ """
+
+ def __init__(self):
+ """
+ Description: Class instance initialization
+ """
+ super(InitDatabaseCommand, self).__init__()
+ self.parse = PkgshipCommand.subparsers.add_parser(
+ 'init', help='initialization of the database')
+ self.params = [
+ ('-filepath', 'str', 'name of the database operated', '')]
+
+ def register(self):
+ """
+ Description: Command line parameter injection
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ super(InitDatabaseCommand, self).register()
+ self.parse.set_defaults(func=self.do_command)
+
+ def do_command(self, params):
+ """
+ Description: Action to execute command
+ Args:
+ params: Command line parameters
+ Returns:
+
+ Raises:
+
+ """
+ file_path = params.filepath
+ try:
+ response = requests.post(self.write_host +
+ '/initsystem', data=json.dumps({'configfile': file_path}),
+ headers=self.headers)
+ except ConnErr as conn_error:
+ LOGGER.logger.error(conn_error)
+ print(str(conn_error))
+ else:
+ if response.status_code == 200:
+ response_data = json.loads(response.text)
+ if response_data.get('code') == ResponseCode.SUCCESS:
+ print('Database initialization success ')
+ else:
+ LOGGER.logger.error(response_data.get('msg'))
+ print(response_data.get('msg'))
+ else:
+ self.http_error(response)
+
+
+class UpdateDatabaseCommand(PkgshipCommand):
+ """
+ Description: update database command
+ Attributes:
+ parse: Command line parsing example
+ params: Command line parameters
+ """
+
+ def __init__(self):
+ """
+ Description: Class instance initialization
+ """
+ super(UpdateDatabaseCommand, self).__init__()
+
+ self.parse = PkgshipCommand.subparsers.add_parser(
+ 'updatedb', help='database update operation')
+ self.params = [('db', 'str', 'name of the database operated', '')]
+
+ def register(self):
+ """
+ Description: Command line parameter injection
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ super(UpdateDatabaseCommand, self).register()
+ self.parse.set_defaults(func=self.do_command)
+
+ def do_command(self, params):
+ """
+ Description: Action to execute command
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ pass # pylint: disable= W0107
+
+
+class AllPackageCommand(PkgshipCommand):
+ """
+ Description: get all package commands
+ Attributes:
+ parse: Command line parsing example
+ params: Command line parameters
+ table: Output table
+ """
+
+ def __init__(self):
+ """
+ Description: Class instance initialization
+ """
+ super(AllPackageCommand, self).__init__()
+
+ self.parse = PkgshipCommand.subparsers.add_parser(
+ 'list', help='get all package data')
+ self.table = self.create_table(
+ ['packagenames', 'database', 'version', 'license'])
+ self.params = [('-db', 'str', 'name of the database operated', '')]
+
+ def register(self):
+ """
+ Description: Command line parameter injection
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ super(AllPackageCommand, self).register()
+ self.parse.set_defaults(func=self.do_command)
+
+ def do_command(self, params):
+ """
+ Description: Action to execute command
+ Args:
+ params: Command line parameters
+ Returns:
+
+ Raises:
+ ConnectionError: Request connection error
+ """
+ _url = self.read_host + \
+ '/packages?dbName={dbName}'.format(dbName=params.db)
+ try:
+ response = requests.get(_url)
+ except ConnErr as conn_error:
+ LOGGER.logger.error(conn_error)
+ print(str(conn_error))
+ else:
+ if response.status_code == 200:
+
+ self.parse_package(json.loads(response.text))
+ if self.table:
+ print(self.table)
+ else:
+ self.http_error(response)
+
+
+class UpdatePackageCommand(PkgshipCommand):
+ """
+ Description: update package data
+ Attributes:
+ parse: Command line parsing example
+ params: Command line parameters
+ """
+
+ def __init__(self):
+ """
+ Description: Class instance initialization
+ """
+ super(UpdatePackageCommand, self).__init__()
+
+ self.parse = PkgshipCommand.subparsers.add_parser(
+ 'updatepkg', help='update package data')
+ self.params = [
+ ('packagename', 'str', 'Source package name', ''),
+ ('db', 'str', 'name of the database operated', ''),
+ ('-m', 'str', 'Maintainers name', ''),
+ ('-l', 'int', 'database priority', 1)
+ ]
+
+ def register(self):
+ """
+ Description: Command line parameter injection
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ super(UpdatePackageCommand, self).register()
+ self.parse.set_defaults(func=self.do_command)
+
+ def do_command(self, params):
+ """
+ Description: Action to execute command
+ Args:
+ params: Command line parameters
+ Returns:
+
+ Raises:
+ ConnectionError: Request connection error
+ """
+ _url = self.write_host + '/packages/findByPackName'
+ try:
+ response = requests.put(
+ _url, data=json.dumps({'sourceName': params.packagename,
+ 'dbName': params.db,
+ 'maintainer': params.m,
+ 'maintainlevel': params.l}),
+ headers=self.headers)
+ except ConnErr as conn_error:
+ LOGGER.logger.error(conn_error)
+ print(str(conn_error))
+ else:
+ if response.status_code == 200:
+ data = json.loads(response.text)
+ if data.get('code') == ResponseCode.SUCCESS:
+ print('update completed')
+ else:
+ LOGGER.logger.error(data.get('msg'))
+ print(data.get('msg'))
+ else:
+ self.http_error(response)
+
+
+class BuildDepCommand(PkgshipCommand):
+ """
+ Description: query the compilation dependencies of the specified package
+ Attributes:
+ parse: Command line parsing example
+ params: Command line parameters
+ collection: Is there a collection parameter
+ collection_params: Command line collection parameters
+ """
+
+ def __init__(self):
+ """
+ Description: Class instance initialization
+ """
+ super(BuildDepCommand, self).__init__()
+
+ self.parse = PkgshipCommand.subparsers.add_parser(
+ 'builddep', help='query the compilation dependencies of the specified package')
+ self.collection = True
+ self.params = [
+ ('packagename', 'str', 'source package name', ''),
+ ]
+ self.collection_params = [
+ ('-dbs', 'Operational database collection')
+ ]
+
+ def register(self):
+ """
+ Description: Command line parameter injection
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ super(BuildDepCommand, self).register()
+ # collection parameters
+
+ for cmd_params in self.collection_params:
+ self.parse.add_argument(
+ cmd_params[0], nargs='*', default=None, help=cmd_params[1])
+ self.parse.set_defaults(func=self.do_command)
+
+ def do_command(self, params):
+ """
+ Description: Action to execute command
+ Args:
+ params: Command line parameters
+ Returns:
+
+ Raises:
+ ConnectionError: Request connection error
+ """
+ _url = self.read_host + '/packages/findBuildDepend'
+ try:
+ response = requests.post(
+ _url, data=json.dumps({'sourceName': params.packagename,
+ 'db_list': params.dbs}),
+ headers=self.headers)
+ except ConnErr as conn_error:
+ LOGGER.logger.error(conn_error)
+ print(str(conn_error))
+ else:
+ if response.status_code == 200:
+ statistics_table = self.parse_depend_package(
+ json.loads(response.text))
+ if getattr(self.table, 'rowcount'):
+ self.print_('query {} buildDepend result display:'.format(
+ params.packagename))
+ print(self.table)
+ self.print_('statistics')
+ print(statistics_table)
+ else:
+ self.http_error(response)
+
+
+class InstallDepCommand(PkgshipCommand):
+ """
+ Description: query the installation dependencies of the specified package
+ Attributes:
+ parse: Command line parsing example
+ params: Command line parameters
+ collection: Is there a collection parameter
+ collection_params: Command line collection parameters
+ """
+
+ def __init__(self):
+ """
+ Description: Class instance initialization
+ """
+ super(InstallDepCommand, self).__init__()
+
+ self.parse = PkgshipCommand.subparsers.add_parser(
+ 'installdep', help='query the installation dependencies of the specified package')
+ self.collection = True
+ self.params = [
+ ('packagename', 'str', 'source package name', ''),
+ ]
+ self.collection_params = [
+ ('-dbs', 'Operational database collection')
+ ]
+
+ def register(self):
+ """
+ Description: Command line parameter injection
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ super(InstallDepCommand, self).register()
+ # collection parameters
+
+ for cmd_params in self.collection_params:
+ self.parse.add_argument(
+ cmd_params[0], nargs='*', default=None, help=cmd_params[1])
+ self.parse.set_defaults(func=self.do_command)
+
+ def parse_package(self, response_data):
+ """
+ Description: Parse the corresponding data of the package
+ Args:
+ response_data: http response data
+ Returns:
+
+ Raises:
+
+ """
+ if getattr(self, 'statistics'):
+ setattr(self, 'statistics', dict())
+ bin_package_count = 0
+ src_package_count = 0
+ if response_data.get('code') == ResponseCode.SUCCESS:
+ package_all = response_data.get('data')
+ if isinstance(package_all, dict):
+ for bin_package, package_depend in package_all.items():
+ # distinguish whether the current data is the data of the root node
+ if isinstance(package_depend, list) and package_depend[-1][0][0] != 'root':
+
+ row_data = [bin_package,
+ package_depend[ListNode.SOURCE_NAME],
+ package_depend[ListNode.VERSION],
+ package_depend[ListNode.DBNAME]]
+ # Whether the database exists
+ if package_depend[ListNode.DBNAME] not in self.statistics:
+ self.statistics[package_depend[ListNode.DBNAME]] = {
+ 'binary': [],
+ 'source': []
+ }
+ # Determine whether the current binary package exists
+ if bin_package not in \
+ self.statistics[package_depend[ListNode.DBNAME]]['binary']:
+ self.statistics[package_depend[ListNode.DBNAME]
+ ]['binary'].append(bin_package)
+ bin_package_count += 1
+ # Determine whether the source package exists
+ if package_depend[ListNode.SOURCE_NAME] not in \
+ self.statistics[package_depend[ListNode.DBNAME]]['source']:
+ self.statistics[package_depend[ListNode.DBNAME]]['source'].append(
+ package_depend[ListNode.SOURCE_NAME])
+ src_package_count += 1
+
+ self.table.add_row(row_data)
+ else:
+ LOGGER.logger.error(response_data.get('msg'))
+ print(response_data.get('msg'))
+ # Display of aggregated data
+ statistics_table = self.statistics_table(
+ bin_package_count, src_package_count)
+
+ return statistics_table
+
+ def do_command(self, params):
+ """
+ Description: Action to execute command
+ Args:
+ params: Command line parameters
+ Returns:
+
+ Raises:
+ ConnectionError: requests connection error
+ """
+ _url = self.read_host + '/packages/findInstallDepend'
+ try:
+ response = requests.post(_url, data=json.dumps(
+ {
+ 'binaryName': params.packagename,
+ 'db_list': params.dbs
+ }, ensure_ascii=True), headers=self.headers)
+ except ConnErr as conn_error:
+ LOGGER.logger.error(conn_error)
+ print(str(conn_error))
+ else:
+ if response.status_code == 200:
+ statistics_table = self.parse_package(
+ json.loads(response.text))
+ if getattr(self.table, 'rowcount'):
+ self.print_('query{} InstallDepend result display:'.format(
+ params.packagename))
+ print(self.table)
+ self.print_('statistics')
+ print(statistics_table)
+ else:
+ self.http_error(response)
+
+
+class SelfBuildCommand(PkgshipCommand):
+ """
+ Description: self-compiled dependency query
+ Attributes:
+ parse: Command line parsing example
+ params: Command line parameters
+ collection: Is there a collection parameter
+ collection_params: Command line collection parameters
+ """
+
+ def __init__(self):
+ """
+ Description: Class instance initialization
+ """
+ super(SelfBuildCommand, self).__init__()
+
+ self.parse = PkgshipCommand.subparsers.add_parser(
+ 'selfbuild', help='query the self-compiled dependencies of the specified package')
+ self.collection = True
+ self.bin_package_table = self.create_table(
+ ['package name', 'src name', 'version', 'database'])
+ self.src_package_table = self.create_table([
+ 'src name', 'version', 'database'])
+ self.params = [
+ ('packagename', 'str', 'source package name', ''),
+ ('-t', 'str', 'Source of data query', 'binary'),
+ ('-w', 'str', 'whether to include other subpackages of binary', 0),
+ ('-s', 'str', 'whether it is self-compiled', 0)
+ ]
+
+ self.collection_params = [
+ ('-dbs', 'Operational database collection')
+ ]
+
+ def register(self):
+ """
+ Description: Command line parameter injection
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ super(SelfBuildCommand, self).register()
+ # collection parameters
+
+ for cmd_params in self.collection_params:
+ self.parse.add_argument(
+ cmd_params[0], nargs='*', default=None, help=cmd_params[1])
+ self.parse.set_defaults(func=self.do_command)
+
+ def _parse_bin_package(self, bin_packages):
+ """
+ Description: Parsing binary result data
+ Args:
+ bin_packages: Binary package data
+
+ Returns:
+
+ Raises:
+
+ """
+ bin_package_count = 0
+ if bin_packages:
+ for bin_package, package_depend in bin_packages.items():
+ # distinguish whether the current data is the data of the root node
+ if isinstance(package_depend, list) and package_depend[-1][0][0] != 'root':
+
+ row_data = [bin_package, package_depend[ListNode.SOURCE_NAME],
+ package_depend[ListNode.VERSION], package_depend[ListNode.DBNAME]]
+
+ # Whether the database exists
+ if package_depend[ListNode.DBNAME] not in self.statistics:
+ self.statistics[package_depend[ListNode.DBNAME]] = {
+ 'binary': [],
+ 'source': []
+ }
+ # Determine whether the current binary package exists
+ if bin_package not in \
+ self.statistics[package_depend[ListNode.DBNAME]]['binary']:
+ self.statistics[package_depend[ListNode.DBNAME]
+ ]['binary'].append(bin_package)
+ bin_package_count += 1
+ self.bin_package_table.add_row(row_data)
+
+ return bin_package_count
+
+ def _parse_src_package(self, src_packages):
+ """
+ Description: Source package data analysis
+ Args:
+ src_packages: Source package
+
+ Returns:
+ Source package data
+ Raises:
+
+ """
+ src_package_count = 0
+ if src_packages:
+ for src_package, package_depend in src_packages.items():
+ # distinguish whether the current data is the data of the root node
+ if isinstance(package_depend, list):
+
+ row_data = [src_package, package_depend[ListNode.VERSION],
+ package_depend[DB_NAME]]
+ # Whether the database exists
+ if package_depend[DB_NAME] not in self.statistics:
+ self.statistics[package_depend[DB_NAME]] = {
+ 'binary': [],
+ 'source': []
+ }
+ # Determine whether the current binary package exists
+ if src_package not in self.statistics[package_depend[DB_NAME]]['source']:
+ self.statistics[package_depend[DB_NAME]
+ ]['source'].append(src_package)
+ src_package_count += 1
+
+ self.src_package_table.add_row(row_data)
+
+ return src_package_count
+
+ def parse_package(self, response_data):
+ """
+ Description: Parse the corresponding data of the package
+ Args:
+ response_data: http response data
+ Returns:
+ Summarized data table
+ Raises:
+
+ """
+ if getattr(self, 'statistics'):
+ setattr(self, 'statistics', dict())
+ bin_package_count = 0
+ src_package_count = 0
+
+ if response_data.get('code') == ResponseCode.SUCCESS:
+ package_all = response_data.get('data')
+ if isinstance(package_all, dict):
+ # Parsing binary result data
+ bin_package_count = self._parse_bin_package(
+ package_all.get('binary_dicts'))
+
+ # Source package data analysis
+ src_package_count = self._parse_src_package(
+ package_all.get('source_dicts'))
+ else:
+ LOGGER.logger.error(response_data.get('msg'))
+ print(response_data.get('msg'))
+ # Display of aggregated data
+ statistics_table = self.statistics_table(
+ bin_package_count, src_package_count)
+ # return (bin_package_table, src_package_table, statistics_table)
+ return statistics_table
+
+ def do_command(self, params):
+ """
+ Description: Action to execute command
+ Args:
+ params: commands lines params
+ Returns:
+
+ Raises:
+ ConnectionError: requests connection error
+ """
+ _url = self.read_host + '/packages/findSelfDepend'
+ try:
+ response = requests.post(_url,
+ data=json.dumps({
+ 'packagename': params.packagename,
+ 'db_list': params.dbs,
+ 'packtype': params.t,
+ 'selfbuild': str(params.s),
+ 'withsubpack': str(params.w)}),
+ headers=self.headers)
+ except ConnErr as conn_error:
+ LOGGER.logger.error(conn_error)
+ print(str(conn_error))
+ else:
+ if response.status_code == 200:
+ statistics_table = self.parse_package(
+ json.loads(response.text))
+ if getattr(self.bin_package_table, 'rowcount') \
+ and getattr(self.src_package_table, 'rowcount'):
+ self.print_('query {} selfDepend result display :'.format(
+ params.packagename))
+ print(self.bin_package_table)
+ self.print_(character='=')
+ print(self.src_package_table)
+ self.print_('statistics')
+ print(statistics_table)
+ else:
+ self.http_error(response)
+
+
+class BeDependCommand(PkgshipCommand):
+ """
+ Description: dependent query
+ Attributes:
+ parse: Command line parsing example
+ params: Command line parameters
+ """
+
+ def __init__(self):
+ """
+ Description: Class instance initialization
+ """
+ super(BeDependCommand, self).__init__()
+
+ self.parse = PkgshipCommand.subparsers.add_parser(
+ 'bedepend', help='dependency query for the specified package')
+ self.params = [
+ ('packagename', 'str', 'source package name', ''),
+ ('db', 'str', 'name of the database operated', ''),
+ ('-w', 'str', 'whether to include other subpackages of binary', 0),
+ ]
+
+ def register(self):
+ """
+ Description: Command line parameter injection
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ super(BeDependCommand, self).register()
+ self.parse.set_defaults(func=self.do_command)
+
+ def do_command(self, params):
+ """
+ Description: Action to execute command
+ Args:
+ params: command lines params
+ Returns:
+
+ Raises:
+ ConnectionError: requests connection error
+ """
+ _url = self.read_host + '/packages/findBeDepend'
+ try:
+ response = requests.post(_url, data=json.dumps(
+ {
+ 'packagename': params.packagename,
+ 'dbname': params.db,
+ 'withsubpack': str(params.w)
+ }
+ ), headers=self.headers)
+ except ConnErr as conn_error:
+ LOGGER.logger.error(conn_error)
+ print(str(conn_error))
+ else:
+ if response.status_code == 200:
+ statistics_table = self.parse_depend_package(
+ json.loads(response.text))
+ if getattr(self.table, 'rowcount'):
+ self.print_('query {} beDepend result display :'.format(
+ params.packagename))
+ print(self.table)
+ self.print_('statistics')
+ print(statistics_table)
+ else:
+ self.http_error(response)
+
+
+class SingleCommand(PkgshipCommand):
+ """
+ Description: query single package information
+ Attributes:
+ parse: Command line parsing example
+ params: Command line parameters
+ """
+
+ def __init__(self):
+ """
+ Description: Class instance initialization
+ """
+ super(SingleCommand, self).__init__()
+
+ self.parse = PkgshipCommand.subparsers.add_parser(
+ 'single', help='query the information of a single package')
+ self.params = [
+ ('packagename', 'str', 'source package name', ''),
+ ('-db', 'str', 'name of the database operated', '')
+ ]
+
+ def register(self):
+ """
+ Description: Command line parameter injection
+ Args:
+
+ Returns:
+
+ Raises:
+
+ """
+ super(SingleCommand, self).register()
+ self.parse.set_defaults(func=self.do_command)
+
+ def parse_package(self, response_data):
+ """
+ Description: Parse the corresponding data of the package
+ Args:
+ response_data: http response data
+ Returns:
+
+ Raises:
+
+ """
+ show_field_name = ('sourceName', 'dbname', 'version',
+ 'license', 'maintainer', 'maintainlevel')
+ print_contents = []
+ if response_data.get('code') == ResponseCode.SUCCESS:
+ package_all = response_data.get('data')
+ if isinstance(package_all, list):
+ for package_item in package_all:
+ for key, value in package_item.items():
+ if value is None:
+ value = ''
+ if key in show_field_name:
+ line_content = '%-15s:%s' % (key, value)
+ print_contents.append(line_content)
+ print_contents.append('=' * self.columns)
+ else:
+ print(response_data.get('msg'))
+ if print_contents:
+ for content in print_contents:
+ self.print_(content=content)
+
+ def do_command(self, params):
+ """
+ Description: Action to execute command
+ Args:
+ params: command lines params
+ Returns:
+
+ Raises:
+ ConnectionError: requests connection error
+ """
+ _url = self.read_host + \
+ '/packages/findByPackName?dbName={db_name}&sourceName={packagename}' \
+ .format(db_name=params.db, packagename=params.packagename)
+ try:
+ response = requests.get(_url)
+ except ConnErr as conn_error:
+ LOGGER.logger.error(conn_error)
+ print(str(conn_error))
+ else:
+ if response.status_code == 200:
+ self.parse_package(json.loads(response.text))
+ else:
+ self.http_error(response)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/packageship/packageship/pkgshipd b/packageship/packageship/pkgshipd
new file mode 100755
index 0000000000000000000000000000000000000000..1425581fd6c7a39ee4aacf25b78c62a75fd9e5ab
--- /dev/null
+++ b/packageship/packageship/pkgshipd
@@ -0,0 +1,151 @@
+#!/bin/bash
+SYS_PATH=/etc/pkgship
+OUT_PATH=/var/run/pkgship_uwsgi
+if [ ! -d "$OUT_PATH" ]; then
+ mkdir $OUT_PATH
+fi
+
+if [ ! -f "$SYS_PATH/package.ini" ]; then
+ echo "[ERROR] $SYS_PATH/package.ini dose not exist!!!"
+ exit 0
+fi
+
+function get_config(){
+ cat $SYS_PATH/package.ini | grep -E ^$2 | sed s/[[:space:]]//g | awk 'BEGIN{FS="="}{print $2}'
+}
+
+function create_config_file(){
+ echo "[INFO] config type is: $service"
+ daemonize=$(get_config "$service" "daemonize")
+ buffer_size=$(get_config "$service" "buffer-size")
+ wsgi_file_path=$(find /usr/lib/ -name "packageship")
+ if [ $service = "manage" -o $service = "all" ];then
+ write_port=$(get_config "$service" "write_port")
+ write_ip_addr=$(get_config "$service" "write_ip_addr")
+ if [[ -z "$daemonize" ]] || [[ -z "$buffer_size" ]] || [[ -z "$write_ip_addr" ]] || [[ -z "$write_port" ]];then
+ echo "[ERROR] CAN NOT find all config name in: $SYS_PATH/package.ini, Please check the file"
+ echo "[ERROR] The following config name is needed: daemonize, buffer_size, write_port and write_ip_addr"
+ exit 1
+ fi
+ if [ -z "$wsgi_file_path" ];then
+ echo "[ERROR] CAN NOT find the wsgi file path under: /usr/lib/"
+ exit 1
+ fi
+ echo "[INFO] manage.ini is saved to $OUT_PATH/manage.ini"
+ echo "[uwsgi]
+http=$write_ip_addr:$write_port
+module=packageship.manage
+wsgi-file=$wsgi_file_path/manage.py
+callable=app
+buffer-size=$buffer_size
+pidfile=$OUT_PATH/manage.pid
+daemonize=$daemonize" > $OUT_PATH/manage.ini
+ fi
+
+ if [ $service = "selfpkg" -o $service = "all" ];then
+ query_port=$(get_config "$service" "query_port")
+ query_ip_addr=$(get_config "$service" "query_ip_addr")
+
+ if [[ -z "$daemonize" ]] || [[ -z "$buffer_size" ]] || [[ -z "$query_ip_addr" ]] || [[ -z "$query_port" ]];then
+ echo "[ERROR] CAN NOT find all config name in: $SYS_PATH/package.ini, Please check the file."
+ echo "[ERROR] The following config name is needed: daemonize, buffer_size, query_port and query_ip_addr."
+ exit 1
+ fi
+ if [ -z "$wsgi_file_path" ];then
+ echo "[ERROR] CAN NOT find the wsgi file path under: /usr/lib/"
+ exit 1
+ fi
+
+ echo "[INFO] selfpkg.ini is saved to: $OUT_PATH/selfpkg.ini"
+ echo "[uwsgi]
+http=$query_ip_addr:$query_port
+module=packageship.selfpkg
+wsgi-file=$wsgi_file_path/selfpkg.py
+callable=app
+buffer-size=$buffer_size
+pidfile=$OUT_PATH/selfpkg.pid
+daemonize=$daemonize" > $OUT_PATH/selfpkg.ini
+
+ fi
+
+ rm -f config_file
+}
+
+function start_service(){
+ if [ "`ps aux | grep "uwsgi" | grep "$1.ini"`" != "" ];then
+ echo "[WARNING] $1 service is running, please STOP it first."
+ else
+ cd $wsgi_file_path
+ uwsgi -d --ini $OUT_PATH/$1.ini
+ echo "[INFO] START uwsgi service: $1.ini"
+ fi
+}
+
+function stop_service(){
+ if [ ! -f "$OUT_PATH/$1.pid" ]; then
+ echo "[ERROR] STOP service FAILED, $OUT_PATH/$1.pid dose not exist."
+ echo "[ERROR] Please stop it manually by using [ps -aux] and [uwsgi --stop #PID]"
+ exit 0
+ fi
+
+ pid=$(cat $OUT_PATH/$1.pid)
+ if [ "`ps aux | awk 'BEGIN{FS=" "}{if ($2=='$pid') print $0}' | grep "$1.ini"`" != "" ];then
+ uwsgi --$2 $OUT_PATH/$1.pid
+ echo "[INFO] STOP uwsgi service: $1.ini"
+ else
+ echo "[WARNING] STOP service [FAILED], Please START the service first."
+ fi
+}
+
+if [ ! -n "$1" ]
+then
+ echo "Usages: sh pkgshipd.sh start|stop|restart [manage|selfpkg]"
+ exit 0
+fi
+
+if [ X$2 = X ];then
+ service="all"
+elif [ $2 = "manage" -o $2 = "selfpkg" ];then
+ service=$2
+else
+ echo "[ERROR] Can not phase the input of $2!!!"
+ exit 0
+fi
+
+create_config_file $service
+if [ $? -ne 0 ];then
+ exit 0
+fi
+
+if [ $1 = start ]
+then
+ if [ $service = "all" ];then
+ start_service "manage"
+ start_service "selfpkg"
+ else
+ start_service $service
+ fi
+ echo "===The run log is saved into: $daemonize==="
+
+elif [ $1 = stop ];then
+ if [ $service = "all" ];then
+ stop_service "manage" "stop"
+ stop_service "selfpkg" "stop"
+ else
+ stop_service $service "stop"
+ fi
+ echo "===The run log is saved into: $daemonize==="
+
+elif [ $1 = restart ];then
+ if [ $service = "all" ];then
+ stop_service "manage" "reload"
+ stop_service "selfpkg" "reload"
+ else
+ stop_service $service "reload"
+ fi
+ echo "===The run log is saved into: $daemonize==="
+
+else
+ echo "Usages: sh pkgshipd.sh start|stop|restart [manage|selfpkg]"
+fi
+
diff --git a/packageship/packageship/pylint.conf b/packageship/packageship/pylint.conf
new file mode 100644
index 0000000000000000000000000000000000000000..c2e270f53c4617ee15326e6be669bd97155141d6
--- /dev/null
+++ b/packageship/packageship/pylint.conf
@@ -0,0 +1,583 @@
+[MASTER]
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code.
+extension-pkg-whitelist=
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=CVS
+
+# Add files or directories matching the regex patterns to the blacklist. The
+# regex matches against base names, not paths.
+ignore-patterns=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
+# number of processors available to use.
+jobs=1
+
+# Control the amount of potential inferred values when inferring a single
+# object. This can help the performance when dealing with large functions or
+# complex, nested conditions.
+limit-inference-results=100
+
+# List of plugins (as comma separated values of python module names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# Specify a configuration file.
+#rcfile=
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages.
+suggestion-mode=yes
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
+confidence=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once). You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use "--disable=all --enable=classes
+# --disable=W".
+disable=print-statement,
+ parameter-unpacking,
+ unpacking-in-except,
+ old-raise-syntax,
+ backtick,
+ long-suffix,
+ old-ne-operator,
+ old-octal-literal,
+ import-star-module-level,
+ non-ascii-bytes-literal,
+ raw-checker-failed,
+ bad-inline-option,
+ locally-disabled,
+ file-ignored,
+ suppressed-message,
+ useless-suppression,
+ deprecated-pragma,
+ use-symbolic-message-instead,
+ apply-builtin,
+ basestring-builtin,
+ buffer-builtin,
+ cmp-builtin,
+ coerce-builtin,
+ execfile-builtin,
+ file-builtin,
+ long-builtin,
+ raw_input-builtin,
+ reduce-builtin,
+ standarderror-builtin,
+ unicode-builtin,
+ xrange-builtin,
+ coerce-method,
+ delslice-method,
+ getslice-method,
+ setslice-method,
+ no-absolute-import,
+ old-division,
+ dict-iter-method,
+ dict-view-method,
+ next-method-called,
+ metaclass-assignment,
+ indexing-exception,
+ raising-string,
+ reload-builtin,
+ oct-method,
+ hex-method,
+ nonzero-method,
+ cmp-method,
+ input-builtin,
+ round-builtin,
+ intern-builtin,
+ unichr-builtin,
+ map-builtin-not-iterating,
+ zip-builtin-not-iterating,
+ range-builtin-not-iterating,
+ filter-builtin-not-iterating,
+ using-cmp-argument,
+ eq-without-hash,
+ div-method,
+ idiv-method,
+ rdiv-method,
+ exception-message-attribute,
+ invalid-str-codec,
+ sys-max-int,
+ bad-python3-import,
+ deprecated-string-function,
+ deprecated-str-translate-call,
+ deprecated-itertools-function,
+ deprecated-types-field,
+ next-method-defined,
+ dict-items-not-iterating,
+ dict-keys-not-iterating,
+ dict-values-not-iterating,
+ deprecated-operator-function,
+ deprecated-urllib-function,
+ xreadlines-attribute,
+ deprecated-sys-function,
+ exception-escape,
+ comprehension-escape,
+ attribute-defined-outside-init
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable=c-extension-no-member
+
+
+[REPORTS]
+
+# Python expression which should return a score less than or equal to 10. You
+# have access to the variables 'error', 'warning', 'refactor', and 'convention'
+# which contain the number of messages in each category, as well as 'statement'
+# which is the total number of statements analyzed. This score is used by the
+# global evaluation report (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details.
+#msg-template=
+
+# Set the output format. Available formats are text, parseable, colorized, json
+# and msvs (visual studio). You can also give a reporter class, e.g.
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+
+# Tells whether to display a full report or only the messages.
+reports=no
+
+# Activate the evaluation score.
+score=yes
+
+
+[REFACTORING]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+# Complete name of functions that never returns. When checking for
+# inconsistent-return-statements if a never returning function is called then
+# it will be considered as an explicit return statement and no message will be
+# printed.
+never-returning-functions=sys.exit
+
+
+[BASIC]
+
+# Naming style matching correct argument names.
+argument-naming-style=snake_case
+
+# Regular expression matching correct argument names. Overrides argument-
+# naming-style.
+#argument-rgx=
+
+# Naming style matching correct attribute names.
+attr-naming-style=snake_case
+
+# Regular expression matching correct attribute names. Overrides attr-naming-
+# style.
+#attr-rgx=
+
+# Bad variable names which should always be refused, separated by a comma.
+bad-names=foo,
+ bar,
+ baz,
+ toto,
+ tutu,
+ tata
+
+# Naming style matching correct class attribute names.
+class-attribute-naming-style=any
+
+# Regular expression matching correct class attribute names. Overrides class-
+# attribute-naming-style.
+#class-attribute-rgx=
+
+# Naming style matching correct class names.
+class-naming-style=PascalCase
+
+# Regular expression matching correct class names. Overrides class-naming-
+# style.
+#class-rgx=
+
+# Naming style matching correct constant names.
+const-naming-style=UPPER_CASE
+
+# Regular expression matching correct constant names. Overrides const-naming-
+# style.
+#const-rgx=
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# Naming style matching correct function names.
+function-naming-style=snake_case
+
+# Regular expression matching correct function names. Overrides function-
+# naming-style.
+#function-rgx=
+
+# Good variable names which should always be accepted, separated by a comma.
+good-names=i,
+ j,
+ k,
+ ex,
+ Run,
+ _
+
+# Include a hint for the correct naming format with invalid-name.
+include-naming-hint=no
+
+# Naming style matching correct inline iteration names.
+inlinevar-naming-style=any
+
+# Regular expression matching correct inline iteration names. Overrides
+# inlinevar-naming-style.
+#inlinevar-rgx=
+
+# Naming style matching correct method names.
+method-naming-style=snake_case
+
+# Regular expression matching correct method names. Overrides method-naming-
+# style.
+#method-rgx=
+
+# Naming style matching correct module names.
+module-naming-style=snake_case
+
+# Regular expression matching correct module names. Overrides module-naming-
+# style.
+#module-rgx=
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+# These decorators are taken in consideration only for invalid-name.
+property-classes=abc.abstractproperty
+
+# Naming style matching correct variable names.
+variable-naming-style=snake_case
+
+# Regular expression matching correct variable names. Overrides variable-
+# naming-style.
+#variable-rgx=
+
+
+[FORMAT]
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )??$
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+# Maximum number of characters on a single line.
+max-line-length=100
+
+# Maximum number of lines in a module.
+max-module-lines=1000
+
+# List of optional constructs for which whitespace checking is disabled. `dict-
+# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
+# `trailing-comma` allows a space between comma and closing bracket: (a, ).
+# `empty-line` allows space-only lines.
+no-space-check=trailing-comma,
+ dict-separator
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+single-line-class-stmt=no
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+
+[LOGGING]
+
+# Format style used to check logging format string. `old` means using %
+# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
+logging-format-style=old
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format.
+logging-modules=logging
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,
+ XXX,
+ TODO
+
+
+[SIMILARITIES]
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=no
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+
+[SPELLING]
+
+# Limits count of emitted suggestions for spelling mistakes.
+max-spelling-suggestions=4
+
+# Spelling dictionary name. Available dictionaries: none. To make it work,
+# install the python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains the private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to the private dictionary (see the
+# --spelling-private-dict-file option) instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[STRING]
+
+# This flag controls whether the implicit-str-concat-in-sequence should
+# generate a warning on implicit string concatenation in sequences defined over
+# several lines.
+check-str-concat-over-line-jumps=no
+
+
+[TYPECHECK]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=contextlib.contextmanager
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# Tells whether to warn about missing members when the owner of the attribute
+# is inferred to be None.
+ignore-none=yes
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference=yes
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=optparse.Values,thread._local,_thread._local
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis). It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint=yes
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+# List of decorators that change the signature of a decorated function.
+signature-mutators=
+
+
+[VARIABLES]
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid defining new builtins when possible.
+additional-builtins=
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables=yes
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,
+ _cb
+
+# A regular expression matching the name of dummy variables (i.e. expected to
+# not be used).
+dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore.
+ignored-argument-names=_.*|^ignored_|^unused_
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
+
+
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,
+ __new__,
+ setUp,
+ __post_init__
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,
+ _fields,
+ _replace,
+ _source,
+ _make
+ _rows
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=cls
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method.
+max-args=6
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=15
+
+# Maximum number of boolean expressions in an if statement (see R0916).
+max-bool-expr=5
+
+# Maximum number of branch for function / method body.
+max-branches=12
+
+# Maximum number of locals for function / method body.
+max-locals=15
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of return / yield for function / method body.
+max-returns=6
+
+# Maximum number of statements in function / method body.
+max-statements=50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+
+[IMPORTS]
+
+# List of modules that can be imported at any level, not just the top level
+# one.
+allow-any-import-level=
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all=no
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+# Deprecated modules which should not be used, separated by a comma.
+deprecated-modules=optparse,tkinter.tix
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled).
+ext-import-graph=
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled).
+import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled).
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant
+
+# Couples of modules and preferred modules, separated by a comma.
+preferred-modules=
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "BaseException, Exception".
+overgeneral-exceptions=BaseException,
+ Exception
diff --git a/packageship/packageship/selfpkg.py b/packageship/packageship/selfpkg.py
new file mode 100644
index 0000000000000000000000000000000000000000..b5878168bafa43ee8128e925b182b324798b30b4
--- /dev/null
+++ b/packageship/packageship/selfpkg.py
@@ -0,0 +1,43 @@
+#!/usr/bin/python3
+"""
+Description: Entry for project initialization and service startupc
+"""
+import os
+from flask_script import Manager
+from packageship.libs.exception import Error
+from packageship.libs.configutils.readconfig import ReadConfig
+
+try:
+ from packageship.system_config import SYS_CONFIG_PATH
+ if not os.path.exists(SYS_CONFIG_PATH):
+ raise FileNotFoundError(
+ 'the system configuration file does not exist and the log cannot be started')
+except FileNotFoundError as file_not_found:
+ from packageship.libs.log.loghelper import Log
+ Log(__name__).logger.error(file_not_found)
+ raise Exception(
+ 'the system configuration file does not exist and the log cannot be started')
+
+from packageship.application import init_app
+try:
+ app = init_app('query')
+except Error as error:
+ raise Exception('Service failed to start')
+else:
+ from packageship.application.app_global import identity_verification
+
+
+@app.before_request
+def before_request():
+ """
+ Description: Global request interception
+ """
+ if not identity_verification():
+ return 'No right to perform operation'
+
+
+if __name__ == "__main__":
+ _readconfig = ReadConfig()
+ port = _readconfig.get_system('query_port')
+ addr = _readconfig.get_system('query_ip_addr')
+ app.run(port=port, host=addr)
diff --git a/packageship/packageship/system_config.py b/packageship/packageship/system_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..14de44a96e3c3dad4c030e2d4a942a63dfb170c0
--- /dev/null
+++ b/packageship/packageship/system_config.py
@@ -0,0 +1,33 @@
+#!/usr/bin/python3
+"""
+Description:System-level file configuration, mainly configure
+the address of the operating environment, commonly used variables, etc.
+"""
+
+import os
+import sys
+
+
+# The root directory where the system is running
+if getattr(sys, 'frozen', False):
+ BASE_PATH = os.path.dirname(os.path.realpath(sys.argv[0]))
+else:
+ BASE_PATH = os.path.abspath(os.path.dirname(__file__))
+
+# system configuration file path
+
+SYS_CONFIG_PATH = os.path.join('/', 'etc', 'pkgship', 'package.ini')
+
+# data file after successful data import
+
+DATABASE_FILE_INFO = os.path.join(
+ '/', 'var', 'run', 'database_file_info.yaml')
+
+# If the path of the imported database is not specified in the configuration file, the
+# configuration in the system is used by default
+DATABASE_FOLDER_PATH = os.path.join('/', 'var', 'run', 'pkgship_dbs')
+
+
+# If the directory of log storage is not configured,
+# it will be stored in the following directory specified by the system by default
+LOG_FOLDER_PATH = os.path.join('/', 'var', 'log', 'pkgship')
diff --git a/packageship/pkgship.spec b/packageship/pkgship.spec
new file mode 100644
index 0000000000000000000000000000000000000000..a48636ab1a91fe0d417401d62725b288499f42e7
--- /dev/null
+++ b/packageship/pkgship.spec
@@ -0,0 +1,74 @@
+Name: pkgship
+Version: 1.0
+Release: 2
+Summary: Pkgship implements rpm package dependence ,maintainer, patch query and so no.
+License: Mulan 2.0
+URL: https://gitee.com/openeuler/openEuler-Advisor
+Source0: https://gitee.com/openeuler/openEuler-Advisor/pkgship-%{version}.tar.gz
+
+BuildArch: noarch
+
+Requires: python3-pip python3-flask-restful python3-flask python3 python3-pyyaml
+Requires: python3-sqlalchemy python3-prettytable python3-requests
+#Requires: pyinstaller python3-flask-session python3-flask-script marshmallow uwsig
+
+%description
+Pkgship implements rpm package dependence ,maintainer, patch query and so no.
+
+%prep
+%autosetup -n pkgship-%{version}
+
+%build
+%py3_build
+
+%install
+%py3_install
+
+
+%check
+
+
+%post
+#build cli bin
+if [ -f "/usr/bin/pkgship" ]; then
+ rm -rf /usr/bin/pkgship
+fi
+
+
+cd %{python3_sitelib}/packageship/
+if [ -f "/usr/bin/pyinstaller" ]; then
+ /usr/bin/pyinstaller -F pkgship.py
+elif [ -f "/usr/local/bin/pyinstaller" ]; then
+ /usr/local/bin/pyinstaller -F pkgship.py
+else
+ echo "pkship install fail,there is no pyinstaller!"
+ exit
+fi
+
+sed -i "s/hiddenimports\=\[\]/hiddenimports\=\['pkg_resources.py2_warn'\]/g" pkgship.spec
+/usr/local/bin/pyinstaller pkgship.spec
+cp dist/pkgship /usr/bin/
+rm -rf %{python3_sitelib}/packageship/build %{python3_sitelib}/packageship/dist
+
+%postun
+
+
+%files
+%doc README.md
+%{python3_sitelib}/*
+%config %{_sysconfdir}/pkgship/*
+%attr(0755,root,root) %{_bindir}/pkgshipd
+
+
+%changelog
+* Sat JUL 4 2020 Yiru Wang - 1.0-2
+- cheange requires python3.7 to python3,add check pyinstaller file.
+
+* Tue JUN 30 2020 Yiru Wang - 1.0-1
+- add pkgshipd file
+
+* Tue Jun 11 2020 Feng Hu - 1.0-0
+- add macro to build cli bin when rpm install
+
+* Sat Jun 6 2020 Feng Hu - 1.0-0
+- init package
diff --git a/packageship/setup.py b/packageship/setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..676e6ccb0902b3bc78dca6d0f3be5055fffa0f7e
--- /dev/null
+++ b/packageship/setup.py
@@ -0,0 +1,63 @@
+#!/usr/bin/python3
+"""
+Package management program installation configuration
+file for software packaging
+"""
+from distutils.core import setup
+import os
+
+BASE_PATH = os.path.dirname(__file__)
+
+path = os.path.join(BASE_PATH, 'Lib', 'site-packages', 'package')
+
+configpath = "/etc/pkgship/"
+
+setup(
+ name='packageship',
+ version='1.0',
+ py_modules=[
+ 'packageship.application.__init__',
+ 'packageship.application.app_global',
+ 'packageship.application.apps.__init__',
+ 'packageship.application.apps.package.serialize',
+ 'packageship.application.apps.package.url',
+ 'packageship.application.apps.package.view',
+ 'packageship.application.apps.package.function.be_depend',
+ 'packageship.application.apps.package.function.build_depend',
+ 'packageship.application.apps.package.function.constants',
+ 'packageship.application.apps.package.function.install_depend',
+ 'packageship.application.apps.package.function.packages',
+ 'packageship.application.apps.package.function.searchdb',
+ 'packageship.application.apps.package.function.self_depend',
+ 'packageship.application.initsystem.data_import',
+ 'packageship.application.initsystem.datamerge',
+ 'packageship.application.models.package',
+ 'packageship.application.models.temporarydb',
+ 'packageship.application.settings',
+ 'packageship.libs.__init__',
+ 'packageship.libs.configutils.readconfig',
+ 'packageship.libs.dbutils.sqlalchemy_helper',
+ 'packageship.libs.exception.ext',
+ 'packageship.libs.log.loghelper',
+ 'packageship.manage',
+ 'packageship.pkgship',
+ 'packageship.selfpkg',
+ 'packageship.system_config'],
+ requires=['prettytable (==0.7.2)',
+ 'Flask_RESTful (==0.3.8)',
+ 'Flask_Session (==0.3.1)',
+ 'Flask_Script (==2.0.6)',
+ 'Flask (==1.1.2)',
+ 'marshmallow (==3.5.1)',
+ 'SQLAlchemy (==1.3.16)',
+ 'PyYAML (==5.3.1)',
+ 'requests (==2.21.0)',
+ 'pyinstall (==0.1.4)',
+ 'uwsgi (==2.0.18)'],
+ license='Dependency package management',
+ long_description=open('README.md', encoding='utf-8').read(),
+ author='gongzt',
+ data_files=[
+ (configpath, ['packageship/package.ini']),
+ ('/usr/bin', ['packageship/pkgshipd'])]
+)
diff --git a/packageship/test/__init__.py b/packageship/test/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/packageship/test/base_code/__init__.py b/packageship/test/base_code/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc
--- /dev/null
+++ b/packageship/test/base_code/__init__.py
@@ -0,0 +1 @@
+
diff --git a/packageship/test/base_code/common_test_code.py b/packageship/test/base_code/common_test_code.py
new file mode 100644
index 0000000000000000000000000000000000000000..f92cb9d88a177b827cf52fd1105cec0c85c2ff73
--- /dev/null
+++ b/packageship/test/base_code/common_test_code.py
@@ -0,0 +1,49 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+Compare the values in two Python data types for equality, ignoring the order of values
+"""
+
+import os
+import json
+from packageship.system_config import BASE_PATH
+
+
+def compare_two_values(obj1, obj2):
+ """
+
+ Args:
+ obj1:object1 It can be a data type in Python,
+ and can be converted by using the str() method
+ obj2:object2 same as obj1
+
+ Returns: True or False
+
+ """
+ # With the help of the str() method provided by python,It's so powerful
+
+ return obj1 == obj2 or (isinstance(obj1, type(obj2)) and
+ "".join(sorted(str(obj1))) == "".join(sorted(str(obj2))))
+
+
+def get_correct_json_by_filename(filename):
+ """
+
+ Args:
+ filename: Correct JSON file name without suffix
+
+ Returns: list this json file's content
+
+ """
+ json_path = os.path.join(os.path.dirname(BASE_PATH),
+ "test",
+ "common_files",
+ "correct_test_result_json",
+ "{}.json".format(filename))
+ try:
+ with open(json_path, "r") as json_fp:
+ correct_list = json.loads(json_fp.read())
+ except FileNotFoundError:
+ return []
+
+ return correct_list
diff --git a/packageship/test/base_code/init_config_path.py b/packageship/test/base_code/init_config_path.py
new file mode 100644
index 0000000000000000000000000000000000000000..19dff752aa384ff80d21b7de6749ec86033e6566
--- /dev/null
+++ b/packageship/test/base_code/init_config_path.py
@@ -0,0 +1,46 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+InitConf
+"""
+import os
+from configparser import ConfigParser
+from packageship import system_config
+import yaml
+
+
+class InitConf:
+ """
+ InitConf
+ """
+
+ def __init__(self):
+ base_path = os.path.join(os.path.dirname(system_config.BASE_PATH),
+ "test",
+ "common_files")
+ config = ConfigParser()
+ config.read(system_config.SYS_CONFIG_PATH)
+
+ conf_path = os.path.join(base_path, "conf.yaml")
+
+ config.set("SYSTEM", "init_conf_path", conf_path)
+ config.write(open(system_config.SYS_CONFIG_PATH, "w"))
+
+ with open(conf_path, 'r', encoding='utf-8') as f:
+ origin_yaml = yaml.load(f.read(), Loader=yaml.FullLoader)
+
+ for index, obj in enumerate(origin_yaml, 1):
+ src_path = os.path.join(base_path, "db_origin",
+ "data_{}_src.sqlite".format(str(index)))
+ bin_path = os.path.join(base_path, "db_origin",
+ "data_{}_bin.sqlite".format(str(index)))
+ obj["src_db_file"] = [src_path]
+ obj["bin_db_file"] = [bin_path]
+ with open(conf_path, 'w', encoding='utf-8') as w_f:
+ yaml.dump(origin_yaml, w_f)
+
+
+# A simple method of single case model
+# Prevent multiple file modifications
+
+init_config = InitConf()
diff --git a/packageship/test/base_code/my_test_runner.py b/packageship/test/base_code/my_test_runner.py
new file mode 100644
index 0000000000000000000000000000000000000000..e3ce22f32178e441efd0eb15a85b6f9e25da4139
--- /dev/null
+++ b/packageship/test/base_code/my_test_runner.py
@@ -0,0 +1,67 @@
+#!/usr/bin/python3
+"""
+Inherited from unittest.TestResult,
+The simple statistical function is realized.
+"""
+import sys
+import unittest
+
+
+class MyTestResult(unittest.TestResult):
+ """
+ Inherited from unittest.TestResult,
+ The simple statistical function is realized.
+ """
+
+ def __init__(self, verbosity=0):
+ super(MyTestResult, self).__init__()
+ self.success_case_count = 0
+ self.err_case_count = 0
+ self.failure_case_count = 0
+ self.verbosity = verbosity
+
+ def addSuccess(self, test):
+ """When the use case is executed successfully"""
+ self.success_case_count += 1
+ super(MyTestResult, self).addSuccess(test)
+ if self.verbosity > 0:
+ sys.stderr.write('Success ')
+ sys.stderr.write(str(test))
+ sys.stderr.write('\n')
+
+ def addError(self, test, err):
+ """When a code error causes a use case to fail"""
+ self.err_case_count += 1
+ super(MyTestResult, self).addError(test, err)
+ if self.verbosity > 0:
+ sys.stderr.write('Error ')
+ sys.stderr.write(str(test)+'\n')
+ _,err_info = self.errors[-1]
+ sys.stderr.write(err_info)
+ sys.stderr.write('\n')
+
+ def addFailure(self, test, err):
+ """When the assertion is false"""
+ self.failure_case_count += 1
+ super(MyTestResult, self).addFailure(test, err)
+ if self.verbosity > 0:
+ sys.stderr.write('Failure ')
+ sys.stderr.write(str(test)+'\n')
+ _, err_info = self.failures[-1]
+ sys.stderr.write(err_info)
+ sys.stderr.write('\n')
+
+
+class MyTestRunner():
+ """
+ Run All TestCases
+ """
+
+ def __init__(self, verbosity=0):
+ self.verbosity = verbosity
+
+ def run(self, test):
+ """run MyTestResult and return result"""
+ result = MyTestResult(self.verbosity)
+ test(result)
+ return result
diff --git a/packageship/test/base_code/operate_data_base.py b/packageship/test/base_code/operate_data_base.py
new file mode 100644
index 0000000000000000000000000000000000000000..b5a472a9a128aacf02cd9b3d25a89ca4eb2eeff8
--- /dev/null
+++ b/packageship/test/base_code/operate_data_base.py
@@ -0,0 +1,39 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+OperateTestBase
+"""
+import os
+import unittest
+
+from packageship.libs.exception import Error
+try:
+ from packageship import system_config
+
+ system_config.SYS_CONFIG_PATH = os.path.join(os.path.dirname(system_config.BASE_PATH),
+ 'test',
+ 'common_files',
+ 'package.ini')
+
+ system_config.DATABASE_FILE_INFO = os.path.join(os.path.dirname(system_config.BASE_PATH),
+ 'test',
+ 'common_files',
+ 'database_file_info.yaml')
+ system_config.DATABASE_FOLDER_PATH = os.path.join(os.path.dirname(system_config.BASE_PATH),
+ 'test',
+ 'common_files',
+ 'operate_dbs')
+
+ from test.base_code.init_config_path import init_config
+ from packageship.manage import app
+except Error:
+ raise
+
+
+class OperateTestBase(unittest.TestCase):
+ """
+ OperateTestBase
+ """
+
+ def setUp(self):
+ self.client = app.test_client()
diff --git a/packageship/test/base_code/read_data_base.py b/packageship/test/base_code/read_data_base.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e7a33ddf28dbcc23bf40d257a15a5c199919049
--- /dev/null
+++ b/packageship/test/base_code/read_data_base.py
@@ -0,0 +1,33 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+import os
+import unittest
+
+from packageship.libs.exception import Error
+try:
+ from packageship import system_config
+
+ system_config.SYS_CONFIG_PATH = os.path.join(os.path.dirname(system_config.BASE_PATH),
+ 'test',
+ 'common_files',
+ 'package.ini')
+
+ system_config.DATABASE_FILE_INFO = os.path.join(os.path.dirname(system_config.BASE_PATH),
+ 'test',
+ 'common_files',
+ 'database_file_info.yaml')
+ system_config.DATABASE_FOLDER_PATH = os.path.join(os.path.dirname(system_config.BASE_PATH),
+ 'test',
+ 'common_files',
+ 'dbs')
+
+ from test.base_code.init_config_path import init_config
+ from packageship.selfpkg import app
+
+except Error:
+ raise
+
+
+class ReadTestBase(unittest.TestCase):
+ def setUp(self):
+ self.client = app.test_client()
diff --git a/packageship/test/common_files/conf.yaml b/packageship/test/common_files/conf.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..0aeea894e6c09c747bd63cff7d5f7d62485ba16a
--- /dev/null
+++ b/packageship/test/common_files/conf.yaml
@@ -0,0 +1,14 @@
+- bin_db_file:
+ - C:\Users\14500\Desktop\workspace\openEuler_pkgmnt\openEuler_Advisor\packageship\test\common_files\db_origin\data_1_bin.sqlite
+ dbname: openEuler-20.03-LTS
+ priority: 1
+ src_db_file:
+ - C:\Users\14500\Desktop\workspace\openEuler_pkgmnt\openEuler_Advisor\packageship\test\common_files\db_origin\data_1_src.sqlite
+ status: enable
+- bin_db_file:
+ - C:\Users\14500\Desktop\workspace\openEuler_pkgmnt\openEuler_Advisor\packageship\test\common_files\db_origin\data_2_bin.sqlite
+ dbname: openEuler-20.04-LTS
+ priority: 2
+ src_db_file:
+ - C:\Users\14500\Desktop\workspace\openEuler_pkgmnt\openEuler_Advisor\packageship\test\common_files\db_origin\data_2_src.sqlite
+ status: enable
diff --git a/packageship/test/common_files/correct_test_result_json/be_depend.json b/packageship/test/common_files/correct_test_result_json/be_depend.json
new file mode 100644
index 0000000000000000000000000000000000000000..26f6760fc7e4aa0e07d8e8d3eed3d351225ce4ea
--- /dev/null
+++ b/packageship/test/common_files/correct_test_result_json/be_depend.json
@@ -0,0 +1,131 @@
+[
+ {
+ "input": {
+ "packagename": "A",
+ "dbname":"openEuler-20.03-LTS"
+ },
+ "output": {
+ "code": "2001",
+ "data": {
+ "A1": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "B",
+ "build"
+ ],
+ [
+ "D",
+ "build"
+ ],
+ [
+ "D1",
+ "install"
+ ],
+ [
+ "B1",
+ "install"
+ ]
+ ]
+ ],
+ "A2": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C",
+ "build"
+ ],
+ [
+ "C1",
+ "install"
+ ],
+ [
+ "A1",
+ "install"
+ ]
+ ]
+ ],
+ "A_src": [
+ "source",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ]
+ ]
+ ],
+ "B1": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A",
+ "build"
+ ],
+ [
+ "D",
+ "build"
+ ]
+ ]
+ ],
+ "B2": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "C1": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A",
+ "build"
+ ],
+ [
+ "B",
+ "build"
+ ],
+ [
+ "A2",
+ "install"
+ ],
+ [
+ "B2",
+ "install"
+ ]
+ ]
+ ],
+ "D1": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "D2",
+ "install"
+ ],
+ [
+ "A2",
+ "install"
+ ]
+ ]
+ ]
+ },
+ "msg": "Successful Operation!"
+}}
+]
+
diff --git a/packageship/test/common_files/correct_test_result_json/build_depend.json b/packageship/test/common_files/correct_test_result_json/build_depend.json
new file mode 100644
index 0000000000000000000000000000000000000000..de32263b90a0fe1c3d2ec0da1727b8d2d68ded75
--- /dev/null
+++ b/packageship/test/common_files/correct_test_result_json/build_depend.json
@@ -0,0 +1,91 @@
+[
+ {
+ "input": {
+ "sourceName": "A"
+ },
+ "output": {
+ "code": "2001",
+ "data": {
+ "A_src": [
+ "source",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ]
+ ]
+ ],
+ "B1": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A",
+ "build"
+ ]
+ ]
+ ],
+ "C1": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A",
+ "build"
+ ],
+ [
+ "A2",
+ "install"
+ ]
+ ]
+ ],
+ "A1": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "B1",
+ "install"
+ ],
+ [
+ "D1",
+ "install"
+ ]
+ ]
+ ],
+ "A2": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A1",
+ "install"
+ ],
+ [
+ "C1",
+ "install"
+ ]
+ ]
+ ],
+ "D1": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ]
+ ]
+ ]
+ },
+ "msg": "Successful Operation!"
+ }
+ }
+]
\ No newline at end of file
diff --git a/packageship/test/common_files/correct_test_result_json/get_repodatas.json b/packageship/test/common_files/correct_test_result_json/get_repodatas.json
new file mode 100644
index 0000000000000000000000000000000000000000..8584ab40b06018e01d5303945d38659ea8b4ed62
--- /dev/null
+++ b/packageship/test/common_files/correct_test_result_json/get_repodatas.json
@@ -0,0 +1,9 @@
+[{
+ "database_name": "openEuler-20.03-LTS",
+ "priority": 1,
+ "status": "enable"
+}, {
+ "database_name": "openEuler-20.04-LTS",
+ "priority": 2,
+ "status": "enable"
+}]
\ No newline at end of file
diff --git a/packageship/test/common_files/correct_test_result_json/get_single_package.json b/packageship/test/common_files/correct_test_result_json/get_single_package.json
new file mode 100644
index 0000000000000000000000000000000000000000..b816f2297720a3397abac8332dd1f09ee548721e
--- /dev/null
+++ b/packageship/test/common_files/correct_test_result_json/get_single_package.json
@@ -0,0 +1,15 @@
+[{
+ "buildDep": ["B1", "C1"],
+ "dbname": "openEuler-20.03-LTS",
+ "downloadURL": null,
+ "license": "GPLv2+ and BSD and MIT and IBM",
+ "maintainer": null,
+ "maintainlevel": null,
+ "sourceName": "A",
+ "sourceURL": "http://play0ad.com",
+ "subpack": {
+ "A1": ["A2"],
+ "A2": ["C1", "D1"]
+ },
+ "version": "0.0.23b"
+}]
\ No newline at end of file
diff --git a/packageship/test/common_files/correct_test_result_json/install_depend.json b/packageship/test/common_files/correct_test_result_json/install_depend.json
new file mode 100644
index 0000000000000000000000000000000000000000..06dfc9e075a1f384836f393049bfba121ebf7e28
--- /dev/null
+++ b/packageship/test/common_files/correct_test_result_json/install_depend.json
@@ -0,0 +1,161 @@
+[
+ {
+ "input": {
+ "binaryName": "A1"
+ },
+ "output": {
+ "code": "2001",
+ "data": {
+ "A1": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ],
+ [
+ "D1",
+ "install"
+ ]
+ ]
+ ],
+ "A2": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A1",
+ "install"
+ ],
+ [
+ "C1",
+ "install"
+ ]
+ ]
+ ],
+ "C1": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ]
+ ]
+ ],
+ "D1": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ]
+ ]
+ ]
+ },
+ "msg": "Successful Operation!"
+ }
+ },
+ {
+ "input": {
+ "binaryName": "D2"
+ },
+ "output": {
+ "code": "2001",
+ "data": {
+ "A1": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "D1",
+ "install"
+ ]
+ ]
+ ],
+ "A2": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A1",
+ "install"
+ ],
+ [
+ "C1",
+ "install"
+ ]
+ ]
+ ],
+ "C1": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ]
+ ]
+ ],
+ "D1": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "D2",
+ "install"
+ ],
+ [
+ "A2",
+ "install"
+ ]
+ ]
+ ],
+ "D2": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ]
+ ]
+ ]
+ },
+ "msg": "Successful Operation!"
+ }
+ },
+ {
+ "input": {
+ "binaryName": "C2"
+ },
+ "output": {
+ "code": "2001",
+ "data": {
+ "C2": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ]
+ ]
+ ]
+ },
+ "msg": "Successful Operation!"
+ }
+ }
+]
\ No newline at end of file
diff --git a/packageship/test/common_files/correct_test_result_json/packages.json b/packageship/test/common_files/correct_test_result_json/packages.json
new file mode 100644
index 0000000000000000000000000000000000000000..815fbd358f663e7bdb20058d135ef309a0ca8c48
--- /dev/null
+++ b/packageship/test/common_files/correct_test_result_json/packages.json
@@ -0,0 +1,91 @@
+ [{
+ "dbname": "openEuler-20.03-LTS",
+ "downloadURL": null,
+ "license": "GPLv2+ and BSD and MIT and IBM",
+ "maintainer": null,
+ "maintainlevel": null,
+ "sourceName": "A",
+ "sourceURL": "http://play0ad.com",
+ "version": "0.0.23b"
+ }, {
+ "dbname": "openEuler-20.03-LTS",
+ "downloadURL": null,
+ "license": null,
+ "maintainer": null,
+ "maintainlevel": null,
+ "sourceName": "B",
+ "sourceURL": null,
+ "version": "0.0.2"
+ }, {
+ "dbname": "openEuler-20.03-LTS",
+ "downloadURL": null,
+ "license": null,
+ "maintainer": null,
+ "maintainlevel": null,
+ "sourceName": "C",
+ "sourceURL": null,
+ "version": "0.1"
+ }, {
+ "dbname": "openEuler-20.03-LTS",
+ "downloadURL": null,
+ "license": null,
+ "maintainer": null,
+ "maintainlevel": null,
+ "sourceName": "D",
+ "sourceURL": null,
+ "version": "0.11"
+ }, {
+ "dbname": "openEuler-20.04-LTS",
+ "downloadURL": null,
+ "license": "GPLv2+ and BSD and MIT and IBM",
+ "maintainer": null,
+ "maintainlevel": null,
+ "sourceName": "A",
+ "sourceURL": "http://play0ad.com",
+ "version": "0.0.23b"
+ }, {
+ "dbname": "openEuler-20.04-LTS",
+ "downloadURL": null,
+ "license": null,
+ "maintainer": null,
+ "maintainlevel": null,
+ "sourceName": "B",
+ "sourceURL": null,
+ "version": "0.0.3"
+ }, {
+ "dbname": "openEuler-20.04-LTS",
+ "downloadURL": null,
+ "license": null,
+ "maintainer": null,
+ "maintainlevel": null,
+ "sourceName": "C",
+ "sourceURL": null,
+ "version": "0.1"
+ }, {
+ "dbname": "openEuler-20.04-LTS",
+ "downloadURL": null,
+ "license": null,
+ "maintainer": null,
+ "maintainlevel": null,
+ "sourceName": "D",
+ "sourceURL": null,
+ "version": "0.12"
+ }, {
+ "dbname": "openEuler-20.04-LTS",
+ "downloadURL": null,
+ "license": null,
+ "maintainer": null,
+ "maintainlevel": null,
+ "sourceName": "E",
+ "sourceURL": null,
+ "version": "0.4"
+ }, {
+ "dbname": "openEuler-20.04-LTS",
+ "downloadURL": null,
+ "license": null,
+ "maintainer": null,
+ "maintainlevel": null,
+ "sourceName": "F",
+ "sourceURL": null,
+ "version": "1"
+ }]
diff --git a/packageship/test/common_files/correct_test_result_json/self_depend.json b/packageship/test/common_files/correct_test_result_json/self_depend.json
new file mode 100644
index 0000000000000000000000000000000000000000..b7b16a7f2309113ed67d4c624a26b83411fa4697
--- /dev/null
+++ b/packageship/test/common_files/correct_test_result_json/self_depend.json
@@ -0,0 +1,1036 @@
+[
+ {
+ "input": {
+ "packagename": "A1"
+ },
+ "output": {
+ "code": "2001",
+ "data": {
+ "binary_dicts": {
+ "A1": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ],
+ [
+ "D1",
+ "install"
+ ],
+ [
+ "B1",
+ "install"
+ ],
+ [
+ "D",
+ "build"
+ ]
+ ]
+ ],
+ "A2": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A1",
+ "install"
+ ],
+ [
+ "C1",
+ "install"
+ ],
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "B1": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A",
+ "build"
+ ],
+ [
+ "D",
+ "build"
+ ]
+ ]
+ ],
+ "B2": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "C1": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A",
+ "build"
+ ],
+ [
+ "A2",
+ "install"
+ ],
+ [
+ "B2",
+ "install"
+ ]
+ ]
+ ],
+ "D1": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ]
+ ]
+ ]
+ },
+ "source_dicts": {
+ "A": [
+ "openEuler-20.03-LTS",
+ "0.0.23b"
+ ],
+ "C": [
+ "openEuler-20.03-LTS",
+ "0.1"
+ ],
+ "D": [
+ "openEuler-20.03-LTS",
+ "0.11"
+ ]
+ }
+ },
+ "msg": "Successful Operation!"
+ }
+ },
+ {
+ "input": {
+ "packagename": "C",
+ "packtype": "source"
+ },
+ "output": {
+ "code": "2001",
+ "data": {
+ "binary_dicts": {
+ "A1": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "D1",
+ "install"
+ ],
+ [
+ "B1",
+ "install"
+ ],
+ [
+ "D",
+ "build"
+ ]
+ ]
+ ],
+ "A2": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C1",
+ "install"
+ ],
+ [
+ "A1",
+ "install"
+ ],
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "B1": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A",
+ "build"
+ ],
+ [
+ "D",
+ "build"
+ ]
+ ]
+ ],
+ "B2": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "C1": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ],
+ [
+ "A",
+ "build"
+ ],
+ [
+ "A2",
+ "install"
+ ],
+ [
+ "B2",
+ "install"
+ ]
+ ]
+ ],
+ "C2": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ]
+ ]
+ ],
+ "D1": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ]
+ ]
+ ]
+ },
+ "source_dicts": {
+ "A": [
+ "openEuler-20.03-LTS",
+ "0.0.23b"
+ ],
+ "C": [
+ "openEuler-20.03-LTS",
+ "0.1"
+ ],
+ "D": [
+ "openEuler-20.03-LTS",
+ "0.11"
+ ]
+ }
+ },
+ "msg": "Successful Operation!"
+ }
+ },
+ {
+ "input": {
+ "packagename": "A2",
+ "selfbuild": "0",
+ "withsubpack": "1"
+ },
+ "output": {
+ "code": "2001",
+ "data": {
+ "binary_dicts": {
+ "A1": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "D1",
+ "install"
+ ],
+ [
+ "B1",
+ "install"
+ ],
+ [
+ "D",
+ "build"
+ ]
+ ]
+ ],
+ "A2": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ],
+ [
+ "C1",
+ "install"
+ ],
+ [
+ "A1",
+ "install"
+ ],
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "B1": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A",
+ "build"
+ ],
+ [
+ "D",
+ "build"
+ ]
+ ]
+ ],
+ "B2": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "C1": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ],
+ [
+ "B2",
+ "install"
+ ],
+ [
+ "A",
+ "build"
+ ]
+ ]
+ ],
+ "D1": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ],
+ [
+ "D2",
+ "install"
+ ]
+ ]
+ ],
+ "C2": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C",
+ "Subpack"
+ ]
+ ]
+ ],
+ "D2": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "D",
+ "Subpack"
+ ]
+ ]
+ ]
+ },
+ "source_dicts": {
+ "A": [
+ "openEuler-20.03-LTS",
+ "0.0.23b"
+ ],
+ "B": [
+ "openEuler-20.03-LTS",
+ "0.0.2"
+ ],
+ "C": [
+ "openEuler-20.03-LTS",
+ "0.1"
+ ],
+ "D": [
+ "openEuler-20.03-LTS",
+ "0.11"
+ ]
+ }
+ },
+ "msg": "Successful Operation!"
+ }
+ },
+ {
+ "input": {
+ "packagename": "A",
+ "selfbuild": "0",
+ "withsubpack": "1",
+ "packtype": "source"
+ },
+ "output": {
+ "code": "2001",
+ "data": {
+ "binary_dicts": {
+ "A1": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ],
+ [
+ "D1",
+ "install"
+ ],
+ [
+ "B1",
+ "install"
+ ],
+ [
+ "D",
+ "build"
+ ]
+ ]
+ ],
+ "A2": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ],
+ [
+ "C1",
+ "install"
+ ],
+ [
+ "A1",
+ "install"
+ ],
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "B1": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A",
+ "build"
+ ],
+ [
+ "D",
+ "build"
+ ]
+ ]
+ ],
+ "B2": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "C1": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ],
+ [
+ "B2",
+ "install"
+ ],
+ [
+ "A",
+ "build"
+ ]
+ ]
+ ],
+ "D1": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ],
+ [
+ "D2",
+ "install"
+ ]
+ ]
+ ],
+ "C2": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C",
+ "Subpack"
+ ]
+ ]
+ ],
+ "D2": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "D",
+ "Subpack"
+ ]
+ ]
+ ]
+ },
+ "source_dicts": {
+ "A": [
+ "openEuler-20.03-LTS",
+ "0.0.23b"
+ ],
+ "B": [
+ "openEuler-20.03-LTS",
+ "0.0.2"
+ ],
+ "C": [
+ "openEuler-20.03-LTS",
+ "0.1"
+ ],
+ "D": [
+ "openEuler-20.03-LTS",
+ "0.11"
+ ]
+ }
+ },
+ "msg": "Successful Operation!"
+ }
+ },
+ {
+ "input": {
+ "packagename": "A",
+ "selfbuild": "1",
+ "withsubpack": "1",
+ "packtype": "source"
+ },
+ "output": {
+ "code": "2001",
+ "data": {
+ "binary_dicts": {
+ "A1": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ],
+ [
+ "D1",
+ "install"
+ ],
+ [
+ "B",
+ "build"
+ ],
+ [
+ "D",
+ "build"
+ ],
+ [
+ "B1",
+ "install"
+ ]
+ ]
+ ],
+ "A2": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ],
+ [
+ "A1",
+ "install"
+ ],
+ [
+ "C1",
+ "install"
+ ],
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "B1": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A",
+ "build"
+ ],
+ [
+ "D",
+ "build"
+ ]
+ ]
+ ],
+ "B2": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "C1": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ],
+ [
+ "B",
+ "build"
+ ],
+ [
+ "A",
+ "build"
+ ],
+ [
+ "B2",
+ "install"
+ ]
+ ]
+ ],
+ "D1": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ],
+ [
+ "D2",
+ "install"
+ ]
+ ]
+ ],
+ "C2": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C",
+ "Subpack"
+ ]
+ ]
+ ],
+ "D2": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "D",
+ "Subpack"
+ ]
+ ]
+ ]
+ },
+ "source_dicts": {
+ "A": [
+ "openEuler-20.03-LTS",
+ "0.0.23b"
+ ],
+ "B": [
+ "openEuler-20.03-LTS",
+ "0.0.2"
+ ],
+ "C": [
+ "openEuler-20.03-LTS",
+ "0.1"
+ ],
+ "D": [
+ "openEuler-20.03-LTS",
+ "0.11"
+ ]
+ }
+ },
+ "msg": "Successful Operation!"
+ }
+ },
+ {
+ "input": {
+ "packagename": "A2",
+ "selfbuild": "1",
+ "withsubpack": "0"
+ },
+ "output": {
+ "code": "2001",
+ "data": {
+ "binary_dicts": {
+ "A1": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "D1",
+ "install"
+ ],
+ [
+ "B",
+ "build"
+ ],
+ [
+ "D",
+ "build"
+ ],
+ [
+ "B1",
+ "install"
+ ]
+ ]
+ ],
+ "A2": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ],
+ [
+ "C1",
+ "install"
+ ],
+ [
+ "A1",
+ "install"
+ ],
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "B1": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A",
+ "build"
+ ],
+ [
+ "D",
+ "build"
+ ]
+ ]
+ ],
+ "B2": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "C1": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ],
+ [
+ "B",
+ "build"
+ ],
+ [
+ "A",
+ "build"
+ ],
+ [
+ "B2",
+ "install"
+ ]
+ ]
+ ],
+ "D1": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ]
+ ]
+ ]
+ },
+ "source_dicts": {
+ "A": [
+ "openEuler-20.03-LTS",
+ "0.0.23b"
+ ],
+ "B": [
+ "openEuler-20.03-LTS",
+ "0.0.2"
+ ],
+ "C": [
+ "openEuler-20.03-LTS",
+ "0.1"
+ ],
+ "D": [
+ "openEuler-20.03-LTS",
+ "0.11"
+ ]
+ }
+ },
+ "msg": "Successful Operation!"
+ }
+ },
+ {
+ "input": {
+ "packagename": "A",
+ "selfbuild": "1",
+ "withsubpack": "0",
+ "packtype": "source"
+ },
+ "output": {
+ "code": "2001",
+ "data": {
+ "binary_dicts": {
+ "A1": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ],
+ [
+ "D1",
+ "install"
+ ],
+ [
+ "B",
+ "build"
+ ],
+ [
+ "D",
+ "build"
+ ],
+ [
+ "B1",
+ "install"
+ ]
+ ]
+ ],
+ "A2": [
+ "A",
+ "0.0.23b",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "root",
+ null
+ ],
+ [
+ "A1",
+ "install"
+ ],
+ [
+ "C1",
+ "install"
+ ],
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "B1": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A",
+ "build"
+ ],
+ [
+ "D",
+ "build"
+ ]
+ ]
+ ],
+ "B2": [
+ "B",
+ "0.0.2",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "C",
+ "build"
+ ]
+ ]
+ ],
+ "C1": [
+ "C",
+ "0.1",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ],
+ [
+ "B",
+ "build"
+ ],
+ [
+ "A",
+ "build"
+ ],
+ [
+ "B2",
+ "install"
+ ]
+ ]
+ ],
+ "D1": [
+ "D",
+ "0.11",
+ "openEuler-20.03-LTS",
+ [
+ [
+ "A2",
+ "install"
+ ]
+ ]
+ ]
+ },
+ "source_dicts": {
+ "A": [
+ "openEuler-20.03-LTS",
+ "0.0.23b"
+ ],
+ "B": [
+ "openEuler-20.03-LTS",
+ "0.0.2"
+ ],
+ "C": [
+ "openEuler-20.03-LTS",
+ "0.1"
+ ],
+ "D": [
+ "openEuler-20.03-LTS",
+ "0.11"
+ ]
+ }
+ },
+ "msg": "Successful Operation!"
+ }
+ }
+]
\ No newline at end of file
diff --git a/packageship/test/common_files/database_file_info.yaml b/packageship/test/common_files/database_file_info.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..0143967501715db56f6ad82920ca6bd0985e3ba4
--- /dev/null
+++ b/packageship/test/common_files/database_file_info.yaml
@@ -0,0 +1,6 @@
+- database_name: openEuler-20.03-LTS
+ priority: 1
+ status: enable
+- database_name: openEuler-20.04-LTS
+ priority: 2
+ status: enable
diff --git a/packageship/test/common_files/db_origin/data_1_bin.sqlite b/packageship/test/common_files/db_origin/data_1_bin.sqlite
new file mode 100644
index 0000000000000000000000000000000000000000..8d5051866a938530ba81ffb7a1b971e4ef3c8f78
Binary files /dev/null and b/packageship/test/common_files/db_origin/data_1_bin.sqlite differ
diff --git a/packageship/test/common_files/db_origin/data_1_src.sqlite b/packageship/test/common_files/db_origin/data_1_src.sqlite
new file mode 100644
index 0000000000000000000000000000000000000000..2b053b0a8f3166558bfcf63505421984902def50
Binary files /dev/null and b/packageship/test/common_files/db_origin/data_1_src.sqlite differ
diff --git a/packageship/test/common_files/db_origin/data_2_bin.sqlite b/packageship/test/common_files/db_origin/data_2_bin.sqlite
new file mode 100644
index 0000000000000000000000000000000000000000..76533ab871181f22041d56ed879eb4f841e4fd35
Binary files /dev/null and b/packageship/test/common_files/db_origin/data_2_bin.sqlite differ
diff --git a/packageship/test/common_files/db_origin/data_2_src.sqlite b/packageship/test/common_files/db_origin/data_2_src.sqlite
new file mode 100644
index 0000000000000000000000000000000000000000..e4bbb155d066e6ee52f5119563f93fc15cfd7a16
Binary files /dev/null and b/packageship/test/common_files/db_origin/data_2_src.sqlite differ
diff --git a/packageship/test/common_files/dbs/maintenance.information.db b/packageship/test/common_files/dbs/maintenance.information.db
new file mode 100644
index 0000000000000000000000000000000000000000..d43b5e4e10a2b922a2931664afe5cb6aba22852f
Binary files /dev/null and b/packageship/test/common_files/dbs/maintenance.information.db differ
diff --git a/packageship/test/common_files/dbs/openEuler-20.03-LTS.db b/packageship/test/common_files/dbs/openEuler-20.03-LTS.db
new file mode 100644
index 0000000000000000000000000000000000000000..31056e94c6f82eacfaa940cc4d5ce473e7d439af
Binary files /dev/null and b/packageship/test/common_files/dbs/openEuler-20.03-LTS.db differ
diff --git a/packageship/test/common_files/dbs/openEuler-20.04-LTS.db b/packageship/test/common_files/dbs/openEuler-20.04-LTS.db
new file mode 100644
index 0000000000000000000000000000000000000000..1a8981105b6c6d0dd13db4365af338521a3f0d95
Binary files /dev/null and b/packageship/test/common_files/dbs/openEuler-20.04-LTS.db differ
diff --git a/packageship/test/common_files/operate_dbs/maintenance.information.db b/packageship/test/common_files/operate_dbs/maintenance.information.db
new file mode 100644
index 0000000000000000000000000000000000000000..d43b5e4e10a2b922a2931664afe5cb6aba22852f
Binary files /dev/null and b/packageship/test/common_files/operate_dbs/maintenance.information.db differ
diff --git a/packageship/test/common_files/operate_dbs/openEuler-20.03-LTS.db b/packageship/test/common_files/operate_dbs/openEuler-20.03-LTS.db
new file mode 100644
index 0000000000000000000000000000000000000000..31056e94c6f82eacfaa940cc4d5ce473e7d439af
Binary files /dev/null and b/packageship/test/common_files/operate_dbs/openEuler-20.03-LTS.db differ
diff --git a/packageship/test/common_files/operate_dbs/openEuler-20.04-LTS.db b/packageship/test/common_files/operate_dbs/openEuler-20.04-LTS.db
new file mode 100644
index 0000000000000000000000000000000000000000..1a8981105b6c6d0dd13db4365af338521a3f0d95
Binary files /dev/null and b/packageship/test/common_files/operate_dbs/openEuler-20.04-LTS.db differ
diff --git a/packageship/test/common_files/package.ini b/packageship/test/common_files/package.ini
new file mode 100644
index 0000000000000000000000000000000000000000..f7d029c88c4985b8e56190b6066c019cdb32fca5
--- /dev/null
+++ b/packageship/test/common_files/package.ini
@@ -0,0 +1,24 @@
+[SYSTEM]
+init_conf_path = C:\Users\14500\Desktop\workspace\openEuler_pkgmnt\openEuler_Advisor\packageship\test\common_files\conf.yaml
+debug = false
+write_port = 8080
+query_port = 8090
+write_ip_addr = 127.0.0.1
+query_ip_addr = 127.0.0.1
+
+[DATABASE]
+user_name = root
+password = 123456
+host = localhost
+port = 3306
+database =
+dbtype = sqlite
+
+[LOG]
+log_level = INFO
+log_name = log_info.log
+
+[UWSGI]
+daemonize = /var/run/pkgship/uwsgi.log
+buffer-size = 65536
+
diff --git a/packageship/test/run_tests.py b/packageship/test/run_tests.py
new file mode 100644
index 0000000000000000000000000000000000000000..6d389d6b6870e8cfd1b4c8b019a3bee1286e7716
--- /dev/null
+++ b/packageship/test/run_tests.py
@@ -0,0 +1,83 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+Execute all test cases
+"""
+import unittest
+import datetime
+from test.base_code.my_test_runner import MyTestRunner
+
+RUNNER = MyTestRunner(verbosity=1)
+
+
+def import_data_tests():
+ """Initialize related test cases"""
+
+ from test.test_module.init_system_tests.test_importdata import ImportData
+ suite = unittest.TestSuite()
+ suite.addTests(unittest.TestLoader().loadTestsFromTestCase(ImportData))
+
+ return RUNNER.run(suite)
+
+
+def read_data_tests():
+ """Test cases with read operations on data"""
+
+ from test.test_module.dependent_query_tests.test_install_depend import TestInstallDepend
+ from test.test_module.dependent_query_tests.test_self_depend import TestSelfDepend
+ from test.test_module.dependent_query_tests.test_be_depend import TestBeDepend
+ from test.test_module.repodatas_test.test_get_repodatas import TestGetRepodatas
+ from test.test_module.dependent_query_tests.test_build_depend import TestBuildDepend
+ from test.test_module.packages_tests.test_packages import TestPackages
+ from test.test_module.single_package_tests.test_get_singlepack import TestGetSinglePack
+ suite = unittest.TestSuite()
+
+ classes = [TestInstallDepend, TestSelfDepend, TestBeDepend,
+ TestGetRepodatas, TestBuildDepend, TestPackages, TestGetSinglePack]
+
+ for cls in classes:
+ suite.addTests(unittest.TestLoader().loadTestsFromTestCase(cls))
+ return RUNNER.run(suite)
+
+
+def write_data_tests():
+ """Test cases with write operations on data"""
+
+ from test.test_module.repodatas_test.test_delete_repodatas import TestDeleteRepodatas
+ from test.test_module.single_package_tests.test_update_singlepack import TestUpdatePackage
+ suite = unittest.TestSuite()
+
+ suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestDeleteRepodatas))
+ suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestUpdatePackage))
+
+ return RUNNER.run(suite)
+
+
+def main():
+ """Test case execution entry function"""
+
+ start_time = datetime.datetime.now()
+
+ result_4_import = import_data_tests()
+ result_4_read = read_data_tests()
+ result_4_write = write_data_tests()
+
+ stop_time = datetime.datetime.now()
+
+ print('\nA total of %s test cases were run: \nsuccessful:%s\tfailed:%s\terror:%s\n' % (
+ int(result_4_import.testsRun) + int(result_4_read.testsRun) + int(result_4_write.testsRun),
+ int(
+ result_4_import.success_case_count
+ ) + int(result_4_read.success_case_count) + int(result_4_write.success_case_count),
+ int(
+ result_4_import.failure_case_count
+ ) + int(result_4_read.failure_case_count) + int(result_4_write.failure_case_count),
+ int(
+ result_4_import.err_case_count
+ ) + int(result_4_read.err_case_count) + int(result_4_write.err_case_count)
+ ))
+
+ print('Total Time: %s' % (stop_time - start_time))
+
+
+main()
diff --git a/packageship/test/test_module/__init__.py b/packageship/test/test_module/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/packageship/test/test_module/dependent_query_tests/__init__.py b/packageship/test/test_module/dependent_query_tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/packageship/test/test_module/dependent_query_tests/test_be_depend.py b/packageship/test/test_module/dependent_query_tests/test_be_depend.py
new file mode 100644
index 0000000000000000000000000000000000000000..572412366b119f27a912666976a7f30dcef903df
--- /dev/null
+++ b/packageship/test/test_module/dependent_query_tests/test_be_depend.py
@@ -0,0 +1,359 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+Less transmission is always parameter transmission
+"""
+import unittest
+import json
+from test.base_code.read_data_base import ReadTestBase
+from test.base_code.common_test_code import compare_two_values, get_correct_json_by_filename
+from packageship.application.apps.package.function.constants import ResponseCode
+
+
+class TestBeDepend(ReadTestBase):
+ """
+ The dependencies of the package are tested
+ """
+
+ def test_lack_parameter(self):
+ """
+ Less transmission is always parameter transmission
+ """
+ # No arguments passed
+ resp = self.client.post("/packages/findBeDepend",
+ data='{}',
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status code return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ # Only the packagename
+ resp = self.client.post("/packages/findBeDepend",
+ data=json.dumps({
+ "packagename": "CUnit",
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status code return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ # Only the withsubpack
+ resp = self.client.post("/packages/findBeDepend",
+ data=json.dumps({
+ "withsubpack": "0",
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status code return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ # Only the dbname
+ resp = self.client.post("/packages/findBeDepend",
+ data=json.dumps({
+ "dbname": "openEuler-20.03-LTS",
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status code return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ # Don't preach withsubpack
+ resp = self.client.post("/packages/findBeDepend",
+ data=json.dumps({
+ "packagename": "A",
+ "dbname": "openEuler-20.03-LTS"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status code return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNotNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ # Don't preach dbname
+ resp = self.client.post("/packages/findBeDepend",
+ data=json.dumps({
+ "packagename": "CUnit",
+ "withsubpack": "0"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status code return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ # Don't preach packagename
+ resp = self.client.post("/packages/findBeDepend",
+ data=json.dumps({
+ "dbname": "openEuler-20.03-LTS",
+ "withsubpack": "0"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status code return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ # All incoming withsubpack=0
+ resp = self.client.post("/packages/findBeDepend",
+ data=json.dumps({
+ "packagename": "A",
+ "dbname": "openEuler-20.03-LTS",
+ "withsubpack": "0"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status code return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNotNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ # All incoming withsubpack=1
+ resp = self.client.post("/packages/findBeDepend",
+ data=json.dumps({
+ "packagename": "A",
+ "dbname": "openEuler-20.03-LTS",
+ "withsubpack": "1"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status code return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNotNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ def test_wrong_parameter(self):
+ """
+ Parameter error
+ """
+
+ # packagename Parameter error
+ resp = self.client.post("/packages/findBeDepend",
+ data=json.dumps({
+ "packagename": "詹姆斯",
+ "dbname": "openEuler-20.03-LTS",
+ "withsubpack": "0"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PACK_NAME_NOT_FOUND,
+ resp_dict.get("code"),
+ msg="Error in status code return!")
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PACK_NAME_NOT_FOUND),
+ resp_dict.get("msg"),
+ msg="Error in status code return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ # dbname Parameter error
+ resp = self.client.post("/packages/findBeDepend",
+ data=json.dumps({
+ "packagename": "ATpy",
+ "dbname": "asdfgjhk",
+ "withsubpack": "0"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.DB_NAME_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.DB_NAME_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status code return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ # withsubpack Parameter error
+ resp = self.client.post("/packages/findBeDepend",
+ data=json.dumps({
+ "packagename": "CUnit",
+ "dbname": "openEuler-20.03-LTS",
+ "withsubpack": "3"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status code return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ def test_true_params_result(self):
+ """
+ Results contrast
+ """
+ correct_list = get_correct_json_by_filename("be_depend")
+
+ self.assertNotEqual([], correct_list, msg="Error reading JSON file")
+
+ for correct_data in correct_list:
+ input_value = correct_data["input"]
+ resp = self.client.post("/packages/findBeDepend",
+ data=json.dumps(input_value),
+ content_type="application/json")
+ output_for_input = correct_data["output"]
+ resp_dict = json.loads(resp.data)
+ self.assertTrue(compare_two_values(output_for_input, resp_dict),
+ msg="The answer is not correct")
diff --git a/packageship/test/test_module/dependent_query_tests/test_build_depend.py b/packageship/test/test_module/dependent_query_tests/test_build_depend.py
new file mode 100644
index 0000000000000000000000000000000000000000..5e0df7ff3c2eaa70d9b717a303eed383154e49ed
--- /dev/null
+++ b/packageship/test/test_module/dependent_query_tests/test_build_depend.py
@@ -0,0 +1,161 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+ build_depend unittest
+"""
+import json
+import unittest
+
+from test.base_code.read_data_base import ReadTestBase
+from test.base_code.common_test_code import compare_two_values, get_correct_json_by_filename
+from packageship.application.apps.package.function.constants import ResponseCode
+
+
+class TestBuildDepend(ReadTestBase):
+ """
+ class for test build_depend
+ """
+
+ def test_empty_source_name_dblist(self):
+ """
+ test empty parameters:sourceName,dbList
+ :return:
+ """
+ resp = self.client.post("/packages/findBuildDepend",
+ data="{}",
+ content_type="application/json")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(resp_dict.get("data"), msg="Error in data information return")
+
+ resp = self.client.post("/packages/findBuildDepend",
+ data=json.dumps({"sourceName": "A"}),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNotNone(resp_dict.get("data"), msg="Error in data information return")
+
+ def test_wrong_source_name_dblist(self):
+ """
+ test wrong parameters:sourceName,dbList
+ :return: None
+ """
+ resp = self.client.post("/packages/findBuildDepend",
+ data=json.dumps({"sourceName": 0}),
+ content_type="application/json")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(resp_dict.get("data"), msg="Error in data information return")
+
+ resp = self.client.post("/packages/findBuildDepend",
+ data=json.dumps({"sourceName": "qitiandasheng"}),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PACK_NAME_NOT_FOUND,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PACK_NAME_NOT_FOUND),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(resp_dict.get("data"), msg="Error in data information return")
+
+ resp = self.client.post("/packages/findBuildDepend",
+ data=json.dumps({"sourceName": "CUnit",
+ "db_list": [12, 3, 4]}),
+ content_type="application/json")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(resp_dict.get("data"), msg="Error in data information return")
+
+ resp = self.client.post("/packages/findBuildDepend",
+ data=json.dumps({"sourceName": "CUnit",
+ "db_list": ["shifu", "bajie"]
+ }), content_type="application/json")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.DB_NAME_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.DB_NAME_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(resp_dict.get("data"), msg="Error in data information return")
+
+ def test_true_params_result(self):
+ """
+ test_true_params_result
+ Returns:
+
+ """
+ correct_list = get_correct_json_by_filename("build_depend")
+
+ self.assertNotEqual([], correct_list, msg="Error reading JSON file")
+
+ for correct_data in correct_list:
+ input_value = correct_data["input"]
+ resp = self.client.post("/packages/findBuildDepend",
+ data=json.dumps(input_value),
+ content_type="application/json")
+ output_for_input = correct_data["output"]
+ resp_dict = json.loads(resp.data)
+ self.assertTrue(compare_two_values(output_for_input, resp_dict),
+ msg="The answer is not correct")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/packageship/test/test_module/dependent_query_tests/test_install_depend.py b/packageship/test/test_module/dependent_query_tests/test_install_depend.py
new file mode 100644
index 0000000000000000000000000000000000000000..81ddf98bd5a7135cdbf4bae74ffddf8ff8b6fb5f
--- /dev/null
+++ b/packageship/test/test_module/dependent_query_tests/test_install_depend.py
@@ -0,0 +1,164 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+TestInstallDepend
+"""
+import unittest
+import json
+
+from test.base_code.common_test_code import get_correct_json_by_filename, compare_two_values
+from test.base_code.read_data_base import ReadTestBase
+from packageship.application.apps.package.function.constants import ResponseCode
+
+
+class TestInstallDepend(ReadTestBase):
+ """
+ TestInstallDepend
+ """
+
+ def test_empty_binaryName_dbList(self):
+ """
+ test_empty_binaryName_dbList
+ Returns:
+
+ """
+ resp = self.client.post("/packages/findInstallDepend",
+ data="{}",
+ content_type="application/json")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(resp_dict.get("data"), msg="Error in data information return")
+
+ resp = self.client.post("/packages/findInstallDepend",
+ data=json.dumps({"binaryName": "A1"}),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNotNone(resp_dict.get("data"), msg="Error in data information return")
+
+ def test_wrong_binaryName_dbList(self):
+ """
+ test_empty_binaryName_dbList
+ Returns:
+
+ """
+ resp = self.client.post("/packages/findInstallDepend",
+ data=json.dumps({"binaryName": 0}),
+ content_type="application/json")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(resp_dict.get("data"), msg="Error in data information return")
+
+ resp = self.client.post("/packages/findInstallDepend",
+ data=json.dumps(
+ {"binaryName": "qitiandasheng"}),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNotNone(resp_dict.get("data"), msg="Error in data information return")
+
+ resp = self.client.post("/packages/findInstallDepend",
+ data=json.dumps({"binaryName": "A1",
+ "db_list": [12, 3, 4]}),
+ content_type="application/json")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(resp_dict.get("data"), msg="Error in data information return")
+
+ resp = self.client.post("/packages/findInstallDepend",
+ data=json.dumps({"binaryName": "A1",
+ "db_list": ["shifu", "bajie"]
+ }), content_type="application/json")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.DB_NAME_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.DB_NAME_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(resp_dict.get("data"), msg="Error in data information return")
+
+ def test_true_params_result(self):
+ """
+ test_empty_binaryName_dbList
+ Returns:
+
+ """
+ correct_list = get_correct_json_by_filename("install_depend")
+
+ self.assertNotEqual([], correct_list, msg="Error reading JSON file")
+
+ for correct_data in correct_list:
+ input_value = correct_data["input"]
+ resp = self.client.post("/packages/findInstallDepend",
+ data=json.dumps(input_value),
+ content_type="application/json")
+ output_for_input = correct_data["output"]
+ resp_dict = json.loads(resp.data)
+ self.assertTrue(compare_two_values(output_for_input, resp_dict),
+ msg="The answer is not correct")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/packageship/test/test_module/dependent_query_tests/test_self_depend.py b/packageship/test/test_module/dependent_query_tests/test_self_depend.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a2fcb5d1f794f88b8ed8ab45b279ed1df2acc43
--- /dev/null
+++ b/packageship/test/test_module/dependent_query_tests/test_self_depend.py
@@ -0,0 +1,289 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+TestSelfDepend
+"""
+import unittest
+import json
+
+from test.base_code.common_test_code import get_correct_json_by_filename, compare_two_values
+from test.base_code.read_data_base import ReadTestBase
+from packageship.application.apps.package.function.constants import ResponseCode
+from packageship.application.apps.package.function.searchdb import db_priority
+
+
+class TestSelfDepend(ReadTestBase):
+ """
+ TestSelfDepend
+ """
+
+ def test_empty_parameter(self):
+ """
+ test_empty_parameter
+ Returns:
+
+ """
+ resp = self.client.post("/packages/findSelfDepend",
+ data='{}',
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return!")
+ self.assertIn("msg", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status information return!")
+
+ self.assertIn("data", resp_dict, msg="Wrong return format!")
+ self.assertIsNone(resp_dict.get("data"), msg="Data return error!")
+
+ resp = self.client.post("/packages/findSelfDepend",
+ data=json.dumps({
+ "packagename": "A1",
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return!")
+ self.assertIn("msg", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status information return!")
+
+ self.assertIn("data", resp_dict, msg="Wrong return format!")
+ self.assertIsNotNone(resp_dict.get("data"), msg="Data return error!")
+
+ resp = self.client.post("/packages/findSelfDepend",
+ data=json.dumps({
+ "packagename": "A1",
+ "db_list": db_priority()
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return!")
+ self.assertIn("msg", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status information return!")
+
+ self.assertIn("data", resp_dict, msg="Wrong return format!")
+ self.assertIsNotNone(resp_dict.get("data"), msg="Data return error!")
+
+ resp = self.client.post("/packages/findSelfDepend",
+ data=json.dumps({
+ "packagename": "A1",
+ "db_list": db_priority(),
+ "selfbuild": "0"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return!")
+ self.assertIn("msg", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status information return!")
+
+ self.assertIn("data", resp_dict, msg="Wrong return format!")
+ self.assertIsNotNone(resp_dict.get("data"), msg="Data return error!")
+
+ resp = self.client.post("/packages/findSelfDepend",
+ data=json.dumps({
+ "packagename": "A1",
+ "db_list": db_priority(),
+ "selfbuild": "0",
+ "withsubpack": "0"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return!")
+ self.assertIn("msg", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status information return!")
+
+ self.assertIn("data", resp_dict, msg="Wrong return format!")
+ self.assertIsNotNone(resp_dict.get("data"), msg="Data return error!")
+
+ def test_wrong_parameter(self):
+ """
+ test_wrong_parameter
+ Returns:
+
+ """
+ resp = self.client.post("/packages/findSelfDepend",
+ data=json.dumps({
+ "packagename": "wukong"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.PACK_NAME_NOT_FOUND,
+ resp_dict.get("code"),
+ msg="Error in status code return!")
+ self.assertIn("msg", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PACK_NAME_NOT_FOUND),
+ resp_dict.get("msg"),
+ msg="Error in status information return!")
+
+ self.assertIn("data", resp_dict, msg="Wrong return format!")
+ self.assertIsNone(resp_dict.get("data"), msg="Data return error!")
+
+ resp = self.client.post("/packages/findSelfDepend",
+ data=json.dumps({
+ "packagename": "A1",
+ "db_list": [1, 2, 3, 4]
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return!")
+ self.assertIn("msg", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status information return!")
+
+ self.assertIn("data", resp_dict, msg="Wrong return format!")
+ self.assertIsNone(resp_dict.get("data"), msg="Data return error!")
+
+ resp = self.client.post("/packages/findSelfDepend",
+ data=json.dumps({
+ "packagename": "A1",
+ "db_list": ["bajie", "shifu"]
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.DB_NAME_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return!")
+ self.assertIn("msg", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.DB_NAME_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status information return!")
+
+ self.assertIn("data", resp_dict, msg="Wrong return format!")
+ self.assertIsNone(resp_dict.get("data"), msg="Data return error!")
+
+ resp = self.client.post("/packages/findSelfDepend",
+ data=json.dumps({
+ "packagename": "A1",
+ "db_list": db_priority(),
+ "selfbuild": "nverguo"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return!")
+ self.assertIn("msg", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status information return!")
+
+ self.assertIn("data", resp_dict, msg="Wrong return format!")
+ self.assertIsNone(resp_dict.get("data"), msg="Data return error!")
+
+ resp = self.client.post("/packages/findSelfDepend",
+ data=json.dumps({
+ "packagename": "A1",
+ "db_list": db_priority(),
+ "selfbuild": "0",
+ "withsubpack": "pansidong",
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return!")
+ self.assertIn("msg", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status information return!")
+
+ self.assertIn("data", resp_dict, msg="Wrong return format!")
+ self.assertIsNone(resp_dict.get("data"), msg="Data return error!")
+
+ resp = self.client.post("/packages/findSelfDepend",
+ data=json.dumps({
+ "packagename": "A1",
+ "db_list": db_priority(),
+ "selfbuild": "0",
+ "withsubpack": "0",
+ "packtype": "pansidaxian"
+ }),
+ content_type="application/json")
+
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return!")
+ self.assertIn("msg", resp_dict, msg="Wrong return format!")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status information return!")
+
+ self.assertIn("data", resp_dict, msg="Wrong return format!")
+ self.assertIsNone(resp_dict.get("data"), msg="Data return error!")
+
+ def test_true_params_result(self):
+ """
+ test_true_params_result
+ Returns:
+
+ """
+ correct_list = get_correct_json_by_filename("self_depend")
+
+ self.assertNotEqual([], correct_list, msg="Error reading JSON file")
+
+ for correct_data in correct_list:
+ input_value = correct_data["input"]
+ resp = self.client.post("/packages/findSelfDepend",
+ data=json.dumps(input_value),
+ content_type="application/json")
+ output_for_input = correct_data["output"]
+ resp_dict = json.loads(resp.data)
+ self.assertTrue(compare_two_values(output_for_input, resp_dict),
+ msg="The answer is not correct")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/packageship/test/test_module/init_system_tests/__init__.py b/packageship/test/test_module/init_system_tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/packageship/test/test_module/init_system_tests/test_importdata.py b/packageship/test/test_module/init_system_tests/test_importdata.py
new file mode 100644
index 0000000000000000000000000000000000000000..35e303aa45def44488a6ae98428fd1ad48563f9c
--- /dev/null
+++ b/packageship/test/test_module/init_system_tests/test_importdata.py
@@ -0,0 +1,279 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+test import_databases
+"""
+import os
+import shutil
+import unittest
+import warnings
+from configparser import ConfigParser
+import yaml
+from packageship import system_config
+from packageship.libs.exception import Error
+
+try:
+
+ system_config.SYS_CONFIG_PATH = os.path.join(
+ os.path.dirname(
+ system_config.BASE_PATH),
+ 'test',
+ 'common_files',
+ 'package.ini')
+
+ system_config.DATABASE_FILE_INFO = os.path.join(
+ os.path.dirname(
+ system_config.BASE_PATH),
+ 'test',
+ 'init_system_files',
+ 'database_file_info.yaml')
+
+ system_config.DATABASE_FOLDER_PATH = os.path.join(os.path.dirname(
+ system_config.BASE_PATH), 'test', 'init_system_files', 'dbs')
+
+ from test.base_code.init_config_path import init_config
+
+except Error:
+ raise Error
+
+from packageship.application.initsystem.data_import import InitDataBase
+from packageship.libs.exception import ContentNoneException
+from packageship.libs.exception import DatabaseRepeatException
+from packageship.libs.configutils.readconfig import ReadConfig
+
+
+class ImportData(unittest.TestCase):
+ """
+ test importdatabases
+ """
+
+ def setUp(self):
+
+ warnings.filterwarnings("ignore")
+
+ def test_empty_param(self):
+ """If init is not obtained_ conf_ Path parameter"""
+ try:
+ InitDataBase(config_file_path=None).init_data()
+ except ContentNoneException as error:
+ self.assertEqual(
+ error.__class__,
+ ContentNoneException,
+ msg="No init in package_ conf_ Path parameter, wrong exception type thrown")
+
+ # Yaml file exists but the content is empty
+
+ try:
+ _config_path = ReadConfig().get_system('init_conf_path')
+ shutil.copyfile(_config_path, _config_path + '.bak')
+
+ with open(_config_path, 'w', encoding='utf-8') as w_f:
+ w_f.write("")
+
+ InitDataBase(config_file_path=_config_path).init_data()
+ except ContentNoneException as error:
+ self.assertEqual(
+ error.__class__,
+ ContentNoneException,
+ msg="Yaml file exists, but the content is empty. The exception type is wrong")
+ finally:
+ # Restore files
+ os.remove(_config_path)
+ os.rename(_config_path + '.bak', _config_path)
+
+ # Yaml file exists but DB exists_ The same with name
+ try:
+ _config_path = ReadConfig().get_system('init_conf_path')
+ shutil.copyfile(_config_path, _config_path + '.bak')
+ with open(_config_path, 'r', encoding='utf-8') as file:
+ origin_yaml = yaml.load(file.read(), Loader=yaml.FullLoader)
+ for obj in origin_yaml:
+ obj["dbname"] = "openEuler"
+ with open(_config_path, 'w', encoding='utf-8') as w_f:
+ yaml.dump(origin_yaml, w_f)
+
+ InitDataBase(config_file_path=_config_path).init_data()
+ except DatabaseRepeatException as error:
+
+ self.assertEqual(
+ error.__class__,
+ DatabaseRepeatException,
+ msg="Yaml file exists but DB_ Name duplicate exception type is wrong")
+ finally:
+ # Restore files
+ os.remove(_config_path)
+ os.rename(_config_path + '.bak', _config_path)
+
+ def test_wrong_param(self):
+ """If the corresponding current init_ conf_ The directory
+ specified by path is incorrect"""
+ try:
+ # Back up source files
+ shutil.copyfile(
+ system_config.SYS_CONFIG_PATH,
+ system_config.SYS_CONFIG_PATH + ".bak")
+ # Modify dbtype to "test"_ dbtype"
+ config = ConfigParser()
+ config.read(system_config.SYS_CONFIG_PATH)
+ config.set("SYSTEM", "init_conf_path", "D:\\Users\\conf.yaml")
+ config.write(open(system_config.SYS_CONFIG_PATH, "w"))
+
+ _config_path = ReadConfig().get_system('init_conf_path')
+ InitDataBase(config_file_path=_config_path).init_data()
+ except FileNotFoundError as error:
+ self.assertEqual(
+ error.__class__,
+ FileNotFoundError,
+ msg="init_ conf_ Path specified directory is empty exception type is wrong")
+ finally:
+ # To restore a file, delete the file first and then rename it back
+ os.remove(system_config.SYS_CONFIG_PATH)
+ os.rename(
+ system_config.SYS_CONFIG_PATH + ".bak",
+ system_config.SYS_CONFIG_PATH)
+
+ # Dbtype error
+ try:
+ # Back up source files
+ shutil.copyfile(
+ system_config.SYS_CONFIG_PATH,
+ system_config.SYS_CONFIG_PATH + ".bak")
+ # Modify dbtype to "test"_ dbtype"
+ config = ConfigParser()
+ config.read(system_config.SYS_CONFIG_PATH)
+ config.set("DATABASE", "dbtype", "test_dbtype")
+ config.write(open(system_config.SYS_CONFIG_PATH, "w"))
+
+ _config_path = ReadConfig().get_system('init_conf_path')
+ InitDataBase(config_file_path=None).init_data()
+ except Error as error:
+ self.assertEqual(
+ error.__class__,
+ Error,
+ msg="Wrong exception type thrown when dbtype is wrong")
+ finally:
+ # To restore a file, delete the file first and then rename it back
+ os.remove(system_config.SYS_CONFIG_PATH)
+ os.rename(
+ system_config.SYS_CONFIG_PATH + ".bak",
+ system_config.SYS_CONFIG_PATH)
+
+ def test_dbname(self):
+ """test dbname"""
+ try:
+ _config_path = ReadConfig().get_system('init_conf_path')
+ shutil.copyfile(_config_path, _config_path + '.bak')
+ with open(_config_path, 'r', encoding='utf-8') as file:
+ origin_yaml = yaml.load(file.read(), Loader=yaml.FullLoader)
+ for obj in origin_yaml:
+ obj["dbname"] = ""
+ with open(_config_path, 'w', encoding='utf-8') as w_f:
+ yaml.dump(origin_yaml, w_f)
+
+ InitDataBase(config_file_path=_config_path).init_data()
+ except DatabaseRepeatException as error:
+
+ self.assertEqual(
+ error.__class__,
+ DatabaseRepeatException,
+ msg="Yaml file exists but DB_ Name duplicate exception type is wrong")
+ finally:
+ # Restore files
+ os.remove(_config_path)
+ os.rename(_config_path + '.bak', _config_path)
+
+ def test_src_db_file(self):
+ """test src db file"""
+ try:
+ _config_path = ReadConfig().get_system('init_conf_path')
+ shutil.copyfile(_config_path, _config_path + '.bak')
+ with open(_config_path, 'r', encoding='utf-8') as file:
+ origin_yaml = yaml.load(file.read(), Loader=yaml.FullLoader)
+ for obj in origin_yaml:
+ obj["src_db_file"] = ""
+ with open(_config_path, 'w', encoding='utf-8') as w_f:
+ yaml.dump(origin_yaml, w_f)
+
+ InitDataBase(config_file_path=_config_path).init_data()
+ except TypeError as error:
+
+ self.assertEqual(
+ error.__class__,
+ TypeError,
+ msg="Yaml file exists but DB_ Name duplicate exception type is wrong")
+ finally:
+ # Restore files
+ os.remove(_config_path)
+ os.rename(_config_path + '.bak', _config_path)
+
+ def test_priority(self):
+ """test priority"""
+ try:
+ _config_path = ReadConfig().get_system('init_conf_path')
+ shutil.copyfile(_config_path, _config_path + '.bak')
+ with open(_config_path, 'r', encoding='utf-8') as file:
+ origin_yaml = yaml.load(file.read(), Loader=yaml.FullLoader)
+ for obj in origin_yaml:
+ obj["priority"] = "-1"
+ with open(_config_path, 'w', encoding='utf-8') as w_f:
+ yaml.dump(origin_yaml, w_f)
+ InitDataBase(config_file_path=_config_path).init_data()
+ with open(system_config.DATABASE_FILE_INFO, 'r', encoding='utf-8') as file_context:
+ init_database_date = yaml.load(
+ file_context.read(), Loader=yaml.FullLoader)
+ self.assertEqual(
+ init_database_date,
+ None,
+ msg=" Priority must be a positive integer between 0 and 100 ")
+ except FileNotFoundError:
+ return
+ finally:
+ # Restore files
+ os.remove(_config_path)
+ os.rename(_config_path + '.bak', _config_path)
+
+ def test_true_init_data(self):
+ """
+ Initialization of system data
+ """
+ # Normal configuration
+ _config_path = ReadConfig().get_system('init_conf_path')
+ InitDataBase(config_file_path=_config_path).init_data()
+
+ # In the correct case, an import will be generated under the initsystem
+ # directory_ success_ databse.yaml
+ path = system_config.DATABASE_FILE_INFO
+
+ self.assertTrue(
+ os.path.exists(path),
+ msg="Import was not generated successfully "
+ "after initialization_ success_ databse.yaml file")
+
+ # And there is data in this file, and it comes from the yaml file of
+ # conf
+ with open(_config_path, 'r', encoding='utf-8') as file:
+ yaml_config = yaml.load(file.read(), Loader=yaml.FullLoader)
+
+ with open(path, 'r', encoding='utf-8') as files:
+ yaml_success = yaml.load(files.read(), Loader=yaml.FullLoader)
+
+ self.assertEqual(
+ len(yaml_config),
+ len(yaml_success),
+ msg="The success file is inconsistent with the original yaml file")
+
+ # Compare name and priority
+ success_name_priority = dict()
+ config_name_priority = dict()
+ len_con = len(yaml_config)
+ for i in range(len_con):
+ success_name_priority[yaml_success[i]["database_name"]] = \
+ yaml_success[i]["priority"]
+ config_name_priority[yaml_config[i]["dbname"]] = \
+ yaml_config[i]["priority"]
+
+ self.assertEqual(
+ success_name_priority,
+ config_name_priority,
+ msg="The database and priority after initialization are"
+ "inconsistent with the original file")
diff --git a/packageship/test/test_module/packages_tests/__init__.py b/packageship/test/test_module/packages_tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/packageship/test/test_module/packages_tests/test_packages.py b/packageship/test/test_module/packages_tests/test_packages.py
new file mode 100644
index 0000000000000000000000000000000000000000..c549e0b64f329f25fb06151430d9b5ecf7b3a06c
--- /dev/null
+++ b/packageship/test/test_module/packages_tests/test_packages.py
@@ -0,0 +1,82 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+packges test
+"""
+from test.base_code.read_data_base import ReadTestBase
+import unittest
+import json
+
+from packageship.application.apps.package.function.constants import ResponseCode
+
+
+class TestPackages(ReadTestBase):
+ """
+ All package test cases
+ """
+
+ def test_empty_dbname(self):
+ """dbName is none"""
+
+ resp = self.client.get("/packages")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNotNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ resp = self.client.get("/packages?dbName=")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNotNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ def test_wrong_dbname(self):
+ """dbName is err"""
+
+ resp = self.client.get("/packages?dbName=test")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.DB_NAME_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.DB_NAME_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in data format return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/packageship/test/test_module/repodatas_test/__init__.py b/packageship/test/test_module/repodatas_test/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/packageship/test/test_module/repodatas_test/test_delete_repodatas.py b/packageship/test/test_module/repodatas_test/test_delete_repodatas.py
new file mode 100644
index 0000000000000000000000000000000000000000..dc396272831db0d0522ad9a1f67e743bf1ab5145
--- /dev/null
+++ b/packageship/test/test_module/repodatas_test/test_delete_repodatas.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+test delete repodatas
+"""
+import os
+import shutil
+
+from test.base_code.operate_data_base import OperateTestBase
+import unittest
+import json
+from packageship import system_config
+from packageship.libs.exception import Error
+from packageship.application.apps.package.function.constants import ResponseCode
+
+
+class TestDeleteRepodatas(OperateTestBase):
+ """
+ test delete repodata
+ """
+
+ def test_wrong_dbname(self):
+ """Test simulation scenario, dbname is not transmitted"""
+
+ # Scenario 1: the value passed by dbname is empty
+ resp = self.client.delete("/repodatas?dbName=")
+ resp_dict = json.loads(resp.data)
+
+ # assert
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ resp = self.client.delete("/repodatas?dbName=rr")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.DB_NAME_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.DB_NAME_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ def test_true_dbname(self):
+ """
+ Returns:
+ """
+ try:
+ shutil.copyfile(system_config.DATABASE_FILE_INFO, system_config.DATABASE_FILE_INFO + '.bak')
+ shutil.copytree(system_config.DATABASE_FOLDER_PATH, system_config.DATABASE_FOLDER_PATH + '.bak')
+ resp = self.client.delete("/repodatas?dbName=openEuler-20.04-LTS")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+ except Error:
+ return None
+ finally:
+ os.remove(system_config.DATABASE_FILE_INFO)
+ os.rename(system_config.DATABASE_FILE_INFO + '.bak', system_config.DATABASE_FILE_INFO)
+ shutil.rmtree(system_config.DATABASE_FOLDER_PATH)
+ os.rename(system_config.DATABASE_FOLDER_PATH + '.bak', system_config.DATABASE_FOLDER_PATH)
diff --git a/packageship/test/test_module/repodatas_test/test_get_repodatas.py b/packageship/test/test_module/repodatas_test/test_get_repodatas.py
new file mode 100644
index 0000000000000000000000000000000000000000..82dc3adb0f34b58ab9b8e01493c745db1c6fccc5
--- /dev/null
+++ b/packageship/test/test_module/repodatas_test/test_get_repodatas.py
@@ -0,0 +1,66 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+test get repodatas
+"""
+from test.base_code.common_test_code import get_correct_json_by_filename
+from test.base_code.common_test_code import compare_two_values
+from test.base_code.read_data_base import ReadTestBase
+import unittest
+import json
+
+from packageship.application.apps.package.function.constants import ResponseCode
+
+
+class TestGetRepodatas(ReadTestBase):
+ """
+ test get repodatas
+ """
+
+ def test_dbname(self):
+ """no dbName"""
+ correct_list = get_correct_json_by_filename("get_repodatas")
+ self.assertNotEqual([], correct_list, msg="Error reading JSON file")
+ resp = self.client.get("/repodatas")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertTrue(
+ compare_two_values(
+ resp_dict.get("data"),
+ correct_list),
+ msg="Error in data information return")
+
+ resp = self.client.get("/repodatas?ddd")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertTrue(
+ compare_two_values(
+ resp_dict.get("data"),
+ correct_list),
+ msg="Error in data information return")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/packageship/test/test_module/single_package_tests/__init__.py b/packageship/test/test_module/single_package_tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/packageship/test/test_module/single_package_tests/test_get_singlepack.py b/packageship/test/test_module/single_package_tests/test_get_singlepack.py
new file mode 100644
index 0000000000000000000000000000000000000000..d4702987345a709267c3327c84ccd3bdcc33f258
--- /dev/null
+++ b/packageship/test/test_module/single_package_tests/test_get_singlepack.py
@@ -0,0 +1,154 @@
+#!/usr/bin/python3
+# -*- coding:utf-8 -*-
+"""
+test_get_single_packages
+"""
+from test.base_code.common_test_code import get_correct_json_by_filename
+from test.base_code.common_test_code import compare_two_values
+from test.base_code.read_data_base import ReadTestBase
+import unittest
+import json
+
+from packageship.application.apps.package.function.constants import ResponseCode
+
+
+class TestGetSinglePack(ReadTestBase):
+ """
+ Single package test case
+ """
+
+ def test_error_sourcename(self):
+ """sourceName is none or err"""
+
+ resp = self.client.get("packages/findByPackName?dbName=openeuler-20.03-lts")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ resp = self.client.get(
+ "packages/findByPackName?sourceName=&dbName=openEuler-20.03-LTS")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ resp = self.client.get(
+ "packages/findByPackName?sourceName=test&dbName=for")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.DB_NAME_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.DB_NAME_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ def test_true_dbname(self):
+ """dbName is null or err"""
+
+ resp = self.client.get("packages/findByPackName?sourceName=A")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNotNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ correct_list = get_correct_json_by_filename("get_single_package")
+ self.assertNotEqual([], correct_list, msg="Error reading JSON file")
+ resp = self.client.get(
+ "/packages/findByPackName?sourceName=A&dbName=openEuler-20.03-LTS")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.CODE_MSG_MAP.get(ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertTrue(
+ compare_two_values(
+ resp_dict.get("data"),
+ correct_list),
+ msg="Error in data information return")
+
+ def test_wrong_dbname(self):
+ """test_wrong_dbname"""
+ resp = self.client.get(
+ "/packages/findByPackName?sourceName=CUnit&dbName=openEuler-20.03-lts")
+ resp_dict = json.loads(resp.data)
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.DB_NAME_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.DB_NAME_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/packageship/test/test_module/single_package_tests/test_update_singlepack.py b/packageship/test/test_module/single_package_tests/test_update_singlepack.py
new file mode 100644
index 0000000000000000000000000000000000000000..2b99c59af984012252dbec3b3ea19a9e2d92b9df
--- /dev/null
+++ b/packageship/test/test_module/single_package_tests/test_update_singlepack.py
@@ -0,0 +1,154 @@
+#!/usr/bin/python3
+"""TestUpdatePackage"""
+# -*- coding:utf-8 -*-
+from test.base_code.operate_data_base import OperateTestBase
+import unittest
+import json
+
+from packageship.application.apps.package.function.constants import ResponseCode
+
+
+class TestUpdatePackage(OperateTestBase):
+ """TestUpdatePackage"""
+
+ def test_empty_dbname(self):
+ """Parameter error"""
+
+ resp = self.client.put("/packages/findByPackName",
+ data=json.dumps({"dbName": "",
+ "sourceName": "xx",
+ "maintainer": "",
+ "maintainlevel": "1"}),
+ content_type="application/json")
+ resp_dict = json.loads(resp.data)
+ resp_dict.get("data")
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ # wrong dbname
+ resp = self.client.put("/packages/findByPackName",
+ data=json.dumps({"dbName": "xx",
+ "sourceName": "xx",
+ "maintainer": "",
+ "maintainlevel": "1"}),
+ content_type="application/json")
+ resp_dict = json.loads(resp.data)
+ resp_dict.get("data")
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.DB_NAME_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.DB_NAME_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ def test_empty_sourcename(self):
+ """Parameter error"""
+
+ resp = self.client.put("/packages/findByPackName",
+ data=json.dumps({"dbName": "openEuler-20.04-LTS",
+ "sourceName": "xx",
+ "maintainer": "1"}),
+ content_type="application/json")
+ resp_dict = json.loads(resp.data)
+ resp_dict.get("data")
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PACK_NAME_NOT_FOUND,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PACK_NAME_NOT_FOUND),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+ # miss maintainer maintainlevel
+ resp = self.client.put("/packages/findByPackName",
+ data=json.dumps({"dbName": "openEuler-20.04-LTS",
+ "sourceName": "xx"}),
+ content_type="application/json")
+ resp_dict = json.loads(resp.data)
+ resp_dict.get("data")
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.PARAM_ERROR,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.PARAM_ERROR),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+ def test_true_parram(self):
+ """
+ Returns:
+ """
+ resp = self.client.put("/packages/findByPackName",
+ data=json.dumps({"dbName": "openEuler-20.04-LTS",
+ "sourceName": "A",
+ "maintainer": "x",
+ "maintainlevel": "1"}),
+ content_type="application/json")
+ resp_dict = json.loads(resp.data)
+ resp_dict.get("data")
+
+ self.assertIn("code", resp_dict, msg="Error in data format return")
+ self.assertEqual(ResponseCode.SUCCESS,
+ resp_dict.get("code"),
+ msg="Error in status code return")
+
+ self.assertIn("msg", resp_dict, msg="Error in data format return")
+ self.assertEqual(
+ ResponseCode.CODE_MSG_MAP.get(
+ ResponseCode.SUCCESS),
+ resp_dict.get("msg"),
+ msg="Error in status prompt return")
+
+ self.assertIn("data", resp_dict, msg="Error in data format return")
+ self.assertIsNone(
+ resp_dict.get("data"),
+ msg="Error in data information return")
+
+
+if __name__ == '__main__':
+ unittest.main()