diff --git a/1 b/1
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/2 b/2
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/3 b/3
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/4 b/4
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/README-zh-cn.md b/README-zh-cn.md
new file mode 100644
index 0000000000000000000000000000000000000000..c9525c30a95494513fdaa7659e88f7e4e2ddee45
--- /dev/null
+++ b/README-zh-cn.md
@@ -0,0 +1,9 @@
+# openeuler-jenkins
+
+## 简介
+
+此仓库用来存放openEuler社区的Jenkins脚本。
+
+## 许可证
+
+详情请参考[LICENSE](https://gitee.com/openeuler/openeuler-jenkins/blob/ac397ce3e078937c700df6fb8de0e1b065ee4218/LICENSE)文件。
\ No newline at end of file
diff --git a/README.md b/README.md
index b9ed15e4f38e0b5bb26d1f51771bcc031627d08b..70be8755622182fddf5e0169c9c549fd84ca78b1 100644
--- a/README.md
+++ b/README.md
@@ -2,7 +2,7 @@
## Introduction
-This repository is used to store the jenkins scripts in openEuler Community.
+This repository is used to store the Jenkins scripts in the openEuler community.
## License
diff --git a/src/ac/acl/code/check_code_style.py b/src/ac/acl/code/check_code_style.py
index cb574b539577b90d94b12df390ddab4b2cc3a212..7dde4161beac67d4481da64158470af51e80850d 100644
--- a/src/ac/acl/code/check_code_style.py
+++ b/src/ac/acl/code/check_code_style.py
@@ -68,7 +68,7 @@ class CheckCodeStyle(BaseCheck):
"""
gp = GitProxy(self._work_dir)
diff_files = gp.diff_files_between_commits("HEAD~1", "HEAD~0")
- logger.debug("diff files: {}".format(diff_files))
+ logger.debug("diff files: %s", diff_files)
diff_code_files = [] # 仓库中变更的代码文件
diff_patch_code_files = [] # patch内的代码文件
@@ -77,7 +77,7 @@ class CheckCodeStyle(BaseCheck):
diff_code_files.append(diff_file)
elif GiteeRepo.is_patch_file(diff_file):
patch_dir = self._gr.patch_dir_mapping.get(diff_file)
- logger.debug("diff patch {} apply at dir {}".format(diff_file, patch_dir))
+ logger.debug("diff patch %s apply at dir %s", diff_file, patch_dir)
if patch_dir is not None:
files_in_patch = gp.extract_files_path_of_patch(diff_file)
patch_code_files = [os.path.join(patch_dir, file_in_patch)
@@ -88,13 +88,13 @@ class CheckCodeStyle(BaseCheck):
for code_file in patch_code_files
if os.path.exists(code_file)])
- logger.debug("diff code files: {}".format(diff_code_files))
- logger.debug("diff patch code files: {}".format(diff_patch_code_files))
+ logger.debug("diff code files: %s", diff_code_files)
+ logger.debug("diff patch code files: %s", diff_patch_code_files)
rs_1 = self.check_file_under_work_dir(diff_code_files)
- logger.debug("check_file_under_work_dir: {}".format(rs_1))
+ logger.debug("check_file_under_work_dir: %s", rs_1)
rs_2 = self.check_files_inner_patch(diff_patch_code_files)
- logger.debug("check_files_inner_patch: {}".format(rs_2))
+ logger.debug("check_files_inner_patch: %s", rs_2)
return rs_1 + rs_2
@@ -131,10 +131,10 @@ class CheckCodeStyle(BaseCheck):
elif GiteeRepo.is_c_cplusplus_file(file_path):
rs = LinterCheck.check_c_cplusplus(file_path)
else:
- logger.error("error when arrive here, unsupport file {}".format(file_path))
+ logger.error("error when arrive here, unsupport file %s", file_path)
return SUCCESS
- logger.info("Linter: {:<40} {}".format(file_path, rs))
+ logger.info("Linter: %s %s", file_path, rs)
if rs.get("F", 0) > 0:
return FAILED
@@ -150,7 +150,7 @@ class CheckCodeStyle(BaseCheck):
:param kwargs:
:return:
"""
- logger.info("check {} repo ...".format(self._repo))
+ logger.info("check %s repo ...", self._repo)
_ = not os.path.exists(self._work_tar_dir) and os.mkdir(self._work_tar_dir)
try:
diff --git a/src/ac/acl/openlibing/check_code.py b/src/ac/acl/openlibing/check_code.py
index cd0ab58e54b698cd9936746554e3f5c5a8d5154e..d0585092b754901bfd0eaf39254d84beb69caec4 100644
--- a/src/ac/acl/openlibing/check_code.py
+++ b/src/ac/acl/openlibing/check_code.py
@@ -17,6 +17,7 @@
"""
import logging
+import time
from src.ac.framework.ac_base import BaseCheck
from src.ac.framework.ac_result import FAILED, WARNING, SUCCESS
@@ -40,14 +41,45 @@ class CheckCode(BaseCheck):
super(CheckCode, self).__init__(workspace, repo, conf)
@staticmethod
- def get_codecheck_result(pr_url, codecheck_api_url):
+ def get_codecheck_result(pr_url, codecheck_api_url, codecheck_api_key):
"""
通过api调用codecheck
"""
- data = {"pr_url": pr_url}
+ # get codecheck Api Token
+ codecheck_token_api_url = '{}/token/{}'.format(codecheck_api_url, codecheck_api_key)
+ token_resp = {}
+ rs = do_requests("get", codecheck_token_api_url, obj=token_resp)
+ if rs != 0 or token_resp.get("code", "") != "200":
+ logger.error("get dynamic token failed")
+ return 'false', {}
+
+ token = token_resp.get("data")
+ data = {"pr_url": pr_url, "token": token}
response_content = {}
+ # 创建codecheck检查任务
+ codecheck_task_api_url = "{}/task".format(codecheck_api_url)
+ rs = do_requests("get", codecheck_task_api_url, querystring=data, obj=response_content)
+ if rs != 0 or response_content.get('code', '') != '200':
+ logger.error("create codecheck task failed; %s", response_content.get('msg', ''))
+ return 'false', {}
+
+ uuid = response_content.get('uuid')
+ task_id = response_content.get('task_id')
+ data = {"uuid": uuid, "token": token}
+ codecheck_status_api_url = '{}/{}/status'.format(codecheck_api_url, task_id)
+ current_time = 0
logger.info("codecheck probably need to 3min")
- rs = do_requests("get", codecheck_api_url, querystring=data, timeout=180, obj=response_content)
+ # 定时3min
+ while current_time < 180:
+ time.sleep(10)
+ response_content = {}
+ # 检查codecheck任务的执行状态
+ rs = do_requests("get", codecheck_status_api_url, querystring=data, obj=response_content)
+ if rs == 0 and response_content.get('code') == '100':
+ current_time = current_time + 10
+ continue
+ else:
+ break
return rs, response_content
def check_code(self):
@@ -55,7 +87,7 @@ class CheckCode(BaseCheck):
开始进行codecheck检查
"""
# 等待计算结果
- rs, response_content = self.get_codecheck_result(self._pr_url, self._codecheck_api_url)
+ rs, response_content = self.get_codecheck_result(self._pr_url, self._codecheck_api_url, self._codecheck_api_key)
# 判断是否计算完成
if rs != 0:
@@ -70,12 +102,12 @@ class CheckCode(BaseCheck):
"state": "pass(通过)/no pass(不通过)"
}
"""
+ logger.warning("click %s view code check detail", response_content.get('data'))
# 只有codecheck完成且codecheck检查的代码中存在bug,返回检查项失败的结果,以detail结尾,会显示具体的代码bug所在位置。
if response_content.get("state") == "no pass":
- logger.warning("click {} view code check detail".format(response_content.get('data')))
return FAILED
else:
- logger.error("code check failed, info :{}".format(response_content.get('msg')))
+ logger.error("code check failed, info : %s", response_content.get('msg'))
return SUCCESS
@@ -86,12 +118,13 @@ class CheckCode(BaseCheck):
:param kwargs:
:return:
"""
- logger.info("check {} code ...".format(self._repo))
- logger.debug("args: {}, kwargs: {}".format(args, kwargs))
+ logger.info("check %s code ...", self._repo)
+ logger.debug("args: %s, kwargs: %s", args, kwargs)
codecheck_conf = kwargs.get("codecheck", {})
self._pr_url = codecheck_conf.get("pr_url", "")
self._pr_number = codecheck_conf.get("pr_number", "")
self._codecheck_api_url = codecheck_conf.get("codecheck_api_url", "")
+ self._codecheck_api_key = codecheck_conf.get('codecheck_api_key', "")
return self.start_check()
diff --git a/src/ac/acl/package_license/check_license.py b/src/ac/acl/package_license/check_license.py
index 6502a02f6c4c4c516b15ac1f32147737fa39a276..669c139235c6b872a9055824c6f661a8cd3e7d88 100644
--- a/src/ac/acl/package_license/check_license.py
+++ b/src/ac/acl/package_license/check_license.py
@@ -87,13 +87,14 @@ class CheckLicense(BaseCheck):
check whether the license in spec file and in src file is same
:return
"""
- if self._pkg_license.check_licenses_is_same(self._license_in_spec, self._license_in_src, self._pkg_license._later_support_license):
- logger.info("licenses in src:{} and in spec:{} are same".format(self._license_in_src,
- self._license_in_spec))
+ if self._pkg_license.check_licenses_is_same(self._license_in_spec, self._license_in_src,
+ self._pkg_license._later_support_license):
+ logger.info("licenses in src:%s and in spec:%s are same", self._license_in_src,
+ self._license_in_spec)
return SUCCESS
else:
- logger.error("licenses in src:{} and in spec:{} are not same".format(self._license_in_src,
- self._license_in_spec))
+ logger.error("licenses in src:%s and in spec:%s are not same", self._license_in_src,
+ self._license_in_spec)
return WARNING
def __call__(self, *args, **kwargs):
@@ -103,7 +104,7 @@ class CheckLicense(BaseCheck):
:param kwargs:
:return:
"""
- logger.info("check {} license ...".format(self._repo))
+ logger.info("check %s license ...", self._repo)
_ = not os.path.exists(self._work_tar_dir) and os.mkdir(self._work_tar_dir)
self._gr.decompress_all() # decompress all compressed file into work_tar_dir
diff --git a/src/ac/acl/package_yaml/check_repo.py b/src/ac/acl/package_yaml/check_repo.py
index 839fd0be3d98b4c9ee8fe34e9cbdff87a7c4c362..11cdd96010439cf84cf0d3c471692584b268d50e 100644
--- a/src/ac/acl/package_yaml/check_repo.py
+++ b/src/ac/acl/package_yaml/check_repo.py
@@ -72,7 +72,7 @@ class DefaultReleaseTags(AbsReleaseTags):
通过url获取上游社区的release tags
return: list
"""
- logging.info("unsupported version control: {}".format(self.version_control))
+ logging.info("unsupported version control: %s", self.version_control)
return []
@@ -118,7 +118,7 @@ class HttpReleaseTagsMixin(object):
response = requests.get(url, headers=headers, timeout=timeout)
need_redirect, new_url, cookies = self.get_redirect_resp(url, response)
if tldextract.extract(url).domain != tldextract.extract(new_url).domain: # 判断域名是否一致 预防csrf攻击
- logging.warning("domain of redirection link is different: {}".format(new_url))
+ logging.warning("domain of redirection link is different: %s", new_url)
return ""
if need_redirect:
cookie_dict = {}
@@ -128,13 +128,13 @@ class HttpReleaseTagsMixin(object):
url = new_url
response = requests.get(url, headers=headers, cookies=cookie_dict, timeout=timeout)
except requests.exceptions.SSLError as e:
- logging.warning("requests {} ssl exception, {}".format(url, e))
+ logging.warning("requests %s ssl exception, %s", url, e)
return ""
except requests.exceptions.Timeout as e:
logging.warning("requests timeout")
return ""
except requests.exceptions.RequestException as e:
- logging.warning("requests exception, {}".format(e))
+ logging.warning("requests exception, %s", e)
return ""
return response
@@ -156,7 +156,7 @@ class HgReleaseTags(AbsReleaseTags, HttpReleaseTagsMixin):
return: list
"""
url = self.url(repo)
- logging.debug("{repo} : get {vc} tags".format(repo=url, vc=self.version_control))
+ logging.debug("%s : get %s tags", url, self.version_control)
if not url:
logging.warning("illegal url: \"\"")
return []
@@ -170,7 +170,7 @@ class HgReleaseTags(AbsReleaseTags, HttpReleaseTagsMixin):
temp_tags.sort(reverse=True, key=lambda x: x["date"][0])
release_tags = [tag["tag"] for tag in temp_tags]
except Exception as e:
- logging.error("exception, {}".format(e))
+ logging.error("exception, %s", e)
return []
return release_tags
@@ -192,7 +192,7 @@ class HgRawReleaseTags(AbsReleaseTags, HttpReleaseTagsMixin):
return: list
"""
url = self.url(repo)
- logging.debug("{repo} : get {vc} tags".format(repo=url, vc=self.version_control))
+ logging.debug("%s : get %s tags", url, self.version_control)
if not url:
logging.warning("illegal url: \"\"")
return []
@@ -220,7 +220,7 @@ class MetacpanReleaseTags(AbsReleaseTags, HttpReleaseTagsMixin):
return: list
"""
url = self.url(repo)
- logging.debug("{repo} : get {vc} tags".format(repo=url, vc=self.version_control))
+ logging.debug("%s : get %s tags", url, self.version_control)
if not url:
logging.warning("illegal url: \"\"")
return []
@@ -258,7 +258,7 @@ class PypiReleaseTags(AbsReleaseTags, HttpReleaseTagsMixin):
return: list
"""
url = self.url(repo)
- logging.debug("{repo} : get {vc} tags".format(repo=url, vc=self.version_control))
+ logging.debug("%s : get %s tags", url, self.version_control)
if not url:
logging.warning("illegal url: \"\"")
return []
@@ -267,7 +267,7 @@ class PypiReleaseTags(AbsReleaseTags, HttpReleaseTagsMixin):
tags_json = response.json()
release_tags = [tag for tag in tags_json.get("releases")]
except Exception as e:
- logging.error("exception, {}".format(e))
+ logging.error("exception, %s", e)
return []
return release_tags
@@ -289,7 +289,7 @@ class RubygemReleaseTags(AbsReleaseTags, HttpReleaseTagsMixin):
return: list
"""
url = self.url(repo)
- logging.debug("{repo} : get {vc} tags".format(repo=url, vc=self.version_control))
+ logging.debug("%s : get %s tags", url, self.version_control)
if not url:
logging.warning("illegal url: \"\"")
return []
@@ -301,7 +301,7 @@ class RubygemReleaseTags(AbsReleaseTags, HttpReleaseTagsMixin):
if element.get("number"):
release_tags.append(element.get("number"))
except Exception as e:
- logging.error("exception, {}".format(e))
+ logging.error("exception, %s", e)
return []
return release_tags
@@ -323,7 +323,7 @@ class GnuftpReleaseTags(AbsReleaseTags, HttpReleaseTagsMixin):
return: list
"""
url = self.url(repo)
- logging.debug("{repo} : get {vc} tags".format(repo=url, vc=self.version_control))
+ logging.debug("%s : get %s tags", url, self.version_control)
if not url:
logging.warning("illegal url: \"\"")
return []
@@ -356,7 +356,7 @@ class FtpReleaseTags(AbsReleaseTags, HttpReleaseTagsMixin):
return: list
"""
url = self.url(repo)
- logging.debug("{repo} : get {vc} tags".format(repo=url, vc=self.version_control))
+ logging.debug("%s : get %s tags", url, self.version_control)
if not url:
logging.warning("illegal url: \"\"")
return []
@@ -382,7 +382,7 @@ class CmdReleaseTagsMixin(object):
sub_proc = subprocess.Popen(cmd_list, stdout=subprocess.PIPE)
response = sub_proc.stdout.read().decode("utf-8")
if sub_proc.wait():
- logging.warning("{cmd} > encount errors".format(cmd=" ".join(cmd_list)))
+ logging.warning("%s > encount errors", " ".join(cmd_list))
return response
@@ -411,7 +411,7 @@ class SvnReleaseTags(AbsReleaseTags, CmdReleaseTagsMixin):
return: list
"""
url = self.url(repo)
- logging.debug("{repo} : get svn tags".format(repo=url))
+ logging.debug("%s : get svn tags", url)
if not url:
logging.warning("illegal url: \"\"")
return []
@@ -465,7 +465,7 @@ class GitReleaseTags(AbsReleaseTags, CmdReleaseTagsMixin):
return: list
"""
url = self.url(repo)
- logging.debug("{repo} : get {vc} tags".format(repo=url, vc=self.version_control))
+ logging.debug("%s : get %s tags", url, self.version_control)
if not url:
logging.warning("illegal url: \"\"")
return []
diff --git a/src/ac/acl/package_yaml/check_yaml.py b/src/ac/acl/package_yaml/check_yaml.py
index dad80acef854db875534990f4134bb0c5637259c..8ac90cc8396fecdccf5dfe228f13b6d9e99cf0f5 100644
--- a/src/ac/acl/package_yaml/check_yaml.py
+++ b/src/ac/acl/package_yaml/check_yaml.py
@@ -73,7 +73,7 @@ class CheckPackageYaml(BaseCheck):
for change_file in diff_files:
if change_file == package_yaml:
- logger.debug("diff files: {}".format(diff_files))
+ logger.debug("diff files: %s", diff_files)
return True
return False
@@ -93,16 +93,16 @@ class CheckPackageYaml(BaseCheck):
with open(os.path.join(self._work_dir, yaml_path), 'r') as yaml_data: # load yaml data
self._yaml_content = yaml.safe_load(yaml_data)
except IOError as e:
- logging.warning("package yaml not exist. {}".format(str(e)))
+ logging.warning("package yaml not exist. %s", str(e))
return WARNING
except yaml.YAMLError as exc:
- logging.warning("Error parsering YAML: {}".format(str(exc)))
+ logging.warning("Error parsering YAML: %s", str(exc))
return WARNING
result = SUCCESS
for keyword in self.PACKAGE_YAML_NEEDED_KEY:
if keyword not in self._yaml_content:
- logger.error("yaml field {} missing".format(keyword))
+ logger.error("yaml field %s missing", keyword)
self._is_standard = True
result = WARNING
return result
@@ -129,7 +129,7 @@ class CheckPackageYaml(BaseCheck):
tags = release_tags.get_tags(sr)
if not tags:
- logger.warning("failed to get version by yaml, version_control: {t1}, src_repo: {t2}".format(t1=vc, t2=sr))
+ logger.warning("failed to get version by yaml, version_control: %s, src_repo: %s", vc, sr)
return WARNING
return SUCCESS
@@ -154,9 +154,9 @@ class CheckPackageYaml(BaseCheck):
if not src_url:
src_url = self._spec.get_source("Source")
vc = self.VERSION_CTRL_TRANS.get(vc, vc) # 对特殊的版本控制对应的域名进行转换
- logger.debug("version control: {vctrl} source url: {url}".format(vctrl=vc, url=src_url))
+ logger.debug("version control: %s source url: %s", vc, src_url)
if vc not in src_url: # 通过判断版本控制字段是否在主页url中 判断一致性
- logger.warning("{vc} is not in url: {url}".format(vc=vc, url=src_url))
+ logger.warning("%s is not in url: %s", vc, src_url)
return WARNING
return SUCCESS
@@ -196,14 +196,14 @@ class CheckPackageYaml(BaseCheck):
src_url = self._spec.get_source("Source0")
if not src_url:
src_url = self._spec.get_source("Source")
- logger.debug("software name: {name} source url: {url}".format(name=software_name, url=src_url))
+ logger.debug("software name: %s source url: %s", software_name, src_url)
if software_name not in src_url:
- logger.warning("{name} is not in source0: {url}".format(name=software_name, url=src_url))
+ logger.warning("%s is not in source0: %s", software_name, src_url)
return WARNING
return SUCCESS
def __call__(self, *args, **kwargs):
- logger.info("check {} yaml ...".format(self._repo))
+ logger.info("check %s yaml ...", self._repo)
self._yaml_changed = self.is_change_package_yaml() # yaml文件变更 进行检查
# 因门禁系统限制外网访问权限,将涉及外网访问的检查功能check_repo暂时关闭
return self.start_check_with_order("fields", "repo_domain", "repo_name")
diff --git a/src/ac/acl/sca/check_sca.py b/src/ac/acl/sca/check_sca.py
index da04991c38809f129cd8d3f30ff415023d72871c..362c1d9ecf549214d7fa12b724487bd11ff4d3bc 100644
--- a/src/ac/acl/sca/check_sca.py
+++ b/src/ac/acl/sca/check_sca.py
@@ -17,6 +17,7 @@
import os
import shutil
import logging
+import json
from src.proxy.git_proxy import GitProxy
from src.ac.framework.ac_base import BaseCheck
@@ -39,50 +40,22 @@ class CheckSCA(BaseCheck):
"""
super(CheckSCA, self).__init__(workspace, repo, conf)
- self._work_diff_dir = os.path.join(workspace, "diff") # 目标目录,保存变更了的代码
-
- def copy_diff_files_to_dest(self, files):
- """
- 拷贝所有diff文件到目标目录
- :param files: 文件列表
- :return:
- """
- for filepath in files:
- try:
- shutil.copy(os.path.join(self._work_dir, filepath), self._work_diff_dir)
- except IOError:
- logger.exception("copy {} to {} exception".format(filepath, self._work_diff_dir))
-
- def save_scanoss_result(self, html):
- """
- 保存结果到本地
- :param html: scanoss 结果,html格式
- :return:
- """
- with open(self._scanoss_result_output, "w") as f:
- f.write(html)
-
def check_scanoss(self):
"""
- scanoss工具检查代码片段引用
- https://osskb.org
- https://github.com/scanoss/scanner.py
- :return:
+ Obtain scanoss logs and result
"""
- gp = GitProxy(self._work_dir)
- diff_files = gp.diff_files_between_commits("HEAD~1", "HEAD~0")
- logger.debug("diff files: {}".format(diff_files))
-
- self.copy_diff_files_to_dest(diff_files)
-
- blacklist_sbom = os.path.realpath(os.path.join(os.path.realpath(__file__), "../../../../conf/deny_list.sbom"))
- scan = ScanOSS(self._scanoss_api_key, self._scanoss_api_url, blacklist_sbom)
- result = scan.scan(self._work_diff_dir)
-
+ # Describes the reportUrl result jenkinsJobName jenkinsBuildNum prNo repoUrl of scanoss
+ try:
+ with open(self._scanoss_result_output, 'r') as f:
+ result_dirt = json.load(f)
+ except IOError:
+ logger.error("%s not found, make sure this file exists", self._scanoss_result_output)
+ return FAILED
+
+ result = result_dirt.get('result')
+
# 保存详细结果到web server
- if not result:
- self.save_scanoss_result(scan.html)
- logger.warning("click {} view scanoss detail".format(self._scanoss_result_repo_path))
+ logger.warning("click %s view scanoss detail", result_dirt.get('reportUrl'))
return SUCCESS if result else FAILED
@@ -93,17 +66,10 @@ class CheckSCA(BaseCheck):
:param kwargs:
:return:
"""
- logger.info("check {} sca ...".format(self._repo))
+ logger.info("check %s sca ...", self._repo)
- logger.debug("args: {}, kwargs: {}".format(args, kwargs))
+ logger.debug("args: %s, kwargs: %s", args, kwargs)
scanoss_conf = kwargs.get("scanoss", {})
- self._scanoss_api_key = scanoss_conf.get("api_key", "")
- self._scanoss_api_url = scanoss_conf.get("api_url", "https://osskb.org/api/scan/direct")
- self._scanoss_result_output = scanoss_conf.get("output", "scanoss_result") # 保存结果到本地文件
- self._scanoss_result_repo_path = scanoss_conf.get("repo_path", "-lost linker-") # 保存结果到web server的路径
-
- _ = not os.path.exists(self._work_diff_dir) and os.mkdir(self._work_diff_dir)
- try:
- return self.start_check()
- finally:
- shutil.rmtree(self._work_diff_dir)
+ self._scanoss_result_output = scanoss_conf.get("output", "scanoss_result")
+
+ return self.start_check()
diff --git a/src/ac/acl/spec/check_spec.py b/src/ac/acl/spec/check_spec.py
index ca0b0ade766eb45a8170aeccbae1d4a03bb9ede1..540d41fb01b819991d57c818378ea1413f97098d 100644
--- a/src/ac/acl/spec/check_spec.py
+++ b/src/ac/acl/spec/check_spec.py
@@ -53,7 +53,7 @@ class CheckSpec(BaseCheck):
package_yaml = "{}.yaml".format(self._repo) # package yaml file name
if len(diff_files) == 1 and diff_files[0] == package_yaml:
- logger.debug("diff files: {}".format(diff_files))
+ logger.debug("diff files: %s", diff_files)
return True
return False
@@ -94,7 +94,7 @@ class CheckSpec(BaseCheck):
# if lts branch, version update is forbidden
if self._is_lts_branch():
- logger.debug("lts branch {}".format(self._tbranch))
+ logger.debug("lts branch %s", self._tbranch)
if RPMSpecAdapter.compare_version(self._spec.version, spec_o.version) == 1:
logger.error("version update of lts branch is forbidden")
return FAILED
@@ -106,8 +106,8 @@ class CheckSpec(BaseCheck):
logger.debug("revert commit")
return SUCCESS
- logger.error("current version: {}-r{}, last version: {}-r{}".format(
- self._spec.version, self._spec.release, spec_o.version, spec_o.release))
+ logger.error("current version: %s-r%s, last version: %s-r%s",
+ self._spec.version, self._spec.release, spec_o.version, spec_o.release)
return FAILED
def check_homepage(self, timeout=30, retrying=3, interval=1):
@@ -119,7 +119,7 @@ class CheckSpec(BaseCheck):
:return:
"""
homepage = self._spec.url
- logger.debug("homepage: {}".format(homepage))
+ logger.debug("homepage: %s", homepage)
if not homepage:
return SUCCESS
@@ -137,15 +137,15 @@ class CheckSpec(BaseCheck):
"""
patches_spec = set(self._spec.patches)
patches_file = set(self._gr.patch_files_not_recursive())
- logger.debug("spec patches: {}".format(patches_spec))
- logger.debug("file patches: {}".format(patches_file))
+ logger.debug("spec patches: %s", patches_spec)
+ logger.debug("file patches: %s", patches_file)
result = SUCCESS
for patch in patches_spec - patches_file:
- logger.error("patch {} lost".format(patch))
+ logger.error("patch %s lost", patch)
result = FAILED
for patch in patches_file - patches_spec:
- logger.warning("patch {} redundant".format(patch))
+ logger.warning("patch %s redundant", patch)
return result
@@ -164,7 +164,7 @@ class CheckSpec(BaseCheck):
content = "x86-64"
if content is not None:
- logger.info("exclusive arch \"{}\"".format(content))
+ logger.info("exclusive arch \"%s\"", content)
try:
with open("exclusive_arch", "w") as f:
f.write(content)
@@ -189,7 +189,7 @@ class CheckSpec(BaseCheck):
"last_version": spec.version, "last_release": spec.release,
"compare_version": compare_version, "compare_release": compare_release, "compare": compare}
- logger.info("{}".format(rs))
+ logger.info("%s", rs)
try:
with open("pkgship_notify", "w") as f:
yaml.safe_dump(rs, f)
@@ -197,7 +197,7 @@ class CheckSpec(BaseCheck):
logger.exception("save pkgship exception")
def __call__(self, *args, **kwargs):
- logger.info("check {} spec ...".format(self._repo))
+ logger.info("check %s spec ...", self._repo)
self._ex_exclusive_arch()
self._tbranch = kwargs.get("tbranch", None)
diff --git a/src/ac/common/gitee_repo.py b/src/ac/common/gitee_repo.py
index 996bfe4e3a4d3fa62922d3d6bd4cb3e4c086e4d5..c24ca09039c0c6bbe7d01f2d8a0d535c7770299d 100644
--- a/src/ac/common/gitee_repo.py
+++ b/src/ac/common/gitee_repo.py
@@ -53,16 +53,16 @@ class GiteeRepo(object):
for filename in filenames:
rel_file_path = os.path.join(dirpath, filename).replace(self._work_dir, "").lstrip("/")
if self.is_compress_file(filename):
- logger.debug("find compress file: {}".format(rel_file_path))
+ logger.debug("find compress file: %s", rel_file_path)
self._compress_files.append(rel_file_path)
elif self.is_patch_file(filename):
- logger.debug("find patch file: {}".format(rel_file_path))
+ logger.debug("find patch file: %s", rel_file_path)
self._patch_files.append(rel_file_path)
elif self.is_spec_file(filename):
- logger.debug("find spec file: {}".format(rel_file_path))
+ logger.debug("find spec file: %s", rel_file_path)
spec_files.append(filename)
elif self.is_package_yaml_file(filename):
- logger.debug("find yaml file: {}".format(rel_file_path))
+ logger.debug("find yaml file: %s", rel_file_path)
self.yaml_file = rel_file_path
def guess_real_spec_file():
@@ -109,7 +109,7 @@ class GiteeRepo(object):
decompress_cmd = "cd {}; timeout 120s tar -C {} -xavf {}".format(
self._work_dir, self._decompress_dir, file_path)
else:
- logger.warning("unsupport compress file: {}".format(file_path))
+ logger.warning("unsupport compress file: %s", file_path)
return False
ret, _, _ = shell_cmd_live(decompress_cmd)
@@ -136,20 +136,20 @@ class GiteeRepo(object):
:param patch: 补丁
:param max_leading: leading path
"""
- logger.debug("apply patch {}".format(patch))
+ logger.debug("apply patch %s", patch)
for patch_dir in [filename for filename in os.listdir(self._decompress_dir)
if os.path.isdir(os.path.join(self._decompress_dir, filename))] + ["."]:
if patch_dir.startswith(".git"):
continue
for leading in range(max_leading + 1):
- logger.debug("try dir {} -p{}".format(patch_dir, leading))
+ logger.debug("try dir %s -p%s", patch_dir, leading)
if GitProxy.apply_patch_at_dir(os.path.join(self._decompress_dir, patch_dir),
os.path.join(self._work_dir, patch), leading):
logger.debug("patch success")
self.patch_dir_mapping[patch] = os.path.join(self._decompress_dir, patch_dir)
return True
- logger.info("apply patch {} failed".format(patch))
+ logger.info("apply patch %s failed", patch)
return False
def apply_all_patches(self, *patches):
@@ -166,7 +166,7 @@ class GiteeRepo(object):
if patch in set(self._patch_files):
rs.append(self.apply_patch(patch))
else:
- logger.error("patch {} not exist".format(patch))
+ logger.error("patch %s not exist", patch)
rs.append(False)
return 0 if all(rs) else (1 if any(rs) else -1)
diff --git a/src/ac/common/linter.py b/src/ac/common/linter.py
index 0b763037994ffaed1f1676cd02cb7dd0656bb398..95eacc86462c88abb4cc3aab2d688f155bd13833 100644
--- a/src/ac/common/linter.py
+++ b/src/ac/common/linter.py
@@ -73,13 +73,13 @@ class LinterCheck(object):
* (E) error, for probable bugs in the code
* (F) fatal, if an error occurred which prevented pylint from doing
"""
- logger.debug("check python file: {}".format(filepath))
+ logger.debug("check python file: %s", filepath)
# E0401: import module error
pylint_cmd = "pylint3 --disable=E0401 {}".format(filepath)
ret, out, _ = shell_cmd_live(pylint_cmd, cap_out=True, verbose=True)
if ret:
- logger.debug("pylint ret, {}".format(ret))
+ logger.debug("pylint ret, %s", ret)
return cls.get_summary_of_pylint(out)
@@ -88,12 +88,12 @@ class LinterCheck(object):
"""
Check golang code by golint
"""
- logger.debug("check go file: {}".format(filepath))
+ logger.debug("check go file: %s", filepath)
golint_cmd = "golint {}".format(filepath)
ret, out, _ = shell_cmd_live(golint_cmd, cap_out=True, verbose=True)
if ret:
- logger.debug("golint error, {}".format(ret))
+ logger.debug("golint error, %s", ret)
return {}
return cls.get_summary_of_golint(out)
@@ -103,13 +103,13 @@ class LinterCheck(object):
"""
Check c/c++ code by splint
"""
- logger.debug("check c/c++ file: {}".format(filepath))
+ logger.debug("check c/c++ file: %s", filepath)
splint_cmd = "splint {}".format(filepath)
#ret, out, _ = shell_cmd_live(splint_cmd, cap_out=True, verbose=True)
ret, out, _ = shell_cmd(splint_cmd)
if ret:
- logger.debug("splint error, {}".format(ret))
+ logger.debug("splint error, %s", ret)
return {}
return cls.get_summary_of_splint(out)
diff --git a/src/ac/common/rpm_spec_adapter.py b/src/ac/common/rpm_spec_adapter.py
index a714476a2d5254a961818f3839fa620525213c01..f32ddba59983b1eb8a497e48818741c7a7bff6d1 100644
--- a/src/ac/common/rpm_spec_adapter.py
+++ b/src/ac/common/rpm_spec_adapter.py
@@ -70,7 +70,7 @@ class RPMSpecAdapter(object):
"""
try:
value = self.buildarch
- logger.debug("build arch: {}".format(value))
+ logger.debug("build arch: %s", value)
if "x86_64" in value.lower():
return True
@@ -85,7 +85,7 @@ class RPMSpecAdapter(object):
"""
try:
value = self.buildarch
- logger.debug("build arch: {}".format(value))
+ logger.debug("build arch: %s", value)
if "aarch64" in value.lower():
return True
@@ -111,7 +111,7 @@ class RPMSpecAdapter(object):
version_n = "{}{}".format(version_n, '.0' * (len(version_o.split('.')) - len(version_n.split('.'))))
version_o = "{}{}".format(version_o, '.0' * (len(version_n.split('.')) - len(version_o.split('.'))))
- logger.debug("compare versions: {} vs {}".format(version_n, version_o))
+ logger.debug("compare versions: %s vs %s", version_n, version_o)
z = zip(version_n.split("."), version_o.split("."))
for p in z:
@@ -121,7 +121,7 @@ class RPMSpecAdapter(object):
elif int(p[0]) > int(p[1]):
return 1
except ValueError as exc:
- logger.debug("check version exception, {}".format(exc))
+ logger.debug("check version exception, %s", exc)
continue
return 0
diff --git a/src/ac/common/scanoss.py b/src/ac/common/scanoss.py
index 39ce0391c02d47116c34cd0d9ba4b3533aa996df..2262702efe90fef902210868977c1030c2f259e4 100644
--- a/src/ac/common/scanoss.py
+++ b/src/ac/common/scanoss.py
@@ -50,7 +50,7 @@ class ScanOSS(object):
try:
json_format = json.loads(result)
except ValueError:
- logger.exception("illegal scanoss result, \"{}\"".format(result))
+ logger.exception("illegal scanoss result, \"%s\"", result)
return True
snippets = 0
@@ -67,7 +67,7 @@ class ScanOSS(object):
detail_trs.append(self.__class__.detail_trs(filename, item))
- logger.debug("snippets: {}, files: {}".format(snippets, files))
+ logger.debug("snippets: %s, files: %s", snippets, files)
detail = "
".format(
th=self.__class__.detail_th(), trs="\n".join(detail_trs))
@@ -161,7 +161,7 @@ class ScanOSS(object):
:param directory: 需要扫描的目录
:return:
"""
- logger.debug("scan dir: {}".format(directory))
+ logger.debug("scan dir: %s", directory)
#scanoss_cmd = "scanner.py --format {} {} --apiurl {} {}".format(
# "plain", "--key {}".format(self._key) if self._key else "", self._api_url, directory)
scanoss_cmd = "scanner.py --blacklist {} --format {} {} --apiurl {} {}".format(
@@ -169,8 +169,8 @@ class ScanOSS(object):
ret, out, err = shell_cmd(scanoss_cmd)
if ret:
- logger.error("scanoss error, {}".format(ret))
- logger.error("{}".format(err))
+ logger.error("scanoss error, %s", ret)
+ logger.error("%s", err)
return True
return self.result_analysis(out)
diff --git a/src/ac/framework/ac.py b/src/ac/framework/ac.py
index 6e32ebbd1e486f6c5d53dca6c47c9f4263ae89a6..f208b007df296d8c101eb01760b9d37d5ba2816f 100644
--- a/src/ac/framework/ac.py
+++ b/src/ac/framework/ac.py
@@ -49,7 +49,7 @@ class AC(object):
self.load_check_elements_from_acl_directory(acl_path)
self.load_check_elements_from_conf(conf, community)
- logger.debug("check list: {}".format(self._ac_check_elements))
+ logger.debug("check list: %s", self._ac_check_elements)
@staticmethod
def is_repo_support_check(repo, check_element):
@@ -74,7 +74,7 @@ class AC(object):
"""
for element in self._ac_check_elements:
check_element = self._ac_check_elements[element]
- logger.debug("check {}".format(element))
+ logger.debug("check %s", element)
# show in gitee, must starts with "check_"
hint = check_element.get("hint", "check_{}".format(element))
@@ -82,25 +82,25 @@ class AC(object):
hint = "check_{}".format(hint)
if not self.__class__.is_repo_support_check(repo, check_element):
- logger.debug("{} not support check".format(repo))
+ logger.debug("%s not support check", repo)
continue
# import module
module_path = check_element.get("module", "{}.check_{}".format(element, element)) # eg: spec.check_spec
try:
module = importlib.import_module("." + module_path, self._acl_package)
- logger.debug("load module {} succeed".format(module_path))
+ logger.debug("load module %s succeed", module_path)
except ImportError as exc:
- logger.exception("import module {} exception, {}".format(module_path, exc))
+ logger.exception("import module %s exception, %s", module_path, exc)
continue
# import entry
entry_name = check_element.get("entry", "Check{}".format(element.capitalize()))
try:
entry = getattr(module, entry_name)
- logger.debug("load entry \"{}\" succeed".format(entry_name))
+ logger.debug("load entry \"%s\" succeed", entry_name)
except AttributeError as exc:
- logger.warning("entry \"{}\" not exist in module {}, {}".format(entry_name, module_path, exc))
+ logger.warning("entry \"%s\" not exist in module %s, %s", entry_name, module_path, exc)
continue
# new a instance
@@ -109,26 +109,26 @@ class AC(object):
entry = entry(workspace, repo, check_element) # new a instance
except Exception as exc:
self._ac_check_result.append({"name": hint, "result": FAILED.val})
- logger.exception("new a instance of class {} exception, {}".format(entry_name, exc))
+ logger.exception("new a instance of class %s exception, %s", entry_name, exc)
continue
if not callable(entry): # check callable
- logger.warning("entry {} not callable".format(entry_name))
+ logger.warning("entry %s not callable", entry_name)
continue
# do ac check
try:
result = entry(**kwargs)
- logger.debug("check result {} {}".format(element, result))
+ logger.debug("check result %s %s", element, result)
except Exception as exc:
- logger.exception("check exception, {} {}".format(element, exc))
+ logger.exception("check exception, %s %s", element, exc)
continue
self._ac_check_result.append({"name": hint, "result": result.val})
dataset.set_attr("access_control.build.acl.{}".format(element), result.hint)
dataset.set_attr("access_control.build.content", self._ac_check_result)
- logger.debug("ac result: {}".format(self._ac_check_result))
+ logger.debug("ac result: %s", self._ac_check_result)
def load_check_elements_from_acl_directory(self, acl_dir):
"""
@@ -150,18 +150,18 @@ class AC(object):
with open(conf_file, "r") as f:
content = yaml.safe_load(f)
except IOError:
- logger.exception("ac conf file {} not exist".format(conf_file))
+ logger.exception("ac conf file %s not exist", conf_file)
return
except YAMLError:
logger.exception("illegal conf file format")
return
elements = content.get(community, {})
- logger.debug("community \"{}\" conf: {}".format(community, elements))
+ logger.debug("community \"%s\" conf: %s", community, elements)
for name in elements:
if name in self._ac_check_elements:
if elements[name].get("exclude"):
- logger.debug("exclude: {}".format(name))
+ logger.debug("exclude: %s", name)
self._ac_check_elements.pop(name)
else:
self._ac_check_elements[name] = elements[name]
@@ -172,7 +172,7 @@ class AC(object):
:param ac_file:
:return:
"""
- logger.debug("save ac result to file {}".format(ac_file))
+ logger.debug("save ac result to file %s", ac_file)
with open(ac_file, "w") as f:
f.write("ACL={}".format(json.dumps(self._ac_check_result)))
@@ -202,15 +202,12 @@ def init_args():
parser.add_argument("-l", type=str, dest="trigger_link", help="job trigger link")
# scanoss
- parser.add_argument("--scanoss-api-key", type=str, dest="scanoss_api_key", help="scanoss api key")
- parser.add_argument("--scanoss-api-url", type=str, dest="scanoss_api_url",
- default="https://osskb.org/api/scan/direct", help="scanoss api url")
parser.add_argument("--scanoss-output", type=str, dest="scanoss_output",
default="scanoss_result", help="scanoss result output")
- parser.add_argument("--scanoss-repo-path", type=str, dest="scanoss_repo", help="scanoss result repo path")
+ parser.add_argument("--codecheck-api-key", type=str, dest="codecheck_api_key", help="codecheck api key")
parser.add_argument("--codecheck-api-url", type=str, dest="codecheck_api_url",
- default="http://124.71.75.234:8384/api/openlibing/codecheck/start", help="codecheck api url")
+ default="https://majun.osinfra.cn:8384/api/openlibing/codecheck", help="codecheck api url")
return parser.parse_args()
@@ -225,10 +222,10 @@ if "__main__" == __name__:
logging.config.fileConfig(logger_conf_path)
logger = logging.getLogger("ac")
- logger.info("using credential {}".format(args.account.split(":")[0]))
- logger.info("cloning repository https://gitee.com/{}/{}.git ".format(args.community, args.repo))
+ logger.info("using credential %s", args.account.split(":")[0])
+ logger.info("cloning repository https://gitee.com/%s/%s.git ", args.community, args.repo)
logger.info("clone depth 4")
- logger.info("checking out pull request {}".format(args.pr))
+ logger.info("checking out pull request %s", args.pr)
# notify gitee
from src.proxy.gitee_proxy import GiteeProxy
@@ -289,11 +286,10 @@ if "__main__" == __name__:
gp.create_tags_of_pr(args.pr, "ci_processing")
# scanoss conf
- scanoss = {"api_key": args.scanoss_api_key, "api_url": args.scanoss_api_url,
- "output": args.scanoss_output, "repo_path": args.scanoss_repo}
+ scanoss = {"output": args.scanoss_output}
codecheck = {"pr_url": "https://gitee.com/{}/{}/pulls/{}".format(args.community, args.repo, args.pr),
- "pr_number": args.pr, "codecheck_api_url": args.codecheck_api_url
+ "pr_number": args.pr, "codecheck_api_url": args.codecheck_api_url, "codecheck_api_key": args.codecheck_api_key
}
# build
diff --git a/src/ac/framework/ac_base.py b/src/ac/framework/ac_base.py
index 7deb3b54b926831c75bf0b8c5ffd39b66f9041e5..9f3e9d73f3b85ed74582244f24c3086b36aac64d 100644
--- a/src/ac/framework/ac_base.py
+++ b/src/ac/framework/ac_base.py
@@ -57,27 +57,27 @@ class BaseCheck(object):
result = SUCCESS
for name in items:
try:
- logger.debug("check {}".format(name))
+ logger.debug("check %s", name)
method = getattr(self, "check_{}".format(name))
rs = method()
- logger.debug("{} -> {}".format(name, rs))
+ logger.debug("%s -> %s", name, rs)
except Exception as e:
# 忽略代码错误
- logger.exception("internal error: {}".format(e))
+ logger.exception("internal error: %s", e)
continue
ignored = True if self._conf and name in self._conf.get("ignored", []) else False
- logger.debug("{} ignore: {}".format(name, ignored))
+ logger.debug("%s ignore: %s", name, ignored)
if rs is SUCCESS:
- logger.info("check {:<30}pass".format(name))
+ logger.info("check %s pass", name)
elif rs is WARNING:
- logger.warning("check {:<30}warning{}".format(name, " [ignored]" if ignored else ""))
+ logger.warning("check %s warning %s", name, " [ignored]" if ignored else "")
elif rs is FAILED:
- logger.error("check {:<30}fail{}".format(name, " [ignored]" if ignored else ""))
+ logger.error("check %s fail %s", name, " [ignored]" if ignored else "")
else:
# never here
- logger.exception("check {:<30}exception{}".format(name, " [ignored]" if ignored else ""))
+ logger.exception("check %s exception %s", name, " [ignored]" if ignored else "")
continue
if not ignored:
@@ -91,6 +91,6 @@ class BaseCheck(object):
"""
members = inspect.getmembers(self, inspect.ismethod)
items = [member[0].replace("check_", "") for member in members if member[0].startswith("check_")]
- logger.debug("check items: {}".format(items))
+ logger.debug("check items: %s", items)
return self.start_check_with_order(*items)
diff --git a/src/build/extra_work.py b/src/build/extra_work.py
index fbefcc84334d537c19a1ccd4c509bfe73621a02f..149b701fc32767743939f4b639d2b0f5b4838c0b 100755
--- a/src/build/extra_work.py
+++ b/src/build/extra_work.py
@@ -49,7 +49,7 @@ class ExtraWork(object):
try:
with open(pkgship_meta_path, "r") as f:
pkgship_meta = yaml.safe_load(f)
- logger.debug("pkgship meta: {}".format(pkgship_meta))
+ logger.debug("pkgship meta: %s", pkgship_meta)
if pkgship_meta.get("compare_version") == 1: # version upgrade
logger.debug("pkgship: notify")
return True
@@ -93,8 +93,8 @@ class ExtraWork(object):
#get rpms
curr_rpm = self._rpm_package.main_package_local()
last_rpm = self._rpm_package.last_main_package(package_arch, package_url)
- logger.debug("curr_rpm: {}".format(curr_rpm))
- logger.debug("last_rpm: {}".format(last_rpm))
+ logger.debug("curr_rpm: %s", curr_rpm)
+ logger.debug("last_rpm: %s", last_rpm)
if not curr_rpm or not last_rpm:
logger.info("no rpms")
return
@@ -108,8 +108,8 @@ class ExtraWork(object):
debuginfos = None
curr_rpm_debug = self._rpm_package.debuginfo_package_local()
last_rpm_debug = self._rpm_package.last_debuginfo_package(package_arch, package_url)
- logger.debug("curr_rpm_debug: {}".format(curr_rpm_debug))
- logger.debug("last_rpm_debug: {}".format(last_rpm_debug))
+ logger.debug("curr_rpm_debug: %s", curr_rpm_debug)
+ logger.debug("last_rpm_debug: %s", last_rpm_debug)
if curr_rpm_debug and last_rpm_debug:
debuginfos = [last_rpm_debug, curr_rpm_debug]
@@ -123,9 +123,9 @@ class ExtraWork(object):
check_abi = CheckAbi(result_output_file=output, input_rpms_path=related_rpms_url)
ret = check_abi.process_with_rpm(rpms, debuginfos)
if ret == 1:
- logger.error("check abi error: {}".format(ret))
+ logger.error("check abi error: %s", ret)
else:
- logger.debug("check abi ok: {}".format(ret))
+ logger.debug("check abi ok: %s", ret)
if os.path.exists(output):
# change of abi
@@ -135,7 +135,7 @@ class ExtraWork(object):
else:
comment = {"name": "check_abi/{}/{}".format(package_arch, self._repo), "result": "SUCCESS"}
- logger.debug("check abi comment: {}".format(comment))
+ logger.debug("check abi comment: %s", comment)
try:
with open(comment_file, "r") as f: # one repo with multi build package
comments = yaml.safe_load(f)
@@ -151,7 +151,7 @@ class ExtraWork(object):
logger.exception("yaml load check abi comment file exception")
comments.append(comment)
- logger.debug("check abi comments: {}".format(comments))
+ logger.debug("check abi comments: %s", comments)
try:
with open(comment_file, "w") as f:
yaml.safe_dump(comments, f) # list
@@ -170,14 +170,14 @@ class ExtraWork(object):
# 1. prepare install root directory
_ = not os.path.exists(install_root) and os.mkdir(install_root)
- logger.info("create install root directory: {}".format(install_root))
+ logger.info("create install root directory: %s", install_root)
repo_name_prefix = "check_install"
# 2. prepare repo
repo_source = OBSRepoSource("http://119.3.219.20:82") # obs 实时构建repo地址
repo_config = repo_source.generate_repo_info(branch_name, arch, "check_install")
- logger.info("repo source config:\n{}".format(repo_config))
+ logger.info("repo source config:\n%s", repo_config)
# write to /etc/yum.repos.d
with open("obs_realtime.repo", "w+") as f:
@@ -189,18 +189,18 @@ class ExtraWork(object):
for name, package in self._rpm_package.iter_all_rpm():
# ignore debuginfo rpm
if "debuginfo" in name or "debugsource" in name:
- logger.debug("ignore debug rpm: {}".format(name))
+ logger.debug("ignore debug rpm: %s", name)
continue
names.append(name)
packages.append(package)
- logger.info("install rpms: {}".format(names))
+ logger.info("install rpms: %s", names)
if packages:
check_install_cmd = "sudo dnf install -y --installroot={} --setopt=reposdir=. {}".format(
install_root, " ".join(packages))
ret, _, err = shell_cmd_live(check_install_cmd, verbose=True)
if ret:
- logger.error("install rpms error, {}, {}".format(ret, err))
+ logger.error("install rpms error, %s, %s", ret, err)
else:
logger.info("install rpm success")
diff --git a/src/build/gitee_comment.py b/src/build/gitee_comment.py
index 442ebbbe4250f592dca24e1d713be61594d52293..5d778bca4c7cfb32e400170fadd1fb6b3b54e00e 100755
--- a/src/build/gitee_comment.py
+++ b/src/build/gitee_comment.py
@@ -82,7 +82,7 @@ class Comment(object):
base_job_name = os.environ.get("JOB_NAME")
base_build_id = os.environ.get("BUILD_ID")
base_build_id = int(base_build_id)
- logger.debug("base_job_name: {}, base_build_id: {}".format(base_job_name, base_build_id))
+ logger.debug("base_job_name: %s, base_build_id: %s", base_job_name, base_build_id)
base_build = jenkins_proxy.get_build(base_job_name, base_build_id)
logger.debug("get base build")
self._up_builds = jenkins_proxy.get_upstream_builds(base_build)
@@ -120,7 +120,7 @@ class Comment(object):
try:
acl = json.loads(os.environ["ACL"])
- logger.debug("ac result: {}".format(acl))
+ logger.debug("ac result: %s", acl)
except ValueError:
logger.exception("invalid ac result format")
return []
@@ -138,7 +138,7 @@ class Comment(object):
comments.append(self.__class__.comment_html_table_tr_rowspan(
item["name"], ac_result.emoji, ac_result.hint))
- logger.info("ac comment: {}".format(comments))
+ logger.info("ac comment: %s", comments)
return comments
@@ -158,7 +158,7 @@ class Comment(object):
comments.append(self.__class__.comment_html_table_tr(
name, ac_result.emoji, ac_result.hint, "{}{}".format(build_url, "console"), build.buildno))
- logger.info("build comment: {}".format(comments))
+ logger.info("build comment: %s", comments)
return comments
@@ -178,25 +178,25 @@ class Comment(object):
return False
for check_abi_comment_file in self._check_abi_comment_files:
- logger.debug("check abi comment file: {}".format(check_abi_comment_file))
+ logger.debug("check abi comment file: %s", check_abi_comment_file)
if not os.path.exists(check_abi_comment_file): # check abi评论文件存在
continue
for build in builds:
name = build.job._data["fullName"]
- logger.debug("check build {}".format(name))
+ logger.debug("check build %s", name)
if not match(name, check_abi_comment_file): # 找到匹配的jenkins build
continue
- logger.debug("build \"{}\" match".format(name))
+ logger.debug("build \"%s\" match", name)
status = build.get_status()
- logger.debug("build state: {}".format(status))
+ logger.debug("build state: %s", status)
if ACResult.get_instance(status) == SUCCESS: # 保证build状态成功
with open(check_abi_comment_file, "r") as f:
try:
content = yaml.safe_load(f)
except YAMLError: # yaml base exception
logger.exception("illegal yaml format of check abi comment file ")
- logger.debug("comment: {}".format(content))
+ logger.debug("comment: %s", content)
for item in content:
ac_result = ACResult.get_instance(item.get("result"))
comments.append(self.__class__.comment_html_table_tr(
@@ -204,7 +204,7 @@ class Comment(object):
"markdown" if "link" in item else "", hashtag=False))
break
- logger.info("check abi comment: {}".format(comments))
+ logger.info("check abi comment: %s", comments)
return comments
diff --git a/src/build/obs_repo_source.py b/src/build/obs_repo_source.py
index 35483501c5fda24ceab91fb9c10c61c16eaf78f0..638e44c1f69c9c8d7d2b570d6c72665c0bd33a51 100644
--- a/src/build/obs_repo_source.py
+++ b/src/build/obs_repo_source.py
@@ -69,35 +69,35 @@ class OBSRepoSource(object):
# openstack common
url = "{}/{}/standard_{}".format(self._current_repo_host, obs_path_part_common, arch)
if do_requests("GET", url) == 0:
- logger.debug("add openstack common repo: {}".format(url))
+ logger.debug("add openstack common repo: %s", url)
repo_config += self.repo_format("openstack_common", repo_name_prefix + "_openstack_common", url)
# openstack base
url = "{}/{}/standard_{}".format(self._current_repo_host, obs_path_part_base, arch)
if do_requests("GET", url) == 0:
- logger.debug("add openstack base repo: {}".format(url))
+ logger.debug("add openstack base repo: %s", url)
repo_config += self.repo_format("openstack_base", repo_name_prefix + "_openstack_base", url)
else:
obs_path_part = branch.replace("-", ":/")
- logger.debug("branch={}, obs_path_part={}".format(branch, obs_path_part))
+ logger.debug("branch=%s, obs_path_part=%s", branch, obs_path_part)
# main
url = "{}/{}/standard_{}".format(self._current_repo_host, obs_path_part, arch)
if do_requests("GET", url) == 0:
- logger.debug("add main repo: {}".format(url))
+ logger.debug("add main repo: %s", url)
repo_config += self.repo_format(repo_name_prefix + "_main", repo_name_prefix + "_main", url)
# epol
url = "{}/{}/standard_{}".format(self._current_repo_host, obs_path_part + ":/Epol", arch)
if do_requests("GET", url) == 0:
- logger.debug("add epol repo: {}".format(url))
+ logger.debug("add epol repo: %s", url)
repo_config += self.repo_format(repo_name_prefix + "_epol", repo_name_prefix + "_epol", url)
# extras
url = "{}/{}/standard_{}".format(self._current_repo_host, obs_path_part + ":/Extras", arch)
if do_requests("GET", url) == 0:
- logger.debug("add extras repo: {}".format(url))
+ logger.debug("add extras repo: %s", url)
repo_config += self.repo_format(repo_name_prefix + "_extras", repo_name_prefix + "_extras", url)\
return repo_config
diff --git a/src/build/osc_build_k8s.py b/src/build/osc_build_k8s.py
index 1e3e354fc08d9bfa14a12eabe997b1e698789082..c9b4d9bc2447d64b7b39ca97a9721c3a9fa53e9f 100755
--- a/src/build/osc_build_k8s.py
+++ b/src/build/osc_build_k8s.py
@@ -46,7 +46,12 @@ class SinglePackageBuild(object):
"oepkg_openstack-rocky_oe-20.03-LTS-SP2": ["openEuler:20.03:LTS:SP2:oepkg:openstack:rocky"],
"oepkg_openstack-common_oe-20.03-LTS-Next": ["openEuler:20.03:LTS:Next:oepkg:openstack:common"],
"oepkg_openstack-queens_oe-20.03-LTS-Next": ["openEuler:20.03:LTS:Next:oepkg:openstack:queens"],
- "oepkg_openstack-rocky_oe-20.03-LTS-Next": ["openEuler:20.03:LTS:Next:oepkg:openstack:rocky"]
+ "oepkg_openstack-rocky_oe-20.03-LTS-Next": ["openEuler:20.03:LTS:Next:oepkg:openstack:rocky"],
+ "oepkg_openstack-common_oe-20.03-LTS-SP3": ["openEuler:20.03:LTS:SP3:oepkg:openstack:common"],
+ "oepkg_openstack-queens_oe-20.03-LTS-SP3": ["openEuler:20.03:LTS:SP3:oepkg:openstack:queens"],
+ "oepkg_openstack-rocky_oe-20.03-LTS-SP3": ["openEuler:20.03:LTS:SP3:oepkg:openstack:rocky"],
+ "openEuler-20.03-LTS-SP3": ["openEuler:20.03:LTS:SP3", "openEuler:20.03:LTS:SP3:Epol"],
+ "openEuler-22.03-LTS-Next": ["openEuler:22.03:LTS:Next", "openEuler:22.03:LTS:Next:Epol"]
}
BUILD_IGNORED_GITEE_BRANCH = ["riscv"]
@@ -70,11 +75,12 @@ class SinglePackageBuild(object):
"""
return OBSProxy.list_repos_of_arch(project, self._package, self._arch, show_exclude=True)
- def build_obs_repos(self, project, repos, work_dir, code_dir):
+ def build_obs_repos(self, project, repos, spec, work_dir, code_dir):
"""
build
:param project: 项目名
:param repos: obs repo
+ :param spec: 指定spec文件
:param code_dir: 码云代码在本地路径
:param work_dir:
:return:
@@ -100,16 +106,16 @@ class SinglePackageBuild(object):
# osc build
for repo in repos:
if repo["state"] == "excluded" and repo["mpac"] == "raspberrypi-kernel":
- logger.info("repo {}:{} excluded".format(repo["repo"], repo["mpac"]))
+ logger.info("repo %s:%s excluded", repo["repo"], repo["mpac"])
continue
root_build = repo["mpac"] in self.PACKAGES_USE_ROOT
if not OBSProxy.build_package(
- project, self._package, repo["repo"], self._arch, repo["mpac"],
+ project, self._package, repo["repo"], self._arch, spec, repo["mpac"],
root_build=root_build, disable_cpio=True):
- logger.error("build {} ... failed".format(repo["repo"]))
+ logger.error("build %s ... failed", repo["repo"])
return 3
- logger.info("build {} ... ok".format(repo["repo"]))
+ logger.info("build %s ... ok", repo["repo"])
logger.debug("build all repos ... finished")
@@ -187,44 +193,45 @@ class SinglePackageBuild(object):
ret, _, _ = shell_cmd_live(cmd, verbose=True)
if ret:
- logger.error("prepare build environ error, {}".format(ret))
+ logger.error("prepare build environ error, %s", ret)
return False
return True
- def build(self, work_dir, code_dir):
+ def build(self, spec, work_dir, code_dir):
"""
入口
+ :param spec: 指定spec文件
:param work_dir: obs工作目录
:param code_dir: 代码目录
:return:
"""
if self._branch in self.BUILD_IGNORED_GITEE_BRANCH:
- logger.error("branch \"{}\" ignored".format(self._branch))
+ logger.error("branch \"%s\" ignored", self._branch)
return 0
if self._branch not in self.GITEE_BRANCH_PROJECT_MAPPING:
- logger.error("branch \"{}\" not support yet".format(self._branch))
+ logger.error("branch \"%s\" not support yet", self._branch)
return 1
has_any_repo_build = False
for project in self.GITEE_BRANCH_PROJECT_MAPPING.get(self._branch):
- logger.debug("start build project {}".format(project))
+ logger.debug("start build project %s", project)
obs_repos = self.get_need_build_obs_repos(project)
if not obs_repos:
- logger.info("all repos ignored of project {}".format(project))
+ logger.info("all repos ignored of project %s", project)
continue
- logger.debug("build obs repos: {}".format(obs_repos))
+ logger.debug("build obs repos: %s", obs_repos)
has_any_repo_build = True
- ret = self.build_obs_repos(project, obs_repos, work_dir, code_dir)
+ ret = self.build_obs_repos(project, obs_repos, spec, work_dir, code_dir)
if ret > 0:
- logger.debug("build run return {}".format(ret))
- logger.error("build {} {} {} ... {}".format(project, self._package, self._arch, "failed"))
+ logger.debug("build run return %s", ret)
+ logger.error("build %s %s %s ... %s", project, self._package, self._arch, "failed")
return 1 # finish if any error
else:
- logger.info("build {} {} {} ... {}".format(project, self._package, self._arch, "ok"))
+ logger.info("build %s %s %s ... %s", project, self._package, self._arch, "ok")
# if no repo build, regard as fail
if not has_any_repo_build:
@@ -253,6 +260,7 @@ def init_args():
parser.add_argument("-t", type=str, dest="account", help="gitee account")
parser.add_argument("-o", type=str, dest="owner", default="src-openeuler", help="gitee owner")
+ parser.add_argument("--spec", type=str, dest="spec", default="", help="spec files")
return parser.parse_args()
@@ -265,10 +273,10 @@ if "__main__" == __name__:
logging.config.fileConfig(logger_conf_path)
logger = logging.getLogger("build")
- logger.info("using credential {}".format(args.account.split(":")[0]))
- logger.info("cloning repository https://gitee.com/{}/{}.git ".format(args.owner, args.repo))
+ logger.info("using credential %s", args.account.split(":")[0])
+ logger.info("cloning repository https://gitee.com/%s/%s.git ", args.owner, args.repo)
logger.info("clone depth 1")
- logger.info("checking out pull request {}".format(args.pr))
+ logger.info("checking out pull request %s", args.pr)
from src.utils.dist_dataset import DistDataset
from src.proxy.git_proxy import GitProxy
@@ -315,7 +323,7 @@ if "__main__" == __name__:
dd.set_attr_stime("spb.build.stime")
spb = SinglePackageBuild(args.package, args.arch, args.branch)
- rs = spb.build(args.workspace, args.code)
+ rs = spb.build(args.spec, args.workspace, args.code)
dd.set_attr("spb.build.result", "failed" if rs else "successful")
dd.set_attr_etime("spb.build.etime")
diff --git a/src/build/related_rpm_package.py b/src/build/related_rpm_package.py
index 97b891cacf3d275b6a728307d9c0add01573b676..e07e5a5c0a934855885089d7f9379ad9fb57cddd 100755
--- a/src/build/related_rpm_package.py
+++ b/src/build/related_rpm_package.py
@@ -44,7 +44,10 @@ class RelatedRpms(object):
"oepkg_openstack-queens_oe-20.03-LTS-SP2": ["openEuler:20.03:LTS:SP2:oepkg:openstack:queens"],
"oepkg_openstack-common_oe-20.03-LTS-Next": ["openEuler:20.03:LTS:Next:oepkg:openstack:common"],
"oepkg_openstack-rocky_oe-20.03-LTS-Next": ["openEuler:20.03:LTS:Next:oepkg:openstack:rocky"],
- "oepkg_openstack-queens_oe-20.03-LTS-Next": ["openEuler:20.03:LTS:Next:oepkg:openstack:queens"]
+ "oepkg_openstack-queens_oe-20.03-LTS-Next": ["openEuler:20.03:LTS:Next:oepkg:openstack:queens"],
+ "oepkg_openstack-common_oe-20.03-LTS-SP3": ["openEuler:20.03:LTS:SP3:oepkg:openstack:common"],
+ "oepkg_openstack-rocky_oe-20.03-LTS-SP3": ["openEuler:20.03:LTS:SP3:oepkg:openstack:rocky"],
+ "oepkg_openstack-queens_oe-20.03-LTS-SP3": ["openEuler:20.03:LTS:SP3:oepkg:openstack:queens"]
}
def __init__(self, obs_addr, obs_repo_url, branch_name, package_arch):
diff --git a/src/conf/deny_list.sbom b/src/conf/deny_list.sbom
index fd3a23ec5c54d339bfaf2e78d006bffa54186d30..7f1e3ff1399dfabfc4fcd7183c5564c2d470c437 100644
--- a/src/conf/deny_list.sbom
+++ b/src/conf/deny_list.sbom
@@ -61,165 +61,257 @@
"publisher": "src-openeuler",
"name": "isula-transform"
},
+ {
+ "publisher": "openeuler",
+ "name": "kernel"
+ },
{
"publisher": "openeuler-mirror",
"name": "kernel"
},
+ {
+ "publisher": "src-openeuler",
+ "name": "kernel"
+ },
+ {
+ "publisher": "openeuler",
+ "name": "async-libfuse"
+ },
{
"publisher": "openeuler-mirror",
"name": "async-libfuse"
},
+ {
+ "publisher": "src-openeuler",
+ "name": "async-libfuse"
+ },
+ {
+ "publisher": "openeuler",
+ "name": "authz"
+ },
{
"publisher": "openeuler-mirror",
"name": "authz"
},
+ {
+ "publisher": "src-openeuler",
+ "name": "authz"
+ },
+ {
+ "publisher": "openeuler",
+ "name": "clibcni"
+ },
{
"publisher": "openeuler-mirror",
"name": "clibcni"
},
- {
+ {
+ "publisher": "src-openeuler",
+ "name": "clibcni"
+ },
+ {
+ "publisher": "openeuler",
+ "name": "isula-build"
+ },
+ {
"publisher": "openeuler-mirror",
"name": "isula-build"
},
{
+ "publisher": "src-openeuler",
+ "name": "isula-build"
+ },
+ {
+ "publisher": "openeuler",
+ "name": "iSulad"
+ },
+ {
"publisher": "openeuler-mirror",
"name": "iSulad"
},
{
+ "publisher": "src-openeuler",
+ "name": "iSulad"
+ },
+ {
+ "publisher": "openeuler",
+ "name": "iSulad-img"
+ },
+ {
"publisher": "openeuler-mirror",
"name": "iSulad-img"
},
{
+ "publisher": "src-openeuler",
+ "name": "iSulad-img"
+ },
+ {
+ "publisher": "openeuler",
+ "name": "iSula-libutils"
+ },
+ {
"publisher": "openeuler-mirror",
"name": "iSula-libutils"
},
{
+ "publisher": "src-openeuler",
+ "name": "iSula-libutils"
+ },
+ {
+ "publisher": "openeuler",
+ "name": "isula-transform"
+ },
+ {
"publisher": "openeuler-mirror",
"name": "isula-transform"
},
{
+ "publisher": "src-openeuler",
+ "name": "isula-transform"
+ },
+ {
+ "publisher": "openeuler",
+ "name": "lcr"
+ },
+ {
"publisher": "openeuler-mirror",
"name": "lcr"
},
{
- "publisher": "openeuler-mirror",
+ "publisher": "src-openeuler",
+ "name": "lcr"
+ },
+ {
+ "publisher": "openeuler",
"name": "lib-shim-v2"
},
- {
+ {
"publisher": "openeuler-mirror",
- "name": "lxcfs-tools"
+ "name": "lib-shim-v2"
},
{
- "publisher": "openeuler-mirror",
- "name": "syscontainer-tools"
+ "publisher": "src-openeuler",
+ "name": "lib-shim-v2"
},
{
- "publisher": "openeuler-mirror",
- "name": "A-Tune"
+ "publisher": "openeuler",
+ "name": "lxcfs-tools"
},
- {
+ {
"publisher": "openeuler-mirror",
- "name": "A-Tune-Collector"
+ "name": "lxcfs-tools"
},
- {
- "publisher": "openeuler-mirror",
- "name": "A-Tune-UI"
+ {
+ "publisher": "src-openeuler",
+ "name": "lxcfs-tools"
},
- {
- "publisher": "openeuler-mirror",
- "name": "prefetch_tuning"
+ {
+ "publisher": "openeuler",
+ "name": "syscontainer-tools"
},
- {
+ {
"publisher": "openeuler-mirror",
- "name": "wisdom-advisor"
+ "name": "syscontainer-tools"
},
{
- "publisher": "openeuler-mirror",
- "name": "qemu"
+ "publisher": "src-openeuler",
+ "name": "syscontainer-tools"
},
- {
+ {
+ "publisher": "openeuler",
+ "name": "A-Tune"
+ },
+ {
"publisher": "openeuler-mirror",
- "name": "vmtop"
+ "name": "A-Tune"
},
{
"publisher": "src-openeuler",
- "name": "kernel"
+ "name": "A-Tune"
},
- {
- "publisher": "src-openeuler",
- "name": "async-libfuse"
+ {
+ "publisher": "openeuler",
+ "name": "A-Tune-Collector"
},
- {
- "publisher": "src-openeuler",
- "name": "authz"
+ {
+ "publisher": "openeuler-mirror",
+ "name": "A-Tune-Collector"
},
{
"publisher": "src-openeuler",
- "name": "clibcni"
+ "name": "A-Tune-Collector"
},
- {
- "publisher": "src-openeuler",
- "name": "isula-build"
+ {
+ "publisher": "openeuler",
+ "name": "A-Tune-UI"
},
- {
- "publisher": "src-openeuler",
- "name": "iSulad"
+ {
+ "publisher": "openeuler-mirror",
+ "name": "A-Tune-UI"
},
{
"publisher": "src-openeuler",
- "name": "iSulad-img"
+ "name": "A-Tune-UI"
},
- {
- "publisher": "src-openeuler",
- "name": "iSula-libutils"
+ {
+ "publisher": "openeuler",
+ "name": "prefetch_tuning"
},
- {
- "publisher": "src-openeuler",
- "name": "isula-transform"
+ {
+ "publisher": "openeuler-mirror",
+ "name": "prefetch_tuning"
},
{
"publisher": "src-openeuler",
- "name": "lcr"
+ "name": "prefetch_tuning"
},
- {
- "publisher": "src-openeuler",
- "name": "lib-shim-v2"
+ {
+ "publisher": "openeuler",
+ "name": "wisdom-advisor"
},
- {
- "publisher": "src-openeuler",
- "name": "lxcfs-tools"
+ {
+ "publisher": "openeuler-mirror",
+ "name": "wisdom-advisor"
},
{
"publisher": "src-openeuler",
- "name": "syscontainer-tools"
+ "name": "wisdom-advisor"
},
- {
- "publisher": "src-openeuler",
- "name": "A-Tune"
+ {
+ "publisher": "openeuler",
+ "name": "qemu"
},
- {
- "publisher": "src-openeuler",
- "name": "A-Tune-Collector"
+ {
+ "publisher": "openeuler-mirror",
+ "name": "qemu"
},
{
"publisher": "src-openeuler",
- "name": "A-Tune-UI"
+ "name": "qemu"
+ },
+ {
+ "publisher": "openeuler",
+ "name": "vmtop"
+ },
+ {
+ "publisher": "openeuler-mirror",
+ "name": "vmtop"
},
{
"publisher": "src-openeuler",
- "name": "prefetch_tuning"
+ "name": "vmtop"
},
{
- "publisher": "src-openeuler",
- "name": "wisdom-advisor"
+ "publisher": "openeuler",
+ "name": "kunpengsecl"
},
{
- "publisher": "src-openeuler",
- "name": "qemu"
+ "ipublisher": "openeuler-mirror",
+ "name": "kunpengsecl"
},
{
"publisher": "src-openeuler",
- "name": "vmtop"
- }
+ "name": "kunpengsecl"
+ }
]
}
diff --git a/src/jobs/jenkins_job.py b/src/jobs/jenkins_job.py
index 3978493b7cf94b3e3568e896d5e8a20d148b1472..811058c3ca2d0f8c7db6ff0b3b6489283e86632c 100755
--- a/src/jobs/jenkins_job.py
+++ b/src/jobs/jenkins_job.py
@@ -59,7 +59,7 @@ class JenkinsJobs(object):
:param interval: 每次batch请求后sleep时间(秒),
:return:
"""
- logger.info("{} jobs {}".format(action, jobs))
+ logger.info("%s jobs %s", action, jobs)
real_jobs = self.get_real_target_jobs(jobs, exclude_jobs if exclude_jobs else [])
def run_once(target_jobs):
@@ -71,14 +71,14 @@ class JenkinsJobs(object):
for index in range(batch):
works = [gevent.spawn(self.dispatch, action, job, jenkins_proxy)
for job in target_jobs[index * concurrency: (index + 1) * concurrency]]
- logger.info("{} works, {}/{} ".format(len(works), index + 1, batch))
+ logger.info("%s works, %s/%s ", len(works), index + 1, batch)
gevent.joinall(works)
for work in works:
if work.value["result"]:
- logger.info("{} job {} ... ok".format(action, work.value["job"]))
+ logger.info("%s job %s ... ok", action, work.value["job"])
else:
_failed_jobs.append(work.value["job"])
- logger.error("{} job {} ... failed".format(action, work.value["job"]))
+ logger.error("%s job %s ... failed", action, work.value["job"])
time.sleep(interval)
@@ -89,12 +89,12 @@ class JenkinsJobs(object):
for index in range(retry):
if not failed_jobs:
break
- logger.info("{} jobs failed, retrying {}/{}".format(len(failed_jobs), index + 1, retry))
+ logger.info("%s jobs failed, retrying %s/%s", len(failed_jobs), index + 1, retry)
failed_jobs = run_once(failed_jobs)
if failed_jobs:
- logger.warning("{} failed jobs".format(len(failed_jobs)))
- logger.warning("{}{}".format(",".join(failed_jobs[:100]), "..." if len(failed_jobs) > 100 else ""))
+ logger.warning("%s failed jobs", len(failed_jobs))
+ logger.warning("%s%s", ",".join(failed_jobs[:100]), "..." if len(failed_jobs) > 100 else "")
def dispatch(self, action, job, jenkins_proxy):
"""
@@ -148,7 +148,7 @@ class SrcOpenEulerJenkinsJobs(JenkinsJobs):
with open(os.path.join(exclusive_arch_path, filename), "r") as f:
arches = f.readline()
self._exclusive_arch[filename] = [arch.strip() for arch in arches.split(",")]
- logger.debug("exclusive arch: {}".format(self._exclusive_arch))
+ logger.debug("exclusive arch: %s", self._exclusive_arch)
def get_real_target_jobs(self, jobs, exclude_jobs):
"""
@@ -274,7 +274,7 @@ class OpenEulerJenkinsJobs(SrcOpenEulerJenkinsJobs):
# build
script = self.guess_build_script(buddy["repo"])
- logger.debug("guess build script: {}".format("script"))
+ logger.debug("guess build script: %s", "script")
ele = root.findall("buiders/hudson.task.Shell/command")
if ele:
# replace first command
diff --git a/src/jobs/obs_meta_strategy.py b/src/jobs/obs_meta_strategy.py
index 6b3062048d90e390c0a35747a27ec1f15919753a..24b0c40c91932c8a8426deedf16d94822ef36475 100755
--- a/src/jobs/obs_meta_strategy.py
+++ b/src/jobs/obs_meta_strategy.py
@@ -79,20 +79,20 @@ class ObsMetaStrategy(object):
if filename == "_service":
_service = os.path.join(dirpath, filename)
try:
- logger.debug("analysis {}".format(_service))
+ logger.debug("analysis %s", _service)
tree = ET.parse(_service)
elements = tree.findall(".//param[@name=\"url\"]") # next/openEuler/zip
except:
- logger.exception("invalid xml format, {}".format(_service))
+ logger.exception("invalid xml format, %s", _service)
continue
_repos = [element.text.strip("/").split("/")[-1] for element in elements] # eg: next/openEuler/zip
- logger.debug("get repos: {}".format(_repos))
+ logger.debug("get repos: %s", _repos)
if any([repo in repos for repo in _repos]):
package = dirpath.strip("/").split("/")[-1] # eg: master/openEuler:Mainline/zip/_services
index += 1
- logger.info("{} {}...ok".format(index, _service))
- logger.info("package: {}, repos: {}".format(package, _repos))
+ logger.info("%s %s...ok", index, _service)
+ logger.info("package: %s, repos: %s", package, _repos)
for repo in _repos:
self._package_repo[package].add(repo)
self._repo_package[repo].add(package)
diff --git a/src/jobs/repo_mapping.py b/src/jobs/repo_mapping.py
index 49b1765bd79b1b5aafa4a27a299d540a43b91669..318f2b2ef1be7e189f9d437bde97b96d1df8501a 100755
--- a/src/jobs/repo_mapping.py
+++ b/src/jobs/repo_mapping.py
@@ -38,9 +38,9 @@ class RepoMapping(object):
self._exclude_repos = kwargs.get("exclude_jobs") if kwargs.get("exclude_jobs") else []
self._repo_mapping = {} # 保存结果
self._ignored_repos = self._load_ignore_repo(ignored_repos_path, ignored_repos_key)
- logger.debug("ignored repos: {}".format(self._ignored_repos))
+ logger.debug("ignored repos: %s", self._ignored_repos)
self._community_repos = self._load_community_repo(community_path) # 社区repos
- logger.debug("community repos: {}".format(self._community_repos))
+ logger.debug("community repos: %s", self._community_repos)
@staticmethod
def _load_ignore_repo(conf_file, ignored_repos_key):
@@ -55,7 +55,7 @@ class RepoMapping(object):
handler = yaml.safe_load(f)
return handler.get(ignored_repos_key, [])
except IOError as e:
- logger.warning("{} not exist".format(conf_file))
+ logger.warning("%s not exist", conf_file)
return []
@staticmethod
@@ -69,10 +69,10 @@ class RepoMapping(object):
with open(community_path, "r") as f:
handler = yaml.safe_load(f)
repos = {item["name"]: item["type"] for item in handler["repositories"]}
- logger.info("repos from community: {}".format(len(repos)))
+ logger.info("repos from community: %s", len(repos))
return repos
except IOError as e:
- logger.warning("{} not exist".format(community_path))
+ logger.warning("%s not exist", community_path)
return []
def _is_valid_repo(self, repo):
diff --git a/src/proxy/es_proxy.py b/src/proxy/es_proxy.py
index b439951c1fed5de95ebb7c8a6829253b8e321e9c..3b18141e95d30599fe95d6366a04d4cecfc68b4d 100644
--- a/src/proxy/es_proxy.py
+++ b/src/proxy/es_proxy.py
@@ -46,10 +46,10 @@ class ESProxy(object):
:return:
"""
try:
- logger.debug("es insert: {}".format(body))
+ logger.debug("es insert: %s", body)
rs = self._es.index(index, body=body)
- logger.debug("insert result: {}".format(rs))
+ logger.debug("insert result: %s", rs)
return rs["result"] == "created"
except elasticsearch.ElasticsearchException:
logger.exception("elastic search insert document exception")
@@ -62,9 +62,9 @@ class ESProxy(object):
:param body:
:return:
"""
- logger.debug("es search: {}".format(body))
+ logger.debug("es search: %s", body)
rs = self._es.search(index=index, body=body)
- logger.debug("result: {}".format(rs))
+ logger.debug("result: %s", rs)
return rs['hits']['hits']
@@ -89,9 +89,9 @@ class ESProxy(object):
"""
try:
body = {"query": query, "script": script}
- logger.debug("es update: {}".format(body))
+ logger.debug("es update: %s", body)
rs = self._es.update_by_query(index, body=body)
- logger.debug("update result: {}".format(rs))
+ logger.debug("update result: %s", rs)
return True
except elasticsearch.ElasticsearchException:
diff --git a/src/proxy/git_proxy.py b/src/proxy/git_proxy.py
index 87fe23294e550775cd673d8c38f17f7c03722f34..a7bca0b78f44be3a4fdff027e91f4a9c3fd7f897 100644
--- a/src/proxy/git_proxy.py
+++ b/src/proxy/git_proxy.py
@@ -50,7 +50,7 @@ class GitProxy(object):
ret, _, _ = shell_cmd_live(init_cmd)
if ret:
- logger.warning("init repository failed, {}".format(ret))
+ logger.warning("init repository failed, %s", ret)
return None
return cls(repo_dir)
@@ -70,8 +70,8 @@ class GitProxy(object):
self._repo_dir, "--progress" if progress else "", depth, url, pull_request, pull_request)
ret, out, _ = shell_cmd_live(fetch_cmd, cap_out=True, cmd_verbose=False)
if ret:
- logger.error("git fetch failed, {}".format(ret))
- logger.error("{}".format(out))
+ logger.error("git fetch failed, %s", ret)
+ logger.error("%s", out)
return False
return True
@@ -86,7 +86,7 @@ class GitProxy(object):
get_content_cmd = "cd {}; git show {}:{}".format(self._repo_dir, commit, file_path)
ret, out, _ = shell_cmd_live(get_content_cmd, cap_out=True)
if ret:
- logger.warning("get file content of commit failed, {}".format(ret))
+ logger.warning("get file content of commit failed, %s", ret)
return None
f = StringIO()
@@ -106,7 +106,7 @@ class GitProxy(object):
ret, out, _ = shell_cmd_live(diff_files_cmd, cap_out=True)
if ret:
- logger.error("get diff files of commits failed, {}".format(ret))
+ logger.error("get diff files of commits failed, %s", ret)
return []
return out
@@ -121,7 +121,7 @@ class GitProxy(object):
ret, out, _ = shell_cmd_live(extract_file_cmd, cap_out=True)
if ret:
- logger.error("extract diff files of patch failed, {}".format(ret))
+ logger.error("extract diff files of patch failed, %s", ret)
return []
return [line.split()[-1] for line in out]
@@ -137,7 +137,7 @@ class GitProxy(object):
ret, _, _ = shell_cmd_live(apply_patch_cmd)
if ret:
- #logger.error("apply patch failed, {}".format(ret))
+ #logger.error("apply patch failed, %s", ret)
return False
return True
@@ -156,7 +156,7 @@ class GitProxy(object):
ret, _, _ = shell_cmd_live(apply_patch_cmd)
if ret:
- #logger.error("apply patch failed, {}".format(ret))
+ #logger.error("apply patch failed, %s", ret)
return False
return True
@@ -171,7 +171,7 @@ class GitProxy(object):
ret, out, _ = shell_cmd_live(get_commit_cmd, cap_out=True)
if ret:
- logger.error("get commit id of index failed, {}".format(ret))
+ logger.error("get commit id of index failed, %s", ret)
return None
return out[0]
@@ -186,7 +186,7 @@ class GitProxy(object):
ret, _, _ = shell_cmd_live(checkout_cmd)
if ret:
- logger.warning("checkout failed, {}".format(ret))
+ logger.warning("checkout failed, %s", ret)
return False
return True
@@ -201,7 +201,7 @@ class GitProxy(object):
ret, _, _ = shell_cmd_live(checkout_cmd)
if ret:
- logger.warning("checkout failed, {}".format(ret))
+ logger.warning("checkout failed, %s", ret)
return False
return True
@@ -224,7 +224,7 @@ class GitProxy(object):
ret, out, _ = shell_cmd_live(tree_hashes_cmd, cap_out=True)
if ret:
- logger.error("get tree hashes failed, {}".format(ret))
+ logger.error("get tree hashes failed, %s", ret)
return None
return out
@@ -243,7 +243,7 @@ class GitProxy(object):
ret, _, _ = shell_cmd_live(fetch_cmd)
if ret:
- logger.error("fetch failed, {}".format(ret))
+ logger.error("fetch failed, %s", ret)
return False
return True
diff --git a/src/proxy/gitee_proxy.py b/src/proxy/gitee_proxy.py
index 3e30c603638ea27389fe6b7f19126989d2b3bdef..3cfa96f952ee5aaa37150ff05906a490b3e56cc5 100644
--- a/src/proxy/gitee_proxy.py
+++ b/src/proxy/gitee_proxy.py
@@ -35,7 +35,7 @@ class GiteeProxy(object):
:param comment: 评论内容
:return: 0成功,其它失败
"""
- logger.debug("comment pull request {}".format(pr))
+ logger.debug("comment pull request %s", pr)
comment_pr_url = "https://gitee.com/api/v5/repos/{}/{}/pulls/{}/comments".format(self._owner, self._repo, pr)
data = {"access_token": self._token, "body": comment}
@@ -58,7 +58,7 @@ class GiteeProxy(object):
logger.debug("create tags, but no tags")
return True
- logger.debug("create tags {} of pull request {}".format(tags, pr))
+ logger.debug("create tags %s of pull request %s", tags, pr)
pr_tag_url = "https://gitee.com/api/v5/repos/{}/{}/pulls/{}/labels?access_token={}".format(
self._owner, self._repo, pr, self._token)
@@ -81,7 +81,7 @@ class GiteeProxy(object):
logger.debug("replace tags, but no tags")
return True
- logger.debug("replace all tags with {} of pull request {}".format(tags, pr))
+ logger.debug("replace all tags with %s of pull request %s", tags, pr)
pr_tag_url = "https://gitee.com/api/v5/repos/{}/{}/pulls/{}/labels?access_token={}".format(
self._owner, self._repo, pr, self._token)
@@ -99,7 +99,7 @@ class GiteeProxy(object):
:param tag: 标签
:return: 0成功,其它失败
"""
- logger.debug("delete tag {} of pull request {}".format(tag, pr))
+ logger.debug("delete tag %s of pull request %s", tag, pr)
pr_tag_url = "https://gitee.com/api/v5/repos/{}/{}/pulls/{}/labels/{}?access_token={}".format(
self._owner, self._repo, pr, tag, self._token)
@@ -128,7 +128,7 @@ class GiteeProxy(object):
"""
handler = yaml.safe_load(response.text)
repos.update({item["name"]: item["type"] for item in handler["repositories"]})
- logger.info("repos from community: {}".format(len(repos)))
+ logger.info("repos from community: %s", len(repos))
community_repo_url = "https://gitee.com/openeuler/community/raw/master/repository/src-openeuler.yaml"
logger.info("requests repos from community, this will take multi seconds")
@@ -143,7 +143,7 @@ class GiteeProxy(object):
:param state: pr状态
:return: str or None
"""
- logger.debug("get last pull request committer, branch: {}, state: {}".format(branch, state))
+ logger.debug("get last pull request committer, branch: %s, state: %s", branch, state)
pr_url = "https://gitee.com/api/v5/repos/{}/{}/pulls?access_token={}&state={}&base={}" \
"&page=1&per_page=1".format(self._owner, self._repo, self._token, state, branch)
@@ -161,7 +161,7 @@ class GiteeProxy(object):
if handler:
try:
committer[0] = handler[0]["user"]["login"]
- logger.debug("get last pr committer: {}".format(committer))
+ logger.debug("get last pr committer: %s", committer)
except KeyError:
logger.exception("extract committer info from gitee exception")
diff --git a/src/proxy/jenkins_proxy.py b/src/proxy/jenkins_proxy.py
index b4b035bd1cc2a9c1c9629217dacc468a1ea7543b..ea40dfcd68a9da382b089d1811687c72e35f858f 100644
--- a/src/proxy/jenkins_proxy.py
+++ b/src/proxy/jenkins_proxy.py
@@ -55,7 +55,7 @@ class JenkinsProxy(object):
self._jenkins.create_job(job, config)
return True
except Exception as e:
- logger.exception("create job exception, {}".format(e))
+ logger.exception("create job exception, %s", e)
return False
def update_job(self, job, config):
@@ -70,7 +70,7 @@ class JenkinsProxy(object):
jks_job.update_config(config)
return True
except Exception as e:
- logger.exception("update job exception, {}".format(e))
+ logger.exception("update job exception, %s", e)
return False
def get_config(self, job):
@@ -82,7 +82,7 @@ class JenkinsProxy(object):
try:
return self._jenkins[job].get_config()
except Exception as e:
- logger.exception("get config exception, {}".format(e))
+ logger.exception("get config exception, %s", e)
return None
def get_build(self, job, build_no):
@@ -95,7 +95,7 @@ class JenkinsProxy(object):
try:
return self._jenkins[job].get_build(build_no)
except Exception as e:
- logger.exception("get job build exception, {}".format(e))
+ logger.exception("get job build exception, %s", e)
return None
@classmethod
@@ -109,7 +109,7 @@ class JenkinsProxy(object):
parent_build = build.get_upstream_build()
return parent_build.get_upstream_build() if parent_build else None
except Exception as e:
- logger.exception("get grandpa build exception, {}".format(e))
+ logger.exception("get grandpa build exception, %s", e)
return None
def _get_upstream_jobs(self, job):
@@ -119,22 +119,22 @@ class JenkinsProxy(object):
:param job: Jenkins Job object
:return:
"""
- logger.debug("get upstream jobs of {}".format(job._data["fullName"]))
+ logger.debug("get upstream jobs of %s", job._data["fullName"])
jobs = []
for project in job._data["upstreamProjects"]: # but is the only way of get upstream projects info
url = project.get("url")
name = project.get("name")
- logger.debug("upstream project: {} {}".format(url, name))
+ logger.debug("upstream project: %s %s", url, name)
m = re.match("(.*)/job/.*", url) # remove last part of job url, greedy match
base_url = m.group(1)
- logger.debug("base url {}".format(base_url))
+ logger.debug("base url %s", base_url)
try:
j = jenkins.Jenkins(base_url, self._username, self._token, timeout=self._timeout)
jobs.append(j[name])
except Exception as e:
- logger.exception("get job of {} exception".format(url))
+ logger.exception("get job of %s exception", url)
continue
return jobs
@@ -167,17 +167,17 @@ class JenkinsProxy(object):
cause_build = cause_job.get_build(cause_build_id)
cause_cause_build_id = cause_build.get_upstream_build_number()
- logger.debug("cause_build_id: {}, cause_job_name: {}, cause_cause_build_id: {}".format(
- cause_build_id, cause_job_name, cause_cause_build_id))
+ logger.debug("cause_build_id: %s, cause_job_name: %s, cause_cause_build_id: %s",
+ cause_build_id, cause_job_name, cause_cause_build_id)
upstream_builds = []
for upstream_job in upstream_jobs:
- logger.debug("{}".format(upstream_job._data["fullName"]))
+ logger.debug("%s", upstream_job._data["fullName"])
for build_id in upstream_job.get_build_ids():
- logger.debug("try build id {}".format(build_id))
+ logger.debug("try build id %s", build_id)
a_build = upstream_job.get_build(build_id)
if a_build.get_upstream_build_number() == cause_cause_build_id:
- logger.debug("build id {} match".format(build_id))
+ logger.debug("build id %s match", build_id)
upstream_builds.append(a_build)
break
diff --git a/src/proxy/kafka_proxy.py b/src/proxy/kafka_proxy.py
index c7dcde44ca59280cd1bf6ba5a9591a6f853d0aa5..110b46e8b0a70593954c4b9e60881472b1e26e4b 100644
--- a/src/proxy/kafka_proxy.py
+++ b/src/proxy/kafka_proxy.py
@@ -49,12 +49,12 @@ class KafkaProducerProxy(object):
:return:
"""
try:
- logger.debug("kafka send: {}, {}".format(key, value))
+ logger.debug("kafka send: %s, %s", key, value)
future = self._kp.send(topic, value=value, key=key)
rs = future.get(timeout=self._timeout)
- logger.debug("kafka send result: {}".format(rs))
+ logger.debug("kafka send result: %s", rs)
return True
except errors.KafkaTimeoutError:
logger.exception("kafka send timeout exception")
diff --git a/src/proxy/obs_proxy.py b/src/proxy/obs_proxy.py
index 6724748e32bab4e2540615dd2480702635482ed2..98fd978172a057cd4a855ce5d14150e611fa8873 100644
--- a/src/proxy/obs_proxy.py
+++ b/src/proxy/obs_proxy.py
@@ -45,7 +45,7 @@ class OBSProxy(object):
cmd = "osc ll {} {}".format(project, package)
ret, rs, _ = shell_cmd_live(cmd, cap_out=True)
if ret:
- logger.error("list project package error, {}".format(ret))
+ logger.error("list project package error, %s", ret)
return None
return rs
@@ -62,7 +62,7 @@ class OBSProxy(object):
"--show-exclude" if show_exclude else "", project, package, arch)
ret, out, _ = shell_cmd_live(cmd, cap_out=True)
if ret:
- logger.debug("list obs repos of arch error, {}".format(ret))
+ logger.debug("list obs repos of arch error, %s", ret)
return []
rs = []
@@ -88,7 +88,7 @@ class OBSProxy(object):
cmd = "osc results {} --csv |grep {} | awk -F';' '{{print $1}}'".format(project, state)
ret, out, _ = shell_cmd_live(cmd, cap_out=True)
if ret:
- logger.debug("list package of state error, {}".format(ret))
+ logger.debug("list package of state error, %s", ret)
return []
return out
@@ -105,23 +105,24 @@ class OBSProxy(object):
_ = os.path.isdir(project) and shutil.rmtree(project)
cmd = "osc co {} {}".format(project, package)
- logger.info("osc co {} {}".format(project, package))
+ logger.info("osc co %s %s", project, package)
ret, _, _ = shell_cmd_live(cmd, verbose=True)
if ret:
- logger.error("checkout package error, {}".format(ret))
+ logger.error("checkout package error, %s", ret)
return False
return True
@staticmethod
- def build_package(project, package, repo, arch, mpac, debug=False, root_build=False, disable_cpio=False):
+ def build_package(project, package, repo, arch, spec, mpac, debug=False, root_build=False, disable_cpio=False):
"""
build
:param project:
:param package:
:param repo:
:param arch:
+ :param spec:
:param mpac: multibuild package
:param debug:
:return:
@@ -130,15 +131,15 @@ class OBSProxy(object):
root_opt = "--userootforbuild" if root_build else ""
debuginfo_opt = "--disable-debuginfo" if not debug else ""
disable_cpio_bulk = "--disable-cpio-bulk-download" if disable_cpio else ""
- cmd = "cd {}; osc build {} {} {} {} {} --no-verify --clean --noservice -M {}".format(
- package_path, repo, arch, root_opt, debuginfo_opt, disable_cpio_bulk, mpac)
+ cmd = "cd {}; osc build {} {} {} {} {} {} --no-verify --clean --noservice -M {}".format(
+ package_path, repo, arch, spec, root_opt, debuginfo_opt, disable_cpio_bulk, mpac)
- logger.info("osc build {} {} {} {} {} --no-verify --clean --noservice -M {}".format(
- repo, arch, root_opt, debuginfo_opt, disable_cpio_bulk, mpac))
+ logger.info("osc build %s %s %s %s %s %s --no-verify --clean --noservice -M %s",
+ repo, arch, spec, root_opt, debuginfo_opt, disable_cpio_bulk, mpac)
ret, _, _ = shell_cmd_live(cmd, verbose=True)
if ret:
- logger.error("build package error, {}".format(ret))
+ logger.error("build package error, %s", ret)
return False
return True
@@ -156,7 +157,7 @@ class OBSProxy(object):
cmd = "osc api /build/{}/{}/{}/{}/_history".format(project, repo, arch, package)
ret, out, _ = shell_cmd_live(cmd, cap_out=True)
if ret:
- logger.debug("list build history of package error, {}".format(ret))
+ logger.debug("list build history of package error, %s", ret)
return ""
return "\n".join(out)
diff --git a/src/proxy/requests_proxy.py b/src/proxy/requests_proxy.py
index b88129fbc83a20f050f53de528f9a0f32955ca4f..87de8296a93b575e9a666a00c301b0d1f8d8ce4a 100644
--- a/src/proxy/requests_proxy.py
+++ b/src/proxy/requests_proxy.py
@@ -38,9 +38,9 @@ def do_requests(method, url, querystring=None, body=None, auth=None, timeout=30,
:return:
"""
try:
- logger.debug("http requests, {} {} {}".format(method, url, timeout))
- logger.debug("querystring: {}".format(querystring))
- logger.debug("body: {}".format(body))
+ logger.debug("http requests, %s %s %s", method, url, timeout)
+ logger.debug("querystring: %s", querystring)
+ logger.debug("body: %s", body)
if method.lower() not in ["get", "post", "put", "delete"]:
return -1
@@ -60,7 +60,7 @@ def do_requests(method, url, querystring=None, body=None, auth=None, timeout=30,
else:
rs = func(url, timeout=timeout)
- logger.debug("status_code {}".format(rs.status_code))
+ logger.debug("status_code %s", rs.status_code)
if rs.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.no_content]:
return 1
@@ -78,11 +78,11 @@ def do_requests(method, url, querystring=None, body=None, auth=None, timeout=30,
return 0
except requests.exceptions.SSLError as e:
- logger.warning("requests {} ssl exception, {}".format(url, e))
+ logger.warning("requests %s ssl exception, %s", url, e)
return -2
except requests.exceptions.Timeout as e:
logger.warning("requests timeout")
return 2
except requests.exceptions.RequestException as e:
- logger.warning("requests exception, {}".format(e))
+ logger.warning("requests exception, %s", e)
return 3
diff --git a/src/requirements b/src/requirements
index d3fd573f2d4135e8280d57b07b7cd8b4112a154b..7d34070afd36a0242b173d47d037f55727678ba0 100644
--- a/src/requirements
+++ b/src/requirements
@@ -11,4 +11,4 @@ chardet
kafka-python
elasticsearch
retrying
-scanoss-scanner
+scanoss
diff --git a/src/tools/obs_package_build_history.py b/src/tools/obs_package_build_history.py
index cd0863785e49dae50e791b254884c10227969e8c..ba9a8d08df1e577d2304594733ab8f98178f574d 100644
--- a/src/tools/obs_package_build_history.py
+++ b/src/tools/obs_package_build_history.py
@@ -46,7 +46,7 @@ class JobBuildHistory(object):
try:
root = ET.fromstring(history)
except ParseError:
- logger.exception("package: {}, build history: {}".format(package, history))
+ logger.exception("package: %s, build history: %s", package, history)
return {"package": package, "max": 0, "min": 0, "average": 0, "times": -1}
duration = [int(ele.get("duration")) for ele in root.findall("entry")]
@@ -74,11 +74,11 @@ class JobBuildHistory(object):
for index in range(batch):
works = [gevent.spawn(JobBuildHistory.get_package_job_duration, project, package, repo, arch)
for package in packages[index * concurrency: (index + 1) * concurrency]]
- logger.info("{} works, {}/{} ".format(len(works), index + 1, batch))
+ logger.info("%s works, %s/%s ", len(works), index + 1, batch)
gevent.joinall(works)
for work in works:
- logger.debug("{}: {}".format(work.value["package"], work.value))
- logger.info("{} ...done".format(work.value["package"]))
+ logger.debug("%s: %s", work.value["package"], work.value)
+ logger.info("{%s} ...done", work.value["package"])
rs.append(work.value)
time.sleep(1)
diff --git a/src/tools/obs_package_build_report.py b/src/tools/obs_package_build_report.py
index cdb50848a9afd643b457b6231727c59b79799170..78e25e4aacd3a9a2c345379ed856ce16aaa8620e 100644
--- a/src/tools/obs_package_build_report.py
+++ b/src/tools/obs_package_build_report.py
@@ -53,7 +53,10 @@ class ObsPackageBuildReport(object):
"openEuler:20.03:LTS:SP2:oepkg:openstack:rocky": "oepkg_openstack-rocky_oe-20.03-LTS-SP2",
"openEuler:20.03:LTS:Next:oepkg:openstack:common": "oepkg_openstack-common_oe-20.03-LTS-Next",
"openEuler:20.03:LTS:Next:oepkg:openstack:queens": "oepkg_openstack-queens_oe-20.03-LTS-Next",
- "openEuler:20.03:LTS:Next:oepkg:openstack:rocky": "oepkg_openstack-rocky_oe-20.03-LTS-Next"
+ "openEuler:20.03:LTS:Next:oepkg:openstack:rocky": "oepkg_openstack-rocky_oe-20.03-LTS-Next",
+ "openEuler:20.03:LTS:SP3:oepkg:openstack:common": "oepkg_openstack-common_oe-20.03-LTS-SP3",
+ "openEuler:20.03:LTS:SP3:oepkg:openstack:queens": "oepkg_openstack-queens_oe-20.03-LTS-SP3",
+ "openEuler:20.03:LTS:SP3:oepkg:openstack:rocky": "oepkg_openstack-rocky_oe-20.03-LTS-SP3"
}
GITEE_OWNER = "src-openeuler"
@@ -97,17 +100,17 @@ class ObsPackageBuildReport(object):
# try:
# branch = self.__class__.PROJECT_BRANCH_MAPPING[self._project]
# except KeyError:
-# logger.exception("project {} not support".format(self._project))
+# logger.exception("project %s not support", self._project)
# return
branch = "master"
# get packages in project of state
packages = OBSProxy.list_packages_of_state(self._project, self._state)
- logger.info("project {} state {}, find {} packages".format(self._project, self._state, len(packages)))
+ logger.info("project %s state %s, find %s packages", self._project, self._state, len(packages))
# get last pr committer
for index, package in enumerate(packages):
- logger.info("{}: {}".format(index, package))
+ logger.info("%s: %s", index, package)
gp = GiteeProxy(self.GITEE_OWNER, package, gitee_api_token)
committer = gp.get_last_pr_committer(branch)
real_name = self._real_name_mapping.get(committer, "N/A")
diff --git a/src/utils/shell_cmd.py b/src/utils/shell_cmd.py
index c68c65cb5fefa61f3cbccffe2722c07a888f9646..e3be542913ca688279031c7a92656c579821eef8 100755
--- a/src/utils/shell_cmd.py
+++ b/src/utils/shell_cmd.py
@@ -8,12 +8,18 @@ no_fmt_logger = logging.getLogger("no_fmt")
def shell_cmd(cmd, inmsg=None):
- logger.debug("exec cmd -- [{}]".format(cmd))
+ """
+ 创建子进程执行命令,返回执行结果
+ :param cmd: 命令
+ :param inmsg: 输入
+ :return:
+ """
+ logger.debug("exec cmd -- [%s]", cmd)
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
if inmsg:
p.stdin.write(inmsg)
out, err = p.communicate()
- logger.debug("iret: {}, rs: {}, err: {}".format(p.returncode, out, err))
+ logger.debug("iret: %s, rs: %s, err: %s", p.returncode, out, err)
return p.returncode, out, err
@@ -29,7 +35,7 @@ def shell_cmd_live(cmd, cap_in=None, cap_out=False, cap_err=False, verbose=False
:return:
"""
if cmd_verbose:
- logger.debug("exec cmd -- {}".format(cmd))
+ logger.debug("exec cmd -- %s", cmd)
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
if cap_in:
@@ -51,13 +57,13 @@ def shell_cmd_live(cmd, cap_in=None, cap_out=False, cap_err=False, verbose=False
break
if cap_out:
- logger.debug("total {} lines output".format(len(out)))
+ logger.debug("total %s lines output", len(out))
ret = p.poll()
err = None
if ret:
- logger.debug("return code {}".format(ret))
+ logger.debug("return code %s", ret)
while True:
line= p.stderr.readline()
if not line:
diff --git a/test/ac/acl/package_yaml/test_check_repo.py b/test/ac/acl/package_yaml/test_check_repo.py
index 8bf40f21be2341d48131dc24b219c44e4cd60e22..f1d75224d349a653cc0abd996a09a6899ead29a1 100644
--- a/test/ac/acl/package_yaml/test_check_repo.py
+++ b/test/ac/acl/package_yaml/test_check_repo.py
@@ -51,9 +51,9 @@ class TestGetReleaseTags(unittest.TestCase):
with open(filepath, 'r') as yaml_data: # load yaml data
result = yaml.safe_load(yaml_data)
except IOError as e:
- logging.warning("package yaml not exist. {}".format(str(e)))
+ logging.warning("package yaml not exist. %s", str(e))
except yaml.YAMLError as exc:
- logging.warning("Error parsering YAML: {}".format(str(exc)))
+ logging.warning("Error parsering YAML: %s", str(exc))
finally:
return result