diff --git a/tools/SME_ISTRUCTION.txt b/tools/SME_ISTRUCTION.txt
new file mode 100644
index 0000000000000000000000000000000000000000..43783be93a1655402af5fa2406384c17cb0a832f
--- /dev/null
+++ b/tools/SME_ISTRUCTION.txt
@@ -0,0 +1,137 @@
+SME
+ADDHA
+ADDSPL
+ADDSVL
+ADDVA
+BF1CVT
+BF2CVT
+BF1CVTL
+BF2CVTL
+BFADD
+BFCLAMP
+BFCVT
+BFCVTN
+BFDOT
+BFMAX
+BFMAXNM
+BFMIN
+BFMINNM
+BFMLA
+BFMLAL
+BFMLS
+BFMLSL
+BFMOPA
+BFMOPS
+BFSUB
+BFVDOT
+BMOPA
+BMOPS
+F1CVT
+F2CVT
+F1CVTL
+F2CVTL
+FADD
+FAMAX
+FAMIN
+FCLAMP
+FCVT
+FCVTL
+FCVTN
+FCVTZS
+FCVTZU
+FDOT
+FMAX
+FMAXNM
+FMIN
+FMINNM
+FMLA
+FMLAL
+FMLALL
+FMLS
+FMLSL
+FMOPA
+FMOPS
+FRINTA
+FRINTM
+FRINTN
+FRINTP
+FSCALE
+FSUB
+FVDOT
+FVDOTB
+FVDOTT
+LD1B
+LD1D
+LD1H
+LD1Q
+LD1W
+LDNT1B
+LDNT1D
+LDNT1H
+LDNT1W
+LUTI2
+LUTI4
+MOVA
+MOVAZ
+MOVT
+RDSVL
+SCLAMP
+SCVTF
+SDOT
+SEL
+SMLAL
+SMLALL
+SMLSL
+SMLSLL
+SMOPA
+SMOPS
+SQCVT
+SQCVTN
+SQCVTU
+SQCVTUN
+SQDMULH
+SQRSHR
+SQRSHRN
+SQRSHRU
+SQRSHRUN
+SRSHL
+ST1B
+ST1D
+ST1H
+ST1Q
+ST1W
+STNT1B
+STNT1D
+STNT1H
+STNT1W
+SUDOT
+SUMLALL
+SUMOPA
+SUMOPS
+SUNPK
+SUVDOT
+SVDOT
+UCLAMP
+UCVTF
+UDOT
+UMLAL
+UMLALL
+UMLSL
+UMLSLL
+UMOPA
+UMOPS
+UQCVT
+UQCVTN
+UQRSHR
+UQRSHRN
+URSHL
+USDOT
+USMLALL
+USMOPA
+USMOPS
+USVDOT
+UUNPK
+UVDOT
+UZP
+ZERO
+ZIP
diff --git a/tools/acceptance_command_line.py b/tools/acceptance_command_line.py
index 47f07090a3d0053dbb01a7c9e725987498b15772..b9ba6181312ebe7be92f6010f6981875320ac508 100644
--- a/tools/acceptance_command_line.py
+++ b/tools/acceptance_command_line.py
@@ -2,93 +2,734 @@ import os
import zipfile
import tarfile
import argparse
+import subprocess
class AcceptanceTool(object):
def devkit_acceptance_report(self, compressed_report_package):
if not os.path.exists(compressed_report_package):
- print("请输入正确的报告压缩包")
+ print("Enter a correct report package.")
return
file_ext = os.path.splitext(compressed_report_package)[1].lower()
if file_ext not in (".zip", ".gz", ".bz2"):
- print("请使用以下格式的压缩包:zip、gz、bz2")
+ print("Please use the compressed package in the following format:.zip、.gz、.bz2")
return
- report_name = compressed_report_package.split("/")[-1].split("_")
- devkit_pipeline_name = ""
- devkit_pipeline_id = ""
- if len(report_name) == 3:
- devkit_pipeline_name = report_name[0]
- devkit_pipeline_id = report_name[1]
decompress = {".zip": decompress_zip, ".gz": decompress_gz_bz, ".bz2": decompress_gz_bz}
current_path = os.getcwd()
- print("开始解压")
+ print("Starting decompression...")
file_names = decompress.get(file_ext)(compressed_report_package)
- command_line_html = {"64-bit-running-mode-check.html": "64位运行模式检查",
- "memory-consistency-check.html": "内存一致性检查",
- "SoftwareMigrationAssessment.html": "软件迁移评估",
- "byte-alignment-check.html": "字节对齐检查", "SourceCodeScanningReport.html": "源码迁移",
- "compatibility_report.html": "云测工具"}
- print("解压完成。")
- print("流水线{}构建{}devkit-pipeline相关工具报告扫描中...".format(devkit_pipeline_name, devkit_pipeline_id))
- html_line = ""
- contents = ""
+ command_line_html = {"64-bit Running Mode Check Report": "64位运行模式检查",
+ "Static Check Report": "内存一致性检查",
+ "Vectorization Check Report": "向量化检查",
+ "Software Porting Assessment Report": "软件迁移评估",
+ "Byte Alignment Check Report": "字节对齐检查",
+ "Source Code Porting Report": "源码迁移", "Compatibility_Application": "云测工具",
+ "Build Affinity Check Report": "构建亲和",
+ "precision": "计算精度分析",
+ "UTgen": "UTgen"}
+ report_type = {"64-bit Running Mode Check Report": "亲和扫描工具", "Static Check Report": "亲和扫描工具",
+ "Vectorization Check Report": "亲和扫描工具", "Build Affinity Check Report": "亲和扫描工具",
+ "Software Porting Assessment Report": "迁移扫描工具",
+ "Byte Alignment Check Report": "亲和扫描工具",
+ "Source Code Porting Report": "迁移扫描工具", "Compatibility_Application": "DevKit测试平台"
+ }
+ print("The decompression is complete.")
+ print("Scanning the report of the devkit-pipeline tool for pipeline construction...")
+ source_path = ""
+ acceptance_html_list = list()
+ scan_report_info = [{'label': '门禁工具', 'value': '迁移扫描工具'}]
+ core_report_info = [{'label': '门禁工具', 'value': '亲和扫描工具'}, {'label': '源码路径', 'value': ''},
+ {'label': '评估结果', 'value': '不通过'}]
+ test_report_info = [{'label': '门禁工具', 'value': 'DevKit测试平台'}, {'label': '测试应用', 'value': ''},
+ {'label': '评估结果', 'value': '通过'}]
+ tuner_report_info = [{'label': '门禁工具', 'value': '调优工具'},
+ {'label': '评估结果', 'value': '不通过'}]
+ jdk_report_info = [{'label': '门禁工具', 'value': '毕昇JDK'},
+ {'label': '评估结果', 'value': '不通过'}]
+ gcc_report_info = [{'label': '门禁工具', 'value': 'GCC for openEuler'},
+ {'label': '评估结果', 'value': '不通过'}]
+ ut_report_info = [{'label': 'Java测试用例', 'value': 'UTgen'},
+ {'label': '评估结果', 'value': '不通过'}]
+ test_devkit = ["Compatibility_Application_Start", "Compatibility_Application_Stop",
+ "Compatibility_Hardware_Server",
+ "Compatibility_Idle_Cpu", "Compatibility_Idle_Disk", "Compatibility_Idle_Memory",
+ "Compatibility_Idle_Network",
+ "Compatibility_Software_Name", "Reliability_Exception_Kill", "Reliability_Pressure_Cpu"]
+ scan_report_detail = list()
+ core_report_detail = list()
+ test_report_detail = list()
+ jdk_report_detail = list()
+ gcc_report_detail = list()
+ tuner_report_detail = list()
+ ut_report_detail = list()
+ scan_report_num = 0
+ core_report_num = 0
+ tuner_report_num = 0
+ ut_report_num = 0
for file in file_names:
- if file.split("/")[-1] in command_line_html.keys():
+ if file.endswith(".log"):
+ with open(os.path.join(current_path, file), encoding='utf-8') as f:
+ content_all = f.read()
+ if "Recover backup file" in content_all:
+ core_report_num += 1
+ core_report_detail.append([core_report_num, "计算精度分析",
+ os.path.join(current_path, file), "使用"])
+ core_report_info[1]["value"] = source_path if not scan_report_info[0][
+ "value"] else source_path
+ core_report_info[2]["value"] = "通过"
+ if file.endswith(".html"):
try:
- with open(os.path.join(current_path, file), encoding="utf-8") as f:
+ with open(os.path.join(current_path, file), encoding='utf-8') as f:
+ content_all = f.read()
+ f.seek(0)
content = f.readlines()
except UnicodeDecodeError:
- with open(os.path.join(current_path, file), encoding="gbk") as f:
+ with open(os.path.join(current_path, file), encoding='gbk') as f:
content = f.readlines()
- if file.split("/")[-1] != "compatibility_report.html":
- flag = 0
- for html_line in content:
- if "Source File Path" in html_line and file != "SoftwareMigrationAssessment.html":
- flag += 1
- continue
- elif "Software Package Path or Name" in html_line:
- flag += 1
- continue
- if flag == 1:
- html_line = \
- html_line.replace("""""", "").replace("""""", "").strip().split("/")[
- -1]
- break
+ f.seek(0)
+ content_all = f.read()
+
+ for acceptance_html in command_line_html.keys():
+ if acceptance_html in content_all:
+ acceptance_html_list.append(acceptance_html)
+ if acceptance_html != "Compatibility_Application":
+ flag = 0
+ for html_line in content:
+ if "Source File Path" in html_line and acceptance_html != "Software Porting Assessment Report":
+ flag += 1
+ continue
+ elif "Software Package Path or Name" in html_line:
+ flag += 1
+ continue
+ if flag == 1:
+ source_path = \
+ html_line.replace("""""", "").replace("""""",
+ "").strip()
+ break
+
+ else:
+ for html_line in content:
+ if "Compatibility_Application_Start" in html_line:
+ if html_line.find("7528") != -1:
+ str1 = html_line.find("7528")
+ str2 = html_line.find("542f")
+ source_path = html_line[str1 + 3 + 1: str2 - 2]
+ if "git_tb_data" in html_line:
+ html_line = html_line.replace("git_tb_data: [", "[").replace("],", "]").replace(
+ "null", "None")
+ html_line = eval(html_line)
+ for line_content in html_line:
+ if line_content in test_devkit:
+ content_value = html_line[html_line.index(line_content) + 1]
+ if content_value is None:
+ content_value = ""
+ test_report_detail.append([line_content, content_value])
+ if report_type.get(acceptance_html) == "迁移扫描工具":
+ scan_report_num += 1
+ scan_report_detail.append([scan_report_num, command_line_html.get(acceptance_html),
+ os.path.join(current_path, file), "使用"])
+ if command_line_html.get(acceptance_html) == "软件迁移评估":
+ if '软件包路径' not in str(scan_report_info):
+ scan_report_info.append({'label': '软件包路径', 'value': source_path})
+ if command_line_html.get(acceptance_html) == "源码迁移":
+ if '源码路径' not in str(scan_report_info):
+ scan_report_info.append({'label': '源码路径', 'value': source_path})
+
+ elif report_type.get(acceptance_html) == "亲和扫描工具":
+ core_report_num += 1
+ core_report_detail.append([core_report_num, command_line_html.get(acceptance_html),
+ os.path.join(current_path, file), "使用"])
+ core_report_info[1]["value"] = source_path if not scan_report_info[0][
+ "value"] else source_path
+ core_report_info[2]["value"] = "通过"
+ else:
+ test_report_info[1]["value"] = source_path
+ for detail in test_report_detail:
+ if detail[0] in ["Compatibility_Application_Start", "Compatibility_Application_Stop",
+ "Compatibility_Hardware_Server", "Compatibility_Software_Name"] and \
+ detail[1] in ("failed", "", "skipped", None):
+ test_report_info[2]["value"] = "不通过"
+
+ elif file.endswith(".txt"):
+ with open(os.path.join(current_path, file), encoding='utf-8') as f:
+ content_all = f.read()
+ f.seek(0)
+ gcc_contents = f.readlines()
+ f.seek(0)
+ jdk_content = f.readline()
+ if "BiSheng" in content_all or "Bisheng" in content_all:
+ jdk_report_info[1]["value"] = "通过"
+ jdk_report_detail = [[1, jdk_content, "使用"]]
+ for gcc_content in gcc_contents:
+ if "gcc version" in gcc_content:
+ if gcc_content.split(" ")[2].split(".")[2] == "1":
+ gcc_report_info[1]["value"] = "通过"
+ gcc_report_detail = [[1, gcc_content, "使用"]]
+ if "7.3.0" in gcc_content and "-mtune=tsv110" in content_all:
+ gcc_report_info[1]["value"] = "通过"
+ gcc_report_detail = [[1, gcc_content, "使用"]]
+ elif file.endswith(".data"):
+ tuner_report_num += 1
+ # 定义要执行的命令
+ command = ['perf', 'script', '-i', os.path.join(current_path, file)]
+ try:
+ # 使用subprocess.run执行命令,并捕获输出
+ result = subprocess.run(command, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ # 输出结果
+ res = result.stdout.decode()
+ if "libkperf" in res or "libsym" in res:
+ tuner_report_detail.append([tuner_report_num, "libkperf",
+ os.path.join(current_path, file), "使用"])
+ tuner_report_info[1]["value"] = "通过"
+ else:
+ tuner_report_detail.append([tuner_report_num, "libkperf",
+ os.path.join(current_path, file), "未使用"])
+ except subprocess.CalledProcessError as e:
+ pass
+ elif file.endswith(".java"):
+ ut_report_num += 1
+ with open(os.path.join(current_path, file), encoding='utf-8') as f:
+ content_all = f.read()
+ if "UTgen" in content_all:
+ ut_report_detail.append([ut_report_num, "UTgen",
+ os.path.join(current_path, file), "使用"])
+ ut_report_info[1]["value"] = "通过"
else:
- for html_line in content:
- if "Compatibility_Application_Start" in html_line:
- str1 = html_line.find("7528")
- str2 = html_line.find("542f")
- html_line = html_line[str1 + 3 + 1: str2 - 2]
- break
- output_content = """{}:
- 报告路径:{}
- 被扫描软件名称:{}""".format(command_line_html.get(file.split("/")[-1]),
- os.path.join(current_path, file), html_line)
- print(output_content)
- contents += "
{}".format(output_content)
-
- if not html_line:
- print("""\033[31m未发现的devkit-pipeline相关工具报告\033[0m""")
-
- html_contents = 'Acceptance report
'.format(
- contents)
- with open('./{}_{}_htmlreports.html'.format(devkit_pipeline_name, devkit_pipeline_id), 'w') as f:
- f.write(html_contents)
-
-
-def decompress_zip(compressed_report_package):
- with zipfile.ZipFile(compressed_report_package) as zip:
+ ut_report_info.append([ut_report_num, "UTgen",
+ os.path.join(current_path, file), "未使用"])
+
+ scan_report_flag = 0
+ for scan_report in scan_report_info:
+ if scan_report.get("label") in ('软件包路径', '源码路径'):
+ scan_report_flag = 1
+ if scan_report.get("value") and "评估结果" not in str(scan_report_info):
+ scan_report_info.append({'label': '评估结果', 'value': '通过'})
+ if not scan_report_flag:
+ scan_report_info.extend([{'label': '源码路径', 'value': ''}, {'label': '评估结果', 'value': '不通过'}])
+ results = set(command_line_html.keys()) - set(acceptance_html_list)
+ for result in results:
+ if report_type.get(result) == "迁移扫描工具":
+ scan_report_num += 1
+ scan_report_detail.append([scan_report_num, command_line_html.get(result),
+ "不存在", "未使用"])
+ elif report_type.get(result) == "亲和扫描工具":
+ core_report_num += 1
+ core_report_detail.append([core_report_num, command_line_html.get(result),
+ "不存在", "未使用"])
+ if "精度分析" not in str(core_report_detail):
+ core_report_num += 1
+ core_report_detail.append([core_report_num, "精度分析",
+ "不存在", "未使用"])
+ if not tuner_report_detail:
+ tuner_report_detail.append([1, "libkperf", "不存在", "未使用"])
+
+ originals = """
+
+
+
+
+
+ Document
+
+
+
+
+
+ 配置信息
+
+
+
+
验收详细结果
+
+
+ 序号 |
+ 使用工具 |
+ 报告路径 |
+ 使用状态 |
+
+
+
+
+
+
+
+ """
+ test_originals = """
+
+
+
+
+
+ Document
+
+
+
+
+
+
+
+ """
+ jdk_gcc_originals = """
+
+
+
+
+
+ Document
+
+
+
+
+
+
+
+ """
+ scan_report_html = originals.format(scan_report_info, scan_report_detail)
+ core_report_html = originals.format(core_report_info, core_report_detail)
+ tuner_report_html = originals.format(tuner_report_info, tuner_report_detail)
+ test_report_html = test_originals.format(test_report_info, test_report_detail)
+ jdk_report_html = jdk_gcc_originals.format(jdk_report_info, jdk_report_detail)
+ gcc_report_html = jdk_gcc_originals.format(gcc_report_info, gcc_report_detail)
+ ut_report_html = originals.format(ut_report_info, ut_report_detail)
+ with open(os.path.join(current_path, "迁移扫描工具.html"), "w", encoding="utf-8") as f:
+ f.write(scan_report_html)
+ with open(os.path.join(current_path, "亲和扫描工具.html"), "w", encoding="utf-8") as f:
+ f.write(core_report_html)
+ with open(os.path.join(current_path, "DevKit测试平台.html"), "w", encoding="utf-8") as f:
+ f.write(test_report_html)
+ # with open(os.path.join(current_path, "毕昇JDK.html"), "w", encoding="utf-8") as f:
+ # f.write(jdk_report_html)
+ # with open(os.path.join(current_path, "GCC for openEuler.html"), "w", encoding="utf-8") as f:
+ # f.write(gcc_report_html)
+ with open(os.path.join(current_path, "调优工具.html"), "w", encoding="utf-8") as f:
+ f.write(tuner_report_html)
+ with open(os.path.join(current_path, "UTgen.html"), "w", encoding="utf-8") as f:
+ f.write(ut_report_html)
+ print("Decompression completed. View the report in {}.".format(current_path))
+
+
+def decompress_zip(compressed_report_package, file_list=[]):
+ with zipfile.ZipFile(compressed_report_package, 'r') as zip:
zip.extractall("./")
- file_names = zip.namelist()
- return file_names
+ for file in zip.namelist():
+ if file.endswith('.zip'):
+ decompress_zip(file)
+ file_names = zip.namelist()
+ file_list.extend(file_names)
+ return file_list
def decompress_gz_bz(compressed_report_package):
+ file_list = []
with tarfile.open(compressed_report_package, "r") as tar:
tar.extractall(path="./")
file_names = tar.getnames()
- return file_names
+ for file in file_names:
+ if file.endswith('.zip'):
+ decompress_zip(file, file_list=file_list)
+ file_list.extend(file_names)
+ return file_list
if __name__ == "__main__":
@@ -103,7 +744,8 @@ if __name__ == "__main__":
elif args.tool == "BoostKit":
pass
else:
- print("请输入正确的参数,如-tool Devkit 或 -tool BoostKit")
+ print("Enter a correct parameter, for example, -tool DevKit or -tool BoostKit.")
+
except Exception as err:
print(err)
- print("请输入正确的参数")
\ No newline at end of file
+ print("Enter a correct parameter.")
diff --git a/tools/acceptance_command_line_bisheng.py b/tools/acceptance_command_line_bisheng.py
new file mode 100644
index 0000000000000000000000000000000000000000..0bad10307258076295f4494dc72406ce54f56dba
--- /dev/null
+++ b/tools/acceptance_command_line_bisheng.py
@@ -0,0 +1,265 @@
+import os
+import zipfile
+import tarfile
+import argparse
+
+# 网页填充内容
+bisheng_optimization = [[1, "函数库优化", "标量数学库(mathlib)", "未使用"],
+ [2, "函数库优化", "矢量数学库(veclib=MATHLIB)", "未使用"],
+ [3, "函数库优化", "内存优化库(jemalloc)", "未使用"],
+ [4, "函数库优化", "neon指令字符优化库(stringlib)", "未使用"],
+ [5, "函数库优化", "sve指令字符优化库(stringlib)", "未使用"],
+ [6, "Continuous Feature Guided Optimization", "反馈优化(PGO)", "未使用"],
+ [7, "Continuous Feature Guided Optimization", "二进制优化(BOLG)", "未使用"],
+ [8, "AI4Compiler", "autotuner", "未使用"]]
+# 每行同时出现关键字,才认定开启优化
+bisheng_optimization_keyword = [
+ ["-lmathlib"],
+ ["-lmathlib", "-fveclib=MATHLIB"],
+ ["-ljemalloc"],
+ ["-lstringlib", "_aarch64_simd"],
+ ["-lstringlib", "_aarch64_sve"],
+ ["-fprofile-use"],
+ ["llvm-bolt"],
+ ["-fautotune"],
+]
+
+tuner_report_info = [{'label': '门禁工具', 'value': '毕昇编译器验收工具'},
+ {'label': '评估结果', 'value': '未通过'}]
+
+class AcceptanceTool(object):
+ def check_keywords_in_lines(self, content, keywords):
+ lines = content.splitlines()
+ for line in lines:
+ if all(keyword in line for keyword in keywords):
+ return True
+ return False
+
+ def devkit_acceptance_report(self, compressed_report_package):
+ if not os.path.exists(compressed_report_package):
+ print("Enter a correct report package.")
+ return
+ file_ext = os.path.splitext(compressed_report_package)[1].lower()
+ if file_ext not in (".zip", ".gz", ".bz2"):
+ print("Please use the compressed package in the following format:.zip、.gz、.bz2")
+ return
+ decompress = {".zip": decompress_zip, ".gz": decompress_gz_bz, ".bz2": decompress_gz_bz}
+ current_path = os.getcwd()
+ print("Starting decompression...")
+ file_names = decompress.get(file_ext)(compressed_report_package)
+ for file in file_names:
+ try:
+ if not os.path.isfile(os.path.join(current_path, file)):
+ continue
+ with open(os.path.join(current_path, file), 'r', encoding='utf-8') as f:
+ content = f.read()
+ for i in range(len(bisheng_optimization_keyword)):
+ keywords = bisheng_optimization_keyword[i]
+ if self.check_keywords_in_lines(content, keywords):
+ bisheng_optimization[i][3] = "已使用"
+ tuner_report_info[1]["value"] = "通过"
+
+ except (UnicodeDecodeError, PermissionError):
+ continue
+
+ originals = """
+
+
+
+
+
+ Document
+
+
+
+
+
+ 配置信息
+
+
+
+
验收详细结果
+
+
+ 序号 |
+ 优化项 |
+ 优化子项 |
+ 使用状态 |
+
+
+
+
+
+
+
+ """
+
+ tuner_report_html = originals.format(tuner_report_info, bisheng_optimization)
+ with open(os.path.join(current_path, "毕昇编译器验收工具.html"), "w", encoding="utf-8") as f:
+ f.write(tuner_report_html)
+ print("Decompression completed.")
+ print("View the report in {}.".format(os.path.join(current_path, "毕昇编译器验收工具.html")))
+
+
+def decompress_zip(compressed_report_package, file_list=[]):
+ with zipfile.ZipFile(compressed_report_package, 'r') as zip:
+ zip.extractall("./")
+ for file in zip.namelist():
+ if file.endswith('.zip'):
+ decompress_zip(file)
+ file_names = zip.namelist()
+ file_list.extend(file_names)
+ return file_list
+
+
+def decompress_gz_bz(compressed_report_package):
+ file_list = []
+ with tarfile.open(compressed_report_package, "r") as tar:
+ tar.extractall(path="./")
+ file_names = tar.getnames()
+ for file in file_names:
+ if file.endswith('.zip'):
+ decompress_zip(file, file_list=file_list)
+ file_list.extend(file_names)
+ return file_list
+
+
+if __name__ == "__main__":
+ try:
+ parser = argparse.ArgumentParser(description="毕昇编译器验收工具")
+ parser.add_argument('-tool', help='BiSheng')
+ parser.add_argument('-package', help='Compressed package')
+ args = parser.parse_args()
+ acceptance_tool = AcceptanceTool()
+ if args.tool == "BiSheng":
+ acceptance_tool.devkit_acceptance_report(args.package)
+ else:
+ print("Enter a correct parameter, for example, -tool BiSheng.")
+
+ except Exception as err:
+ print(err)
+ print("Enter a correct parameter.")
diff --git a/tools/collect_msg.sh b/tools/collect_msg.sh
new file mode 100644
index 0000000000000000000000000000000000000000..dea57dbb4fafb60ecfa32e78930511a6539b1e38
--- /dev/null
+++ b/tools/collect_msg.sh
@@ -0,0 +1,653 @@
+#########################################################################
+# File Name: collect_msg.sh
+# Author: ****
+# mail: ****.com
+# Created Time: Wed Mar 27 14:56:03 2024
+#########################################################################
+#!/bin/bash
+current_path=$(pwd)
+config_file=$current_path/config.ini
+log_path=$current_path/log
+default_project="Bigdata Database Storage Arm Virt Acclib Virtual HPC"
+
+spark_omni_func=(
+ --deploy-mode client
+ --driver-cores 1
+ --driver-memory 980M
+ --num-executors 3
+ --executor-cores 1
+ --executor-memory 600M
+ --master yarn
+ --conf spark.memory.offHeap.enabled=true
+ --conf spark.memory.offHeap.size=1025M
+ --conf spark.task.cpus=1
+ --conf spark.driver.extraClassPath=/opt/omni-operator/lib/boostkit-omniop-spark-3.1.1-1.4.0-aarch64.jar:/opt/omni-operator/lib/boostkit-omniop-bindings-1.4.0-aarch64.jar:/opt/omni-operator/lib/dependencies/*
+ --conf spark.executor.extraClassPath=/opt/omni-operator/lib/boostkit-omniop-spark-3.1.1-1.4.0-aarch64.jar:/opt/omni-operator/lib/boostkit-omniop-bindings-1.4.0-aarch64.jar:/opt/omni-operator/lib/dependencies/*
+ --driver-java-options -Djava.library.path=/opt/omni-operator/lib
+ --conf spark.sql.codegen.wholeStage=false
+ --conf spark.executorEnv.LD_LIBRARY_PATH=/opt/omni-operator/lib
+ --conf spark.executorEnv.OMNI_HOME=/opt/omni-operator/
+ --conf spark.driverEnv.LD_LIBRARY_PATH=/opt/omni-operator/lib
+ --conf spark.driverEnv.OMNI_HOME=/opt/omni-operator/
+ --conf spark.executor.extraLibraryPath=/opt/omni-operator/lib
+ --conf spark.driverEnv.LD_PRELOAD=/opt/omni-operator/lib/libjemalloc.so.2
+ --conf spark.executorEnv.LD_PRELOAD=/opt/omni-operator/lib/libjemalloc.so.2
+ --conf spark.sql.extensions=com.huawei.boostkit.spark.ColumnarPlugin
+ --jars /opt/omni-operator/lib/boostkit-omniop-spark-3.1.1-1.4.0-aarch64.jar
+ --jars /opt/omni-operator/lib/boostkit-omniop-bindings-1.4.0-aarch64.jar
+ --conf spark.sql.orc.impl=native
+ --conf spark.shuffle.manager=org.apache.spark.shuffle.sort.OmniColumnarShuffleManager
+ --conf spark.omni.sql.columnar.fusion=false
+ --conf spark.omni.sql.columnar.sortSpill.enabled=true
+ --conf spark.omni.sql.columnar.sortSpill.rowThreshold=4000000
+ --conf spark.omni.sql.columnar.sortSpill.dirDiskReserveSize=214748364800
+ --conf spark.locality.wait=8
+ --conf spark.sql.autoBroadcastJoinThreshold=10M
+ --conf spark.sql.broadcastTimeout=500
+ --conf spark.sql.cbo.enabled=false
+ --conf spark.default.parallelism=200
+ --conf spark.sql.shuffle.partitions=200
+ --conf spark.executorEnv.MALLCO_CONF=narenas:2
+)
+
+
+
+#####################根据section以及key值获取对应value############
+acquire_value(){
+ project=$1
+ key=$2
+ grep $project -A 15 $config_file |grep -m 1 $key|awk -F= '{print $2}'|awk '{print $1}'
+}
+
+
+######################获取配置文件中选择校验的解决方案###########
+acquire_select_project(){
+ all_name=""
+ for per_project in $default_project
+ do
+ status=$(acquire_value $per_project check)
+ if [[ $status = True ]]; then
+ all_name="$all_name $per_project"
+ fi
+ done
+ echo $all_name
+}
+
+
+check_customer_info(){
+ customer_information=$1
+ if [ -z "$customer_information" ];
+ then
+ echo "请您在config.ini Global字段 填写唯一个人标识 eg: xxx有限公司"
+ exit 1
+ fi
+
+ if [[ "$customer_information"x =~ "xxx有限公司" ]];
+ then
+ echo "请您在config.ini Global字段 修改 xxx有限公司为具体公司名称以供标识"
+ exit 1
+ fi
+}
+
+
+####################打包日志文件############################
+tar_log_file(){
+ customer_information=$1
+ datatime=$(date '+%Y%m%d%H%M')
+
+ if test -d $log_path;
+ then
+ echo "customer_information: ${customer_information}_isv_msg_${datatime}.tar.gz" >> $log_path/os_msg.log
+ tar zcvf ${customer_information}_isv_msg_${datatime}.tar.gz -C $log_path .
+ else
+ echo "$log_path logpath abnormality, please check"
+ fi
+}
+
+
+###################arm原生特性信息收集#################
+collect_arm_native(){
+ kbox_container=$1
+ video_container=$2
+ instruction_container=$3
+ # check tested container whether exist
+ containers=($1 $2 $3)
+ for i in ${containers[@]};do
+ docker ps -a | grep -wq $i || { echo "the $i container doesn't found,please check!"; exit 1; }
+ done
+ # kbox基础云手机
+ rm -f $log_path/arm_native.log
+
+ keyword=(gralloc.kbox.so audio.primary.kbox.so gps.kbox.so sensors.kbox.so libmedia_omxcore.so libstagefrighthw.so vinput hwcomposer.kbox.so)
+ docker exec -it $kbox_container lsof > $log_path/arm_native_raw.log
+ for i in "${keyword[@]}";do
+ grep -F "${i}" $log_path/arm_native_raw.log >> $log_path/arm_native.log
+ done
+ rm -f $log_path/arm_native_raw.log
+
+ docker exec -it $kbox_container cat /proc/sys/fs/binfmt_misc/ubt_a32a64 >> $log_path/arm_native.log
+ # 视频流
+ docker exec -it $video_container lsof | grep VmiInputFlinger >> $log_path/arm_native.log
+ # 指令流
+ docker exec -it $instruction_container ps -ef | grep -F "VmiAgent instruction" >> $log_path/arm_native.log
+}
+
+
+#################虚拟化特性信息收集##################
+collect_virtual_host(){
+ sudo systemctl status waasagent.service |grep "Active" > $log_path/virtual_sense.log
+ waasctl --version >> $log_path/virtual_sense.log
+
+ ovs_appctl_res=$(ovs-appctl --version 2>&1)
+
+ if [[ $ovs_appctl_res =~ "command not found" ]];
+ then
+ echo "ovs-appctl: command not found" > $log_path/virtual_dpu_flow.log
+ else
+ echo "ovs-appctl version: $ovs_appctl_res" > $log_path/virtual_dpu_flow.log
+ script -a -c 'ovs-appctl hwoff/dump-hwoff-flows' $log_path/virtual_dpu_flow.log
+ fi
+}
+
+
+collect_virtual_dpu(){
+ server_name=$1
+ network=$2
+ flavor=$3
+ volume=$4
+ availability_zone=$5
+
+ # 需要再DPU测执行
+ dpak_ovs_ctl_res=$(dpak-ovs-ctl -h 2>&1)
+ if [[ $dpak_ovs_ctl_res =~ "command not found" ]];
+ then
+ echo "请确定已在DPU测执行该工具"
+ echo "dpak_ovs_ctl: command not found" > $log_path/virtual_dpu.log
+ else
+ echo "dpak_ovs_ctl version: $dpak_ovs_ctl_res" > $log_path/virtual_dpu.log
+ script -a -c 'dpak-ovs-ctl hwoff/dump-hwoff-flows' $log_path/virtual_dpu.log
+ fi
+
+ /usr/libexec/spdk/scripts/hw_dpu_rpc.py get_version >> $log_path/virtual_dpu.log 2>&1
+ /usr/libexec/spdk/scripts/hw_dpu_rpc.py get_controllers >> $log_path/virtual_dpu.log 2>&1
+
+ # 创建虚机
+ openstack_res=$(openstack --version 2>&1)
+
+ if [[ $openstack_res =~ "command not found" ]];
+ then
+ echo "请确定已在DPU测执行该工具"
+ echo "openstack: command not found" >> $log_path/virtual_dpu.log
+ else
+ echo "openstack version: $openstack_res" >> $log_path/virtual_dpu.log
+ openstack server create $server_name --network $network --flavor $flavor --volume $volume --availability-zone $availability_zone >> $log_path/virtual_dpu.log
+ echo "等待虚机创建完成"
+ sleep 120
+ echo "server_name: $server_name" >> $log_path/virtual_dpu.log
+ openstack server list >> $log_path/virtual_dpu.log
+ fi
+}
+
+
+################数据库特性信息收集##################
+collect_database(){
+ mysql_install_path=$1
+ mysql_username=$2
+ mysql_password=$3
+ database_name=$4
+ plugin_path=$1/lib/plugin
+ $mysql_install_path/bin/mysqld_safe --defaults-file=/etc/my.cnf &
+ sleep 20
+ mysql -u $mysql_username -p$mysql_password -D $database_name -e "select * from INFORMATION_SCHEMA.plugins where PLUGIN_NAME like 'thread_pool%'" > $log_path/database_mysql.log
+ mysql -u $mysql_username -p$mysql_password -D $database_name -e "select * from INFORMATION_SCHEMA.plugins where PLUGIN_NAME like 'kovae%'" >> $log_path/database_mysql.log
+ echo thread_pool: $(ls $plugin_path |grep thread_pool.so) >> $log_path/database_mysql.log
+ echo kovae_path: $(ls $plugin_path |grep ha_kovae.so) >> $log_path/database_mysql.log
+ readelf -a $mysql_install_path/bin/mysqld|grep bolt >> $log_path/database_mysql.log
+ echo no_lock: $(objdump -d $mysql_install_path/bin/mysqld|grep -c row_vers_build_for_semi_consistent_readP5trx_t) >> $log_path/database_mysql.log
+ objdump -d $mysql_install_path/bin/mysqld |grep crc32cb >> $log_path/database_mysql.log
+ pkill -9 mysql
+}
+
+
+####################机密计算特性信息收集################
+collect_virtcca_msg(){
+ cvm_name=$1
+ username=$2
+ passwd=$3
+ xml_path=/tmp/temp.xml
+ virsh list --all|grep -q $cvm_name
+ if [ $? -ne 0 ]; then
+ echo "错误:虚拟机 $cvm_name 不存在"
+ return 0
+ fi
+ vm_status=$(virsh domstate "$cvm_name")
+
+ if [ "$vm_status" == "shut off" ]; then
+ echo "虚拟机 $cvm_name 处于 shut off 状态,正在启动..."
+ virsh start "$cvm_name"
+ echo "虚拟机 $cvm_name 启动完成"
+ elif [ "$vm_status" == "running" ]; then
+ echo "虚拟机 $cvm_name 处于 running 状态,无需操作"
+ else
+ echo "错误:无法确定虚拟机 $cvm_name 的状态"
+ return 0
+ fi
+
+ virsh dumpxml $cvm_name > $xml_path
+ ret=$(grep -i "type='cvm'" $xml_path)
+ echo "$ret" > $log_path/virtcca_status.log
+ expect << EOF >> $log_path/virtcca_status.log
+ spawn virsh console $cvm_name
+ expect "Escape character is \\^]"
+ send "\r"
+ expect "login:"
+ send "$username\r"
+ expect "Password:"
+ send "$passwd\r"
+ expect "# "
+ send "ls -l /\r"
+ expect "# "
+ send "exit\r"
+ expect eof
+EOF
+}
+
+
+collect_ccos_msg(){
+ /vendor/bin/tee-check > $log_path/virtccos_itrustee.log
+ tlogcat -f &
+ sleep 3s
+ cat /var/log/tee/teeOS_log-0 | grep TA_UUID >> $log_path/virtccos_itrustee.log
+}
+
+
+#################加速库特性信息收集##################
+collect_acceleration_library(){
+ system_lib=$1
+ hmpp_lib=$2
+ math_lib=$3
+ openssl speed -elapsed -engine kae rsa2048 > $log_path/acceleration_library.log 2>&1
+ ldd $1 >> $log_path/acceleration_library.log
+ ldd $2 >> $log_path/acceleration_library.log
+ ldd $3 >> $log_path/acceleration_library.log
+}
+
+
+###############分布式存储特性信息收集###############
+# $1 ec_pool 名字
+collect_storage_acc(){
+ ec_pool=$1
+ # 存储加速库
+ ldd /usr/bin/ceph-osd > $log_path/storage_acc.log
+ bcache_dev=$(ls /sys/class/block|grep -m 1 bcache)
+ # 如果没有课增加异常判断
+ ll /sys/class/block/$bcache_dev/bcache/cache/internal/traffic_policy_start >> $log_path/storage_acc.log
+
+ pool_list=$(rados lspools |grep -wx $ec_pool)
+ if [[ $pool_list =~ $ec_pool ]];
+ then
+ echo "ec_pool created" >> $log_path/storage_acc.log
+ pid_num=$(ps -ef|grep osd|grep -v grep|head -n 1|awk '{print $2}')
+ cat /proc/$pid_num/smaps |grep ksal >> $log_path/storage_acc.log
+ else
+ echo "ec_pool not exist" >> $log_path/storage_acc.log
+ fi
+}
+
+
+###############大数据特性信息收集##################
+collect_bigdata_kal(){
+ algotithm_list=$1
+ algotithm_path=$2
+ dataset_list=$3
+
+ read -r -a algotithm_arry <<< "$algotithm_list"
+ read -r -a dataset_arry <<< "$dataset_list"
+
+ cd $algotithm_path
+ index=0
+ for per_alg in ${algotithm_list[*]}
+ do
+ #bash $algotithm_path/bin/ml/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} no no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ #index=`expr $index + 1`
+ # "Usage: ","1st argument: name of dataset: cit_patents, enwiki_2018, uk_2002","2nd argument: optimization algorithm or raw: no/yes","3rd argument: verify result: no/yes"
+ if [ "$per_alg" == "betweenness" ] || [ "$per_alg" == "node2vec" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} no no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: cit_patents,enwiki_2018,arabic_2005,graph500_22,graph500_23,graph500_25","2nd argument: optimization algorithm or raw: no/yes"
+ if [ "$per_alg" == "bfs" ] || [ "$per_alg" == "cc" ] || [ "$per_alg" == "deepwalk" ] || [ "$per_alg" == "diameter" ] || [ "$per_alg" == "ecc" ] || [ "$per_alg" == "fraudar" ] || [ "$per_alg" == "katz" ] || [ "$per_alg" == "kcore" ] || [ "$per_alg" == "ktruss" ] || [ "$per_alg" == "louvain" ] || [ "$per_alg" == "modularity" ] || [ "$per_alg" == "mst" ] || [ "$per_alg" == "scc" ] || [ "$per_alg" == "slpa" ] || [ "$per_alg" == "tpr" ] || [ "$per_alg" == "trussdecomposition" ] || [ "$per_alg" == "wlpa" ] || [ "$per_alg" == "wmssp" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage:","dataset name: simulate1,simulate2,usaRoad"
+ if [ "$per_alg" == "cd" ] || [ "$per_alg" == "inccc" ] || [ "$per_alg" == "mce" ] || [ "$per_alg" == "wce" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: name of dataset: cit_patents,uk_2002","2nd argument: weight or not: e.g. weighted,unweighted","3rd argument: verify result: no/yes"
+ if [ "$per_alg" == "closeness" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} weight no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: name of dataset: cit_patents,uk_2002,arabic_2005,graph500_22,graph500_23,graph500_24,graph500_25"
+ # "2nd argument: name of api: lcc,avgcc,globalcc","3nd argument: weight or not: weighted,unweighted","4th argument: optimization algorithm or raw: no/yes"
+ if [ "$per_alg" == "clusteringcoefficient" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} lcc weighted no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: it_2004,twitter7,uk_2007_05,mycielskian20,gap_kron,com_friendster"
+ # "2nd argument: name of api: degrees,inDegrees,outDegrees","3rd argument: optimization algorithm or raw: no/yes"
+ if [ "$per_alg" == "degree" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} degrees no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: twitter_2010","2nd argument: rate: e.g. 0.001,0.01,0.05"
+ # "3nd argument: batch: e.g. 1,2,3,4,5","4th argument: optimization algorithm or raw: no/yes"
+ if [ "$per_alg" == "incpr" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} 0.001 1 no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: name of dataset: graph500_21,com_orkut"
+ # "2nd argument: name of queryGraph: for Identical: 4dgn/4sqr/6star; for unIdentical: 4dgn/4sqr/4clique/5clique/6clique","3rd argument: match mode:Identical,unIdentical"
+ if [ "$per_alg" == "incsgm" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} 4dgn Identical > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: graph500_22,graph500_24,graph500_25","2nd argument: api: run,runConvergence","3rd argument: optimization algorithm or raw: no/yes"
+ if [ "$per_alg" == "lpa" ] || [ "$per_alg" == "pr" ] || [ "$per_alg" == "tc" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} run no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: soc_liveJournal,uk_2002,arabic_2005","2nd argument: source number: 5/50","3rd argument: optimization algorithm or raw: no/yes"
+ if [ "$per_alg" == "mssp" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} 5 no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: epinions, graph500_23_weight, graph500_25_weight","2nd argument: anomaly_type: 0/1","3rd argument: optimization algorithm or raw: no/yes"
+ if [ "$per_alg" == "oddball" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} 0 no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: cit_patents,uk_2002,arabic_2005","2nd argument: name of api: fixMS,fixSS,conSS"
+ # "3rd argument: optimization algorithm or raw: no/yes","4th argument: sourceCnt or null: 1,5,10,50,100"
+ if [ "$per_alg" == "ppr" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} fixMS no 1 > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: name of dataset: graph500_19,liveJournal,com_orkut"
+ # "2nd argument: name of queryGraph: for Identical: 4dgn/4sqr/5tree/6star; for unIdentical: 4dgn/4clique/5clique/6clique","3rd argument: match mode:Identical,unIdentical","4th argument: optimization algorithm or raw: no/yes"
+ if [ "$per_alg" == "sgm" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} 4dgn Identical no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: cit_patents,uk_2002,arabic_2005","2nd argument: name of api: run,runUntilConvergence","3nd argument: seeds count: 100,500,1000"
+ if [ "$per_alg" == "tr" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} run 100 > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: cage14, GAP_road, GAP_twitter","2nd argument: name of api: static, convergence","3rd argument: optimization algorithm or raw: no, yes"
+ if [ "$per_alg" == "wpr" ];
+ then
+ bash $algotithm_path/bin/graph/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} static no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: type of data structure: [dataframe/rdd]","2nd argument: name of dataset: e.g. als/alsbs/alsh"
+ # "3rd argument: name of API: e.g. fit/fit1/fit2/fit3; for rdd: train","4th argument: optimization algorithm or raw: [no/yes]","5th argument: Whether to Compare Results [no/yes]"
+ if [ "$per_alg" == "als" ] || [ "$per_alg" == "kmeans" ] || [ "$per_alg" == "lda" ] || [ "$per_alg" == "pca" ];
+ then
+ bash $algotithm_path/bin/ml/${algotithm_arry[$index]}_run.sh dataframe ${dataset_arry[$index]} fit no no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: e.g. CP10M1K/CP2M5K/CP1M10K","2nd argument: optimization algorithm or raw: [no/yes]","3rd argument: Whether to Compare Results [no/yes]"
+ if [ "$per_alg" == "bo" ] || [ "$per_alg" == "cov" ] || [ "$per_alg" == "crf" ] || [ "$per_alg" == "encoder" ] || [ "$per_alg" == "fpg" ] || [ "$per_alg" == "hdb" ] || [ "$per_alg" == "idf" ] || [ "$per_alg" == "if" ] || [ "$per_alg" == "nmf" ] || [ "$per_alg" == "ps" ] || [ "$per_alg" == "simrank" ] || [ "$per_alg" == "svd" ] || [ "$per_alg" == "te" ];
+ then
+ bash $algotithm_path/bin/ml/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} no no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: e.g. bremenSmall/farm/house","2nd argument: optimization algorithm or raw: [no/yes]"
+ if [ "$per_alg" == "dbscan" ] || [ "$per_alg" == "knn" ];
+ then
+ bash $algotithm_path/bin/ml/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: type of algorithm: [classification/regression]","2nd argument: type of data structure: [dataframe/rdd]"
+ # "3rd argument: name of dataset: [epsilon/higgs/mnist8m]","4th argument: name of API: [for dataframe: fit/fit1/fit2/fit3; for rdd: trainClassifier/trainRegressor]","5th argument: optimization algorithm or raw: [no/yes]"
+ # "6th argument: Whether to Compare Results [no/yes]"
+ if [ "$per_alg" == "dt" ] || [ "$per_alg" == "gbdt" ] || [ "$per_alg" == "rf" ];
+ then
+ bash $algotithm_path/bin/ml/${algotithm_arry[$index]}_run.sh classification dataframe ${dataset_arry[$index]} fit no no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: higgs/mnist8m","2nd argument: name of API: fit/fit1/fit2/fit3"
+ # "3rd argument: save or verify result: save/verify","4th argument: optimization algorithm or raw: no/yes"
+ if [ "$per_alg" == "dtb" ];
+ then
+ bash $algotithm_path/bin/ml/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} fit verify no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: type of algorithm: [classification/regression]"
+ # "2nd argument: name of dataset: [higgs/avazu]","3rd argument: name of API: [fit]"
+ # "4th argument: optimization algorithm or raw: [no/yes]","5th argument: Whether to Compare Results [no/yes]"
+ if [ "$per_alg" == "fm" ];
+ then
+ bash $algotithm_path/bin/ml/${algotithm_arry[$index]}_run.sh classification ${dataset_arry[$index]} fit no no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: type of algorithm: [classification/regression]","2nd argument: name of dataset:mnist8m, higgs "
+ # "3rd argument: optimization algorithm or raw: [no/yes]","4th argument: Whether to Compare Results [no/yes]"
+ if [ "$per_alg" == "lgbm" ];
+ then
+ bash $algotithm_path/bin/ml/${algotithm_arry[$index]}_run.sh classification ${dataset_arry[$index]} no no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: e.g. mnist8m/Twitter/rcv"
+ # "2nd argument: name of API: e.g. fit/fit1/fit2/fit3","3th argument: optimization algorithm or raw: [no/yes]","4th argument: Whether to Compare Results [no/yes]"
+ if [ "$per_alg" == "linR" ] || [ "$per_alg" == "logR" ] || [ "$per_alg" == "spca" ] || [ "$per_alg" == "svm" ];
+ then
+ bash $algotithm_path/bin/ml/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} fit no no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: type of data structure: [dataframe/rdd]","2nd argument: name of dataset: e.g. CP10M1K/CP2M5K/CP1M10K"
+ # "3nd argument: optimization algorithm or raw: [no/yes]","4rd argument: Whether to Compare Results [no/yes]"
+ if [ "$per_alg" == "pearson" ] || [ "$per_alg" == "spearman" ];
+ then
+ bash $algotithm_path/bin/ml/${algotithm_arry[$index]}_run.sh dataframe ${dataset_arry[$index]} no no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1st argument: name of dataset: cate/node/item/taobao","2nd argument: name of API: fit/fit1/fit2/fit3","3rd argument:optimization algorithm or raw: no/yes"
+ if [ "$per_alg" == "word2vec" ];
+ then
+ bash $algotithm_path/bin/ml/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} fit no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ # "Usage: ","1rd argument: name of dataset: e.g. higgs/mnist8m","2st argument: type of algorithm: [classification/regression]"
+ # "3th argument: optimization algorithm or raw: [no/yes]","4th argument: Whether to Compare Results [no/yes]"
+ if [ "$per_alg" == "xgbt" ];
+ then
+ bash $algotithm_path/bin/ml/${algotithm_arry[$index]}_run.sh ${dataset_arry[$index]} classification no no > $log_path/bigdata_kal_${algotithm_arry[$index]}_${dataset_arry[$index]}.log 2>&1
+ fi
+ index=`expr $index + 1`
+ done
+
+}
+
+
+collect_bigdata_operator(){
+ # 日志记录位置 log_path/bigdata_operator.log
+ spark_path=$1
+ database=$2
+ if [ -e $spark_path ];
+ then
+ spark_conf_path=$1/conf
+ if ! cat < $spark_conf_path/log4j.properties|grep "^log4j.logger.com.huawei.boostkit.spark=INFO";
+ then
+ echo "log4j.logger.com.huawei.boostkit.spark=INFO" >> $spark_conf_path/log4j.properties
+ fi
+ $spark_path/bin/spark-sql "${spark_omni_func[@]}" --database $database -e "WITH customer_total_return AS ( SELECT sr_customer_sk AS ctr_customer_sk, sr_store_sk AS ctr_store_sk, sum(sr_return_amt) AS ctr_total_return FROM store_returns, date_dim WHERE sr_returned_date_sk = d_date_sk AND d_year = 2000 GROUP BY sr_customer_sk, sr_store_sk) SELECT c_customer_id FROM customer_total_return ctr1, store, customer WHERE ctr1.ctr_total_return > (SELECT avg(ctr_total_return) * 1.2 FROM customer_total_return ctr2 WHERE ctr1.ctr_store_sk = ctr2.ctr_store_sk) AND s_store_sk = ctr1.ctr_store_sk AND s_state = 'TN' AND ctr1.ctr_customer_sk = c_customer_sk ORDER BY c_customer_id LIMIT 100;" 1>$log_path/bigdata_operator.log 2>&1
+ else
+ echo "$spark_path not exist" >$log_path/bigdata_operator.log 2>&1
+ fi
+}
+
+
+collect_bigdata_hbase(){
+
+ hbase com.huawei.boostkit.hindex.mapreduce.GlobalTableIndexer -Dtablename.to.index=OnlySingleIndexTable -Dindexspecs.to.addandbuild='osindex=>C0:[F1]'
+ hbase shell << EOF > $log_path/bigdata_hbase.log 2>&1
+ debug
+ scan 'OnlySingleIndexTable',{FILTER=>"(SingleColumnValueFilter('C0', 'F1',=,'binary:bbb')"}
+ exit
+EOF
+}
+
+
+collect_bigdata_tune_up(){
+ omniadvisor_dir=$1
+ mysql_username=$2
+ mysql_password=$3
+ mysql_database=$4
+ if [ -e $omniadvisor_dir/omniadvisor ];
+ then
+ echo "omniadvisor.log" >> $log_path/bigdata_tune_up.log
+ else
+ echo "omniadvisor.log not exist" >> $log_path/bigdata_tune_up.log
+ fi
+
+ mysql -u $mysql_username -p$mysql_password -D $mysql_database -e "show tables" >> $log_path/bigdata_tune_up.log 2>&1
+
+}
+
+
+#################HPC特性信息收集##################
+# $1 #用户可执行文件路径
+
+collect_hpc_acceleration_library(){
+ bin_path=$1
+ rm -rf $log_path/hpc_acceleration_library.log
+ touch $log_path/hpc_acceleration_library.log
+ ldd $bin_path > $log_path/hpc_acceleration_library.log 2>&1
+}
+
+# $1 #用户可执行文件路径
+collect_sme_acceleration_library(){
+ bin_path=$1
+ ifsme=`lscpu|grep Flags|grep sme`
+ if [ -n "$ifsme" ]; then
+ bin_path=$1 #用户可执行文件路径
+ rm -rf $log_path/hpc_SME_library.log
+ touch $log_path/hpc_SME_library.log
+ ldd $bin_path | grep SME >> $log_path/hpc_SME_library.log 2>&1
+ objdump -d $bin_path >> $log_path/hpc_SME_library.log 2>&1
+ else
+ echo "架构不支持SME" >> $log_path/hpc_SME_library.log
+ fi
+
+}
+
+
+################环境信息收集#######################
+collect_os_msg(){
+ echo os: $(cat /etc/os-release |grep PRETTY_NAME=|awk -F= '{print $2}') > $log_path/os_msg.log
+ echo kernel: $(uname -r) >> $log_path/os_msg.log
+ dmidecode -t Processor|grep -m 1 Version: >> $log_path/os_msg.log
+ dmidecode -t system|grep "Product Name" >> $log_path/os_msg.log
+ }
+
+
+main(){
+ if [ -e $config_file ]; then
+ select_project=$(acquire_select_project)
+ echo "开始收集BoostKit 特性信息如下:$select_project"
+ mkdir -p $log_path
+ rm -fr $log_path/*
+ echo "日志存放位置: $log_path"
+ else
+ echo "config.ini not exist"
+ exit 1
+ fi
+
+ collect_os_msg
+ for per_project in $select_project
+ do
+ if [ $per_project = "Arm" ];
+ then
+ kbox_container=$(acquire_value Arm kbox_container)
+ video_container=$(acquire_value Arm video_container)
+ instruction_container=$(acquire_value Arm instuction_container)
+ echo "start collect Arm msg..."
+ collect_arm_native $kbox_container $video_container $instruction_container
+ echo "Arm collect msg Done..."
+ elif [ $per_project = "Virt" ];
+ then
+ echo "start collect Virt msg..."
+ cvm_name=$(acquire_value Virtcca cvm_name)
+ cvm_username=$(acquire_value Virtcca cvm_username)
+ cvm_password=$(acquire_value Virtcca cvm_password)
+ collect_ccos_msg
+ collect_virtcca_msg $cvm_name $cvm_username $cvm_password
+ echo "Virt collect msg Done..."
+ elif [ $per_project = "Database" ];
+ then
+ echo "start collect Database msg..."
+ mysql_install_path=$(acquire_value Database mysql_install_path)
+ mysql_username=$(acquire_value Database mysql_username)
+ mysql_password=$(acquire_value Database mysql_password)
+ database_name=$(acquire_value Database database_name)
+
+ collect_database $mysql_install_path $mysql_username $mysql_password $database_name
+ echo "Database collect msg Done..."
+ elif [ $per_project = "Acclib" ];
+ then
+ echo "start collect acceleration_library msg..."
+ system_lib=$(acquire_value Acclib system_lib)
+ hmpp_lib=$(acquire_value Acclib HMPP_lib)
+ math_lib=$(acquire_value Acclib math_lib)
+ collect_acceleration_library $system_lib $hmpp_lib $math_lib
+ echo "acceleration_library collect msg Done..."
+ elif [ $per_project = "Storage" ];
+ then
+ echo "start collect Storage msg..."
+ ec_pool_name=$(acquire_value Storage ec_pool_name)
+ collect_storage_acc ec_pool_name
+ echo "Storage collect msg Done..."
+ elif [ $per_project = "Bigdata" ];
+ then
+ echo "start collect Bigdata msg..."
+ algorithms_path=$(acquire_value Bigdata algorithms_path)
+ algorithms_list=$(acquire_value Bigdata algorithms_name)
+ dataset_list=$(acquire_value Bigdata dataset_list)
+ spark_path=$(acquire_value Bigdata spark_path)
+ database=$(acquire_value Bigdata database)
+ omniadvisor_dir=$(acquire_value Bigdata omniadvisor_dir)
+ mysql_username=$(acquire_value Bigdata mysql_username)
+ mysql_password=$(acquire_value Bigdata mysql_password)
+ mysql_database_name=$(acquire_value Bigdata mysql_database_name)
+ collect_bigdata_kal "${algorithms_list[@]}" $algorithms_path "${dataset_list[@]}"
+ collect_bigdata_operator $spark_path $database
+ collect_bigdata_hbase
+ collect_bigdata_tune_up $omniadvisor_dir $mysql_username $mysql_password $mysql_database_name
+ echo "Bigdata collect msg Done..."
+ elif [ $per_project = "Virtual" ];
+ then
+ echo "start collect Virtual msg..."
+ collect_virtual_host
+ server_name=$(acquire_value Virtual server_name)
+ network=$(acquire_value Virtual network)
+ flavor=$(acquire_value Virtual flavor)
+ volume=$(acquire_value Virtual volume)
+ availability_zone=$(acquire_value Virtual availability_zone)
+ collect_virtual_dpu $server_name $network $flavor $volume $availability_zone
+ echo "Virtual collect msg Done..."
+ elif [ $per_project = "HPC" ];
+ then
+ echo "start collect HPC msg..."
+ acc_lib=$(acquire_value HPC acc_lib)
+ sme=$(acquire_value HPC sme)
+ collect_hpc_acceleration_library $acc_lib
+ collect_sme_acceleration_library $sme
+ echo "HPC collect msg Done..."
+ fi
+ done
+}
+customer_information=$(acquire_value Global information)
+check_customer_info $customer_information
+main
+tar_log_file $customer_information
+
+
+
+
+
+
+
+
diff --git a/tools/config.ini b/tools/config.ini
new file mode 100644
index 0000000000000000000000000000000000000000..875e6673252c602b8648893ddefac52a5a2fc18c
--- /dev/null
+++ b/tools/config.ini
@@ -0,0 +1,78 @@
+[Global]
+# 填写您相关标识信息 eg:xxx有限公司
+information=xxx有限公司
+
+[Virtcca]
+check=False
+cvm_name=cvm1
+cvm_username=root
+cvm_password=huawei
+
+
+[Arm]
+check=False
+# 基础kbox容器名
+kbox_container=kbox_1
+# 视频流容器名
+video_container=kbox_2
+#指令流容器名
+instuction_container=kbox_3
+
+
+[Database]
+check=False
+mysql_install_path=/usr/local/mysql
+mysql_username=root
+mysql_password=123456
+database_name=xxxxx
+
+
+[Virtual]
+check=False
+# OpenStack 验证需要
+server_name=vm1
+network=port1_vdpa01
+flavor=4U4G80G
+volume=ceph1_centos2
+availability_zone=nova:dpu01
+
+
+[Acclib]
+check=False
+#引用系统库二进制位置
+system_lib=/home/system_lib.so
+#引用HMPP库二进制位置
+HMPP_lib=/home/HMPP_lib.so
+# 引用数学库二进制位置
+math_lib=/home/math_lib.so
+
+
+[Bigdata]
+check=False
+# 机器学习和图分析算法加速库
+algorithms_path=/home/KAL_TEST_DIR
+algorithms_name=algorithms_name1 algorithms_name2 algorithms_name3
+dataset_list=dataset1 dataset2 dataset3
+
+# 算子加速
+spark_path=/home/
+database=tpcds_bin_partitioned_orc_1000
+
+# 参数调优
+omniadvisor_dir=/home
+mysql_username=root
+mysql_password=123456
+mysql_database_name=xxxxx
+
+
+[Storage]
+check=False
+# 针对ksal特性需先创建ecpool
+ec_pool_name=ec_pool
+
+
+[HPC]
+check=False
+acc_lib=/home/lib
+sme=/home/lib
+
diff --git a/tools/download_dependency/build_download.sh b/tools/download_dependency/build_download.sh
deleted file mode 100644
index 8f141cfb039c5b2e7b44367cdc2a74572bae64a1..0000000000000000000000000000000000000000
--- a/tools/download_dependency/build_download.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-# SourceCode build script
-# Copyright: Copyright (c) Huawei Technologies Co., Ltd. All rights reserved.
-
-set -e
-current_dir=$(cd $(dirname "$0"); pwd)
-project_dir=$(dirname $(dirname "${current_dir}"))
-
-umask 077
-
-build_dir=${project_dir}/build/download_dependency
-rm -rf "${build_dir}"
-mkdir -p "${build_dir}"
-
-cd "${build_dir}"
-
-pyinstaller -F "${current_dir}"/src/download.py -p "${current_dir}"/src --name "download_tool"
diff --git a/tools/download_dependency/src/download.py b/tools/download_dependency/src/download.py
deleted file mode 100644
index 8a3321187a2f51d165c447310262203f1e17e966..0000000000000000000000000000000000000000
--- a/tools/download_dependency/src/download.py
+++ /dev/null
@@ -1,190 +0,0 @@
-import os
-import platform
-import subprocess
-import sys
-import shutil
-import tarfile
-import wget
-import download_config
-from download_utils import download_dependence_handler, download_dependence_file
-from download_command_line import process_command_line, CommandLine
-
-FILE = "file"
-SHA256 = "sha256"
-URL = "url"
-SAVE_PATH = "save_path"
-DEFAULT_PATH = "./devkitdependencies"
-DEPENDENCY_FILE = "devkitdependencies.tar.gz"
-
-# A-FOT files
-BASE_URL = "https://gitee.com/openeuler/A-FOT/raw/master/{}"
-A_FOT = "a-fot"
-A_FOT_INI = "a-fot.ini"
-AUTO_FDO_SH = "auto_fdo.sh"
-AUTO_BOLT_SH = "auto_bolt.sh"
-AUTO_PREFETCH = "auto_prefetch.sh"
-SPLIT_JSON_PY = "split_json.py"
-FILE_LIST = (A_FOT, A_FOT_INI, AUTO_FDO_SH, AUTO_BOLT_SH, AUTO_PREFETCH, SPLIT_JSON_PY)
-
-component_collection_map = {
- component.get("component_name"): {
- "download file":
- {
- URL: f"{component.get(FILE)}",
- SAVE_PATH: f"{os.path.join(DEFAULT_PATH, component.get(FILE).split('/')[-1])}"
- },
- "download sha256":
- {
- URL: f"{component.get(SHA256)}",
- SAVE_PATH: f"{os.path.join(DEFAULT_PATH, component.get(SHA256).split('/')[-1])}"
- }
- } for component in (
- download_config.BiShengCompiler,
- download_config.GCCforOpenEuler,
- download_config.BiShengJDK8,
- download_config.BiShengJDK17,
- )
-}
-
-lkp_collection_map = {
- "LkpTests": {
- "download file": {
- URL: f"{download_config.LkpTests.get(FILE)}",
- SAVE_PATH: f"{os.path.join(DEFAULT_PATH, 'lkp-tests.tar.gz')}",
- },
- "download gem dependency": {
- URL: f"{download_config.LkpTests.get('gem dependency')}",
- SAVE_PATH: f"{os.path.join(DEFAULT_PATH, 'gem_dependencies.zip')}",
- },
- },
- "CompatibilityTesting": {
- "download file": {
- URL: f"{download_config.CompatibilityTesting.get(FILE)}",
- SAVE_PATH: f"{os.path.join(DEFAULT_PATH, 'compatibility_testing.tar.gz')}",
- }
- },
-}
-
-
-def download_dependence():
- if not os.path.exists(DEFAULT_PATH):
- os.mkdir(DEFAULT_PATH)
- elif os.path.isfile(DEFAULT_PATH):
- print(f"[ERROR] The file {DEFAULT_PATH} exists. Please rename or remove this file.")
- return False
- else:
- pass
-
- ret = True
- component_collection_map.update(lkp_collection_map)
- for component_name in component_collection_map:
- shell_dict = component_collection_map.get(component_name)
- ret = ret and download_dependence_handler(shell_dict)
- return ret
-
-
-def download_a_fot():
- saved_path = os.path.join(DEFAULT_PATH, A_FOT)
- try:
- os.mkdir(saved_path)
- except FileExistsError as e:
- pass
-
- try:
- for f in FILE_LIST:
- wget.download(BASE_URL.format(f), os.path.join(saved_path, f))
-
- with tarfile.open(os.path.join(DEFAULT_PATH, "a-fot.tar.gz"), "w:gz") as t:
- t.add(saved_path, arcname="a-fot")
- return True
- except Exception as e:
- print(e)
- return False
- finally:
- shutil.rmtree(saved_path)
-
-
-iso_collection_map = {
- component.get("component_name"): {
- "download file":
- {
- URL: f"{component.get(FILE)}",
- SAVE_PATH: f"{os.path.join('./', component.get(FILE).split('/')[-1])}"
- },
- "download sha256":
- {
- URL: f"{component.get(SHA256)}",
- SAVE_PATH: f"{os.path.join('./', component.get(SHA256).split('/')[-1])}"
- }
- } for component in (
- download_config.OpenEuler_2003_LTS,
- download_config.OpenEuler_2003_LTS_SP1,
- download_config.OpenEuler_2003_LTS_SP2,
- download_config.OpenEuler_2003_LTS_SP3,
- download_config.OpenEuler_2003_LTS_SP4,
- download_config.OpenEuler_2009,
- download_config.OpenEuler_2103,
- download_config.OpenEuler_2109,
- download_config.OpenEuler_2203_LTS,
- download_config.OpenEuler_2203_LTS_SP1,
- download_config.OpenEuler_2203_LTS_SP2,
- download_config.OpenEuler_2203_LTS_SP3,
- download_config.OpenEuler_2209,
- download_config.OpenEuler_2303,
- download_config.OpenEuler_2309,
- )
-}
-
-
-def download_iso():
- if platform.system() == "Windows" and CommandLine.download_iso == "auto":
- print("Please use '-iso' option in Linux machine if iso version is not specified. "
- "OpenEuler Operating System is recommended.")
- sys.exit(1)
- if CommandLine.download_iso == "auto":
- result = subprocess.run("grep PRETTY_NAME /etc/os-release".split(' '),
- capture_output=True, shell=False)
- output = result.stdout.decode().strip()
- print(f"Get os-release output: {output}")
-
- CommandLine.download_iso = (output.split("=")[1]
- .replace("(", "")
- .replace(")", "")
- .replace(".", "")
- .replace("\"", "")
- .replace(" ", "_")
- .replace("-", "_"))
- print(f"Auto detect operating system version: {CommandLine.download_iso}")
-
- shell_dict = iso_collection_map.get(CommandLine.download_iso, "")
- if not shell_dict:
- print("Please check /etc/os-release is changed or not.")
- return False
- return download_dependence_file("download file", shell_dict)
-
-
-if __name__ == '__main__':
- try:
- process_command_line(program="download_dependency", description="devkit-pipeline download_dependency tool",
- class_list=[CommandLine])
- if CommandLine.download_iso:
- if download_iso():
- print("-- Download iso success. --")
- else:
- print("Download iso failed.")
- sys.exit(0)
-
- ret = download_dependence()
- if ret:
- print(f"Now compress dependencies to {DEPENDENCY_FILE}...")
- with tarfile.open(DEPENDENCY_FILE, "w:gz") as tar:
- tar.add(DEFAULT_PATH, arcname=os.path.basename(DEFAULT_PATH))
-
- print(f"-- Compress dependencies to {DEPENDENCY_FILE} success. --")
- shutil.rmtree(DEFAULT_PATH)
- print("-- Delete dependencies directory. --")
- else:
- print("-- Download dependencies failed. Please try execute download tool again. --")
- except (KeyboardInterrupt, Exception) as e:
- print(f"\nDownload dependencies failed. {str(e)} Please try execute download tool again.")
- sys.exit(1)
diff --git a/tools/download_dependency/src/download_command_line.py b/tools/download_dependency/src/download_command_line.py
deleted file mode 100644
index 12cd1f5ea82b5fbcb19f9083987284aa2fa9ec52..0000000000000000000000000000000000000000
--- a/tools/download_dependency/src/download_command_line.py
+++ /dev/null
@@ -1,56 +0,0 @@
-import argparse
-import download_config
-
-
-class CommandLine:
- download_iso = None
-
- @classmethod
- def add_options(cls, parser):
- parser.add_argument("-iso", action="store", dest="download_iso", default="",
- choices=[
- component.get("component_name") for component in (
- download_config.OpenEuler_2003_LTS,
- download_config.OpenEuler_2003_LTS_SP1,
- download_config.OpenEuler_2003_LTS_SP2,
- download_config.OpenEuler_2003_LTS_SP3,
- download_config.OpenEuler_2003_LTS_SP4,
- download_config.OpenEuler_2009,
- download_config.OpenEuler_2103,
- download_config.OpenEuler_2109,
- download_config.OpenEuler_2203_LTS,
- download_config.OpenEuler_2203_LTS_SP1,
- download_config.OpenEuler_2203_LTS_SP2,
- download_config.OpenEuler_2203_LTS_SP3,
- download_config.OpenEuler_2209,
- download_config.OpenEuler_2303,
- download_config.OpenEuler_2309,
- {"component_name": "auto"},
- )
- ],
- metavar="SPECIFY_DOWNLOADING_ISO_VERSION",
- help="Specify downloading iso version. "
- "Candidate iso versions: "
- "openEuler_2003_LTS, openEuler_2003_LTS_SP1, openEuler_2003_LTS_SP2, "
- "openEuler_2003_LTS_SP3, openEuler_2003_LTS_SP4, "
- "openEuler_2009, openEuler_2103, openEuler_2109, "
- "openEuler_2203_LTS, openEuler_2203_LTS_SP1, openEuler_2203_LTS_SP2, "
- "openEuler_2203_LTS_SP3, "
- "openEuler_2209, openEuler_2303, openEuler_2309. "
- "Input 'auto' will auto detect operating system version in Linux if iso version is not specified."
- )
-
- @classmethod
- def process_args(cls, args):
- cls.download_iso = args.download_iso
- return cls.download_iso
-
-
-def process_command_line(program, description, class_list):
- parser = argparse.ArgumentParser(prog=program, description=description, add_help=True)
- for klass in class_list:
- klass.add_options(parser)
-
- args = parser.parse_args()
- for klass in class_list:
- klass.process_args(args)
diff --git a/tools/download_dependency/src/download_config.py b/tools/download_dependency/src/download_config.py
deleted file mode 100644
index 1bc885d55143a5972b62996cd72f89ccea91e371..0000000000000000000000000000000000000000
--- a/tools/download_dependency/src/download_config.py
+++ /dev/null
@@ -1,112 +0,0 @@
-BiShengCompiler = {
- "component_name": "BiShengCompiler",
- "file": "https://mirrors.huaweicloud.com/kunpeng/archive/compiler/bisheng_compiler/BiShengCompiler-3.2.0-aarch64-linux.tar.gz",
- "sha256": "https://mirrors.huaweicloud.com/kunpeng/archive/compiler/bisheng_compiler/BiShengCompiler-3.2.0-aarch64-linux.tar.gz.sha256",
-}
-GCCforOpenEuler = {
- "component_name": "GCCforOpenEuler",
- "file": "https://mirrors.huaweicloud.com/kunpeng/archive/compiler/kunpeng_gcc/gcc-10.3.1-2023.12-aarch64-linux.tar.gz",
- "sha256": "https://mirrors.huaweicloud.com/kunpeng/archive/compiler/kunpeng_gcc/gcc-10.3.1-2023.12-aarch64-linux.tar.gz.sha256",
-}
-BiShengJDK8 = {
- "component_name": "BiShengJDK8",
- "file": "https://mirrors.huaweicloud.com/kunpeng/archive/compiler/bisheng_jdk/bisheng-jdk-8u402-linux-aarch64.tar.gz",
- "sha256": "https://mirrors.huaweicloud.com/kunpeng/archive/compiler/bisheng_jdk/bisheng-jdk-8u402-linux-aarch64.tar.gz.sha256",
-}
-BiShengJDK17 = {
- "component_name": "BiShengJDK17",
- "file": "https://mirrors.huaweicloud.com/kunpeng/archive/compiler/bisheng_jdk/bisheng-jdk-17.0.10-linux-aarch64.tar.gz",
- "sha256": "https://mirrors.huaweicloud.com/kunpeng/archive/compiler/bisheng_jdk/bisheng-jdk-17.0.10-linux-aarch64.tar.gz.sha256",
-}
-
-
-LkpTests = {
- "component_name": "LkpTests",
- "file": "https://gitee.com/openeuler/devkit-pipeline/releases/download/v0.2/lkp-tests.tar.gz",
- "gem dependency": "https://gitee.com/openeuler/devkit-pipeline/releases/download/v0.2/gem_dependencies.zip",
-}
-
-CompatibilityTesting = {
- "component_name": "CompatibilityTesting",
- "file": "https://gitee.com/openeuler/devkit-pipeline/releases/download/v0.2/compatibility_testing.tar.gz",
-}
-
-
-OpenEuler_2003_LTS = {
- "component_name": "openEuler_2003_LTS",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-20.03-LTS/ISO/aarch64/openEuler-20.03-LTS-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-20.03-LTS/ISO/aarch64/openEuler-20.03-LTS-everything-aarch64-dvd.iso.sha256sum",
-}
-OpenEuler_2003_LTS_SP1 = {
- "component_name": "openEuler_2003_LTS_SP1",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-20.03-LTS-SP1/ISO/aarch64/openEuler-20.03-LTS-SP1-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-20.03-LTS-SP1/ISO/aarch64/openEuler-20.03-LTS-SP1-everything-aarch64-dvd.iso.sha256sum",
-}
-OpenEuler_2003_LTS_SP2 = {
- "component_name": "openEuler_2003_LTS_SP2",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-20.03-LTS-SP2/ISO/aarch64/openEuler-20.03-LTS-SP2-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-20.03-LTS-SP2/ISO/aarch64/openEuler-20.03-LTS-SP2-everything-aarch64-dvd.iso.sha256sum",
-}
-OpenEuler_2003_LTS_SP3 = {
- "component_name": "openEuler_2003_LTS_SP3",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-20.03-LTS-SP3/ISO/aarch64/openEuler-20.03-LTS-SP3-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-20.03-LTS-SP3/ISO/aarch64/openEuler-20.03-LTS-SP3-everything-aarch64-dvd.iso.sha256sum",
-}
-OpenEuler_2003_LTS_SP4 = {
- "component_name": "openEuler_2003_LTS_SP4",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-20.03-LTS-SP4/ISO/aarch64/openEuler-20.03-LTS-SP4-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-20.03-LTS-SP4/ISO/aarch64/openEuler-20.03-LTS-SP4-everything-aarch64-dvd.iso.sha256sum",
-}
-
-OpenEuler_2009 = {
- "component_name": "openEuler_2009",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-20.09/ISO/aarch64/openEuler-20.09-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-20.09/ISO/aarch64/openEuler-20.09-everything-aarch64-dvd.iso.sha256sum",
-}
-OpenEuler_2103 = {
- "component_name": "openEuler_2103",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-21.03/ISO/aarch64/openEuler-21.03-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-21.03/ISO/aarch64/openEuler-21.03-everything-aarch64-dvd.iso.sha256sum",
-}
-OpenEuler_2109 = {
- "component_name": "openEuler_2109",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-21.09/ISO/aarch64/openEuler-21.09-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-21.09/ISO/aarch64/openEuler-21.09-everything-aarch64-dvd.iso.sha256sum",
-}
-
-OpenEuler_2203_LTS = {
- "component_name": "openEuler_2203_LTS",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-22.03-LTS/ISO/aarch64/openEuler-22.03-LTS-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-22.03-LTS/ISO/aarch64/openEuler-22.03-LTS-everything-aarch64-dvd.iso.sha256sum",
-}
-OpenEuler_2203_LTS_SP1 = {
- "component_name": "openEuler_2203_LTS_SP1",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-22.03-LTS-SP1/ISO/aarch64/openEuler-22.03-LTS-SP1-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-22.03-LTS-SP1/ISO/aarch64/openEuler-22.03-LTS-SP1-everything-aarch64-dvd.iso.sha256sum",
-}
-OpenEuler_2203_LTS_SP2 = {
- "component_name": "openEuler_2203_LTS_SP2",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-22.03-LTS-SP2/ISO/aarch64/openEuler-22.03-LTS-SP2-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-22.03-LTS-SP2/ISO/aarch64/openEuler-22.03-LTS-SP2-everything-aarch64-dvd.iso.sha256sum",
-}
-OpenEuler_2203_LTS_SP3 = {
- "component_name": "openEuler_2203_LTS_SP3",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-22.03-LTS-SP3/ISO/aarch64/openEuler-22.03-LTS-SP3-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-22.03-LTS-SP3/ISO/aarch64/openEuler-22.03-LTS-SP3-everything-aarch64-dvd.iso.sha256sum",
-}
-
-OpenEuler_2209 = {
- "component_name": "openEuler_2209",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-22.09/ISO/aarch64/openEuler-22.09-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-22.09/ISO/aarch64/openEuler-22.09-everything-aarch64-dvd.iso.sha256sum",
-}
-OpenEuler_2303 = {
- "component_name": "openEuler_2303",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-23.03/ISO/aarch64/openEuler-23.03-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-23.03/ISO/aarch64/openEuler-23.03-everything-aarch64-dvd.iso.sha256sum",
-}
-OpenEuler_2309 = {
- "component_name": "openEuler_2309",
- "file": "https://mirrors.huaweicloud.com/openeuler/openEuler-23.09/ISO/aarch64/openEuler-23.09-everything-aarch64-dvd.iso",
- "sha256": "https://mirrors.huaweicloud.com/openeuler/openEuler-23.09/ISO/aarch64/openEuler-23.09-everything-aarch64-dvd.iso.sha256sum",
-}
diff --git a/tools/download_dependency/src/download_utils.py b/tools/download_dependency/src/download_utils.py
deleted file mode 100644
index 6f422176b0b2f382f1f478f357521a2a4668f69c..0000000000000000000000000000000000000000
--- a/tools/download_dependency/src/download_utils.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import os
-import sys
-import urllib.error
-import wget
-
-
-def download_dependence_handler(shell_dict):
- ret = True
- for shell_cmd in shell_dict:
- ret = download_dependence_file(shell_cmd, shell_dict)
- return ret
-
-
-def download_dependence_file(shell_cmd, shell_dict):
- ret = True
- url_and_save_path = shell_dict.get(shell_cmd)
- try:
- print(f"Downloading from {url_and_save_path.get('url')}")
- download_result = wget.download(
- url_and_save_path.get('url'), url_and_save_path.get('save_path')
- )
- print()
- except (TimeoutError, urllib.error.URLError, OSError) as e:
- print(f"[ERROR] download error occurs: {str(e)} "
- f"\nPlease visit following url and download dependencies to default directory."
- f"\n\t{url_and_save_path.get('url')}"
- )
- raise OSError(f"download error occurs: {str(e)}")
-
- if not os.path.isfile(download_result):
- print(f"[ERROR] Download dependencies failed. "
- f"Please visit following url and download dependencies to default directory."
- f"\n\t{url_and_save_path.get('url')}"
- )
- ret = False
- return ret
-
diff --git a/tools/install_dependency/build_install.sh b/tools/install_dependency/build_install.sh
deleted file mode 100644
index e9fcc7beee372073ae3917dff0da9f8bc0047918..0000000000000000000000000000000000000000
--- a/tools/install_dependency/build_install.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/bash
-# SourceCode build script
-# Copyright: Copyright (c) Huawei Technologies Co., Ltd. All rights reserved.
-
-set -e
-current_dir=$(cd $(dirname "$0"); pwd)
-project_dir=$(dirname $(dirname "${current_dir}"))
-
-umask 077
-
-build_dir=${project_dir}/build/install_dependency
-rm -rf "${build_dir}"
-mkdir -p "${build_dir}"
-
-cd "${build_dir}"
-
-pyinstaller -F "${current_dir}"/src/install.py -p "${current_dir}/src:${project_dir}/tools/download_dependency/src" --add-data "../../build/component:component" --name "deploy_tool"
-
-cp "${current_dir}"/config/machine.yaml "${build_dir}"/dist/machine.yaml
diff --git a/tools/install_dependency/config/machine.yaml b/tools/install_dependency/config/machine.yaml
deleted file mode 100644
index 65669118d0c0b40bdb53c8185d1e2b367d1a7a49..0000000000000000000000000000000000000000
--- a/tools/install_dependency/config/machine.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
-user: root
-pkey: /root/.ssh/id_rsa
-scanner:
- - 192.168.0.1
- - 192.168.0.2
- - 192.168.0.3
-builder:
- - 192.168.0.1
-executor:
- - 192.168.0.1
-devkit:
- - 192.168.0.4
\ No newline at end of file
diff --git a/tools/install_dependency/src/command_line.py b/tools/install_dependency/src/command_line.py
deleted file mode 100644
index 32a4363bc52cf6feaf4b1f4f2e65bcd256893b58..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/command_line.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import argparse
-
-DEFAULT_YAML_PATH = "./machine.yaml"
-
-
-class CommandLine:
- yaml_path = DEFAULT_YAML_PATH
- iso_path = None
- debug = False
-
- @classmethod
- def add_options(cls, parser):
- parser.add_argument("-f", "--config", action="store", dest="yaml_path", default=DEFAULT_YAML_PATH,
- help="Assign yaml config file path. Default path is 'machine.yaml' in current directory.")
- parser.add_argument("-iso", action="store", dest="iso_path", default=None,
- help="Assign prepared iso file path and deploy iso. Only deploy in executor and devkit machine.")
- parser.add_argument("--debug", action="store_true", dest="debug", default=False, help="Open debug log.")
-
- @classmethod
- def process_args(cls, args):
- cls.yaml_path = args.yaml_path if args.yaml_path and args.yaml_path != "./" else DEFAULT_YAML_PATH
- cls.iso_path = args.iso_path
- cls.debug = args.debug
- return cls.yaml_path
-
-
-def process_command_line(program, description, class_list):
- parser = argparse.ArgumentParser(prog=program, description=description, add_help=True)
- for klass in class_list:
- klass.add_options(parser)
-
- args = parser.parse_args()
- for klass in class_list:
- klass.process_args(args)
diff --git a/tools/install_dependency/src/constant.py b/tools/install_dependency/src/constant.py
deleted file mode 100644
index 5bc23c505d398536376cbd64516c5c432c6f8a7b..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/constant.py
+++ /dev/null
@@ -1,12 +0,0 @@
-USER = "user"
-PKEY = "pkey"
-PASSWORD = "password"
-SCANNER = "scanner"
-BUILDER = "builder"
-EXECUTOR = "executor"
-DEVKIT = "devkit"
-MACHINE = "machine"
-DEPENDENCY_FILE = "devkitdependencies.tar.gz"
-DEPENDENCY_DIR = "devkitdependencies"
-
-INSTRUCTION = "instruction"
diff --git a/tools/install_dependency/src/exception/__init__.py b/tools/install_dependency/src/exception/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/tools/install_dependency/src/exception/connect_exception.py b/tools/install_dependency/src/exception/connect_exception.py
deleted file mode 100644
index 96fd3af99fcbba32f90ebfec4ff02783901c9f80..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/exception/connect_exception.py
+++ /dev/null
@@ -1,20 +0,0 @@
-class ConnectException(Exception):
- def __init__(self):
- super(ConnectException, self).__init__()
- self.status = ""
- self.value = ""
-
-
-class ConnectRemoteException(ConnectException):
- def __init__(self):
- super(ConnectRemoteException, self).__init__()
-
-
-class CreatePkeyFailedException(ConnectException):
- def __init__(self):
- super(CreatePkeyFailedException, self).__init__()
-
-
-class NotMatchedMachineTypeException(ConnectException):
- def __init__(self):
- super(NotMatchedMachineTypeException, self).__init__()
diff --git a/tools/install_dependency/src/handler/__init__.py b/tools/install_dependency/src/handler/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/tools/install_dependency/src/handler/base_yaml_check.py b/tools/install_dependency/src/handler/base_yaml_check.py
deleted file mode 100644
index 748a8b36c9e48a829e25f6b730ad036a1076b6a5..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/handler/base_yaml_check.py
+++ /dev/null
@@ -1,64 +0,0 @@
-import re
-import logging
-import constant
-from handler.handler_and_node import Handler
-from machine.klass_dict import KLASS_DICT
-
-LOGGER = logging.getLogger("install_dependency")
-MIN_SET = (constant.USER, constant.PKEY, constant.INSTRUCTION)
-MAX_SET = (constant.USER, constant.PKEY, constant.PASSWORD,
- constant.SCANNER, constant.BUILDER, constant.EXECUTOR, constant.DEVKIT, constant.INSTRUCTION)
-
-
-class BaseCheck(Handler):
- IPV4_REG = r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
-
- def handle(self, data) -> bool:
- LOGGER.debug("BaseCheck start!")
- key_set = set(data.keys())
- if not key_set.issuperset(MIN_SET) or not key_set.issubset(MAX_SET):
- LOGGER.error("Yaml file content not correct. Wrong yaml mappings.")
- return False
-
- if not BaseCheck.check_user(data):
- return False
- if not BaseCheck.check_pkey(data):
- return False
- if not BaseCheck.check_machine_ip(data):
- return False
-
- LOGGER.debug(f"After Base Check, data: {data}")
- return True
-
- @staticmethod
- def check_user(data):
- user_name = data.get(constant.USER, "")
- if not user_name:
- LOGGER.error("Yaml file content not correct. Empty user name.")
- return False
- return True
-
- @staticmethod
- def check_pkey(data):
- pkey_path = data.get(constant.PKEY, "")
- if not pkey_path:
- LOGGER.error("Yaml file content not correct. Empty pkey.")
- return False
- return True
-
- @staticmethod
- def check_machine_ip(data):
- for machine_type in (set(KLASS_DICT.keys()) & set(data.keys())):
- if not data.get(machine_type) or not isinstance(data.get(machine_type), list):
- LOGGER.error(f"Yaml file content not correct. Yaml file {machine_type} value not sequence.")
- return False
- for ip in data.get(machine_type):
- if not BaseCheck.validate_ip(ip):
- LOGGER.error(f"Yaml file content not correct. Given ip: {ip} not correct.")
- return False
- return True
-
- @staticmethod
- def validate_ip(ip_address: str):
- return re.match(BaseCheck.IPV4_REG, ip_address)
-
diff --git a/tools/install_dependency/src/handler/connect_check.py b/tools/install_dependency/src/handler/connect_check.py
deleted file mode 100644
index fbe8e877e839fd92c71c149b8a8b224aa9507fc0..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/handler/connect_check.py
+++ /dev/null
@@ -1,67 +0,0 @@
-import logging
-import socket
-
-import constant
-from handler.handler_and_node import Handler
-from machine.local_machine import LocalMachine
-from machine.klass_dict import KLASS_DICT
-from exception.connect_exception import ConnectException
-
-LOGGER = logging.getLogger("install_dependency")
-
-ROLE_COMPONENT = {
- "scanner": ["BiShengJDK17"],
- "builder": ["GCCforOpenEuler", "BiShengCompiler", "BiShengJDK17", "BiShengJDK8"],
- "executor": ["BiShengJDK17", "LkpTests"]
-}
-
-
-class ConnectCheck(Handler):
-
- def handle(self, data) -> bool:
- LOGGER.debug("ConnectCheck start!")
- local_ip = ConnectCheck.get_local_ip()
-
- ret = True
- for role in (set(KLASS_DICT.keys()) & set(data.keys())):
- ret = ret and ConnectCheck.machine_role_check(data, role, local_ip)
- return ret
-
- @staticmethod
- def machine_role_check(data, role, local_ip):
- builder_list = data.get(role)
- klass = KLASS_DICT.get(role)
- data[constant.MACHINE] = dict()
- for ip in builder_list:
- if ip == local_ip or ip == "127.0.0.1":
- ip = "127.0.0.1"
- machine_instance = data[constant.MACHINE].get(ip, LocalMachine(ip))
- machine_instance.add_component(ROLE_COMPONENT[role])
- data[constant.MACHINE][ip] = machine_instance
- continue
- try:
- machine_instance = data[constant.MACHINE].get(ip, klass(ip, data[constant.USER], data[constant.PKEY],
- data.get(constant.PASSWORD, None)))
- machine_instance.add_component(ROLE_COMPONENT[role])
- data[constant.MACHINE][ip] = machine_instance
- except ConnectException:
- LOGGER.error(f"-- [error] Connect {ip} failed. Please check.")
- del data[constant.MACHINE]
- return False
- except Exception as e:
- LOGGER.error(f"-- [error] Connect {ip} failed. Because of {str(e)}")
- del data[constant.MACHINE]
- return False
- if data.get(constant.INSTRUCTION) == "deploy_iso" and role in ("devkit", "executor"):
- machine_instance.set_mirror()
- return True
-
- @staticmethod
- def get_local_ip():
- sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- try:
- sock.connect(("8.8.8.8", 80))
- ip = sock.getsockname()[0]
- finally:
- sock.close()
- return ip if ip else "127.0.0.1"
diff --git a/tools/install_dependency/src/handler/gather_package.py b/tools/install_dependency/src/handler/gather_package.py
deleted file mode 100644
index edaed4e79e8cb48b8657e353bbb4a19cc0dcc6dd..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/handler/gather_package.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import logging
-import os
-import subprocess
-import constant
-from download import download_dependence, component_collection_map, lkp_collection_map
-from handler.handler_and_node import Handler
-
-LOGGER = logging.getLogger("install_dependency")
-
-
-class GatherPackage(Handler):
-
- def handle(self, data) -> bool:
- instruction_to_func_dict = {
- "deploy_iso": GatherPackage.deploy_iso_handle,
- "default": GatherPackage.default_handle,
- }
- return instruction_to_func_dict.get(data.get(constant.INSTRUCTION, "default"))()
-
- @staticmethod
- def deploy_iso_handle():
- LOGGER.info("Iso file already checked.")
- return GatherPackage.default_handle()
-
- @staticmethod
- def default_handle():
- LOGGER.debug("GatherPackage start!")
- if GatherPackage.check_default_path_available():
- LOGGER.info("Dependencies ready.")
- return True
-
- if os.path.isfile(constant.DEPENDENCY_DIR):
- LOGGER.error(f"The file {constant.DEPENDENCY_DIR} exists. Please rename or remove this file.")
- return False
-
- try:
- ret = download_dependence()
- except Exception as e:
- LOGGER.error(f"Download dependencies failed. {str(e)}. Please execute download tool.")
- return False
- if not ret:
- LOGGER.error("Download dependencies failed. Please execute download tool.")
- return False
- LOGGER.info("Download dependencies success.")
- return True
-
- @staticmethod
- def check_default_path_available():
- if os.path.exists(constant.DEPENDENCY_FILE):
- try:
- print(f"Now extract files from {constant.DEPENDENCY_FILE}:")
- subprocess.run(f"tar -zxvf {constant.DEPENDENCY_FILE}".split(' '),
- capture_output=False, shell=False, stderr=subprocess.STDOUT)
- except (FileExistsError,) as e:
- LOGGER.warning(f"{constant.DEPENDENCY_FILE} may already extracted.")
- except Exception as e:
- LOGGER.error(f"Extract {constant.DEPENDENCY_FILE} failed. {str(e)}")
- return False
-
- if not os.path.isdir(constant.DEPENDENCY_DIR):
- LOGGER.warning(f"The directory {constant.DEPENDENCY_DIR} not exists.")
- return False
- component_collection_map.update(lkp_collection_map)
- for component_name in component_collection_map:
- shell_dict = component_collection_map.get(component_name)
- for shell_cmd in shell_dict:
- url_and_save_path = shell_dict.get(shell_cmd)
- component = url_and_save_path.get("save_path")
- if not os.path.isfile(component):
- LOGGER.warning(f"The file {component} not exists.")
- return False
- return True
diff --git a/tools/install_dependency/src/handler/handler_and_node.py b/tools/install_dependency/src/handler/handler_and_node.py
deleted file mode 100644
index 5d5f3dc9fb2da1536bc7198d13b55b5fbafeae0b..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/handler/handler_and_node.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from abc import abstractmethod
-
-
-class Handler:
- """处理器基类"""
-
- def __init__(self):
- pass
-
- @abstractmethod
- def handle(self, data) -> bool:
- pass
-
-
-class Node:
- """链表节点"""
-
- def __init__(self, handler=None):
- self.handler: Handler = handler
- self.next_node: Node = None
-
- def get_next_node(self):
- return self.next_node
-
- def set_next_node(self, node):
- self.next_node = node
-
- def execute(self, data):
- ret: bool = self.handler.handle(data)
- if not ret:
- return False
- if self.next_node:
- return self.next_node.execute(data)
- return True
diff --git a/tools/install_dependency/src/handler/install_package.py b/tools/install_dependency/src/handler/install_package.py
deleted file mode 100644
index ab81daa10a043a219b85ff65c8e11d7514aa167a..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/handler/install_package.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import logging
-import multiprocessing
-
-import constant
-from handler.handler_and_node import Handler
-
-LOGGER = logging.getLogger("install_dependency")
-
-
-class InstallPackage(Handler):
-
- def handle(self, data):
- LOGGER.debug("Install Package start!")
- jobs = []
- for _, machine in data[constant.MACHINE].items():
- process = multiprocessing.Process(target=process_work, args=(machine,))
- jobs.append(process)
- process.start()
- for job in jobs:
- job.join()
- return True
-
-
-def process_work(machine):
- try:
- machine.install_components()
- except (OSError, IOError) as e:
- LOGGER.error(f"Remote machine {machine.ip} occur Error: {str(e)}")
diff --git a/tools/install_dependency/src/handler/pipeline.py b/tools/install_dependency/src/handler/pipeline.py
deleted file mode 100644
index dbeaca6ed6594f8c5bf4532343dcae2dd1be7962..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/handler/pipeline.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import logging
-
-from handler.handler_and_node import Node
-
-LOGGER = logging.getLogger("install_dependency")
-
-
-class PipeLine:
- """维护一个链表"""
-
- def __init__(self, data):
- self.head: Node = Node()
- self.tail: Node = self.head
- self.data = data
-
- def start(self):
- if self.head.get_next_node() and self.head.get_next_node().execute(self.data):
- print("-- Program finished. --")
-
- def add_tail(self, *handlers):
- for handler in handlers:
- node = Node(handler)
- self.tail.set_next_node(node)
- self.tail = node
diff --git a/tools/install_dependency/src/install.py b/tools/install_dependency/src/install.py
deleted file mode 100644
index 62b77b5589ffe2e6276bb3c0bfceee0f8912ea16..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/install.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import os
-import subprocess
-import sys
-import logging
-import yaml
-
-import constant
-from log import config_logging
-from command_line import process_command_line, CommandLine
-
-from handler.pipeline import PipeLine
-from handler.base_yaml_check import BaseCheck
-from handler.connect_check import ConnectCheck
-from handler.gather_package import GatherPackage
-from handler.install_package import InstallPackage
-
-LOGGER = logging.getLogger("install_dependency")
-PIPELINE = [BaseCheck(), ConnectCheck(), GatherPackage(), InstallPackage()]
-ISO_VERIFY_FLAG_STRING = "ISO 9660 CD-ROM filesystem data"
-
-
-def read_yaml_file(yaml_path):
- try:
- with open(yaml_path, "r") as file:
- config_dict = yaml.safe_load(file)
- except (FileNotFoundError, IsADirectoryError) as e:
- LOGGER.error(f"Yaml file is not in specified path. Error: {str(e)}")
- sys.exit(1)
- except (yaml.parser.ParserError,
- yaml.scanner.ScannerError,
- yaml.composer.ComposerError,
- yaml.constructor.ConstructorError) as e:
- LOGGER.error(f"Incorrect yaml file. Error: {str(e)}")
- sys.exit(1)
- return config_dict
-
-
-def check_iso_available(iso_path):
- if not os.path.isfile(iso_path):
- LOGGER.error(f"ISO file is not in specified path. Error: {iso_path} file not found.")
- sys.exit(1)
- try:
- result = subprocess.run(f"file -b {iso_path}".split(' '),
- capture_output=True, shell=False)
- output = result.stdout.decode().strip()
- if output.find(ISO_VERIFY_FLAG_STRING) == -1:
- LOGGER.error(f"Verify iso result: Not available. Please re-download iso file.")
- sys.exit(1)
- except (FileNotFoundError, IsADirectoryError, PermissionError, Exception) as e:
- LOGGER.error(f"Verify iso file integrity occur error: {str(e)}")
- sys.exit(1)
-
-
-if __name__ == '__main__':
- try:
- process_command_line(program="install_dependency", description="devkit-pipeline install_dependency tool",
- class_list=[CommandLine])
- config_logging(CommandLine.debug)
- config_dict = read_yaml_file(CommandLine.yaml_path)
-
- if CommandLine.iso_path:
- config_dict[constant.INSTRUCTION] = "deploy_iso"
- check_iso_available(CommandLine.iso_path)
- else:
- config_dict[constant.INSTRUCTION] = "default"
- LOGGER.debug(f"-- config_dict: {config_dict}")
-
- pipe = PipeLine(config_dict)
- pipe.add_tail(*PIPELINE)
- pipe.start()
- except (KeyboardInterrupt, Exception) as e:
- print(f"[warning] Program Exited. {str(e)}")
diff --git a/tools/install_dependency/src/lkp_collect_map.py b/tools/install_dependency/src/lkp_collect_map.py
deleted file mode 100644
index 2303e2c1f6b2d3e14cbb0f2f8bbace1bdac5fb92..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/lkp_collect_map.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import os.path
-
-from utils import base_path
-
-CURRENT_DEFAULT_PATH = "./devkitdependencies"
-
-lkp_collection_map = {
- "LkpTests": {
- "download file": {
- "save_path": f"{os.path.join(CURRENT_DEFAULT_PATH, 'lkp-tests.tar.gz')}",
- },
- "download gem dependency": {
- "save_path": f"{os.path.join(CURRENT_DEFAULT_PATH, 'gem_dependencies.zip')}",
- },
- },
- "CompatibilityTesting": {
- "download file": {
- "save_path": f"{os.path.join(CURRENT_DEFAULT_PATH, 'compatibility_testing.tar.gz')}",
- }
- },
- "DevkitDistribute": {
- "download file": {
- "save_path": f"{os.path.join(base_path('component'), 'DevkitDistribute', 'devkit_distribute.tar.gz')}",
- }
- }
-}
diff --git a/tools/install_dependency/src/log.py b/tools/install_dependency/src/log.py
deleted file mode 100644
index 1f1fe3dca78e56e6dca38a082f1eb63b3a850e05..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/log.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import logging
-import sys
-
-
-def config_logging(debug=False):
- logger = logging.getLogger("install_dependency")
- logger.setLevel(logging.DEBUG)
-
- formatter = logging.Formatter(
- "[%(asctime)s] [%(levelname)s] [processID:%(process)d]"
- " [%(threadName)s] [%(module)s:%(funcName)s:%(lineno)d]"
- " %(message)s")
-
- handler = logging.StreamHandler(sys.stdout)
- handler.setLevel(logging.DEBUG if debug else logging.INFO)
- handler.setFormatter(formatter)
-
- logger.addHandler(handler)
diff --git a/tools/install_dependency/src/machine/__init__.py b/tools/install_dependency/src/machine/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/tools/install_dependency/src/machine/builder_machine.py b/tools/install_dependency/src/machine/builder_machine.py
deleted file mode 100644
index 8ee4479e37a5800eaf9f829d1115739e3fb82120..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/machine/builder_machine.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import constant
-from machine.machine import Machine
-
-
-class BuilderMachine(Machine):
- def __init__(self, ip, user, pkey, password=None):
- super(BuilderMachine, self).__init__(ip, user, pkey, password)
- self.role = constant.BUILDER
diff --git a/tools/install_dependency/src/machine/devkit_machine.py b/tools/install_dependency/src/machine/devkit_machine.py
deleted file mode 100644
index 35147f46c3d95cb5da27fd0c67a577e04b794f53..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/machine/devkit_machine.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import constant
-from machine.machine import Machine
-
-
-class DevkitMachine(Machine):
- def __init__(self, ip, user, pkey, password=None):
- super(DevkitMachine, self).__init__(ip, user, pkey, password)
- self.role = constant.DEVKIT
diff --git a/tools/install_dependency/src/machine/executor_machine.py b/tools/install_dependency/src/machine/executor_machine.py
deleted file mode 100644
index 923c7a133a7a743828700e73da9e49ee1a969874..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/machine/executor_machine.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import constant
-from machine.machine import Machine
-
-
-class ExecutorMachine(Machine):
- def __init__(self, ip, user, pkey, password=None):
- super(ExecutorMachine, self).__init__(ip, user, pkey, password)
- self.role = constant.EXECUTOR
diff --git a/tools/install_dependency/src/machine/klass_dict.py b/tools/install_dependency/src/machine/klass_dict.py
deleted file mode 100644
index 835b9994cdc1a0325d7d137f1f384de095dcc2b8..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/machine/klass_dict.py
+++ /dev/null
@@ -1,12 +0,0 @@
-import constant
-from machine.scanner_machine import ScannerMachine
-from machine.builder_machine import BuilderMachine
-from machine.executor_machine import ExecutorMachine
-from machine.devkit_machine import DevkitMachine
-
-KLASS_DICT = {
- constant.EXECUTOR: ExecutorMachine,
- constant.DEVKIT: DevkitMachine,
- constant.SCANNER: ScannerMachine,
- constant.BUILDER: BuilderMachine,
-}
diff --git a/tools/install_dependency/src/machine/local_machine.py b/tools/install_dependency/src/machine/local_machine.py
deleted file mode 100644
index 8e46f6f8c72a0d189d85a6ca8e2442b260da97d2..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/machine/local_machine.py
+++ /dev/null
@@ -1,281 +0,0 @@
-import os
-import logging
-import subprocess
-
-import constant
-from command_line import CommandLine
-from exception.connect_exception import NotMatchedMachineTypeException
-from download import component_collection_map
-from lkp_collect_map import lkp_collection_map
-from utils import (base_path, MKDIR_TMP_DEVKITDEPENDENCIES_CMD, YUM_INSTALL_LKP_DEPENDENCIES_CMD,
- CHECK_MIRROR_INSTALL_STATUS, PROMPT_MAP)
-
-LOGGER = logging.getLogger("install_dependency")
-SHELL_FILE_LIST = ["install.sh", "check_install_result.sh"]
-
-
-class LocalMachine:
- def __init__(self, ip):
- self.ip = ip
- self.check_is_aarch64()
- self.component_list = []
- self.mirrors = False
-
- def set_mirror(self):
- self.mirrors = True
-
- def add_component(self, component):
- self.component_list.extend(component)
- self.component_list = list(set(self.component_list))
-
- def install_components(self):
- if self.mirrors:
- self.install_component("OpenEulerMirrorISO")
- for component in self.component_list:
- self.install_component(component)
- if self.mirrors:
- self.install_component("UnOpenEulerMirrorISO")
-
- def check_is_aarch64(self):
- machine_type = os.uname().machine.lower()
- LOGGER.info(f"{self.ip} machine type: {machine_type}")
- if machine_type != "aarch64":
- LOGGER.error(f"Machine type of {self.ip} is {machine_type}, not aarch64. Please replace this machine.")
- raise NotMatchedMachineTypeException()
-
- def install_component(self, component_name):
- try:
- self.install_component_handler(component_name)
- except (FileNotFoundError, PermissionError, NotADirectoryError, OSError, IOError) as e:
- LOGGER.error(f"Local machine {self.ip} occur Error: {str(e)}")
-
- def install_component_handler(self, component_name):
- component_name_to_func_dict = {
- "GCCforOpenEuler": self.default_install_component_handle,
- "BiShengCompiler": self.default_install_component_handle,
- "BiShengJDK17": self.default_install_component_handle,
- "BiShengJDK8": self.default_install_component_handle,
- "LkpTests": self.lkptest_install_component_handle,
- "NonInvasiveSwitching": self.nis_install_component_handle,
- "OpenEulerMirrorISO": self.deploy_iso_handle,
- "UnOpenEulerMirrorISO": self.undeploy_iso_handle,
- }
- return component_name_to_func_dict.get(component_name)(component_name)
-
- def nis_install_component_handle(self, component_name):
- remote_file_list = []
- # 上传并执行 安装脚本, 校验安装结果脚本
- install_result = ""
- for shell_file in SHELL_FILE_LIST:
- sh_file_local_path = os.path.join(base_path("component"), component_name, shell_file)
- sh_file_remote_path = os.path.join("/tmp/", constant.DEPENDENCY_DIR, component_name + shell_file)
- sh_cmd = f"bash {sh_file_remote_path}"
- execute_output = (
- self.transport_shell_file_and_execute(
- sh_file_local_path=sh_file_local_path,
- sh_file_remote_path=sh_file_remote_path,
- sh_cmd=sh_cmd
- ))
- remote_file_list.append(sh_file_remote_path)
- if shell_file == SHELL_FILE_LIST[1]:
- install_result = execute_output
-
- if install_result == "true":
- LOGGER.info(f"Local machine {self.ip} install {component_name} success.")
- else:
- LOGGER.error(f"Local machine {self.ip} install {component_name} failed.")
- # 清理tmp临时文件
- self.clear_tmp_file_at_local_machine(remote_file_list)
-
- def lkptest_install_component_handle(self, component_name):
- self._local_exec_command(MKDIR_TMP_DEVKITDEPENDENCIES_CMD)
- self._local_exec_command(YUM_INSTALL_LKP_DEPENDENCIES_CMD)
-
- # 复制 tar.gz 文件
- LOGGER.info(f"Install component in local machine {self.ip}: {component_name}")
- remote_file_list = []
- shell_dict = lkp_collection_map.get(component_name)
- for shell_cmd in shell_dict:
- url_and_save_path = shell_dict.get(shell_cmd)
- local_file = url_and_save_path.get("save_path")
- remote_file = os.path.abspath(os.path.join('/tmp', constant.DEPENDENCY_DIR, local_file.split('/')[-1]))
- LOGGER.debug(f"Copy local_file: {local_file} to local machine {self.ip} remote_file: {remote_file}")
- remote_file_list.append(remote_file)
- self._local_exec_command(f"/bin/cp -f {local_file} {remote_file}")
- # 上传并执行 安装脚本, 校验安装结果脚本
- install_result = ""
- for shell_file in SHELL_FILE_LIST:
- sh_file_local_path = os.path.join(base_path("component"), component_name, shell_file)
- sh_file_remote_path = os.path.join("/tmp/", constant.DEPENDENCY_DIR, component_name + shell_file)
- sh_cmd = f"bash {sh_file_remote_path} {remote_file_list[0]} {remote_file_list[1]}"
- execute_output = (
- self.transport_shell_file_and_execute(
- sh_file_local_path=sh_file_local_path,
- sh_file_remote_path=sh_file_remote_path,
- sh_cmd=sh_cmd
- ))
- remote_file_list.append(sh_file_remote_path)
- if shell_file == SHELL_FILE_LIST[1]:
- install_result = execute_output
-
- if install_result == "true":
- LOGGER.info(f"Local machine {self.ip} install {component_name} success.")
- else:
- LOGGER.error(f"Local machine {self.ip} install {component_name} failed.")
- # 清理tmp临时文件
- self.clear_tmp_file_at_local_machine(remote_file_list)
- self.__install_component_on_lkptest("CompatibilityTesting")
- self.__install_component_on_lkptest("DevkitDistribute")
-
- def __install_component_on_lkptest(self, component_name):
- # 复制 tar.gz 文件
- LOGGER.info(f"Install component in local machine {self.ip}: {component_name}")
- remote_file_list = []
- shell_dict = lkp_collection_map.get(component_name)
- for shell_cmd in shell_dict:
- url_and_save_path = shell_dict.get(shell_cmd)
- local_file = url_and_save_path.get("save_path")
- remote_file = os.path.abspath(os.path.join('/tmp', constant.DEPENDENCY_DIR, local_file.split('/')[-1]))
- LOGGER.debug(f"Copy local_file: {local_file} to local machine {self.ip} remote_file: {remote_file}")
- remote_file_list.append(remote_file)
- self._local_exec_command(f"/bin/cp -f {local_file} {remote_file}")
- # 上传并执行 安装脚本, 校验安装结果脚本
- install_result = ""
- for shell_file in SHELL_FILE_LIST:
- sh_file_local_path = os.path.join(base_path("component"), component_name, shell_file)
- sh_file_remote_path = os.path.join("/tmp/", constant.DEPENDENCY_DIR, component_name + shell_file)
- sh_cmd = f"bash {sh_file_remote_path} {remote_file_list[0]}"
- execute_output = (
- self.transport_shell_file_and_execute(
- sh_file_local_path=sh_file_local_path,
- sh_file_remote_path=sh_file_remote_path,
- sh_cmd=sh_cmd
- ))
- remote_file_list.append(sh_file_remote_path)
- if shell_file == SHELL_FILE_LIST[1]:
- install_result = execute_output
-
- if install_result == "true":
- LOGGER.info(f"Local machine {self.ip} install {component_name} success.")
- else:
- LOGGER.error(f"Local machine {self.ip} install {component_name} failed.")
- # 清理tmp临时文件
- self.clear_tmp_file_at_local_machine(remote_file_list)
-
- def deploy_iso_handle(self, component_name):
- # 复制镜像文件
- LOGGER.info(f"Deploy component in local machine {self.ip}: {component_name}")
- local_path = os.path.abspath(CommandLine.iso_path)
-
- # 执行 安装脚本, 校验安装结果脚本
- install_result = ""
- for shell_file in SHELL_FILE_LIST:
- sh_file_local_path = os.path.join(base_path("component"), component_name, shell_file)
- sh_cmd = f"bash {sh_file_local_path} {local_path}"
- if not os.path.exists(sh_file_local_path):
- LOGGER.error(f"{sh_file_local_path} not exists.")
- raise FileNotFoundError(f"local file {sh_file_local_path} not exists.")
-
- result = subprocess.run(f"{sh_cmd}".split(' '),
- capture_output=True, shell=False)
- output = result.stdout.decode().strip()
- LOGGER.info(f"Local machine {self.ip} exec '{sh_cmd}' output: {output}")
- if shell_file == SHELL_FILE_LIST[1]:
- install_result = output
-
- if install_result == "true":
- LOGGER.info(f"Remote machine {self.ip} deploy {component_name} success.")
- else:
- LOGGER.info(f"Remote machine {self.ip} deploy {component_name} failed.")
-
- def default_install_component_handle(self, component_name):
- self._local_exec_command(MKDIR_TMP_DEVKITDEPENDENCIES_CMD)
-
- # 上传 组件压缩包和校验文件
- LOGGER.info(f"Install component in local machine {self.ip}: {component_name}")
- remote_file_list = []
- shell_dict = component_collection_map.get(component_name)
- for shell_cmd in shell_dict:
- url_and_save_path = shell_dict.get(shell_cmd)
- local_file = url_and_save_path.get("save_path")
- remote_file = os.path.abspath(os.path.join('/tmp', local_file))
- LOGGER.debug(f"Copy local_file: {local_file} to local machine {self.ip} remote_file: {remote_file}")
- remote_file_list.append(remote_file)
- self._local_exec_command(f"/bin/cp -f {local_file} {remote_file}")
- # 上传并执行 安装脚本, 校验安装结果脚本
- install_result = ""
- for shell_file in SHELL_FILE_LIST:
- sh_file_local_path = os.path.join(base_path("component"), component_name, shell_file)
- sh_file_remote_path = os.path.join("/tmp/", constant.DEPENDENCY_DIR, component_name + shell_file)
- sh_cmd = f"bash {sh_file_remote_path}"
- execute_output = (
- self.transport_shell_file_and_execute(
- sh_file_local_path=sh_file_local_path,
- sh_file_remote_path=sh_file_remote_path,
- sh_cmd=sh_cmd
- ))
- remote_file_list.append(sh_file_remote_path)
- if shell_file == SHELL_FILE_LIST[1]:
- install_result = execute_output
-
- if install_result == "true":
- LOGGER.info(f"Local machine {self.ip} install {component_name} success.")
- else:
- LOGGER.error(f"Local machine {self.ip} install {component_name} failed.")
- # 清理tmp临时文件
- self.clear_tmp_file_at_local_machine(remote_file_list)
-
- def _local_exec_command(self, cmd):
- result = subprocess.run(cmd.split(' '),
- capture_output=False, shell=False, stderr=subprocess.STDOUT)
- if result.returncode == 0:
- LOGGER.debug(f"Local machine {self.ip} exec '{cmd}' success.")
- else:
- LOGGER.error(f"Local machine {self.ip} exec '{cmd}' failed.")
- raise OSError(PROMPT_MAP.get(cmd, f"Local machine {self.ip} exec '{cmd}' failed."))
-
- def transport_shell_file_and_execute(self, sh_file_local_path, sh_file_remote_path, sh_cmd):
- if not os.path.exists(sh_file_local_path):
- LOGGER.error(f"{sh_file_local_path} not exists.")
- raise FileNotFoundError(f"local file {sh_file_local_path} not exists.")
-
- LOGGER.debug(f"Copy local_file: {sh_file_local_path} to local machine {self.ip} "
- f"remote_file: {sh_file_remote_path}")
- subprocess.run(f"/bin/cp -f {sh_file_local_path} {sh_file_remote_path}".split(' '),
- capture_output=False, shell=False, stderr=subprocess.STDOUT)
-
- result = subprocess.run(f"{sh_cmd}".split(' '),
- capture_output=True, shell=False)
- output = result.stdout.decode().strip()
- LOGGER.info(f"Local machine {self.ip} exec '{sh_cmd}' output: {output}")
- return output
-
- def clear_tmp_file_at_local_machine(self, remote_file_list):
- LOGGER.debug(f"Clear tmp file at local machine {self.ip}")
- for remote_file in remote_file_list:
- LOGGER.debug(f"Delete tmp file at local machine {self.ip}: {remote_file}")
- subprocess.run(f"rm -f {remote_file}".split(' '),
- capture_output=False, shell=False, stderr=subprocess.STDOUT)
-
- def do_nothing(self, component_name, sftp_client, ssh_client):
- return
-
- def undeploy_iso_handle(self, component_name):
- # 需要检查本地镜像是否安装成功
- self._local_exec_command(CHECK_MIRROR_INSTALL_STATUS)
-
- component_name = component_name.replace("Un", "")
- LOGGER.info(f"Umount component in local machine {self.ip}: {component_name}")
-
- # 执行 卸载脚本
- for shell_file in ["uninstall.sh"]:
- sh_file_local_path = os.path.join(base_path("component"), component_name, shell_file)
- sh_cmd = f"bash {sh_file_local_path}"
- if not os.path.exists(sh_file_local_path):
- LOGGER.error(f"{sh_file_local_path} not exists.")
- raise FileNotFoundError(f"local file {sh_file_local_path} not exists.")
-
- result = subprocess.run(f"{sh_cmd}".split(' '),
- capture_output=True, shell=False)
- output = result.stdout.decode().strip()
- LOGGER.info(f"Local machine {self.ip} exec '{sh_cmd}' output: {output}")
diff --git a/tools/install_dependency/src/machine/machine.py b/tools/install_dependency/src/machine/machine.py
deleted file mode 100644
index 31300fc4518c311d0dac04f9d4e7081fe869e30e..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/machine/machine.py
+++ /dev/null
@@ -1,385 +0,0 @@
-import logging
-import os
-import socket
-import typing
-
-import paramiko
-import timeout_decorator
-
-import constant
-from command_line import CommandLine
-from exception.connect_exception import (CreatePkeyFailedException, ConnectRemoteException,
- NotMatchedMachineTypeException)
-from download import component_collection_map
-from lkp_collect_map import lkp_collection_map
-from utils import (base_path, validate_path, MKDIR_TMP_DEVKITDEPENDENCIES_CMD, YUM_INSTALL_LKP_DEPENDENCIES_CMD,
- CHECK_HOME_SPACE_SUFFICIENT_FOR_MIRROR, CHECK_TMP_SPACE_SUFFICIENT_FOR_PACKAGE,
- CHECK_MIRROR_INSTALL_STATUS, PROMPT_MAP)
-
-LOGGER = logging.getLogger("install_dependency")
-SHELL_FILE_LIST = ["install.sh", "check_install_result.sh"]
-
-
-class Machine:
- def __init__(self, ip, user, pkey, password=None):
- self.ip = ip
- self.user = user
- self.pkey = pkey
- self.password = password
- self.check_is_aarch64()
- self.component_list = []
- self.mirrors = False
-
- def add_component(self, component):
- self.component_list.extend(component)
- self.component_list = list(set(self.component_list))
-
- def set_mirror(self):
- self.mirrors = True
-
- def check_is_aarch64(self):
- machine_type = self.get_machine_type()
- LOGGER.info(f"{self.ip} machine type: {machine_type}")
- if machine_type != "aarch64":
- LOGGER.error(f"Machine type of {self.ip} is {machine_type}, not aarch64. Please replace this machine.")
- raise NotMatchedMachineTypeException()
-
- def get_machine_type(self):
- try:
- ssh_client = self.ssh_client()
- stdin, stdout, stderr = ssh_client.exec_command("uname -m", timeout=10)
- except (paramiko.ssh_exception.SSHException, socket.timeout) as e:
- LOGGER.error(f"Connect remote {self.ip} failed. {str(e)}")
- raise ConnectRemoteException()
- stdout_output = stdout.read().decode().strip()
- ssh_client.close()
- return stdout_output
-
- def ssh_client(self):
- ssh = paramiko.SSHClient()
- ssh._transport = self.transport_connect(self.user, self.pkey, self.password)
- return ssh
-
- def sftp_client(self):
- sftp = paramiko.SFTPClient.from_transport(self.transport_connect(self.user, self.pkey, self.password))
- return sftp
-
- def transport_connect(self, user, pkey_path, password=None):
- if not validate_path(pkey_path) or not os.path.isfile(pkey_path):
- LOGGER.error("Yaml file content not correct. Given pkey not exists.")
- raise ConnectRemoteException()
- try:
- # 指定本地的RSA私钥文件。如果建立密钥对时设置的有密码,password为设定的密码,如无不用指定password参数
- pkey = paramiko.RSAKey.from_private_key_file(pkey_path, password=password)
- except (IOError,) as e:
- LOGGER.error(f"Pkey file not exists. {str(e)}")
- raise CreatePkeyFailedException()
- except (paramiko.ssh_exception.PasswordRequiredException, paramiko.ssh_exception.AuthenticationException) as e:
- LOGGER.warning(f"Pkey password is required. {str(e)}")
- password = input(f"Press Enter to input password of {pkey_path}: ")
- self.password = password
- return self.transport_connect(user, pkey_path, password)
- except (paramiko.ssh_exception.SSHException,) as e:
- LOGGER.error(f"Connect remote {self.ip} failed because of wrong pkey. {str(e)}")
- raise CreatePkeyFailedException()
-
- try:
- transport = self.transport_connect_with_timeout(user, pkey)
- except (paramiko.ssh_exception.AuthenticationException,
- paramiko.ssh_exception.SSHException,
- timeout_decorator.TimeoutError,
- socket.gaierror,
- socket.timeout,
- socket.error) as e:
- LOGGER.error(f"Connect remote {self.ip} failed. {str(e)}")
- raise ConnectRemoteException()
- return transport
-
- @timeout_decorator.timeout(10)
- def transport_connect_with_timeout(self, user, pkey):
- transport = paramiko.Transport((self.ip, 22))
- transport.connect(username=user, pkey=pkey)
- return transport
-
- def install_component(self, component_name):
- ssh_client = self.ssh_client()
- sftp_client = self.sftp_client()
- try:
- self.install_component_handler(component_name, sftp_client, ssh_client)
- except timeout_decorator.TimeoutError as e:
- LOGGER.error(f"Remote machine {self.ip} occur Error: Exec cmd {str(e)}")
- except (FileNotFoundError, PermissionError, NotADirectoryError, OSError, IOError) as e:
- LOGGER.error(f"Remote machine {self.ip} occur Error: {str(e)}")
- finally:
- ssh_client.close()
- sftp_client.close()
-
- def install_components(self):
- if self.mirrors:
- self.install_component("OpenEulerMirrorISO")
- for component in self.component_list:
- self.install_component(component)
- if self.mirrors:
- self.install_component("UnOpenEulerMirrorISO")
-
- def install_component_handler(self, component_name, sftp_client, ssh_client):
- component_name_to_func_dict: typing.Dict[
- str, typing.Callable[[str, paramiko.SFTPClient, paramiko.SSHClient], typing.Any]] = {
- "GCCforOpenEuler": self.default_install_component_handle,
- "BiShengCompiler": self.default_install_component_handle,
- "BiShengJDK17": self.default_install_component_handle,
- "BiShengJDK8": self.default_install_component_handle,
- "LkpTests": self.lkptest_install_component_handle,
- "NonInvasiveSwitching": self.nis_install_component_handle,
- "OpenEulerMirrorISO": self.deploy_iso_handle,
- "UnOpenEulerMirrorISO": self.undeploy_iso_handle,
- }
- return component_name_to_func_dict.get(component_name)(component_name, sftp_client, ssh_client)
-
- def nis_install_component_handle(self, component_name, sftp_client, ssh_client):
- remote_file_list = []
- # 上传并执行 安装脚本, 校验安装结果脚本
- install_result = ""
- for shell_file in SHELL_FILE_LIST:
- sh_file_local_path = os.path.join(base_path("component"), component_name, shell_file)
- sh_file_remote_path = os.path.join("/tmp/", constant.DEPENDENCY_DIR, component_name + shell_file)
- sh_cmd = f"bash {sh_file_remote_path}"
- execute_output = (
- self.transport_shell_file_and_execute(
- ssh_client, sftp_client,
- sh_file_local_path=sh_file_local_path,
- sh_file_remote_path=sh_file_remote_path,
- sh_cmd=sh_cmd
- ))
- remote_file_list.append(sh_file_remote_path)
- if shell_file == SHELL_FILE_LIST[1]:
- install_result = execute_output
-
- if install_result == "true":
- LOGGER.info(f"Remote machine {self.ip} install {component_name} success.")
- else:
- LOGGER.error(f"Remote machine {self.ip} install {component_name} failed.")
- # 清理tmp临时文件
- self.clear_tmp_file_at_remote_machine(remote_file_list)
-
- def lkptest_install_component_handle(self, component_name, sftp_client, ssh_client):
- self._remote_exec_command(MKDIR_TMP_DEVKITDEPENDENCIES_CMD, ssh_client)
- self._remote_exec_command(YUM_INSTALL_LKP_DEPENDENCIES_CMD, ssh_client)
-
- # 上传 lkp-tests.tar.gz文件
- LOGGER.info(f"Install component in remote machine {self.ip}: {component_name}")
- remote_file_list = []
- shell_dict = lkp_collection_map.get(component_name)
- for shell_cmd in shell_dict:
- url_and_save_path = shell_dict.get(shell_cmd)
- local_file = url_and_save_path.get("save_path")
- remote_file = os.path.abspath(os.path.join('/tmp', constant.DEPENDENCY_DIR, local_file.split('/')[-1]))
- LOGGER.debug(f"Transport local_file: {local_file} to remote machine {self.ip} "
- f"remote_file: {remote_file}")
-
- remote_file_list.append(remote_file)
- sftp_client.put(localpath=f"{local_file}", remotepath=f"{remote_file}")
- # 上传并执行 安装脚本, 校验安装结果脚本
- install_result = ""
- for shell_file in SHELL_FILE_LIST:
- sh_file_local_path = os.path.join(base_path("component"), component_name, shell_file)
- sh_file_remote_path = os.path.join("/tmp/", constant.DEPENDENCY_DIR, component_name + shell_file)
- sh_cmd = f"bash {sh_file_remote_path} {remote_file_list[0]} {remote_file_list[1]}"
- execute_output = (
- self.transport_shell_file_and_execute(
- ssh_client, sftp_client,
- sh_file_local_path=sh_file_local_path,
- sh_file_remote_path=sh_file_remote_path,
- sh_cmd=sh_cmd
- ))
- remote_file_list.append(sh_file_remote_path)
- if shell_file == SHELL_FILE_LIST[1]:
- install_result = execute_output
-
- if install_result == "true":
- LOGGER.info(f"Remote machine {self.ip} install {component_name} success.")
- else:
- LOGGER.error(f"Remote machine {self.ip} install {component_name} failed.")
- # 清理tmp临时文件
- self.clear_tmp_file_at_remote_machine(ssh_client, remote_file_list)
- self.__install_component_on_lkptest("CompatibilityTesting", sftp_client, ssh_client)
- self.__install_component_on_lkptest("DevkitDistribute", sftp_client, ssh_client)
-
- def __install_component_on_lkptest(self, component_name, sftp_client, ssh_client):
- # 上传 tar.gz 文件
- LOGGER.info(f"Install component in remote machine {self.ip}: {component_name}")
- remote_file_list = []
- shell_dict = lkp_collection_map.get(component_name)
- for shell_cmd in shell_dict:
- url_and_save_path = shell_dict.get(shell_cmd)
- local_file = url_and_save_path.get("save_path")
- remote_file = os.path.abspath(os.path.join('/tmp', constant.DEPENDENCY_DIR, local_file.split('/')[-1]))
- LOGGER.debug(f"Transport local_file: {local_file} to remote machine {self.ip} "
- f"remote_file: {remote_file}")
- remote_file_list.append(remote_file)
- sftp_client.put(localpath=f"{local_file}", remotepath=f"{remote_file}")
- # 上传并执行 安装脚本, 校验安装结果脚本
- install_result = ""
- for shell_file in SHELL_FILE_LIST:
- sh_file_local_path = os.path.join(base_path("component"), component_name, shell_file)
- sh_file_remote_path = os.path.join("/tmp/", constant.DEPENDENCY_DIR, component_name + shell_file)
- sh_cmd = f"bash {sh_file_remote_path} {remote_file_list[0]}"
- execute_output = (
- self.transport_shell_file_and_execute(
- ssh_client, sftp_client,
- sh_file_local_path=sh_file_local_path,
- sh_file_remote_path=sh_file_remote_path,
- sh_cmd=sh_cmd
- ))
- remote_file_list.append(sh_file_remote_path)
- if shell_file == SHELL_FILE_LIST[1]:
- install_result = execute_output
-
- if install_result == "true":
- LOGGER.info(f"Remote machine {self.ip} install {component_name} success.")
- else:
- LOGGER.error(f"Remote machine {self.ip} install {component_name} failed.")
- # 清理tmp临时文件
- self.clear_tmp_file_at_remote_machine(ssh_client, remote_file_list)
-
- def deploy_iso_handle(self, component_name, sftp_client, ssh_client):
- self._remote_exec_command(CHECK_HOME_SPACE_SUFFICIENT_FOR_MIRROR, ssh_client)
-
- # 上传 镜像文件
- LOGGER.info(f"Deploy component in remote machine {self.ip}: {component_name}")
- local_path = os.path.abspath(CommandLine.iso_path)
- remote_path = os.path.join("/home", local_path.split('/')[-1])
- LOGGER.debug(f"Transport local_file: {local_path} to remote machine {self.ip} "
- f"remote_file: {remote_path}")
- sftp_client.put(localpath=local_path, remotepath=remote_path)
-
- # 上传并执行 安装脚本, 校验安装结果脚本
- install_result = ""
- remote_file_list = []
- for shell_file in SHELL_FILE_LIST:
- sh_file_local_path = os.path.join(base_path("component"), component_name, shell_file)
- sh_file_remote_path = os.path.join("/tmp/", component_name + shell_file)
- sh_cmd = f"bash {sh_file_remote_path} {remote_path}"
- execute_output = (
- self.transport_shell_file_and_execute(
- ssh_client, sftp_client,
- sh_file_local_path=sh_file_local_path,
- sh_file_remote_path=sh_file_remote_path,
- sh_cmd=sh_cmd
- ))
- remote_file_list.append(sh_file_remote_path)
- if shell_file == SHELL_FILE_LIST[1]:
- install_result = execute_output
-
- if install_result == "true":
- LOGGER.info(f"Remote machine {self.ip} deploy {component_name} success.")
- else:
- LOGGER.info(f"Remote machine {self.ip} deploy {component_name} failed.")
- # 清理tmp临时文件
- self.clear_tmp_file_at_remote_machine(ssh_client, remote_file_list)
-
- def default_install_component_handle(self, component_name, sftp_client, ssh_client):
- self._remote_exec_command(MKDIR_TMP_DEVKITDEPENDENCIES_CMD, ssh_client)
- self._remote_exec_command(CHECK_TMP_SPACE_SUFFICIENT_FOR_PACKAGE, ssh_client)
-
- # 上传 组件压缩包和校验文件
- LOGGER.info(f"Install component in remote machine {self.ip}: {component_name}")
- remote_file_list = []
- shell_dict = component_collection_map.get(component_name)
- for shell_cmd in shell_dict:
- url_and_save_path = shell_dict.get(shell_cmd)
- local_file = url_and_save_path.get("save_path")
- remote_file = os.path.abspath(os.path.join('/tmp', local_file))
- LOGGER.debug(f"Transport local_file: {local_file} to remote machine {self.ip} "
- f"remote_file: {remote_file}")
- remote_file_list.append(remote_file)
- sftp_client.put(localpath=f"{local_file}", remotepath=f"{remote_file}")
-
- # 上传并执行 安装脚本, 校验安装结果脚本
- install_result = ""
- for shell_file in SHELL_FILE_LIST:
- sh_file_local_path = os.path.join(base_path("component"), component_name, shell_file)
- sh_file_remote_path = os.path.join("/tmp/", constant.DEPENDENCY_DIR, component_name + shell_file)
- sh_cmd = f"bash {sh_file_remote_path}"
- execute_output = (
- self.transport_shell_file_and_execute(
- ssh_client, sftp_client,
- sh_file_local_path=sh_file_local_path,
- sh_file_remote_path=sh_file_remote_path,
- sh_cmd=sh_cmd
- ))
- remote_file_list.append(sh_file_remote_path)
- if shell_file == SHELL_FILE_LIST[1]:
- install_result = execute_output
-
- if install_result == "true":
- LOGGER.info(f"Remote machine {self.ip} install {component_name} success.")
- else:
- LOGGER.info(f"Remote machine {self.ip} install {component_name} failed.")
- # 清理tmp临时文件
- self.clear_tmp_file_at_remote_machine(ssh_client, remote_file_list)
-
- @timeout_decorator.timeout(100)
- def _remote_exec_command(self, cmd, ssh_client):
- try:
- stdin, stdout, stderr = ssh_client.exec_command(cmd, timeout=90)
- except (paramiko.ssh_exception.SSHException, socket.timeout) as e:
- LOGGER.error(f"Remote machine {self.ip} exec '{cmd}' failed Please run this command in this machine.")
- raise OSError(PROMPT_MAP.get(cmd, f"Remote machine {self.ip} exec '{cmd}' failed."))
- exit_status = stdout.channel.recv_exit_status()
- if exit_status == 0:
- LOGGER.debug(f"Remote machine {self.ip} exec '{cmd}' success.")
- else:
- LOGGER.error(f"Remote machine {self.ip} exec '{cmd}' failed. Please run this command in this machine.")
- raise OSError(PROMPT_MAP.get(cmd, f"Remote machine {self.ip} exec '{cmd}' failed."))
-
- def transport_shell_file_and_execute(self, ssh_client, sftp_client, sh_file_local_path, sh_file_remote_path,
- sh_cmd):
- if not os.path.exists(sh_file_local_path):
- LOGGER.error(f"{sh_file_local_path} not exists.")
- raise FileNotFoundError(f"local file {sh_file_local_path} not exists.")
-
- LOGGER.debug(f"Transport local_file: {sh_file_local_path} to remote machine {self.ip} "
- f"remote_file: {sh_file_remote_path}")
- sftp_client.put(localpath=sh_file_local_path, remotepath=sh_file_remote_path)
-
- stdin, stdout, stderr = ssh_client.exec_command(sh_cmd)
- output = stdout.read().decode().strip()
- LOGGER.info(f"Remote machine {self.ip} '{sh_cmd}' output: {output}")
- return output
-
- def clear_tmp_file_at_remote_machine(self, ssh_client, remote_file_list):
- LOGGER.debug(f"Clear tmp file at remote machine {self.ip}")
- for remote_file in remote_file_list:
- LOGGER.debug(f"Delete tmp file at remote machine {self.ip}: {remote_file}")
- ssh_client.exec_command(f"rm -f {remote_file}")
-
- def do_nothing(self, component_name, sftp_client, ssh_client):
- return
-
- def undeploy_iso_handle(self, component_name, sftp_client, ssh_client):
- # 需要检查本地镜像是否安装成功
- self._remote_exec_command(CHECK_MIRROR_INSTALL_STATUS, ssh_client)
-
- component_name = component_name.replace("Un", "")
- LOGGER.info(f"Umount component in remote machine {self.ip}: {component_name}")
- local_path = os.path.abspath(CommandLine.iso_path)
- remote_path = os.path.join("/home", local_path.split('/')[-1])
-
- # 上传并执行 卸载脚本
- remote_file_list = []
- for shell_file in ["uninstall.sh"]:
- sh_file_local_path = os.path.join(base_path("component"), component_name, shell_file)
- sh_file_remote_path = os.path.join("/tmp/", component_name + shell_file)
- sh_cmd = f"bash {sh_file_remote_path}"
- execute_output = (
- self.transport_shell_file_and_execute(
- ssh_client, sftp_client,
- sh_file_local_path=sh_file_local_path,
- sh_file_remote_path=sh_file_remote_path,
- sh_cmd=sh_cmd
- ))
- remote_file_list.append(sh_file_remote_path)
- remote_file_list.append(remote_path)
- # 清理tmp临时文件
- self.clear_tmp_file_at_remote_machine(ssh_client, remote_file_list)
diff --git a/tools/install_dependency/src/machine/scanner_machine.py b/tools/install_dependency/src/machine/scanner_machine.py
deleted file mode 100644
index 9377b527f8b9a0507156b71be16f3ec62d97c2e0..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/machine/scanner_machine.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import constant
-from machine.machine import Machine
-
-
-class ScannerMachine(Machine):
- def __init__(self, ip, user, pkey, password=None):
- super(ScannerMachine, self).__init__(ip, user, pkey, password)
- self.role = constant.SCANNER
diff --git a/tools/install_dependency/src/utils.py b/tools/install_dependency/src/utils.py
deleted file mode 100644
index d249129dd2352a28d71133d6292f9d52d39126ae..0000000000000000000000000000000000000000
--- a/tools/install_dependency/src/utils.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import os
-import sys
-
-MKDIR_TMP_DEVKITDEPENDENCIES_CMD = "mkdir -p /tmp/devkitdependencies"
-YUM_INSTALL_LKP_DEPENDENCIES_CMD = "yum install -y git wget rubygems"
-CHECK_HOME_SPACE_SUFFICIENT_FOR_MIRROR = "[[ $(df -m /home | awk 'NR==2' | awk '{print $4}') -gt 17*1024 ]]"
-CHECK_TMP_SPACE_SUFFICIENT_FOR_PACKAGE = "[[ $(df -m /tmp | awk 'NR==2' | awk '{print $4}') -gt 1024 ]]"
-CHECK_MIRROR_INSTALL_STATUS = "test -d /etc/yum.repos.d/yum.repos.backup"
-
-PROMPT_MAP = {
- MKDIR_TMP_DEVKITDEPENDENCIES_CMD: "Directory /tmp/devkitdependencies not exists.",
- YUM_INSTALL_LKP_DEPENDENCIES_CMD: "Yum install dependencies failed.",
- CHECK_HOME_SPACE_SUFFICIENT_FOR_MIRROR: "Machine /home space not sufficient for mirror.",
- CHECK_TMP_SPACE_SUFFICIENT_FOR_PACKAGE: "Machine /tmp space not sufficient for package.",
- CHECK_MIRROR_INSTALL_STATUS: "Mirror mount status not correct."
-}
-
-
-def validate_path(path: str) -> bool:
- return path.startswith('/') and path.find('../') == -1 and path.find('./') == -1
-
-
-def base_path(path):
- if getattr(sys, 'frozen', False):
- base_dir = sys._MEIPASS
- else:
- base_dir = os.path.dirname(__file__)
- return os.path.join(base_dir, path)
-
-
-def available_role(role_list, data):
- list_after_verification = []
- for role in role_list:
- if role in data:
- list_after_verification.append(role)
- return list_after_verification
diff --git a/tools/isv_tool.py b/tools/isv_tool.py
new file mode 100644
index 0000000000000000000000000000000000000000..923c22f2e5492b287af53620292ea859329fe781
--- /dev/null
+++ b/tools/isv_tool.py
@@ -0,0 +1,605 @@
+import re
+import os
+from datetime import timezone, timedelta, datetime
+import argparse
+import zipfile
+import tarfile
+from prettytable import PrettyTable
+import logging
+from bs4 import BeautifulSoup
+
+Logger = logging.getLogger()
+Logger.setLevel(level=logging.INFO)
+File_log = logging.FileHandler("./isv.log", "a", encoding="utf-8")
+File_log.setFormatter(logging.Formatter("[%(funcName)s - %(lineno)d] %(message)s"))
+Logger.addHandler(File_log)
+
+CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+class IsvTool:
+ def __init__(self):
+ self.log = Logger
+
+ # 解决方案对应分析方法
+ self.method_dict = {
+ "bigdata": self.check_bigdata,
+ "storage": self.check_storage,
+ "database": self.check_database,
+ "arm_native": self.check_arm_native,
+ "virtual": self.check_virtual,
+ "virt": self.check_virt,
+ "acceleration_library": self.check_acceleration_library,
+ "hpc": self.check_hpc
+
+ }
+ # 项目中文名标识
+ self.project_msg = {
+ "bigdata": "大数据",
+ "storage": "分布式存储",
+ "database": "数据库",
+ "arm_native": "Arm原生",
+ "virtual": "虚拟化",
+ "virt": "机密计算",
+ "acceleration_library": "加速库",
+ "hpc": "HPC"
+ }
+
+ # 特性是否必选标识
+ self.must_feature = {
+ "bigdata": ["OmniRuntime Hbase二级索引", "OmniRuntime 参数调优", "OmniRuntime 算子加速"],
+ "storage": ["存储加速算法库"],
+ "database": ["GCC编译反馈优化", "MySQL无锁优化"],
+ "arm_native": ["视频流引擎"],
+ "virtual": ["负载动态感知"],
+ "virt": ["virtCCA", "iTrustee"],
+ "acceleration_library": ["KAE加解密", "鲲鹏基础加速库 系统库", "鲲鹏基础加速库 HMPP", "鲲鹏基础加速库 数学库"],
+ "hpc": ["加速库改造"]
+ }
+ self.must_str = str(list(self.must_feature.values()))
+ self.default_html = "./report.html"
+
+ def check_virtcca_cvm(self, log_path):
+ create_cvm_status = False
+ start_cvm_status = False
+ with open(log_path) as f:
+ for per_line in f.readlines():
+ if re.compile("launchSecurity type=\'cvm\'>").findall(per_line):
+ create_cvm_status = True
+
+ if re.compile("etc").findall(per_line):
+ start_cvm_status = True
+
+ self.log.info(f"create_cvm_status: {create_cvm_status} start_cvm_status: {start_cvm_status}")
+ if start_cvm_status and create_cvm_status:
+ return True
+ else:
+ return False
+
+ def check_ccos(self, log_path):
+ ccos_status = False
+ with open(log_path) as f:
+ content = f.read()
+ check_res = re.compile(r"check (s\w+)").findall(content)
+ if check_res:
+ ccos_status = True
+
+ if re.compile(r"TA_UUID").findall(content):
+ ccos_status = True
+ else:
+ ccos_status = False
+
+ return ccos_status
+
+ def check_virt(self, *log_path_list):
+ result = {"virtCCA": False, "iTrustee": False}
+ for per_log in log_path_list:
+ per_log_strip = per_log.replace("./", "")
+ if per_log_strip.startswith("virtcca"):
+ result["virtCCA"] = self.check_virtcca_cvm(per_log)
+ elif per_log_strip.startswith("virtccos"):
+ result["iTrustee"] = self.check_ccos(per_log)
+
+ return result
+
+ def check_arm_native(self, log_path):
+ kbox, video, instruction = False, False, False
+ kbox_key_word = (
+ 'gralloc.kbox.so', 'audio.primary.kbox.so', 'gps.kbox.so', 'sensors.kbox.so', 'libmedia_omxcore.so',
+ 'libstagefrighthw', 'vinput', 'hwcomposer.kbox.so')
+ with open(log_path) as f:
+ content = f.read()
+ for per_key_word in kbox_key_word:
+ kbox_res = re.compile(per_key_word).findall(content)
+ if kbox_res:
+ kbox = True
+ break
+
+ video_res = re.compile("VmiInputFlinger").findall(content)
+ if video_res:
+ video = True
+
+ instruction_res = re.compile("VmiAgent instruction").findall(content)
+ if instruction_res:
+ instruction = True
+
+ return {"Kbox基础云手机": kbox, "视频流引擎": video, "指令流引擎": instruction}
+
+ def check_virtual_flow(self, log_path):
+ # ovs 卸载检测
+ status = False
+ with open(log_path) as f:
+ content = f.read()
+ if not content:
+ return status
+
+ if not re.compile("ovs-appctl: command not found").findall(content) and not re.compile(
+ "No flow available").findall(content):
+ status = True
+
+ return status
+
+ def check_virtual_dpu(self, log_path):
+ net_uninstall, storage_uninstall, openstack_uninstall = False, False, False
+ with open(log_path) as f:
+ content = f.read()
+ if not content:
+ return False
+
+ if not re.compile("dpak_ovs_ctl: command not found").findall(content) and not re.compile(
+ "No flow available").findall(content):
+ net_uninstall = True
+ content_new = content.replace("\n", "")
+ storage_res = re.compile('\"session\": \[(.*)\]\s*\}').findall(content_new)
+ self.log.info(f"storage_res: %s" % storage_res)
+ if storage_res:
+ if storage_res[0].strip() == "":
+ storage_uninstall = False
+ else:
+ storage_uninstall = True
+
+ server_name = re.compile(r"server_name: (.*)").findall(content)
+ self.log.info(f"server_name: %s" % server_name)
+ if not server_name or server_name[0] == "":
+ openstack_uninstall = False
+ elif re.compile(r"%s\s+|\s+ACTIVE" % server_name[0]).findall(content):
+ openstack_uninstall = True
+
+ if net_uninstall and storage_uninstall and openstack_uninstall:
+ return True
+ else:
+ return False
+
+ def check_virtual_load_awareness(self, log_path):
+ # 负载动态感知检测
+ running_status = False
+ version_status = False
+ with open(log_path) as f:
+ content = f.read()
+ if re.compile("active \(running\)").findall(content):
+ running_status = True
+ if re.compile("waasctl:\s\d+\S\d+\S\d+").findall(content) and \
+ re.compile("WaaS Agent:\s\d+\S\d+\S\d+").findall(content):
+ version_status = True
+
+ if running_status and version_status:
+ return True
+ else:
+ return False
+
+ def check_virtual(self, *log_path_list):
+ result = {"负载动态感知": False, "OVS流表网卡加速": False, "虚拟化DPU卸载": False}
+ for per_log in log_path_list:
+ per_log_strip = per_log.replace("./", "")
+ if per_log_strip.startswith("virtual_sense"):
+ result.setdefault("负载动态感知", "")
+ result["负载动态感知"] = self.check_virtual_load_awareness(per_log)
+ elif per_log_strip.startswith("virtual_dpu_flow"):
+ result["OVS流表网卡加速"] = self.check_virtual_flow(per_log)
+ elif per_log_strip.startswith("virtual_dpu"):
+ result["虚拟化DPU卸载"] = self.check_virtual_dpu(per_log)
+
+ return result
+
+ def check_database(self, log_path):
+ thread_pool, kovae, gcc_compile, no_lock, crc32 = False, False, False, False, False
+ with open(log_path) as f:
+ content = f.read()
+ if re.compile("THREAD_POOL_GROUPS").findall(content) and re.compile(
+ "thread_pool: thread_pool.so").findall(
+ content):
+ thread_pool = True
+
+ if re.compile("kovae_path: ha_kovae.so").findall(content) and re.compile("KOVAE_MEMORY_ACTIVE").findall(
+ content):
+ kovae = True
+
+ if re.compile("bolt").findall(content):
+ gcc_compile = True
+
+ no_lock_list = re.compile("no_lock: (\d+)").findall(content)
+ if not no_lock_list:
+ no_lock = False
+ elif int(no_lock_list[0]) > 0:
+ no_lock = True
+
+ if re.compile("crc32cb").findall(content):
+ crc32 = True
+
+ return {"MySQL支持线程池功能": thread_pool, "MySQL向量化分析引擎": kovae, "GCC编译反馈优化": gcc_compile, "MySQL无锁优化": no_lock,
+ "CRC32指令优化": crc32}
+
+ def check_acceleration_library(self, log_path):
+ # 加速库
+ kae, system_lib, hmpp_lib, math_lib = False, False, False, False
+ with open(log_path) as f:
+ content = f.read()
+ if not re.compile("invalid engine").findall(content) and re.compile("engine \"kae\" set").findall(content):
+ kae = True
+
+ for per_system_flag in ["libavx2ki", "libavx2neon", "libkqmalloc", "libhtl"]:
+ if re.compile(f"{per_system_flag}.so").findall(content):
+ system_lib = True
+ self.log.info(f"check system_lib: {per_system_flag}.so result: {system_lib}")
+ break
+ self.log.info(f"check system_lib: {per_system_flag}.so result: {system_lib}")
+
+ for per_hmpp_flag in ["libHMPP_signal", "libHMPP_image", "libHMPP_audio"]:
+ if re.compile(f"{per_hmpp_flag}.so").findall(content):
+ hmpp_lib = True
+ self.log.info(f"check hmpp_lib: {per_hmpp_flag}.so result: {hmpp_lib}")
+ break
+ self.log.info(f"check hmpp_lib: {per_hmpp_flag}.so result: {hmpp_lib}")
+
+ for per_math_flag in ["libkblas", "libkvml", "libkspblas", "libkfftf", "libkfft", "libkm", "libkm_19",
+ "libksvml", "libkvsl", "libksolver", "libklapack_full", "libkscasolver", "libkes"]:
+ if re.compile(f"{per_math_flag}.so").findall(content):
+ math_lib = True
+ self.log.info(f"check math_lib: {per_math_flag}.so result: {math_lib}")
+ break
+ self.log.info(f"check math_lib: {per_math_flag}.so result: {math_lib}")
+
+ return {"KAE加解密": kae, "鲲鹏基础加速库 系统库": system_lib, "鲲鹏基础加速库 HMPP": hmpp_lib, "鲲鹏基础加速库 数学库": math_lib}
+
+ def bigdata_kal(self, log_path):
+ # 根据 bigdata_kal 前缀输入; 针对单个算法日志解析是否生效; 外部收集具体算法信息
+ kal_status = False
+ split_log_path = log_path.split("_")
+ alg_name, datatset_name = split_log_path[-2], split_log_path[-1]
+ with open(log_path) as f:
+ content = f.read()
+ if re.compile("KunpengAlgorithmLibrary").findall(content):
+ kal_status = True
+
+ self.log.info(f"Algorithm: {alg_name} dataset {datatset_name} check_result:{kal_status}")
+ return kal_status
+
+ def bigdata_operator(self, log_path):
+ operator_status = False
+ with open(log_path) as f:
+ content = f.read()
+ if re.compile("Using BoostKit Spark Native Sql Engine Extension to Speed Up Your Queries").findall(content):
+ operator_status = True
+
+ return operator_status
+
+ def bigdata_hbase(self, log_path):
+ hbase_status = False
+ with open(log_path) as f:
+ content = f.read()
+ if re.compile("Best index").findall(content):
+ hbase_status = True
+
+ return hbase_status
+
+ def bigdata_tune_up(self, log_path):
+ tuneup_status = False
+ with open(log_path) as f:
+ content = f.read()
+ if re.compile("omniadvisor.log").findall(content) and not re.compile("omniadvisor.log not exist").findall(
+ content):
+ if re.compile("best_config").findall(content) and re.compile("history_config").findall(content):
+ tuneup_status = True
+
+ return tuneup_status
+
+ def check_bigdata(self, *log_path_list):
+ result = {"机器学习和图分析算法加速库": list(), "OmniRuntime Hbase二级索引": False, "OmniRuntime 参数调优": False,
+ "OmniRuntime 算子加速": False}
+ for per_log in log_path_list:
+ per_log_strip = per_log.replace("./", "")
+ if per_log_strip.startswith("bigdata_kal"):
+ # result.setdefault("机器学习和图分析算法加速库", list())
+ result["机器学习和图分析算法加速库"].append(self.bigdata_kal(per_log))
+ elif per_log_strip.startswith("bigdata_hbase"):
+ result["OmniRuntime Hbase二级索引"] = self.bigdata_hbase(per_log)
+ elif per_log_strip.startswith("bigdata_tune_up"):
+ result["OmniRuntime 参数调优"] = self.bigdata_tune_up(per_log)
+ elif per_log_strip.startswith("bigdata_operator"):
+ result["OmniRuntime 算子加速"] = self.bigdata_operator(per_log)
+
+ bigdata_kal_content = result.get("机器学习和图分析算法加速库")
+ self.log.info("bigdata_kal: {}".format(bigdata_kal_content))
+
+ if False in bigdata_kal_content or len(bigdata_kal_content) < 4:
+ result["机器学习和图分析算法加速库"] = False
+ else:
+ result["机器学习和图分析算法加速库"] = True
+
+ return result
+
+ def check_storage(self, log_path):
+ # 存储分析
+ ec_turbo, cache, ksal = False, False, False
+ with open(log_path) as f:
+ content = f.read()
+ if re.compile(r"libkps_ec.so").findall(content) and re.compile(r"libkps_bluestore.so").findall(content):
+ ec_turbo = True
+ if re.compile(r"traffic_policy_start").findall(content):
+ cache = True
+ if re.compile(r"libksal.so").findall(content) and re.compile(r"libec_ksal.so").findall(content):
+ ksal = True
+
+ result = {"存储Ceph加速 EC Turbo": ec_turbo, "存储Ceph加速 智能写Cache": cache, "存储加速算法库": ksal}
+ self.log.info(f"storage analysis result: {result}")
+ return result
+
+ def check_hpc_acc_lib(self, log_path):
+ acc_lib = False
+ num = 0
+ with open(log_path) as f:
+ content = f.read()
+ for key_word in ["hyperio", "libkm_l", "hmpi", "tydss"]:
+ if re.compile(f"{key_word}").findall(content):
+ num += 1
+
+ if num >= 3:
+ acc_lib = True
+
+ return acc_lib
+
+ def check_hpc_sme_lib(self, log_path):
+ sme_status = False
+ with open(log_path) as f:
+ content = f.read()
+ with open("./SME_ISTRUCTION.txt", encoding="utf-8") as hpc_fuc:
+ for per_line in hpc_fuc.readlines():
+ if re.compile(per_line).findall(content):
+ sme_status = True
+
+ return sme_status
+
+ def check_hpc(self, *log_path_list):
+ result = {"加速库改造": False, "SME改造": False}
+ for per_log in log_path_list:
+ per_log_strip = per_log.replace("./", "")
+ if per_log_strip.startswith("hpc_acceleration_library"):
+ result["加速库改造"] = self.check_hpc_acc_lib(per_log)
+ elif per_log_strip.startswith("hpc_SME_library"):
+ result["SME改造"] = self.check_hpc_sme_lib(per_log)
+
+ return result
+
+ def induct_log(self, *log_list):
+ # 归纳不同解决方案的日志
+ induct_result = dict()
+ for per_log in log_list:
+ if per_log.startswith("./bigdata"):
+ induct_result.setdefault("bigdata", list())
+ induct_result["bigdata"].append(per_log)
+ elif per_log.startswith("./storage"):
+ induct_result.setdefault("storage", list())
+ induct_result["storage"].append(per_log)
+ elif per_log.startswith("./database"):
+ induct_result.setdefault("database", list())
+ induct_result["database"].append(per_log)
+ elif per_log.startswith("./arm_native"):
+ induct_result.setdefault("arm_native", list())
+ induct_result["arm_native"].append(per_log)
+ elif per_log.startswith("./acceleration_library"):
+ induct_result.setdefault("acceleration_library", list())
+ induct_result["acceleration_library"].append(per_log)
+ elif per_log.startswith("./virtual"):
+ induct_result.setdefault("virtual", list())
+ induct_result["virtual"].append(per_log)
+ elif per_log.startswith("./virt"):
+ induct_result.setdefault("virt", list())
+ induct_result["virt"].append(per_log)
+ elif per_log.startswith("./hpc"):
+ induct_result.setdefault("hpc", list())
+ induct_result["hpc"].append(per_log)
+ elif per_log.startswith("./os_msg"):
+ induct_result.setdefault("os_msg", None)
+ induct_result["os_msg"] = per_log
+
+ self.log.info(f"induct_dict: {induct_result}")
+ return induct_result
+
+ def analysis(self, **log_path_dict: dict):
+ result = dict()
+ if log_path_dict.get("os_msg"):
+ log_path_dict.pop("os_msg")
+
+ for per_project, feature_log in log_path_dict.items():
+ method = self.method_dict.get(per_project)
+ if method:
+ if per_project in ["bigdata", "virtual", "virt", "hpc"]:
+ result.setdefault(per_project, method(*feature_log))
+ else:
+ result.setdefault(per_project, method(feature_log[-1]))
+ else:
+ self.log.info(f"{per_project} nocorrect")
+
+ self.log.info(result)
+ return result
+
+ @staticmethod
+ def decompress_zip(compressed_report_package):
+ with zipfile.ZipFile(compressed_report_package) as zip:
+ zip.extractall("./")
+ file_names = zip.namelist()
+ return file_names
+
+ @staticmethod
+ def decompress_gz_bz(compressed_report_package):
+ with tarfile.open(compressed_report_package, "r") as tar:
+ tar.extractall(path="./")
+ file_names = tar.getnames()
+ return file_names
+
+ def generator_html(self, new_html_path, feature_num, config_dict, feature_msg):
+ with open(self.default_html, encoding="UTF-8") as f:
+ soup = BeautifulSoup(f.read(), "html.parser")
+ aa = soup.find_all("script")
+ temp = aa[0].string
+ insert_str = """
+ var infoList = {};
+
+ // 已使能特性
+ var num = {};
+
+ // 验收详细结果
+ var tableList = {};
+ {}""".format(
+ str(config_dict),
+ str(feature_num),
+ str(feature_msg),
+ temp
+ )
+ aa[0].string = insert_str
+ with open(new_html_path, "w", encoding="UTF-8") as f:
+ f.write(str(soup))
+
+ def acquire_os_msg(self, log_path):
+
+ if not log_path:
+ self.log.error("系统信息未获取到")
+ insert_list = [
+ {"label": "报告生成时间",
+ "value": datetime.now(tz=timezone(timedelta(hours=8))).strftime("%Y/%m/%d %H:%M:%S")},
+ {"label": "操作系统版本", "value": "未获取"},
+ {"label": "内核版本", "value": "未获取"},
+ {"label": "鲲鹏服务器的机型", "value": "未获取"},
+ {"label": "工具路径", "value": CURRENT_DIR},
+ {"label": "工具版本", "value": "1.0"},
+ ]
+ return insert_list
+
+ os_dict = {
+ "os": "None",
+ "version": "None",
+ "kernel": "None",
+ "product_name": "None",
+ }
+ with open(log_path, encoding="utf-8") as f:
+ for per_line in f.readlines():
+ per_line = per_line.strip()
+ if per_line.startswith("os:"):
+ os_re = re.compile(r"os: (.*)").findall(per_line.strip())
+ os_dict["os"] = os_re[0].strip("\"") if os_re else "None"
+ elif per_line.startswith("version:"):
+ version_re = re.compile(r"Version:(.*)").findall(per_line)
+ os_dict["version"] = version_re[0] if version_re else "None"
+ elif per_line.startswith("kernel:"):
+ kernel = re.compile(r"kernel: (.*)").findall(per_line)
+ os_dict["kernel"] = kernel[0] if kernel else "None"
+ elif per_line.startswith("Product Name:"):
+ product_name_re = re.compile(r"Product Name:(.*)").findall(per_line)
+ os_dict["product_name"] = product_name_re[0] if product_name_re else "None"
+ elif per_line.startswith("customer_information:"):
+ customer_information_re = re.compile(r"customer_information:(.*)").findall(per_line)
+ os_dict["customer_information"] = customer_information_re[0] if customer_information_re else "None"
+
+ self.log.info(f"os_dict: {os_dict}")
+ insert_list = [
+ {"label": "报告生成时间", "value": datetime.now(tz=timezone(timedelta(hours=8))).strftime("%Y/%m/%d %H:%M:%S")},
+ {"label": "操作系统版本", "value": os_dict.get("os", "未获取")},
+ {"label": "内核版本", "value": os_dict.get("kernel", "未获取")},
+ {"label": "鲲鹏服务器的机型", "value": os_dict.get("product_name", "未获取")},
+ {"label": "信息收集压缩包名称", "value": os_dict.get("customer_information", "客户未填写")},
+ {"label": "工具路径", "value": CURRENT_DIR},
+ {"label": "工具版本", "value": "1.0"},
+ ]
+ self.log.info(f"insert_list: \n{insert_list}")
+ return insert_list
+
+ def convert_analysis_res(self, **analysis_result):
+ # 转换分析结果
+ index = 1
+ effect_num = 0
+ result_list = list()
+ self.log.info(f"must_str: {self.must_str}")
+ success_str, fail_str, enable_str = "", "", ""
+ for project, features in analysis_result.items():
+ # 筛选通过的特性
+ temp_key = [key for key, value in features.items() if value]
+ # 必选特性情况
+ project_must_feature = self.must_feature.get(project)
+ # 和必选特性交集
+ temp_set = set(temp_key) & set(project_must_feature)
+ if False not in features.values():
+ effect_num += 1
+ success_str += f"{self.project_msg.get(project)} "
+ else:
+ # 根据必选特性判断是否使能对应解决方案
+ enable_list = list()
+ for per_feature in project_must_feature:
+ if per_feature not in list(temp_set):
+ fail_str += f"{self.project_msg.get(project)} "
+ break
+ else:
+ enable_list.append(True)
+
+ self.log.info(f"{project} 必选特性生效情况: {temp_set} 所有特性情况 {features}")
+ if enable_list and False not in enable_list and len(enable_list) == len(project_must_feature):
+ effect_num += 1
+ success_str += f"{self.project_msg.get(project)} "
+
+ for per_feature, status in features.items():
+ status = "通过" if status else "失败"
+ must_status = "必选" if per_feature in self.must_str else "非必选"
+ result_list.append([index, self.project_msg.get(project), per_feature, must_status, status])
+ index += 1
+
+ # 转化使能评估结果适用于html 数据填充
+ if success_str == "" and fail_str != "":
+ enable_list = [{"label": "使能评估失败", "value": "{} 【失败】".format(fail_str)}]
+ elif success_str != "" and fail_str == "":
+ enable_list = [{"label": "使能评估成功", "value": "{} 【通过】".format(success_str)}]
+ elif success_str == "" and fail_str == "":
+ enable_list = [{"label": "使能评估结果", "value": "未使能任何解决方案"}]
+ else:
+ enable_list = [{"label": "使能评估成功", "value": "{} 【通过】".format(success_str)},
+ {"label": "使能评估失败", "value": "{} 【失败】".format(fail_str)},
+ ]
+
+ return result_list, effect_num, enable_list
+
+
+if __name__ == "__main__":
+ # try:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-tool', help='DevKit or BoostKit')
+ parser.add_argument('-package', help='Compressed package')
+ args = parser.parse_args()
+ if args.tool == "DevKit":
+ pass
+ # acceptance_tool = AcceptanceTool()
+ # acceptance_tool.devkit_acceptance_report(args.package)
+ elif args.tool == "BoostKit":
+ boostkit = IsvTool()
+ file_list = IsvTool.decompress_gz_bz(args.package)
+ induct_dict = boostkit.induct_log(*file_list)
+ analysis_result = boostkit.analysis(**induct_dict)
+ config_list = boostkit.acquire_os_msg(induct_dict.get("os_msg"))
+ convert_result, effect_num, enable_list = boostkit.convert_analysis_res(**analysis_result)
+ config_list += enable_list
+ boostkit.generator_html("./new_report.html", effect_num, config_list, convert_result)
+ else:
+ print("请输入正确的参数,如-tool Devkit 或 -tool BoostKit")
+ # except Exception as err:
+ # print(err)
+ # print("请输入正确的参数")
+
+
+
diff --git a/tools/report.html b/tools/report.html
new file mode 100644
index 0000000000000000000000000000000000000000..dceb5c35e6faaa3b8564b6dfc55ea854589dc3e1
--- /dev/null
+++ b/tools/report.html
@@ -0,0 +1,150 @@
+
+
+
+
+
+
+Document
+
+
+
+
+
+ 配置信息
+
+
+
+
验收详细结果
+
+
+序号 |
+解决方案 |
+特性 |
+是否必选 |
+分析结果 |
+
+
+
+
+
+
+
+