1 Star 0 Fork 692

hisi_opensource/community

forked from OpenHarmony/community 
加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
克隆/下载
ci_build.py 9.73 KB
一键复制 编辑 原始数据 按行查看 历史
hisi_opensource 提交于 2024-06-18 20:10 +08:00 . update ci_build.py.
import json
import os
import re
import stat
import subprocess
import sys
import time
from environs import Env
DEFAULT_BUILD_TIMEOUT = 30 * 60
# ===== 社区代码仓相关的配置 =====
# 社区存放编译配置的文件名
BUILD_INFO_FILENAME = 'build_info.json'
# ===== 华为云OBS相关的配置 =====
OBS_HTTP_DOWNLOAD_HOST = "https://hispark-obs-fbde.obs.cn-north-4.myhuaweicloud.com"
def debug_main():
# 编译框架暂不支持,用模拟函数做替代
cmds = [
"ls -lath",
"cat ./ci-build.log",
"wget https://hispark-obs-fbde.obs.cn-north-4.myhuaweicloud.com/debug/standard-bs25-app-evb_bs25_sample1.fwpkg -P ./archives",
"wget https://hispark-obs-fbde.obs.cn-north-4.myhuaweicloud.com/debug/build-standard-bs25-app-evb_bs25_sample1.log -P ./archives",
"ls -lath ./archives"
]
print(f"print cwd: {os.getcwd()}")
print(f"mkdir ./archives: ")
os.mkdir("./archives")
for cmd in cmds:
print(f"cmd: {cmd}")
os.system(cmd)
time.sleep(0.5)
def get_build_logs(cwd='.'):
print(f"start get_build_logs")
build_logs = []
if not os.path.exists(cwd):
print(f"path {cwd} not exists")
return build_logs
for root, dirs, filenames in os.walk(cwd):
print(f"root: {root}, files: {filenames}")
if root != cwd:
continue
for filename in filenames:
if filename.startswith("build-") and filename.endswith("log"):
build_logs.append(os.path.join(root, filename))
return build_logs
def run_build_script(build_cmd_list):
log_path = os.path.join('.', 'ci-build.log')
writer = os.fdopen(os.open(
log_path,
os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
stat.S_IWUSR | stat.S_IRUSR,
), 'wb')
reader = os.fdopen(os.open(
log_path,
os.O_RDONLY,
stat.S_IWUSR | stat.S_IRUSR,
), 'rb')
# 执行烧写脚本,将日志打印到控制台并输出到文件
print(f"cwd: {os.getcwd()}")
print(' '.join(build_cmd_list))
start = time.time()
try:
proc = subprocess.Popen(
build_cmd_list,
text=False,
stdout=writer,
stderr=writer,
)
while True:
timeout = (time.time() - start) > DEFAULT_BUILD_TIMEOUT
line = reader.readline()
if line == b'':
if proc.poll() is not None:
break
time.sleep(2)
if not timeout:
continue
else:
proc.kill()
raise Exception(f"build exit cause: timeout")
try:
outs = line.decode('utf-8', errors='strict').rstrip()
except UnicodeDecodeError:
outs = line.decode('gbk', errors='replace').rstrip()
if not outs:
if not timeout:
continue
else:
proc.kill()
raise Exception(f"build timeout")
print(outs)
except subprocess.CalledProcessError as exc:
errors = []
if exc.output:
err_msg = ""
try:
err_msg = exc.output.strip().decode("utf-8", errors="strict")
except Exception:
err_msg = exc.output.strip().decode("gbk", errors="ignore")
errors.append(err_msg)
else:
errors.append(str(exc))
print("\n".join(errors))
raise Exception(errors[-1])
def parse_build_log(path):
res = {
"target": '',
"result": '',
"times": '',
"log_name": path.split('/')[-1]
}
name = path.split('/')[-1].replace('build-', '').replace('.log', '')
if not os.path.exists(path):
print(f"[parse_build_log] build log {path} not exist")
return name, res
patterns = {
"target": "######### Build target:(\S+) (\S+)",
"time": "(\S+) takes (\d+)(\.\d+) s",
"result": "Finished: (SUCCESS|FAILURE)"
}
plength = len(patterns)
events = list(patterns.keys())
msgs = list(patterns.values())
fp = open(path, 'r+', encoding='utf-8', errors='ignore')
line = fp.readline()
while line:
match = None
event = None
for i in range(plength):
match = re.search(msgs[i], line)
if match:
event = events[i]
break
if event == 'target':
target, build_res = match.groups()
res['target'] = target
print(f'target img {target}, build {build_res}')
if event == 'time':
_, second, millisecond = match.groups()
print(f'build second {second}, millisecond {millisecond}')
res['times'] = float(f'{second}{millisecond}')
if event == 'result':
res['result'] = match.groups()[0]
print(f'build and packet {res["result"]}')
line = fp.readline()
return name, res
def parse_build_results():
samples_build_results = {}
# 遍历当前目录下的log文件
archive_dir = './archives'
log_paths = get_build_logs(archive_dir)
print(f"[parse_build_results] log_paths: {log_paths}")
for path in log_paths:
name, result = parse_build_log(path)
print(f"[parse_build_results] name: {name}, result: {result}")
print(f"[parse_build_results] cur samples_build_results: {samples_build_results}")
if name and name not in samples_build_results.keys():
samples_build_results[name] = result
# todo: 确认各项目镜像格式是否都为fwpkg文件,如果不是需要在archive_dir中查找出完整名称
samples_build_results[name]['image_name'] = f"{name}.fwpkg"
else:
print(f"sample name: {name} has something wrong")
return samples_build_results
def get_all_build_info():
# 解析编译配置文件
print("start get_all_build_info")
res = {}
info_paths = []
for root, dirs, names in os.walk('./'):
for filename in names:
if filename == BUILD_INFO_FILENAME:
info_paths.append(os.path.join(root, filename))
print(f"[get_all_build_info] info_paths: {info_paths}")
for p in info_paths:
try:
info = json.load(open(p, 'r+'))
for i in info:
tmp = [i.get('buildTarget', ''),
i.get('relativePath', '').replace('/', '-'),
i.get('chip', '')]
if i.get('buildDef', ''):
tmp.append(i.get('buildDef', '').replace(',', '-'))
res["_".join(tmp)] = i
except Exception as err:
print(f"loading build info {p} failed, cause: {err}")
return res
def get_build_results_filename(parent_pipeline_id, parent_pipeline_number):
return f"build-results_{parent_pipeline_id}_{parent_pipeline_number}.json"
def get_build_log_download_url(repo_name, parent_pipeline_id, parent_pipeline_number, sample_name):
return f"{OBS_HTTP_DOWNLOAD_HOST}/logs/build/{repo_name}/{parent_pipeline_id}/{parent_pipeline_number}/{sample_name}"
def get_build_img_download_url(repo_name, parent_pipeline_id, parent_pipeline_number, sample_name):
return f"{OBS_HTTP_DOWNLOAD_HOST}/data/img/smoke/{repo_name}/{parent_pipeline_id}/{parent_pipeline_number}/{sample_name}"
def merge_result_and_build_info(build_results: dict, build_info: dict):
print("start merge_result_and_build_info")
all_build_names = build_info.keys()
for name in build_results.keys():
if name in all_build_names:
print(f"[merge_result_and_build_info][{name}] build_results: {build_results[name]}")
print(f"[merge_result_and_build_info][{name}] build_info: {build_info[name]}")
build_results[name].update(build_info[name])
else:
raise Exception(f"sample {name} config not in build info")
return build_results
def main(build_command, parent_pipeline_id='111', parent_pipeline_number='222'):
env = Env()
env.read_env()
# build_cmd = env('build_command')
# parent_pipeline_id = env('parent_pipeline_id')
# parent_pipeline_number = env('parent_pipeline_number')
build_cmd = build_command
parent_pipeline_id = parent_pipeline_id
parent_pipeline_number = parent_pipeline_number
err_msg = ''
status = ''
try:
run_build_script(build_cmd.split(' '))
# debug_main()
status = 'SUCCESS'
except Exception as err:
status = 'FAILED'
print(err)
finally:
try:
# 解析各案例的编译日志,获取结果和耗时
build_results = parse_build_results()
print(f"[main] build_results: {build_results}")
# 解析编译配置
build_info = get_all_build_info()
print(f"[main] build_info: {build_info}")
results = merge_result_and_build_info(build_results, build_info)
print(f"[main] merge results: {results}")
results_path = os.path.join("./", get_build_results_filename(parent_pipeline_id, parent_pipeline_number))
fp = open(results_path, 'w+')
fp.write(json.dumps(results))
if fp:
fp.close()
except Exception as err:
print(f"results parse failed, cause {err}")
# todo: upload archives and result detail
if status == 'SUCCESS':
return 0
else:
return -1
if __name__ == '__main__':
sys.exit(main(sys.argv[1]))
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/hisi-opensource/community.git
git@gitee.com:hisi-opensource/community.git
hisi-opensource
community
community
master

搜索帮助