diff --git a/test/scripts/auto_xts_test/get_resource/config.yaml b/test/scripts/auto_xts_test/get_resource/config.yaml deleted file mode 100644 index 392a034452314cabfc3bd78b87720e5f69cf8bae..0000000000000000000000000000000000000000 --- a/test/scripts/auto_xts_test/get_resource/config.yaml +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (c) 2023 Huawei Device Co., Ltd. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -url_dailybuilds : "http://ci.openharmony.cn/api/ci-backend/ci-portal/v1/dailybuilds" -headers : - 'Accept': 'application/json, text/plain, */*' - 'Accept-Encoding': 'gzip, deflate' - 'Accept-Language': 'zh-CN,zh;q=0.8' - 'Access-Control-Allow-Credentials': 'true' - 'Access-Control-Allow-Methods': 'POST, GET, PUT, OPTIONS, DELETE, PATCH' - 'Access-Control-Allow-Origin': '*' - 'Connection': 'keep-alive' - 'Content-Length': '216' - 'Content-Type': 'application/json;charset=UTF-8' - 'Cookie': '_frid=d54846f4e88e415587e14aed0e4a9d63;\ - __51vcke__JhI7USZ6OfAHQZUm=0af50c49-e1b6-5ca4-9356-a986a785be93;\ - __51vuft__JhI7USZ6OfAHQZUm=1684307559015;\ - _fr_ssid=c60810a1808f447b9f696d9534294dcb;\ - __51uvsct__JhI7USZ6OfAHQZUm=5;\ - __vtins__JhI7USZ6OfAHQZUm=%7B%22sid%22%3A%20%22972e7520-a952-52ff-b0f4-0c3ca53da01b%22%2C%20%22vd%22%3A%205%2C%20%22stt%22%3A%201947502%2C%20%22dr%22%3A%20409887%2C%20%22expires%22%3A%201684921552594%2C%20%22ct%22%3A%201684919752594%7D;\ - _fr_pvid=3a57d4c932eb4e10814323c8d3758b0d' - 'hide': 'false' - 'Host': 'ci.openharmony.cn' - 'Origin': 'http://ci.openharmony.cn' - 'Referer': 'http://ci.openharmony.cn/dailys/dailybuilds' - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64) \ - AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0\ - Safari/537.36 Edg/113.0.1774.50' -data: - 'branch': "master" - 'buildFailReason': "" - 'buildStatus': "success" - 'component': "dayu200" - 'deviceLevel': "" - 'endTime': "" - 'hardwareBoard': "" - 'pageNum': 1 - 'pageSize': 8 - 'projectName': "openharmony" - 'startTime': "" - 'testResult': "" -url_dayu200: - - "http://download.ci.openharmony.cn/version/Daily_Version/dayu200/" - - "/version-Daily_Version-dayu200-" - - "-dayu200.tar.gz" -url_tools : 'http://123.60.114.105:9999/RKDevTool.zip' -path_xts_pack : 'D:\\AutoXTSTest\\dayu200_xts.tar.gz' -path_xts_dir : 'D:\\AutoXTSTest\\dayu200_xts' -path_configfile : 'D:\\AutoXTSTest\\dayu200_xts\\suites\\acts\\config\\user_config.xml' -path_xts_report : 'D:\\AutoXTSTest\\dayu200_xts\\suites\\acts\\reports' \ No newline at end of file diff --git a/test/scripts/auto_xts_test/get_resource/get_tool.py b/test/scripts/auto_xts_test/get_resource/get_tool.py deleted file mode 100755 index 02c781bde7fdcd3ba549aa0a96130050fb548621..0000000000000000000000000000000000000000 --- a/test/scripts/auto_xts_test/get_resource/get_tool.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# -# Copyright (c) 2023 Huawei Device Co., Ltd. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import json -import os -import stat -import zipfile - -import requests -from tqdm import tqdm -import yaml - - -def get_tool(url): - print(f"Getting RKDevTool from {url}") - r = requests.get(url, stream=True) - total = int(r.headers.get('content-length'), 0) - flags = os.O_WRONLY | os.O_CREAT - modes = stat.S_IWUSR | stat.S_IRUSR - - with os.fdopen(os.open(r".\RKDevTool.zip", flags, modes), "wb") as f, tqdm( - desc="RKDevTool.zip", - total=total, - unit='iB', - unit_scale=True, - unit_divisor=1024, - ) as bar: - for byte in r.iter_content(chunk_size=1024): - size = f.write(byte) - bar.update(size) - with zipfile.ZipFile(".\\RKDevTool.zip", 'r') as zfile: - zfile.extractall(path=".\\RKDevTool") - - -if __name__ == "__main__": - with open(r".\get_resource\config.yaml", 'r') as config_file: - data = yaml.safe_load(config_file.read()) - get_tool(data['url_tools']) \ No newline at end of file diff --git a/test/scripts/auto_xts_test/get_resource/spider.py b/test/scripts/auto_xts_test/get_resource/spider.py deleted file mode 100755 index 2c06247719915f64b2ff8b62600107235333a452..0000000000000000000000000000000000000000 --- a/test/scripts/auto_xts_test/get_resource/spider.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# -# Copyright (c) 2023 Huawei Device Co., Ltd. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import json -import logging -import os -import stat -import tarfile -import xml.etree.ElementTree as ET - -import requests -from tqdm import tqdm -import yaml - - -def get_images_and_testcases(url, download_path, extract_path): - print(f"Get new image from {url},please wait!") - r = requests.get(url, stream=True) - total = int(r.headers.get('content-length'), 0) - flags = os.O_WRONLY | os.O_CREAT - modes = stat.S_IWUSR | stat.S_IRUSR - - with os.fdopen(os.open(download_path, flags, modes), "wb") as f, tqdm( - desc="dayu200_xts.tar.gz", - total=total, - unit='iB', - unit_scale=True, - unit_divisor=1024, - ) as bar: - for byte in r.iter_content(chunk_size=1024): - size = f.write(byte) - bar.update(size) - - print("extracrting file") - with tarfile.open(download_path, "r") as tar: - for member in tqdm(desc='dayu200_xts', iterable=tar.getmembers(), total=len(tar.getmembers())): - tar.extract(path=extract_path, member=member) - logging.basicConfig(filename="log.log", level='INFO') - logging.info(f'Downloading Success, url:{url}') - - -def get_url(url, headers, json_data, url_2): - response = requests.post(url, json=json_data, headers=headers) - json_obj = json.loads(response.text) - start_time = json_obj['result']['dailyBuildVos'][0]['buildStartTime'] - start_time = start_time[:8] + "_" + start_time[8:] - return url_2[0] + start_time + url_2[1] + start_time + url_2[2] - - -def change_port(xml_path, xml_dw="./environment/device/port"): - doc = ET.parse(xml_path) - root = doc.getroot() - port = root.find(xml_dw) - port.text = "8710" - doc.write(xml_path) - - -if __name__ == '__main__': - with open(r".\get_resource\config.yaml", 'r') as config_file: - data = yaml.safe_load(config_file.read()) - dest_url = get_url(data['url_dailybuilds'], data['headers'], data['data'], data['url_dayu200']) - get_images_and_testcases(dest_url, data['path_xts_pack'], data['path_xts_dir']) - change_port(data['path_configfile']) - \ No newline at end of file diff --git a/test/scripts/auto_xts_test/run.bat b/test/scripts/auto_xts_test/run.bat index f372bb6b7858a27b502f85e1e6d1264770ac8013..9d7591c4dff5665c4d27090ac42c442c06f4fa19 100755 --- a/test/scripts/auto_xts_test/run.bat +++ b/test/scripts/auto_xts_test/run.bat @@ -18,36 +18,14 @@ REM change to work directory cd /d %~dp0 REM log -echo "------------------------------------------------" >> log.log - -REM get tool -if not exist .\RKDevTool ( -python .\get_resource\get_tool.py -.\RKDevTool\DriverAssitant_v5.1.1\DriverAssitant_v5.1.1\DriverInstall.exe -del /q .\RKDevTool.zip -) -if not exist .\RKDevTool\RKDevTool.exe (goto ToolError) - -REM get image & XTS testcases -set var=D:\AutoXTSTest -if not exist %var% (md %var%) -rd /s /q %var%\dayu200_xts -python .\get_resource\spider.py -del /q %var%\dayu200_xts.tar.gz -if not exist %var%\dayu200_xts\suites (goto ResourceError) - -REM load image to rk3568 -hdc shell reboot bootloader -cd RKDevTool -python ..\autoburn.py -cd .. -for /f "tokens=*" %%i in ('hdc list targets') do (set target=%%i) -if "%var%"=="[Empty]" (goto BurnError) +echo "------------------------------------------------" >> log.txt REM run XTStest +set var=D:\AutoXTSTest timeout /t 15 hdc shell "power-shell setmode 602" hdc shell "hilog -Q pidoff" +cd /d %~dp0 for /f "tokens=1,2 delims==" %%i in (running_modules.txt) do ( if "%%i"=="modules" set value=%%j ) @@ -55,25 +33,10 @@ call %var%\dayu200_xts\suites\acts\run.bat run -l %value% REM get result cd /d %~dp0 -echo "Successfully excute script" >> log.log +echo "Successfully excute script" >> log.txt if exist result (rd /s /q result) md result -python get_result.py +python result.py ENDLOCAL exit -REM error process -: ToolError -echo "Error happens while getting tool" >> log.log -ENDLOCAL -exit - -: ResourceError -echo "Error happens while getting dailybuilds resource" >> log.log -ENDLOCAL -exit - -: BurnError -echo "Error happens while burnning images" >> log.log -ENDLOCAL -exit \ No newline at end of file diff --git a/test/scripts/download/__pycache__/utils.cpython-310.pyc b/test/scripts/download/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9e335e6f39dac23298a147b344098cd87b0412c6 Binary files /dev/null and b/test/scripts/download/__pycache__/utils.cpython-310.pyc differ diff --git a/test/scripts/download/config.yaml b/test/scripts/download/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7ff05211e9ec6ff23c8355ac3cd31db69201906a --- /dev/null +++ b/test/scripts/download/config.yaml @@ -0,0 +1,33 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# test env config +deveco_path: D:\complier\DevEco Studio +deveco_sdk_path: D:\enviorment\SDK\openHarmony_SDK +node_js_path: D:\enviorment\nodejs-huawei +RKdevtool_download_path : 'http://123.60.114.105:9999/RKDevTool.zip' +RKdevtool_output_path: './auto_xts_test/RKDevTool' +pictures_download_path: 'http://123.60.114.105:9999/pictures_reference.zip' +pictures_output_path: './sdk_test/pictures_reference' +download_path: D:\download +# image download list +download_list: + - sdk: + name: sdk + path: D:\enviorment\SDK\openHarmony_SDK # The directory where the compressed files are stored + output_path: D:\enviorment\SDK\openHarmony_SDK\temp # File replacement path + - dayu200: + name: dayu200 + path: D:\AutoXTSTest + output_path: D:\AutoXTSTest\dayu200_xts + diff --git a/test/scripts/download/download.py b/test/scripts/download/download.py new file mode 100644 index 0000000000000000000000000000000000000000..6cf0e610ddafeac174acc3ee80ef407e68cabdb2 --- /dev/null +++ b/test/scripts/download/download.py @@ -0,0 +1,266 @@ +import argparse +import os +import shutil +import sys +import tarfile +import zipfile +import subprocess + +import yaml + + +import utils + + +configs = {} +arguments = {} + + +class downloadTask: + def __init__(self): + self.name = '' + self.path = '' + self.output_path = '' + + +def parse_configs(): + config_file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml') + with open(config_file_path, 'r', encoding='utf-8') as config_file: + global configs + configs = yaml.safe_load(config_file) + + +def clean_log(): + output_log_dic = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'result') + if os.path.exists(output_log_dic): + shutil.rmtree(output_log_dic) + + +def create_download_task(): + task_list = [] + download_list = configs.get('download_list') + for download_task in download_list: + task = downloadTask() + task.name = download_task['name'] + task.path = download_task['path'] + task.output_path = download_task['output_path'] + task_list.append(task) + + return task_list + + +def check_deveco_dev(): + if utils.is_linux(): + return False + + java_path = os.path.join(configs.get('deveco_path'), 'jbr') + if not os.path.exists(java_path): + print("Java not found!") + return False + + if not os.path.exists(configs.get('node_js_path')): + print("Node js not found!") + return False + + return True + + +def check_rkDevTool(): + rKDevTool_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../auto_xts_test/RKDevTool') + if not os.path.exists(rKDevTool_path): + url = configs.get('RKdevtool_download_path') + output_path = configs.get('RKdevtool_output_path') + utils.get_tool('RKDevTool.zip', url, output_path) + + rKDevTool_exe_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), + '../auto_xts_test/RKDevTool/RKDevTool.exe') + if not rKDevTool_exe_path: + return False + + return True + + +def check_pictures_reference(): + pictures_reference_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), + '../sdk_test/pictures_reference') + if not os.path.exists(pictures_reference_path): + url = configs.get('pictures_download_path') + output_path = configs.get('pictures_output_path') + utils.get_tool('pictures_references.zip', url, output_path) + if not os.path.exists(pictures_reference_path): + return False + + return True + + +def prepare_test_dev(): + clean_log() + prepared = check_deveco_dev() + prepared = prepared and check_rkDevTool() and check_pictures_reference() + return prepared + + +def download_simple(download_url): + download_name = utils.parse_file_name(download_url) + download_path = configs.get('download_path') + download_temp_file = os.path.join(download_path, download_name) + if not os.path.exists(download_path): + os.mkdir(download_path) + utils.download(download_url, download_temp_file, download_name) + + # 判断文件名后缀, 如果是gz则对文件进行解压 + file_extension = os.path.splitext(download_name)[-1] + if file_extension == '.gz': + with tarfile.open(download_temp_file, 'r:gz') as tar: + print(f'Unpacking {download_temp_file}') + tar.extractall(download_path) + print(f'Decompression {download_temp_file} completed') + + +def download_zip_file(task_name, download_url, path): + # 读取文件路径 + print(path) + temp_floder = path + '_temp' + download_name = utils.get_remote_download_name(task_name) + + download_temp_file = os.path.join(temp_floder, download_name) + if os.path.exists(temp_floder): + shutil.rmtree(temp_floder) + os.mkdir(temp_floder) + # 开始下载任务 + print(f'download {task_name} from {download_url}, please wait!!!') + utils.download(download_url, download_temp_file, download_name) + + # 校验是否正确获取到文件 + if not utils.check_gzip_file(download_temp_file): + print('The downloaded file is not a valid gzip file.') + return '', '' + + # 解压文件 + with tarfile.open(download_temp_file, 'r:gz') as tar: + print(f'Unpacking {download_temp_file}') + tar.extractall(temp_floder) + print(f'Decompression {download_temp_file} completed') + + if task_name == 'sdk': + sdk_zip_path_list = [temp_floder, 'ohos-sdk', 'windows'] + if utils.is_mac(): + sdk_zip_path_list = [temp_floder, 'sdk', + 'packages', 'ohos-sdk', 'darwin'] + sdk_floder = os.path.join(temp_floder, 'SDK_TEMP') + sdk_zip_path = os.path.join(*sdk_zip_path_list) + for item in os.listdir(sdk_zip_path): + if item != '.DS_Store': + print(f'Unpacking {item}') + with zipfile.ZipFile(os.path.join(sdk_zip_path, item)) as zip_file: + zip_file.extractall(os.path.join(sdk_floder)) + print(f'Decompression {item} completed') + + +def updata_to_output_path(task_name, path, output_path): + if task_name == 'sdk': + deveco_sdk_path = configs.get('deveco_sdk_path') + temp_floder = deveco_sdk_path + '_temp' + sdk_floder = os.path.join(temp_floder, 'SDK_TEMP') + api_version = utils.get_api_version(os.path.join( + *[sdk_floder, 'ets', 'oh-uni-package.json'])) + update_sdk_to_deveco(sdk_floder, api_version) + else: + if os.path.exists(output_path): + shutil.rmtree(output_path) + if os.path.exists(path): + shutil.move(path, output_path) + + +def update_sdk_to_deveco(sdk_path, api_version): + deveco_sdk_path = configs.get('deveco_sdk_path') + deveco_sdk_version_path = os.path.join(deveco_sdk_path, api_version) + for sdk_item in os.listdir(deveco_sdk_path): + if sdk_item.startswith(f'{api_version}-'): + shutil.rmtree(os.path.join(deveco_sdk_path, sdk_item)) + if os.path.exists(deveco_sdk_version_path): + shutil.move(deveco_sdk_version_path, + deveco_sdk_version_path + '-' + utils.get_time_string()) + for item in os.listdir(sdk_path): + if item != '.DS_Store': + if utils.is_mac(): + if item == 'toolchains': + utils.add_executable_permission( + os.path.join(sdk_path, item, 'restool')) + utils.add_executable_permission( + os.path.join(sdk_path, item, 'ark_disasm')) + elif item == 'ets': + utils.add_executable_permission(os.path.join(sdk_path, item, 'build-tools', + 'ets-loader', 'bin', 'ark', 'build-mac', 'bin', + 'es2abc')) + utils.add_executable_permission(os.path.join(sdk_path, item, 'build-tools', + 'ets-loader', 'bin', 'ark', 'build-mac', 'legacy_api8', + 'bin', 'js2abc')) + elif item == 'js': + utils.add_executable_permission(os.path.join(sdk_path, item, 'build-tools', + 'ace-loader', 'bin', 'ark', 'build-mac', 'bin', + 'es2abc')) + utils.add_executable_permission(os.path.join(sdk_path, item, 'build-tools', + 'ace-loader', 'bin', 'ark', 'build-mac', 'legacy_api8', + 'bin', 'js2abc')) + shutil.move(os.path.join(sdk_path, item), + os.path.join(deveco_sdk_version_path, item)) + # 关闭arkts语法规则校验 + utils.close_arkts_code_linter() + + +def burn_system_image(): + RKDevTool_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../auto_xts_test/RKDevTool') + os.chdir(RKDevTool_path) + cmd = 'hdc shell reboot bootloader' + subprocess.run(cmd, shell=False) + utils.auto_burn() + print('burn_system_image_success') + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--downloadUrl', type=str, dest='download_url', default=None, + nargs='+', + help='specify what you want to download') + return parser.parse_args() + + +if __name__ == '__main__': + # 1. 读取配置 && 参数解析 + parse_configs() + parse_args() + arguments = parse_args() + # 如果输入了参数(下载url)就会自动去下载文件到配置中指定的文件夹,并退出。 + if arguments.download_url is not None: + for download_url in arguments.download_url: + print(f'download url: {download_url}') + download_simple(download_url) + # 下载并解压完后退出程序 + sys.exit(1) + + # 2. 检查环境(deveco && 烧录工具) + if not prepare_test_dev(): + # logging.error('The test environment is incomplete, please check') + print("The test environment is incomplete, please check") + sys.exit(1) + + # 3. 下载镜像 + if os.path.exists('download_url.txt'): + os.remove('download_url.txt') + download_task_list = create_download_task() + with open('download_url.txt', 'a') as file: + for task in download_task_list: + print(task.name) + # a. 获取下载的url地址 + download_url = utils.get_download_url(task.name) + # b. 下载压缩包到指定目录 && 解压 + download_zip_file(task.name, download_url, task.path) + # c. 覆盖到对应文件夹 + temp_file = task.path + '_temp' + updata_to_output_path(task.name, temp_file, task.output_path) + file.write(f'{task.name}, {download_url}\n') + file.write('download all tasks successfully!!!') + # 4. 烧录镜像到板子中 + burn_system_image() + print('complete all tasks successfully') diff --git a/test/scripts/download/utils.py b/test/scripts/download/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ce10babca9b080b6caa33732bc5322b64fc33997 --- /dev/null +++ b/test/scripts/download/utils.py @@ -0,0 +1,204 @@ +import gzip +import os +import sys +import logging +import datetime +import zipfile +from urllib.parse import urlparse, unquote +import time +import json +import stat + +import httpx +import requests +import tqdm +from pywinauto.application import Application + + +def is_windows(): + return sys.platform == 'win32' or sys.platform == 'cygwin' + + +def is_mac(): + return sys.platform == 'darwin' + + +def is_linux(): + return sys.platform == 'linux' + + +def get_time_string(): + return time.strftime('%Y%m%d-%H%M%S') + + +def get_encoding(): + if is_windows(): + return 'utf-8' + else: + return sys.getfilesystemencoding() + + +def get_tool(tar_name, url, output_path): + print(f"Getting {tar_name} from {url}") + r = requests.get(url, stream=True) + total = int(r.headers.get('content-length'), 0) + flags = os.O_WRONLY | os.O_CREAT + modes = stat.S_IWUSR | stat.S_IRUSR + with os.fdopen(os.open(f"{output_path}.zip", flags, modes), "wb") as f, tqdm( + desc=f"{tar_name}", + total=total, + unit='iB', + unit_scale=True, + unit_divisor=1024, + ) as bar: + for byte in r.iter_content(chunk_size=1024): + size = f.write(byte) + bar.update(size) + with zipfile.ZipFile(f"{output_path}.zip", 'r') as zfile: + zfile.extractall(path=f"{output_path}") + + +def get_download_url(task_name): + now_time = datetime.datetime.now().strftime('%Y%m%d%H%M%S') + last_hour = (datetime.datetime.now() + + datetime.timedelta(hours=-24)).strftime('%Y%m%d%H%M%S') + url = 'http://ci.openharmony.cn/api/daily_build/build/tasks' + downnload_job = { + 'pageNum': 1, + 'pageSize': 1000, + 'startTime': '', + 'endTime': '', + 'projectName': 'openharmony', + 'branch': 'master', + 'component': '', + 'deviceLevel': '', + 'hardwareBoard': '', + 'buildStatus': '', + 'buildFailReason': '', + 'testResult': '', + } + downnload_job['startTime'] = str(last_hour) + downnload_job['endTime'] = str(now_time) + post_result = requests.post(url, json=downnload_job) + post_data = json.loads(post_result.text) + sdk_url_suffix = '' + for ohos_sdk_list in post_data['data']['dailyBuildVos']: + try: + if get_remote_download_name(task_name) in ohos_sdk_list['obsPath']: + sdk_url_suffix = ohos_sdk_list['obsPath'] + break + except BaseException as err: + logging.error(err) + download_url = 'http://download.ci.openharmony.cn/' + sdk_url_suffix + return download_url + + +def download(download_url, temp_file, temp_file_name): + with httpx.stream('GET', download_url) as response: + with open(temp_file, "wb") as temp: + total_length = int(response.headers.get("content-length")) + with tqdm.tqdm(total=total_length, unit="B", unit_scale=True) as pbar: + pbar.set_description(temp_file_name) + chunk_sum = 0 + count = 0 + for chunk in response.iter_bytes(): + temp.write(chunk) + chunk_sum += len(chunk) + percentage = chunk_sum / total_length * 100 + while str(percentage).startswith(str(count)): + if str(percentage).startswith('100'): + logging.info(f'{temp_file_name} Download Complete {percentage: .1f}%') + break + else: + logging.info(f'{temp_file_name} Downloading... {percentage: .1f}%') + count += 1 + pbar.update(len(chunk)) + + +def end_burn(dlg): + timeout = 300 + while True: + if timeout < 0: + return + mode = dlg.window(control_type="Tab").window_text() + if mode == 'Found One MASKROM Device': + dlg.Button16.click() + print("image burnning finished") + return + else: + print("please wait for a while...") + time.sleep(5) + timeout -= 5 + + +def auto_burn(): + app = Application(backend='uia').start('RKDevTool.exe') + dlg = app.top_window() + + while True: + mode = dlg.window(control_type="Tab").window_text() + if mode == 'Found One LOADER Device': + print('start burning') + dlg.window(title="Run").click() + time.sleep(100) + end_burn(dlg) + return + else: + time.sleep(1) + + +def check_gzip_file(file_path): + try: + with gzip.open(file_path, 'rb') as gzfile: + gzfile.read(1) + except Exception as e: + logging.exception(e) + return False + return True + + +def get_remote_download_name(task_name): + if is_windows(): + if task_name == 'sdk': + return 'ohos-sdk-full.tar.gz' + if task_name == 'dayu200': + return 'dayu200.tar.gz' + elif is_mac(): + if task_name == 'sdk': + return 'L2-MAC-SDK-FULL.tar.gz' + else: + print('Unsuport platform to get sdk from daily build') + return '' + + +def get_api_version(json_path): + with open(json_path, 'r') as uni: + uni_cont = uni.read() + uni_data = json.loads(uni_cont) + api_version = uni_data['apiVersion'] + return api_version + + +def add_executable_permission(file_path): + current_mode = os.stat(file_path).st_mode + new_mode = current_mode | 0o111 + os.chmod(file_path, new_mode) + + +def parse_file_name(url): + parsed_url = urlparse(url) + path = unquote(parsed_url.path) + file_name = os.path.basename(path) + return file_name + + +def close_arkts_code_linter(): + ets_checker_path = 'D:\\enviorment\\SDK\\openHarmony_SDK\\11\\ets\\build-tools\\ets-loader\\lib\\ets_checker.js' + with open(ets_checker_path, 'r+') as modified_file: + content = modified_file.read() + content = content.replace( + 'return _main.partialUpdateConfig.executeArkTSLinter?_main.partialUpdateConfig.standardArkTSLinter&&isStandardMode()?_do_arkTS_linter.ArkTSLinterMode.STANDARD_MODE:_do_arkTS_linter.ArkTSLinterMode.COMPATIBLE_MODE:_do_arkTS_linter.ArkTSLinterMode.NOT_USE', + 'return _do_arkTS_linter.ArkTSLinterMode.COMPATIBLE_MODE') + modified_file.seek(0) + modified_file.write(content) + modified_file.truncate() diff --git a/test/scripts/get_commit_log/__pycache__/result.cpython-310.pyc b/test/scripts/get_commit_log/__pycache__/result.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d43ad9b5d72d499af3f6f8f85875676a4e4b570e Binary files /dev/null and b/test/scripts/get_commit_log/__pycache__/result.cpython-310.pyc differ diff --git a/test/scripts/get_commit_log/config.yaml b/test/scripts/get_commit_log/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9da7db50667787ca547e4b544ef45084fd9e8b7a --- /dev/null +++ b/test/scripts/get_commit_log/config.yaml @@ -0,0 +1,7 @@ +crawl_max_page: 10 +# repo list +repo_list: + - 'arkcompiler_ets_frontend' + - 'developtools_ace_ets2bundle' + - 'third_party_typescript' + - 'arkcompiler_runtime_core' \ No newline at end of file diff --git a/test/scripts/get_commit_log/get_commit_log.py b/test/scripts/get_commit_log/get_commit_log.py new file mode 100644 index 0000000000000000000000000000000000000000..b7314e473cbc7bfbc64bfc418d21afac5db1e33c --- /dev/null +++ b/test/scripts/get_commit_log/get_commit_log.py @@ -0,0 +1,105 @@ +import argparse +import os + +import yaml +import requests +from lxml import etree +from datetime import datetime, timedelta, time + +from result import get_result + + +configs = {} + + +def parse_config(): + config_file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml') + with open(config_file_path, 'r', encoding='utf-8') as config_file: + global configs + configs = yaml.safe_load(config_file) + + +def get_url(name, page): + url_prefix = 'https://gitee.com/openharmony/' + url_suffix = f'/pulls?assignee_id=&author_id=&label_ids=&label_text=&milestone_id=&page={page}&priority=&project_type=&scope=&search=&single_label_id=&single_label_text=&sort=closed_at+desc&status=merged&target_project=&tester_id=' + url = url_prefix + name + url_suffix + + return url + + +def get_html(url): + headers = { + 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3' + } + try: + response = requests.get(url, headers=headers, verify=False) + if response.status_code == 200: + return response.text + except Exception as e: + print(e) + return None + + +def crawl_committer(repo_list, start_time, end_time): + crawl_max_page = configs.get('crawl_max_page') + data_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data.txt') + if os.path.exists(data_file): + os.remove(data_file) + for repo_name in repo_list: + for i in range(1, crawl_max_page + 1): + url = get_url(repo_name, str(i)) + print(url) + html = get_html(url) + tree = etree.HTML(html) + commit_list = tree.xpath('/html/body/div[2]/div[2]/div[2]/div[2]/div') + for commit_task in commit_list: + title = commit_task.xpath('.//div[1]/a/text()')[0] + committer = commit_task.xpath('.//div[3]/span[2]/a/span/text()')[0] + commit_time_str = commit_task.xpath('.//div[3]/span[4]/span/text()')[0].strip() + pr_link = commit_task.xpath('.//div[1]/a/@href')[0] + + time = datetime.strptime(commit_time_str, '%Y-%m-%d %H:%M') + if start_time <= time <= end_time: + print("在预期的时间段内") + print(title) + print(committer) + print(commit_time_str) + print(pr_link) + print('---------------------------------') + with open(data_file, 'a', encoding='utf-8') as file: + file.write(f"{repo_name}, {title}, {committer}, {commit_time_str}, {pr_link}\n") + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--startTime', type=str, dest='start_time', default=None, + help='specify crawl start time') + parser.add_argument('--repoName', type=str, dest='repo_name', default=None, + nargs='+', + help='specify which repo you want to crawl') + return parser.parse_args() + + +if __name__ == '__main__': + parse_config() + end_time = datetime.now() + yesterday = end_time - timedelta(days=1) + start_time = datetime(yesterday.year, yesterday.month, yesterday.day, 0, 0, 0) + repo_list = configs.get('repo_list') + + arguments = parse_args() + if arguments.start_time is not None: + time_str = datetime.strptime(arguments.start_time, '%Y-%m-%d') + start_time = datetime.combine(time_str, time.min) + end_time = start_time + timedelta(days=1) + print('爬取开始时间', start_time) + print('爬取结束时间', end_time) + if arguments.repo_name is not None: + repo_list = arguments.repo_name + print(repo_list) + + crawl_committer(repo_list, start_time, end_time) + get_result() + + + diff --git a/test/scripts/get_commit_log/result.py b/test/scripts/get_commit_log/result.py new file mode 100644 index 0000000000000000000000000000000000000000..df910449ceb346a8896b46aa1ff50d13b1b4cb26 --- /dev/null +++ b/test/scripts/get_commit_log/result.py @@ -0,0 +1,36 @@ +import os + +from jinja2 import Environment, FileSystemLoader + + +def get_result(): + data_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data.txt') + with open(data_file, 'r', encoding='utf-8') as file: + lines = file.readlines() + + data = [] + for line in lines: + line = line.strip() + values = line.split(',') + + item = { + 'project': values[0], + 'description': values[1], + 'author': values[2], + 'time': values[3], + 'link': f'https://gitee.com//{values[4]}'.replace(' ', '') + } + data.append(item) + + template_dir = os.path.dirname(__file__) + env = Environment(loader=FileSystemLoader(template_dir)) + template = env.get_template('template.html') + output = template.render(data=data) + + commit_log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'commit_log.html') + with open(commit_log_file, 'w', encoding='utf-8') as file: + file.write(output) + + +if __name__ == '__main__': + get_result() diff --git a/test/scripts/get_commit_log/template.html b/test/scripts/get_commit_log/template.html new file mode 100644 index 0000000000000000000000000000000000000000..a3e29f842647d02a72708b4d3b0e5a4d0b48fade --- /dev/null +++ b/test/scripts/get_commit_log/template.html @@ -0,0 +1,97 @@ + + + + + 提交记录 + + + +
+
+ + + + + + + + + + + + + {% for item in data %} + + + + + + + + {% endfor %} + +
提交记录
项目描述作者时间链接
{{ item.project }}{{ item.description }}{{ item.author }}{{ item.time }}点击查看
+
+
+ + \ No newline at end of file diff --git a/test/scripts/sdk_test/entry.py b/test/scripts/sdk_test/entry.py index fea78fdc33a431f70171cb9f939b92a2829aa4e8..99998370453417df62bd1b8bb984b5d8ce346e43 100644 --- a/test/scripts/sdk_test/entry.py +++ b/test/scripts/sdk_test/entry.py @@ -25,13 +25,12 @@ import utils def run(): - sdk_url = utils.get_sdk_url() - cmd = ['python', 'run.py'] - cmd.extend(['--sdkPath', sdk_url]) cmd.extend(['--hapMode', 'all']) cmd.extend(['--compileMode', 'all']) cmd.extend(['--logLevel', 'debug']) + # 去除下载镜像,抽象到download一起;新增镜像烧录与运行时截图验证(需要连接开发板) + cmd.extend(['--runHaps', 'False']) cmd.extend(['--logFile', 'log' + '_' + utils.get_time_string() + '.txt']) current_dir = os.path.dirname(os.path.abspath(__file__)) diff --git a/test/scripts/sdk_test/execution.py b/test/scripts/sdk_test/execution.py index bcdeb614e66b7eb8a41eb7addb8d2cecb95764bc..d29108379041e3269a1f379b5d8680abbf024001 100644 --- a/test/scripts/sdk_test/execution.py +++ b/test/scripts/sdk_test/execution.py @@ -24,13 +24,14 @@ import re import shutil import signal import subprocess +import time import zipfile - import json5 - import options import utils +from utils import get_running_screenshot, verify_runtime, out_file_backup + class IncrementalTest: @staticmethod @@ -47,8 +48,11 @@ class IncrementalTest: logging.debug(f"new module hap file: {new_module_name_output_file}") + picture_suffix = 'debug' + if not is_debug: + picture_suffix = 'release' passed = validate(inc_task, task, is_debug, stdout, - stderr, new_module_name_output_file) + stderr, f'incremental_compile_change_module_name_{picture_suffix}', new_module_name_output_file) logging.debug(f"validate new module hap file, passed {passed}") if not passed: return @@ -65,6 +69,7 @@ class IncrementalTest: modules_abc_path = os.path.join(abc_path, 'modules.abc') modules_pa = disasm_abc(task, modules_abc_path) if not modules_pa or not os.path.exists(modules_pa): + out_file_backup(task, 'changeModuleName') inc_info.result = options.TaskResult.failed inc_info.error_message = f'ark_disasm failed, module name change not verified' return @@ -172,7 +177,10 @@ class IncrementalTest: logging.info(f"==========> Running {test_name} for task: {task.name}") [stdout, stderr] = compile_project(task, is_debug) - passed = validate(inc_task, task, is_debug, stdout, stderr) + picture_suffix = 'debug' + if not is_debug: + picture_suffix = 'release' + passed = validate(inc_task, task, is_debug, stdout, stderr, f'incremental_compile_no_change_{picture_suffix}') if passed: IncrementalTest.validate_compile_incremental_file( task, inc_task, is_debug, []) @@ -193,7 +201,10 @@ class IncrementalTest: 'patch_lines_2').get('tail')) [stdout, stderr] = compile_project(task, is_debug) - passed = validate(inc_task, task, is_debug, stdout, stderr) + picture_suffix = 'debug' + if not is_debug: + picture_suffix = 'release' + passed = validate(inc_task, task, is_debug, stdout, stderr, f'incremental_compile_add_oneline_{picture_suffix}') if passed: modified_files = [os.path.join(*modify_file_item)] IncrementalTest.validate_compile_incremental_file( @@ -238,7 +249,10 @@ class IncrementalTest: file.write(patch_lines.get('tail')) [stdout, stderr] = compile_project(task, is_debug) - passed = validate(inc_task, task, is_debug, stdout, stderr) + picture_suffix = 'debug' + if not is_debug: + picture_suffix = 'release' + passed = validate(inc_task, task, is_debug, stdout, stderr, f'incremental_compile_add_file_{picture_suffix}') if passed: modified_files = [os.path.join(*modify_file_item)] IncrementalTest.validate_compile_incremental_file( @@ -256,7 +270,10 @@ class IncrementalTest: # this test is after 'add_file', and in test 'add_file' already done remove file, # so here just call compile [stdout, stderr] = compile_project(task, is_debug) - passed = validate(inc_task, task, is_debug, stdout, stderr) + picture_suffix = 'debug' + if not is_debug: + picture_suffix = 'release' + passed = validate(inc_task, task, is_debug, stdout, stderr, f'incremental_compile_delete_file_{picture_suffix}') if passed: modify_file_item = task.inc_modify_file modified_files = [os.path.join(*modify_file_item)] @@ -271,7 +288,10 @@ class IncrementalTest: logging.info(f"==========> Running {test_name} for task: {task.name}") hap_mode = not is_debug [stdout, stderr] = compile_project(task, hap_mode) - validate(inc_task, task, hap_mode, stdout, stderr) + picture_suffix = 'debug' + if not is_debug: + picture_suffix = 'release' + validate(inc_task, task, hap_mode, stdout, stderr, f'incremental_compile_reverse_hap_mode_{picture_suffix}') @staticmethod def compile_incremental_modify_module_name(task, is_debug): @@ -312,6 +332,8 @@ class IncrementalTest: [stdout, stderr] = compile_project(task, is_debug) IncrementalTest.validate_module_name_change( task, inc_task, is_debug, stdout, stderr, new_module_name) + passed = IncrementalTest.validate_module_name_change( + task, inc_task, is_debug, stdout, stderr, new_module_name) except Exception as e: logging.exception(e) finally: @@ -520,6 +542,7 @@ class OtherTest: '-p', 'module=entry@ohosTest', 'assembleHap'] [stdout, stderr] = compile_project(task, True, cmd) [is_success, time_string] = is_compile_success(stdout) + if not is_success: test_info.result = options.TaskResult.failed test_info.error_message = stderr @@ -575,6 +598,7 @@ def disasm_abc(task, abc_file): def is_abc_debug_info_correct(task, abc_file, is_debug): pa_file = disasm_abc(task, abc_file) if not os.path.exists(pa_file): + out_file_backup(task, 'checkEsModuleOutPuts') logging.error(f"pa file not exist: {pa_file}") return False @@ -751,11 +775,11 @@ def validate_compile_output(info, task, is_debug, output_file=''): return passed -def run_compile_output(info, task_path): - # TODO: - # 1)install hap - # 2)run hap and verify - return False +# def run_compile_output(task, info, task_path): +def run_compile_output(task, picture_name): + get_running_screenshot(task, picture_name) + time.sleep(2) + return verify_runtime(task, picture_name) def is_compile_success(compile_stdout): @@ -763,11 +787,10 @@ def is_compile_success(compile_stdout): match_result = re.search(pattern, compile_stdout) if not match_result: return [False, ''] - return [True, match_result.group(0)] -def validate(compilation_info, task, is_debug, stdout, stderr, output_file=''): +def validate(compilation_info, task, is_debug, stdout, stderr, picture_name, output_file=''): info = {} if is_debug: info = compilation_info.debug_info @@ -784,7 +807,7 @@ def validate(compilation_info, task, is_debug, stdout, stderr, output_file=''): passed = validate_compile_output(info, task, is_debug, output_file) if options.arguments.run_haps: - passed &= run_compile_output(info) + passed &= run_compile_output(task, picture_name) if passed: collect_compile_time(info, time_string) @@ -851,8 +874,11 @@ def compile_incremental(task, is_debug): return if options.arguments.compile_mode == 'incremental': + picture_suffix = 'debug' + if not is_debug: + picture_suffix = 'release' passed = validate(task.full_compilation_info, - task, is_debug, stdout, stderr) + task, is_debug, stdout, stderr, f'incremental_compile_first{picture_suffix}') if not passed: logging.error( "Incremental compile failed due to first compile failed!") @@ -939,14 +965,14 @@ def execute_full_compile(task): if options.arguments.hap_mode in ['all', 'release']: [stdout, stderr] = compile_project(task, False) passed = validate(task.full_compilation_info, - task, False, stdout, stderr) + task, False, stdout, stderr, 'full_compile_release') if passed: backup_compile_output(task, False) clean_compile(task) if options.arguments.hap_mode in ['all', 'debug']: [stdout, stderr] = compile_project(task, True) passed = validate(task.full_compilation_info, - task, True, stdout, stderr) + task, True, stdout, stderr, 'full_compile_debug') if passed: backup_compile_output(task, True) clean_compile(task) diff --git a/test/scripts/sdk_test/options.py b/test/scripts/sdk_test/options.py index 0f5e58b17399b65a2446ea83e70d46186452d295..a6ffade15f274d9b735cb3793b837b227d389915 100644 --- a/test/scripts/sdk_test/options.py +++ b/test/scripts/sdk_test/options.py @@ -24,7 +24,6 @@ import os from enum import Enum import yaml - import json5 import utils @@ -73,12 +72,14 @@ class TestTask: def __init__(self): self.name = '' self.path = '' + self.bundle_name = '' + self.ability_name = '' self.type = '' self.build_path = [] self.output_hap_path = '' + self.output_hap_path_signed = '' self.output_app_path = '' self.inc_modify_file = [] - self.full_compilation_info = FullCompilationInfo() self.incre_compilation_info = {} self.other_tests = {} @@ -132,7 +133,7 @@ def get_ark_disasm_path(task_path): profile_file = os.path.join(task_path, 'build-profile.json5') with open(profile_file, 'r') as file: profile_data = json5.load(file) - return os.path.join(sdk_path, str(profile_data['app']['products'][0]['compileSdkVersion']), + return os.path.join(sdk_path, str(profile_data['app']['compileSdkVersion']), 'toolchains', ark_disasm) @@ -156,10 +157,13 @@ def create_test_tasks(): task = TestTask() task.name = hap['name'] task.path = hap['path'] + task.bundle_name = hap['bundle_name'] + task.ability_name = hap['ability_name'] task.type = hap['type'] task.build_path = hap['build_path'] task.cache_path = hap['cache_path'] task.output_hap_path = hap['output_hap_path'] + task.output_hap_path_signed = hap['output_hap_path_signed'] task.output_app_path = hap['output_app_path'] task.inc_modify_file = hap['inc_modify_file'] task.backup_info.cache_path = os.path.join(task.path, 'test_suite_cache') diff --git a/test/scripts/sdk_test/preparation.py b/test/scripts/sdk_test/preparation.py index 32cdc805e25f19b21e0473e84473bbe87ec5dc56..321def01ff088c4ae39ee4bcf04912bf3c000807 100644 --- a/test/scripts/sdk_test/preparation.py +++ b/test/scripts/sdk_test/preparation.py @@ -18,17 +18,12 @@ limitations under the License. Description: prepare environment for test """ -import logging + import os import shutil -import tarfile -import zipfile - -import validators import options -from utils import is_linux, is_mac, get_time_string, add_executable_permission -from utils import get_api_version, check_gzip_file, download, get_remote_sdk_name +from utils import is_mac def setup_env(): @@ -46,125 +41,21 @@ def setup_env(): os.environ['JAVA_HOME'] = java_home -def check_deveco_env(): - if is_linux(): - return False - - java_path = os.path.join(options.configs.get('deveco_path'), 'jbr') - if not os.path.exists(java_path): - logging.error("Java not found!") - return False - - if not os.path.exists(options.configs.get('node_js_path')): - logging.error("Node js not found!") - return False - - return True - - -def get_sdk_from_remote(sdk_url): - deveco_sdk_path = options.configs.get('deveco_sdk_path') - temp_floder = deveco_sdk_path + '_temp' - sdk_name = get_remote_sdk_name() - sdk_zip_path_list = [temp_floder, 'ohos-sdk', 'windows'] - if is_mac(): - sdk_zip_path_list = [temp_floder, 'sdk', - 'packages', 'ohos-sdk', 'darwin'] - sdk_temp_file = os.path.join(temp_floder, sdk_name) - - if os.path.exists(temp_floder): - shutil.rmtree(temp_floder) - os.mkdir(temp_floder) - download(sdk_url, sdk_temp_file, sdk_name) - if not check_gzip_file(sdk_temp_file): - logging.error('The downloaded file is not a valid gzip file.') - return '', '' - with tarfile.open(sdk_temp_file, 'r:gz') as tar: - tar.extractall(temp_floder) - - sdk_floder = os.path.join(temp_floder, 'SDK_TEMP') - sdk_zip_path = os.path.join(*sdk_zip_path_list) - for item in os.listdir(sdk_zip_path): - if item != '.DS_Store': - logging.info(f'Unpacking {item}') - with zipfile.ZipFile(os.path.join(sdk_zip_path, item)) as zip_file: - zip_file.extractall(os.path.join(sdk_floder)) - logging.info(f'Decompression {item} completed') - - api_version = get_api_version(os.path.join( - *[sdk_floder, 'ets', 'oh-uni-package.json'])) - return sdk_floder, api_version - - -def update_sdk_to_deveco(sdk_path, api_version): - deveco_sdk_path = options.configs.get('deveco_sdk_path') - deveco_sdk_version_path = os.path.join(deveco_sdk_path, api_version) - for sdk_item in os.listdir(deveco_sdk_path): - if sdk_item.startswith(f'{api_version}-'): - shutil.rmtree(os.path.join(deveco_sdk_path, sdk_item)) - if os.path.exists(deveco_sdk_version_path): - shutil.move(deveco_sdk_version_path, - deveco_sdk_version_path + '-' + get_time_string()) - for item in os.listdir(sdk_path): - if item != '.DS_Store': - if is_mac(): - if item == 'toolchains': - add_executable_permission( - os.path.join(sdk_path, item, 'restool')) - add_executable_permission( - os.path.join(sdk_path, item, 'ark_disasm')) - elif item == 'ets': - add_executable_permission(os.path.join(sdk_path, item, 'build-tools', - 'ets-loader', 'bin', 'ark', 'build-mac', 'bin', 'es2abc')) - add_executable_permission(os.path.join(sdk_path, item, 'build-tools', - 'ets-loader', 'bin', 'ark', 'build-mac', 'legacy_api8', 'bin', 'js2abc')) - elif item == 'js': - add_executable_permission(os.path.join(sdk_path, item, 'build-tools', - 'ace-loader', 'bin', 'ark', 'build-mac', 'bin', 'es2abc')) - add_executable_permission(os.path.join(sdk_path, item, 'build-tools', - 'ace-loader', 'bin', 'ark', 'build-mac', 'legacy_api8', 'bin', 'js2abc')) - shutil.move(os.path.join(sdk_path, item), - os.path.join(deveco_sdk_version_path, item)) - - -def prepare_sdk(): - sdk_arg = options.arguments.sdk_path - if sdk_arg == '': - return True # use the sdk specified in config.yaml - - api_version = '9' - sdk_path = sdk_arg - if validators.url(sdk_arg): - sdk_path, api_version = get_sdk_from_remote(sdk_arg) - - if not sdk_path or not os.path.exists(sdk_path): - return False - - update_sdk_to_deveco(sdk_path, api_version) - return True - - -def prepare_image(): - if options.arguments.run_haps: - return True - - # TODO: 1)download image, 2)flash image - - return True - - def clean_log(): output_log_file = options.configs.get('log_file') daily_report_file = options.configs.get('output_html_file') + picture_dic = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'pictures') if os.path.exists(output_log_file): os.remove(output_log_file) if os.path.exists(daily_report_file): os.remove(daily_report_file) + if os.path.exists(picture_dic): + shutil.rmtree(picture_dic) def prepare_test_env(): clean_log() - prepared = check_deveco_env() setup_env() - prepared = prepared and prepare_sdk() and prepare_image() - return prepared + + + diff --git a/test/scripts/sdk_test/result.py b/test/scripts/sdk_test/result.py index be8167ee8f7697e3e66d863e84ba03c50d049ab9..2b41adc7d9200bc1408cce0301afcfd6a68ae65c 100644 --- a/test/scripts/sdk_test/result.py +++ b/test/scripts/sdk_test/result.py @@ -22,7 +22,6 @@ import copy import logging import os import time - import pandas import options diff --git a/test/scripts/sdk_test/run.py b/test/scripts/sdk_test/run.py index 35077749eded766dde735b1255dc2b9d83721ba3..54c4ad62b6a72f37f90c4c109d9b4b222e29ec00 100644 --- a/test/scripts/sdk_test/run.py +++ b/test/scripts/sdk_test/run.py @@ -25,23 +25,21 @@ import time from execution import execute from options import process_options -from preparation import prepare_test_env from result import process_test_result - +from preparation import prepare_test_env def run(): old_env = os.environ.copy() try: start_time = time.time() test_tasks = process_options() + + prepare_test_env() + if not test_tasks: logging.error("No test task found, test suite exit!") sys.exit(1) - if not prepare_test_env(): - logging.error("Prepare test environment failed, test suite exit!") - sys.exit(1) - execute(test_tasks) process_test_result(test_tasks, start_time) except Exception as e: diff --git a/test/scripts/sdk_test/utils.py b/test/scripts/sdk_test/utils.py index fdf4298aa01049f47e3e7cd8277362d6f33c51e1..5835d5637abb4bc731881c4ef804561db8517040 100644 --- a/test/scripts/sdk_test/utils.py +++ b/test/scripts/sdk_test/utils.py @@ -18,7 +18,6 @@ limitations under the License. Description: utils for test suite """ -import datetime import json import logging import os @@ -28,9 +27,7 @@ import subprocess import sys import gzip -import httpx -import requests -import tqdm +from PIL import Image def get_log_level(arg_log_level): @@ -84,41 +81,6 @@ def is_esmodule(hap_type): return 'stage' in hap_type -def get_sdk_url(): - now_time = datetime.datetime.now().strftime('%Y%m%d%H%M%S') - last_hour = (datetime.datetime.now() + - datetime.timedelta(hours=-24)).strftime('%Y%m%d%H%M%S') - url = 'http://ci.openharmony.cn/api/ci-backend/ci-portal/v1/dailybuilds' - downnload_job = { - 'pageNum': 1, - 'pageSize': 1000, - 'startTime': '', - 'endTime': '', - 'projectName': 'openharmony', - 'branch': 'master', - 'component': '', - 'deviceLevel': '', - 'hardwareBoard': '', - 'buildStatus': '', - 'buildFailReason': '', - 'testResult': '', - } - downnload_job['startTime'] = str(last_hour) - downnload_job['endTime'] = str(now_time) - post_result = requests.post(url, data=downnload_job) - post_data = json.loads(post_result.text) - sdk_url_suffix = '' - for ohos_sdk_list in post_data['result']['dailyBuildVos']: - try: - if get_remote_sdk_name() in ohos_sdk_list['obsPath']: - sdk_url_suffix = ohos_sdk_list['obsPath'] - break - except BaseException as err: - logging.error(err) - sdk_url = 'http://download.ci.openharmony.cn/' + sdk_url_suffix - return sdk_url - - def get_api_version(json_path): with open(json_path, 'r') as uni: uni_cont = uni.read() @@ -143,28 +105,6 @@ def is_file_timestamps_same(file_a, file_b): return file_a_mtime == file_b_mtime -def download(url, temp_file, temp_file_name): - with httpx.stream('GET', url) as response: - with open(temp_file, "wb") as temp: - total_length = int(response.headers.get("content-length")) - with tqdm.tqdm(total=total_length, unit="B", unit_scale=True) as pbar: - pbar.set_description(temp_file_name) - chunk_sum = 0 - count = 0 - for chunk in response.iter_bytes(): - temp.write(chunk) - chunk_sum += len(chunk) - percentage = chunk_sum / total_length * 100 - while str(percentage).startswith(str(count)): - if str(percentage).startswith('100'): - logging.info(f'SDK Download Complete {percentage: .1f}%') - break - else: - logging.info(f'SDK Downloading... {percentage: .1f}%') - count += 1 - pbar.update(len(chunk)) - - def add_executable_permission(file_path): current_mode = os.stat(file_path).st_mode new_mode = current_mode | 0o111 @@ -179,3 +119,77 @@ def get_remote_sdk_name(): else: logging.error('Unsuport platform to get sdk from daily build') return '' + + +def get_running_screenshot(task, image_name): + subprocess.run('hdc shell power-shell wakeup;power-shell setmode 602') + subprocess.run('hdc shell uinput -T -m 420 1000 420 400;uinput -T -m 420 400 420 1000') + + build_path = os.path.join(task.path, *task.build_path) + out_path = os.path.join(build_path, *task.output_hap_path_signed) + + subprocess.run(f'hdc install {out_path}') + subprocess.run(f'hdc shell aa start -a {task.ability_name} -b {task.bundle_name}') + time.sleep(3) + + screen_path = f'/data/local/tmp/{image_name}.jpeg' + subprocess.run(f'hdc shell snapshot_display -f {screen_path}') + time.sleep(3) + + subprocess.run(f'hdc file recv {screen_path} {image_name}.jpeg') + subprocess.run(f'hdc shell aa force-stop {task.bundle_name}') + subprocess.run(f'hdc shell bm uninstall -n {task.bundle_name}') + + pic_save_dic = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'pictures') + if not os.path.exists(pic_save_dic): + os.mkdir(pic_save_dic) + + pic_save_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), f'pictures\{task.name}') + if not os.path.exists(pic_save_path): + os.mkdir(pic_save_path) + + shutil.move(f'{image_name}.jpeg', pic_save_path) + + +def compare_screenshot(image1_path, image2_path, threshold=0.95): + image1 = Image.open(image1_path).convert('RGB') + + image2 = Image.open(image2_path).convert('RGB') + + image1.thumbnail((256, 256)) + image2.thumbnail((256, 256)) + + pixel1 = image1.load() + pixel2 = image2.load() + width, height = image1.size + + similar_pixels = 0 + total_pixels = width * height + + for x in range(width): + for y in range(height): + if pixel1[x, y] == pixel2[x, y]: + similar_pixels += 1 + + similarity = similar_pixels / total_pixels + + if similarity >= threshold: + return True + else: + return False + + +def verify_runtime(task, picture_name): + pic_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), f'pictures/{task.name}/{picture_name}.jpeg') + pic_path_reference = os.path.join(os.path.dirname(os.path.abspath(__file__)), f'pictures_reference/{task.name}/{picture_name}.jpeg') + passed = compare_screenshot(pic_path, pic_path_reference, threshold=0.95) + if not passed: + logging.error(f'{task.name} get error when runing') + return False + return True + + +def out_file_backup(task, test_name): + output_file_path = os.path.join(task.path, *task.build_path, 'outputs') + output_bak_path = f'{task.name}/{test_name}/{get_time_string()}' + shutil.copytree(output_file_path, output_bak_path) diff --git a/test/scripts/email_config.yaml b/test/scripts/send_email/email_config.yaml similarity index 97% rename from test/scripts/email_config.yaml rename to test/scripts/send_email/email_config.yaml index 329f3e79f2ef65b3a3f3b7f266e881620d976d14..55ab2eace2daa79e99c293cf2c16a8d7642965a0 100644 --- a/test/scripts/email_config.yaml +++ b/test/scripts/send_email/email_config.yaml @@ -1,35 +1,35 @@ -# Copyright (c) 2023 Huawei Device Co., Ltd. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -user_name : "" -sender_email_address : "" -auth_code : "" -receiver_list : - - "" -smtp_server: "" -smtp_port: "25" - -xts_report_file : "./auto_xts_test/result/summary_report.html" -sdk_report_file : "./sdk_test/sdk_test_report.html" -perf_report_file : "./performance_test/mail_data/email_msg.html" -attatchment_files : - - "./auto_xts_test/result/details_report.html" - - "./auto_xts_test/result/failures_report.html" - - "./performance_test/mail_data/performance_logs.zip" -image_files: - "./performance_test/mail_data/debug_full_time.jpg": performance00 - "./performance_test/mail_data/debug_incremental_time.jpg": performance01 - "./performance_test/mail_data/debug_size.jpg": performance02 - "./performance_test/mail_data/release_full_time.jpg": performance10 - "./performance_test/mail_data/release_incremental_time.jpg": performance11 - "./performance_test/mail_data/release_size.jpg": performance12 +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +user_name : "" +sender_email_address : "" +auth_code : "" +receiver_list : + - "" +smtp_server: "" +smtp_port: "25" + +xts_report_file : "./auto_xts_test/result/summary_report.html" +sdk_report_file : "./sdk_test/sdk_test_report.html" +perf_report_file : "./performance_test/mail_data/email_msg.html" +attatchment_files : + - "./auto_xts_test/result/details_report.html" + - "./auto_xts_test/result/failures_report.html" + - "./performance_test/mail_data/performance_logs.zip" +image_files: + "./performance_test/mail_data/debug_full_time.jpg": performance00 + "./performance_test/mail_data/debug_incremental_time.jpg": performance01 + "./performance_test/mail_data/debug_size.jpg": performance02 + "./performance_test/mail_data/release_full_time.jpg": performance10 + "./performance_test/mail_data/release_incremental_time.jpg": performance11 + "./performance_test/mail_data/release_size.jpg": performance12 diff --git a/test/scripts/send_email.py b/test/scripts/send_email/send_email.py old mode 100755 new mode 100644 similarity index 95% rename from test/scripts/send_email.py rename to test/scripts/send_email/send_email.py index 07f7030f065291aca271c3e2356f3264c14c48bd..ebff9f747db367aa9aa85194a167b748c60cd98d --- a/test/scripts/send_email.py +++ b/test/scripts/send_email/send_email.py @@ -1,95 +1,95 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# -# Copyright (c) 2023 Huawei Device Co., Ltd. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import os -import smtplib - -from email.mime.image import MIMEImage -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText -import yaml - - -def add_content(content, file_name, test_part): - if file_name == "": - content += f'

{test_part} not complete yet

' - return content - if not os.path.exists(file_name): - content += f'

{test_part} run failed

' - return content - with open(file_name, 'r', encoding='utf-8') as f: - content += f.read() - return content - - -def add_attachment(msg, file_list): - for file in file_list: - if os.path.exists(file): - with open(file, 'rb') as f: - attachment = MIMEText(f.read(), 'base64', 'utf-8') - attachment['Content-Disposition'] = f'attachment; filename="{os.path.basename(file)}"' - msg.attach(attachment) - - -def add_image(msg, img_dic): - for path in img_dic: - if os.path.exists(path): - with open(path, 'rb') as f: - img = MIMEImage(f.read()) - img.add_header('Content-ID', img_dic[path]) - msg.attach(img) - - -def send_email(): - with open(r".\email_config.yaml", 'r') as f: - data = yaml.safe_load(f.read()) - - user_name = data["user_name"] - sender = data["sender_email_address"] - auth_code = data["auth_code"] - receiver = data["receiver_list"] - smtp_server = data["smtp_server"] - smtp_port = data["smtp_port"] - xts_test = data["xts_report_file"] - sdk_test = data["sdk_report_file"] - perf_test = data["perf_report_file"] - attachment_files = data["attatchment_files"] - image_files = data["image_files"] - - msg = MIMEMultipart() - msg['From'] = sender - msg['To'] = ", ".join(receiver) - msg['Subject'] = "Arkcompiler Test" - - html = "" - dividing_line = '
' - html = add_content(html, xts_test, "xts_test") - html += dividing_line - html = add_content(html, sdk_test, "sdk_test") - html += dividing_line - html = add_content(html, perf_test, "perf_test") - msg.attach(MIMEText(html, 'html', 'utf-8')) - add_attachment(msg, attachment_files) - add_image(msg, image_files) - smtp = smtplib.SMTP(smtp_server, smtp_port) - smtp.login(user_name, auth_code) - smtp.sendmail(sender, receiver, msg.as_string()) - smtp.quit() - - -if __name__ == "__main__": +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import smtplib + +from email.mime.image import MIMEImage +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +import yaml + + +def add_content(content, file_name, test_part): + if file_name == "": + content += f'

{test_part} not complete yet

' + return content + if not os.path.exists(file_name): + content += f'

{test_part} run failed

' + return content + with open(file_name, 'r', encoding='utf-8') as f: + content += f.read() + return content + + +def add_attachment(msg, file_list): + for file in file_list: + if os.path.exists(file): + with open(file, 'rb') as f: + attachment = MIMEText(f.read(), 'base64', 'utf-8') + attachment['Content-Disposition'] = f'attachment; filename="{os.path.basename(file)}"' + msg.attach(attachment) + + +def add_image(msg, img_dic): + for path in img_dic: + if os.path.exists(path): + with open(path, 'rb') as f: + img = MIMEImage(f.read()) + img.add_header('Content-ID', img_dic[path]) + msg.attach(img) + + +def send_email(): + with open(r"email_config.yaml", 'r') as f: + data = yaml.safe_load(f.read()) + + user_name = data["user_name"] + sender = data["sender_email_address"] + auth_code = data["auth_code"] + receiver = data["receiver_list"] + smtp_server = data["smtp_server"] + smtp_port = data["smtp_port"] + xts_test = data["xts_report_file"] + sdk_test = data["sdk_report_file"] + perf_test = data["perf_report_file"] + attachment_files = data["attatchment_files"] + image_files = data["image_files"] + + msg = MIMEMultipart() + msg['From'] = sender + msg['To'] = ", ".join(receiver) + msg['Subject'] = "Arkcompiler Test" + + html = "" + dividing_line = '
' + html = add_content(html, xts_test, "xts_test") + html += dividing_line + html = add_content(html, sdk_test, "sdk_test") + html += dividing_line + html = add_content(html, perf_test, "perf_test") + msg.attach(MIMEText(html, 'html', 'utf-8')) + add_attachment(msg, attachment_files) + add_image(msg, image_files) + smtp = smtplib.SMTP(smtp_server, smtp_port) + smtp.login(user_name, auth_code) + smtp.sendmail(sender, receiver, msg.as_string()) + smtp.quit() + + +if __name__ == "__main__": send_email() \ No newline at end of file diff --git a/test/scripts/timer.py b/test/scripts/timer.py index 99b2630bf42789975027e5486b0ad15c88400648..4c0efb7cd84cf491fa02f266e4eb41a55855e69d 100755 --- a/test/scripts/timer.py +++ b/test/scripts/timer.py @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. - +import argparse import os import subprocess import time @@ -22,25 +22,86 @@ import time import schedule +arguments = {} + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--runTime', type=str, dest='run_time', default=None, + help='specify when to start the test') + parser.add_argument('--skipDownload', type=str, dest='skip_download', default=False, + help='specify whether to skip the download or not') + parser.add_argument('--downloadUrl', type=str, dest='download_url', default=None, + nargs='+', + help='specify what you want to download') + parser.add_argument('--startTime', type=str, dest='start_time', default=None, + help='specify crawl start time') + parser.add_argument('--repoName', type=str, dest='repo_name', default=None, + nargs='+', + help='specify which repo you want to crawl') + + global arguments + arguments = parser.parse_args() + + +def download_is_successful(): + if not os.path.exists('./download/download_url.txt'): + return False + with open('./download/download_url.txt', 'r') as file: + content = file.read() + if 'successfully' not in content: + return False + + return True + + +def start_download_task(): + if not arguments.skip_download: + download_command = ['python', './download/download.py'] + if arguments.download_url is not None: + download_command.extend(['--downloadUrl']) + for download_url in arguments.download_url: + download_command.extend([download_url]) + job(download_command) + + +def start_crawl_task(): + crawl_command = ['python', './get_commit_log/get_commit_log.py'] + if arguments.start_time is not None: + crawl_command.extend(['--startTime', arguments.start_time]) + if arguments.repo_name is not None: + crawl_command.extend(['--repoName']) + for repo_name in arguments.repo_name: + crawl_command.extend([repo_name]) + job(crawl_command) + + def job(cmd): subprocess.run(cmd, shell=False) def run(): - job(os.path.join(".", "auto_xts_test", "run.bat")) - job(f'python {os.path.join(".", "sdk_test", "entry.py")}') - if not os.path.exists("sdk_url.txt"): - print('SDK download failed') + start_download_task() + + if not download_is_successful(): return + start_crawl_task() + + os.chdir(os.path.dirname(os.path.realpath(__file__))) + job(os.path.join(".", "auto_xts_test", "run.bat")) + job(f'python {os.path.join(".", "sdk_test", "entry.py")}') job(f'python {os.path.join(".", "performance_test", "performance_entry.py")}') job(f'python {os.path.join(".", "send_email.py")}') -if __name__ == "__main__": - os.chdir(os.path.dirname(os.path.realpath(__file__))) - schedule.every().day.at("02:10").do(run) +if __name__ == '__main__': + parse_args() + if arguments.run_time is not None: + run_time = arguments.get('run_time') + os.chdir(os.path.dirname(os.path.realpath(__file__))) + schedule.every().day.at(run_time).do(run) run() while True: schedule.run_pending() - time.sleep(1) + time.sleep(1) \ No newline at end of file