From 7e3a55e94473f9e45dd3a0a94dc6e4aee6a8c56e Mon Sep 17 00:00:00 2001 From: chenqy930 Date: Mon, 12 Jun 2023 14:38:22 +0800 Subject: [PATCH 1/5] Add sdk test suites Signed-off-by: chenqy930 Change-Id: Idf33c25e1bc639a75832db292fec95363310da34 --- test_suite/config.yaml | 144 +++++++++++ test_suite/execution.py | 526 ++++++++++++++++++++++++++++++++++++++ test_suite/options.py | 168 ++++++++++++ test_suite/preparation.py | 77 ++++++ test_suite/result.py | 148 +++++++++++ test_suite/run.py | 51 ++++ test_suite/utils.py | 75 ++++++ 7 files changed, 1189 insertions(+) create mode 100644 test_suite/config.yaml create mode 100644 test_suite/execution.py create mode 100644 test_suite/options.py create mode 100644 test_suite/preparation.py create mode 100644 test_suite/result.py create mode 100644 test_suite/run.py create mode 100644 test_suite/utils.py diff --git a/test_suite/config.yaml b/test_suite/config.yaml new file mode 100644 index 0000000000..33773ca493 --- /dev/null +++ b/test_suite/config.yaml @@ -0,0 +1,144 @@ +# Copyright (c) 2023 Huawei Device Co., Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http:\www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Description: configs for test suite + +# environment settings +deveco_path: D:\deveco\bin\devecostudio64.exe +deveco_sdk_path: D:\deveco-sdk\deveco-sdk-0602 + +# test haps list +# available values of attribute 'type' are: stage, stage_widget, fa, js, compatible8 +haps: + # complex haps + - calendar: + name: Calendar + path: D:\haps\calendar + type: stage + output_path: + output_hap_name: + output_app_name: + description: + - fangtaobao: + name: FangTaoBao + path: D:\haps\fangtaobao + type: stage + output_path: + output_hap_name: + output_app_name: + description: + - fangdouyin: + name: FangDouYin + path: D:\haps\fangdouyin + type: stage + output_path: + output_hap_name: + output_app_name: + description: + - fangweixin: + name: FangWeiXin + path: D:\haps\fangweixin + type: stage + output_path: + output_hap_name: + output_app_name: + description: + # arkui haps + - arkuistagedemo: + name: ArkuiStageDemo + path: D:\haps\arkuiStageDemo + type: stage + output_path: + output_hap_name: + output_app_name: + description: + - arkuifademo: + name: ArkuiFaDemo + path: D:\haps\arkuiFaDemo + type: fa + output_path: + output_hap_name: + output_app_name: + description: + # module haps + - moduledemo: + name: ModuleDemo + path: D:\haps\moduleDemo + type: stage + output_path: + output_hap_name: + output_app_name: + description: + # widget haps + - widgetdemo: + name: WidgetDemo + path: D:\haps\WidgetDemo + type: stage_widget + output_path: + output_hap_name: + output_app_name: + description: + # IDE demo haps + - idedemo_00: + name: IdeStageDemoEmptyAbility + path: D:\sdk-test\DemoApplication_EmptyAbility + type: stage + build_path: [entry, build, default] + output_hap_path: [outputs, default, entry-default-unsigned.hap] + output_app_path: [outputs, default, app, entry-default.hap] + inc_modify_file: [entry, src, main, ets, pages, index.ets] + baseline_full_compile_time: 0 + baseline_inc_compile_time: 0 + baseline_abc_size: 999999999 + description: + - idedemo_01: + name: IdeFaDemoEmptyAbility + path: D:\sdk-test\DemoApplication_EmptyAbility_fa + type: fa + build_path: [entry, build, default] + output_hap_path: [outputs, default, entry-default-unsigned.hap] + output_app_path: [outputs, default, app, entry-default.hap] + inc_modify_file: [entry, src, main, ets, MainAbility, pages, index.ets] + baseline_full_compile_time: 0 + baseline_inc_compile_time: 0 + baseline_abc_size: 999999999 + description: + - idedemo_02: + name: IdeCompatible8DemoEmptyAbility + path: D:\haps\IdeCompatible8DemoEmptyAbility + type: compatible8 + output_path: + output_hap_name: + output_app_name: + description: + - idedemo_03: + name: IdeJsDemoEmptyAbility + path: D:\haps\IdeJsDemoEmptyAbility + type: js + output_path: + output_hap_name: + output_app_name: + description: + # error demo haps(to be added) + +# modifications for incremental compilation +patch_content: + patch_new_file: + # This new file will be added to the same directory as 'inc_modify_file' specified in haps + name: test.ets + content: "export function a() {return 'a'}" + patch_lines_1: + head: "import {a} from './test'\n" + tail: "console.log(a.toString());\n" + patch_lines_2: + tail: "console.log('This is a new line');\n" diff --git a/test_suite/execution.py b/test_suite/execution.py new file mode 100644 index 0000000000..fe21876075 --- /dev/null +++ b/test_suite/execution.py @@ -0,0 +1,526 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +Copyright (c) 2023 Huawei Device Co., Ltd. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Description: execute test tasks +""" + +import logging +import os +import re +import subprocess +import shutil +import zipfile + +import options +import utils + + +def validate_output_for_jsbundle(info, uncompressed_output_path, is_debug): + abc_files = [] + for root, dirs, files in os.walk(uncompressed_output_path): + for file in files: + if file.endswith('.abc'): + abc_files.append(os.path.join(root, file)) + + total_size = 0 + for file in abc_files: + total_size += os.path.getsize(os.path.join(uncompressed_output_path, file)) + if total_size == 0: + info.result = options.TaskResult.failed + info.error_message = "abc not found or abc size is 0" + return False + else: + info.abc_size = total_size + + if is_debug: + for file in abc_files: + sourcemap_file = file.replace('.abc', '.js.map') + if not os.path.exists(os.path.join(uncompressed_output_path, sourcemap_file)): + info.result = options.TaskResult.failed + info.error_message = "sourcemap not found" + return False + + return True + + +def validate_output_for_esmodule(info, task_type, uncompressed_output_path, is_debug): + abc_sourcemap_path = os.path.join(uncompressed_output_path, 'ets') + + modules_abc_path = os.path.join(abc_sourcemap_path, 'modules.abc') + if not os.path.exists(modules_abc_path): + info.result = options.TaskResult.failed + info.error_message = "modules.abc not found" + return False + + modules_abc_size = os.path.getsize(modules_abc_path) + if modules_abc_size <= 0: + info.result = options.TaskResult.failed + info.error_message = "modules.abc size is 0" + return False + info.abc_size = modules_abc_size + + if task_type == 'stage_widget': + widget_abc_path = os.path.join(abc_sourcemap_path, 'widgets.abc') + if not os.path.exists(widget_abc_path): + info.result = options.TaskResult.failed + info.error_message = "widgets.abc not found" + return False + + widgets_abc_size = os.path.getsize(widget_abc_path) + if widgets_abc_size <= 0: + info.result = options.TaskResult.failed + info.error_message = "widgets.abc size is 0" + return False + else: + info.abc_size += widgets_abc_size + + if is_debug: + sourcemap_path = os.path.join(abc_sourcemap_path, 'sourceMaps.map') + if not os.path.exists(sourcemap_path): + info.result = options.TaskResult.failed + info.error_message = "sourcemap not found" + return False + + return True + + +def collect_compile_time(info, time_string): + time_second = 0 + time_millisecond = 0 + + time_items = time_string.split() + for i in range(0, len(time_items)): + if time_items[i] == 's': + time_second = float(time_items[i - 1]) + if time_items[i] == 'ms': + time_millisecond = round(float(time_items[i - 1])/1000, 3) + + info.time = round(time_second + time_millisecond, 3) + + +def get_compile_output_file_path(task, is_debug): + output_file = '' + + if is_debug: + output_file = os.path.join(task.path, *(task.build_path), *(task.output_hap_path)) + else: + output_file = os.path.join(task.path, *(task.build_path), *(task.output_app_path)) + + return output_file + + +def validate_compile_output(info, task, is_debug): + passed = False + + output_file = get_compile_output_file_path(task, is_debug) + uncompressed_output_file = output_file + '.uncompressed' + + if not os.path.exists(output_file): + logging.error("output file for task %s not exists: %s", task.name, output_file) + passed = False + + info.result = options.TaskResult.failed + info.error_message = "Hap not found" + return [passed, uncompressed_output_file] + try: + with zipfile.ZipFile(output_file, 'r') as zip_ref: + zip_ref.extractall(uncompressed_output_file) + except Exception as e: + logging.error("unzip exception: %s", e) + logging.error("uncompressed output file for task %s failed. output file: %s", task.name, output_file) + passed = False + + info.result = options.TaskResult.failed + info.error_message = "Hap uncompressed failed, cannot exam build products" + return [passed, uncompressed_output_file] + + if utils.is_esmodule(task.type): + passed = validate_output_for_esmodule(info, task.type, uncompressed_output_file, is_debug) + else: + passed = validate_output_for_jsbundle(info, uncompressed_output_file, is_debug) + + shutil.rmtree(uncompressed_output_file) + + return passed + + +def run_compile_output(info, task_path): + ## TODO: + # 1)install hap + # 2)run hap and verify + return + + +def is_compile_success(compile_stdout): + pattern = r"BUILD SUCCESSFUL in (\d+ s )?(\d+ ms)?" + match_result = re.search(pattern, compile_stdout) + if not match_result: + return [False, ''] + + return [True, match_result.group(0)] + + +def validate(compilation_info, task, is_debug, stdout, stderr): + info = {} + if is_debug: + info = compilation_info.debug_info + else: + info = compilation_info.release_info + + # ret_code will be 1 if there's stderr, use "COMPILE SUCCESSFUL" as a flag to make a judge + [is_success, time_string] = is_compile_success(stdout) + if not is_success: + info.result = options.TaskResult.failed + info.error_message = stderr + return + + passed = False + passed = validate_compile_output(info, task, is_debug) + + if not options.arguments.pack_only: + passed = run_compile_output(info) + + if passed: + collect_compile_time(info, time_string) + info.result = options.TaskResult.passed + + return passed + + +def compile(task, is_debug): + cmd = ['hvigorw'] + if is_debug: + cmd.append('assembleHap') + else: + cmd.append('assembleApp') + + logging.debug('cmd: %s', cmd) + logging.debug("cmd execution path %s", task.path) + process = subprocess.Popen(cmd, shell = True, cwd = task.path, + stdout = subprocess.PIPE, + stderr = subprocess.PIPE) + stdout, stderr = process.communicate(timeout=options.arguments.compile_timeout) + stdout_utf8 = stdout.decode("utf-8", errors="ignore") + stderr_utf8 = stderr.decode("utf-8", errors="ignore") + logging.debug("cmd stdout: {}".format(stdout_utf8)) + logging.debug("cmd stderr: {}".format(stderr_utf8)) + + return [stdout_utf8, stderr_utf8] + + +def clean_compile(task): + cmd = 'hvigorw clean' + logging.debug('cmd: %s', cmd) + logging.debug("cmd execution path %s", task.path) + process = subprocess.Popen(cmd, shell = True, cwd = task.path, + stdout = subprocess.PIPE, + stderr = subprocess.PIPE) + out, err = process.communicate(timeout=options.arguments.compile_timeout) + + +def validate_compile_incremental_time(task, inc_task, is_debug): + if is_debug: + full_info = task.full_compilation_info.debug_info + inc_info = inc_task.debug_info + else: + full_info = task.full_compilation_info.release_info + inc_info = inc_task.release_info + + if full_info.time < inc_info.time: + inc_info.result = options.TaskResult.failed + inc_info.error_message = 'Incremental compile took more time than full compile.' + + +def prepare_incremental_task(task, task_name): + if task_name in task.incre_compilation_info: + inc_task = task.incre_compilation_info[task_name] + else: + inc_task = options.IncCompilationInfo() + inc_task.name = task_name + task.incre_compilation_info[task_name] = inc_task + return inc_task + + +def compile_incremental_no_modify(task, is_debug): + task_name = 'no_change' + inc_task = prepare_incremental_task(task, task_name) + + [stdout, stderr] = compile(task, is_debug) + passed = validate(inc_task, task, is_debug, stdout, stderr) + validate_compile_incremental_time(task, inc_task, is_debug) + + +def compile_incremental_add_oneline(task, is_debug): + task_name = 'add_oneline' + inc_task = prepare_incremental_task(task, task_name) + + modify_file_item = task.inc_modify_file + modify_file = os.path.join(task.path, *modify_file_item) + modify_file_backup = modify_file + ".bak" + shutil.copyfile(modify_file, modify_file_backup) + + with open(modify_file, 'a', encoding='utf-8') as file: + file.write(options.configs['patch_content']['patch_lines_2']['tail']) + + [stdout, stderr] = compile(task, is_debug) + passed = validate(inc_task, task, is_debug, stdout, stderr) + validate_compile_incremental_time(task, inc_task, is_debug) + + shutil.move(modify_file_backup, modify_file) + + +def compile_incremental_add_file(task, is_debug): + task_name = 'add_file' + inc_task = prepare_incremental_task(task, task_name) + + modify_file_item = task.inc_modify_file + modify_file = os.path.join(task.path, *modify_file_item) + modify_file_backup = modify_file + ".bak" + shutil.copyfile(modify_file, modify_file_backup) + + modify_dir = os.path.dirname(modify_file) + new_file_name = options.configs['patch_content']['patch_new_file']['name'] + new_file_content = options.configs['patch_content']['patch_new_file']['content'] + new_file = os.path.join(modify_dir, new_file_name) + + with open(new_file, 'w', encoding='utf-8') as file: + file.writelines(new_file_content) + + with open(modify_file, 'r+', encoding='utf-8') as file: + old_content = file.read() + file.seek(0) + file.write(options.configs['patch_content']['patch_lines_1']['head']) + file.write(old_content) + file.write(options.configs['patch_content']['patch_lines_1']['tail']) + + [stdout, stderr] = compile(task, is_debug) + validate(inc_task, task, is_debug, stdout, stderr) + validate_compile_incremental_time(task, inc_task, is_debug) + + shutil.move(modify_file_backup, modify_file) + os.remove(new_file) + + +def compile_incremental_delete_file(task, is_debug): + task_name = 'delete_file' + inc_task = prepare_incremental_task(task, task_name) + + # this test is after 'add_file', and in test 'add_file' already done remove file, + # so here just call compile + [stdout, stderr] = compile(task, is_debug) + validate(inc_task, task, is_debug, stdout, stderr) + validate_compile_incremental_time(task, inc_task, is_debug) + + +def compile_incremental_reverse_hap_mode(task, is_debug): + task_name = 'reverse_hap_mode' + inc_task = prepare_incremental_task(task, task_name) + + hap_mode = not is_debug + [stdout, stderr] = compile(task, hap_mode) + validate(inc_task, task, hap_mode, stdout, stderr) + + +def compile_incremental_modify_bundle_name(task, is_debug): + # TODO: this needs to modify bundle name and disasm abc for compare + return + + +def compile_incremental(task, is_debug): + [stdout, stderr] = compile(task, is_debug) + + [is_success, time_string] = is_compile_success(stdout) + if not is_success: + logging.error("Incremental compile failed due to first compile failed!") + return + + if options.arguments.compile_mode == 'incremental': + passed = validate(task.full_compilation_info, task, is_debug, stdout, stderr) + if not passed: + logging.error("Incremental compile failed due to first compile failed!") + return + + backup_compile_output(task, is_debug) + + compile_incremental_no_modify(task, is_debug) + compile_incremental_add_oneline(task, is_debug) + compile_incremental_add_file(task, is_debug) + compile_incremental_delete_file(task, is_debug) + compile_incremental_reverse_hap_mode(task, is_debug) + # TODO: compile_incremental_modify_bundle_name(task, is_debug) + + +def backup_compile_output(task, is_debug): + backup_path = task.backup_info.cache_path + if not os.path.exists(backup_path): + os.mkdir(backup_path) + + if is_debug: + if len(task.backup_info.output_debug) == 2: + return + + backup_output_path = os.path.join(backup_path, 'output', 'debug') + if not os.path.exists(backup_output_path): + os.makedirs(backup_output_path) + output_file = get_compile_output_file_path(task, True) + + else: + if len(task.backup_info.output_release) == 2: + return + + backup_output_path = os.path.join(backup_path, 'output', 'release') + if not os.path.exists(backup_output_path): + os.makedirs(backup_output_path) + output_file = get_compile_output_file_path(task, False) + + shutil.copy(output_file, backup_output_path) + backup_output = os.path.join(backup_output_path, os.path.basename(output_file)) + backup_time_output = backup_output + '-' + utils.get_time_string() + shutil.move(backup_output, backup_time_output) + + if is_debug: + task.backup_info.output_debug.append(backup_time_output) + else: + task.backup_info.output_release.append(backup_time_output) + + +def backup_compile_cache(task, is_debug): + backup_path = task.backup_info.cache_path + if not os.path.exists(backup_path): + os.mkdir(backup_path) + + backup_cache_path = os.path.join(backup_path, 'cache') + if not os.path.exists(backup_cache_path): + os.mkdir(backup_cache_path) + cache_files = os.path.join(task.path, *(task.build_path), 'cache') + + if is_debug: + if len(task.backup_info.cache_debug) == 1: + return + + backup_cache_file = os.path.join(backup_cache_path, 'debug') + shutil.copytree(cache_files, backup_cache_file) + task.backup_info.cache_debug = backup_cache_file + else: + if len(task.backup_info.cache_release) == 1: + return + + backup_cache_file = os.path.join(backup_cache_path, 'release') + shutil.copytree(cache_files, backup_cache_file) + task.backup_info.cache_release = backup_cache_file + + +def backup_compile_output_and_cache(task, is_debug): + backup_compile_output(task, is_debug) + backup_compile_cache(task, is_debug) + + +def execute_full_compile(task): + clean_compile(task) + passed = False + if options.arguments.hap_mode in ['all', 'release']: + [stdout, stderr] = compile(task, False) + passed = validate(task.full_compilation_info, task, False, stdout, stderr) + if passed: + backup_compile_output_and_cache(task, False) + clean_compile(task) + if options.arguments.hap_mode in ['all', 'debug']: + [stdout, stderr] = compile(task, True) + passed = validate(task.full_compilation_info, task, True, stdout, stderr) + if passed: + backup_compile_output_and_cache(task, True) + clean_compile(task) + + return passed + + +def execute_incremental_compile(task): + if options.arguments.hap_mode in ['all', 'release']: + compile_incremental(task, False) + if options.arguments.hap_mode in ['all', 'debug']: + compile_incremental(task, True) + clean_compile(task) + + +def execute_break_compile(task): + # TODO + return '' + + +def verify_binary_consistency(task): + debug_consistency = True + release_consistency = True + + if options.arguments.hap_mode in ['all', 'release']: + # will have at lease 1 output from full compile + if len(task.backup_info.output_release) == 1: + compile(task, False) + backup_compile_output(task, False) + + if len(task.backup_info.output_release) == 2: + release_consistency = utils.is_same_file( + task.backup_info.output_release[0], task.backup_info.output_release[1]) + else: + release_consistency = False + + if options.arguments.hap_mode in ['all', 'debug']: + logging.debug("----> len cache: %s", len(task.backup_info.output_debug)) + if len(task.backup_info.output_debug) == 1: + logging.debug("----> rebuild") + compile(task, True) + backup_compile_output(task, True) + + if len(task.backup_info.output_debug) == 2: + logging.debug('-----> compare') + debug_consistency = utils.is_same_file( + task.backup_info.output_debug[0], task.backup_info.output_debug[1]) + else: + debug_consistency = False + + if debug_consistency and release_consistency: + task.abc_consistency = options.TaskResult.passed + else: + task.abc_consistency = options.TaskResult.failed + + +def clean_backup(task): + if os.path.exists(task.backup_info.cache_path): + shutil.rmtree(task.backup_info.cache_path) + + +def execute(test_tasks): + for task in test_tasks: + try: + # TODO: add sdk path checking(sdk path in hap is same as config.yaml) + logging.info("======> running task: %s", task.name) + if options.arguments.compile_mode in ['all', 'full']: + logging.info("==========> running task: %s in full compilation", task.name) + if not execute_full_compile(task): + logging.error("Full compile failed, skip other tests!") + continue + + if options.arguments.compile_mode in ['all', 'incremental']: + logging.info("==========> running task: %s in incremental compilation", task.name) + execute_incremental_compile(task) + + execute_break_compile(task) + verify_binary_consistency(task) + logging.info("======> running task: %s finised", task.name) + except Exception as e: + logging.exception(e) + finally: + clean_backup(task) diff --git a/test_suite/options.py b/test_suite/options.py new file mode 100644 index 0000000000..714c77e984 --- /dev/null +++ b/test_suite/options.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +Copyright (c) 2021 Huawei Device Co., Ltd. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Description: process options and configs for test suite +""" + +import argparse +import logging +import os +import yaml + +from enum import Enum +from utils import init_logger + +YAML_PATH = './config.yaml' + +class TaskResult(Enum): + undefind = 0 + passed = 1 + failed = 2 + + +class CompilationInfo: + def __init__(self): + self.result = TaskResult.undefind + self.error_message = '' + self.time = 0 + self.abc_size = 0 + + +class FullCompilationInfo: + def __init__(self): + self.debug_info = CompilationInfo() + self.release_info = CompilationInfo() + + +class IncCompilationInfo: + def __init__(self): + self.debug_info = CompilationInfo() + self.release_info = CompilationInfo() + self.name = '' + + +class BackupInfo: + def __init__(self): + self.cache_path = '' + self.cache_debug = '' + self.cache_release = '' + self.output_debug = [] + self.output_release = [] + + +class TestTask: + def __init__(self): + self.name = '' + self.path = '' + self.type = '' + self.build_path = [] + self.output_hap_path = '' + self.output_app_path = '' + self.inc_modify_file = [] + + self.full_compilation_info = FullCompilationInfo() + self.incre_compilation_info = {} + self.break_compilation_info = CompilationInfo() + self.abc_consistency = TaskResult.undefind + + self.backup_info = BackupInfo() + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--sdkPath', type=str, dest='sdk_path', default='', + help='specify sdk path if need to update sdk') + parser.add_argument('--buildMode', type=str, dest='build_mode', default='all', + choices=['all', 'assemble', 'preview', 'hotreload', 'hotfix'], + help='specify build mode') + parser.add_argument('--hapMode', type=str, dest='hap_mode', default='all', + choices=['all', 'debug', 'release'], + help='specify hap mode') + parser.add_argument('--compileMode', type=str, dest='compile_mode', default='all', + choices=['all', 'full', 'incremental'], + help='specify compile mode') + parser.add_argument('--testCase', type=str, dest='test_case', default='all', + choices=['all', 'fa', 'stage', 'compatible8', 'js'], + help='specify test cases') + parser.add_argument('--testHap', type=str, dest='test_hap', default='all', + help="specify test haps, option can be 'all' or a list of haps seperated by ','") + parser.add_argument('--imagePath', type=str, dest='image_path', default='all', + help='specify image path if need to update rk/phone images') + parser.add_argument('--packOnly', type=bool, dest='pack_only', default=True, + help='specify how to verify. if packOnly is true, will not verify results by running haps') + parser.add_argument('--outputBinarySize', type=bool, dest='output_binary_size', default=True, + help='specify whether to output binary size to the result') + parser.add_argument('--outputCompileTime', type=bool, dest='output_compile_time', default=True, + help='specify whether to output compilation time to the result') + parser.add_argument('--emailResult', type=bool, dest='email_result', default=True, + help='specify whether to send result by email') + parser.add_argument('--resultFile', type=str, dest='result_file', default='', + help='specify whether to output results to a file') + parser.add_argument('--logLevel', type=str, dest='log_level', default='error', + choices=['debug', 'info', 'warn', 'error'], + help='specify log level of test suite') + parser.add_argument('--logFile', type=str, dest='log_file', default='', + help='specify the file log outputs to, empty string will output to console') + parser.add_argument('--compileTimeout', type=int, dest='compile_timeout', default=600, + help='specify deveco compilation timeout') + global arguments + arguments = parser.parse_args() + + +def parse_configs(): + with open(YAML_PATH, 'r') as config_file: + global configs + configs = yaml.safe_load(config_file) + + +def create_test_tasks(): + task_list = [] + haps_list = configs['haps'] + test_cases = 'all' if arguments.test_case == 'all' else [] + test_haps = 'all' if arguments.test_hap == 'all' else [] + if test_cases != 'all': + test_cases = arguments.test_case.split(',') + if test_haps != 'all': + test_haps = arguments.test_hap.split(',') + + for hap in haps_list: + if test_cases == 'all' or test_haps == 'all' \ + or (test_cases and (hap['type'] in test_cases)) \ + or (test_haps and (hap['name'] in test_haps)): + if not os.path.exists(hap['path']): + logging.warn("Path of hap %s dosen't exist: %s" % (hap['name'], hap['path'])) + continue + task = TestTask() + task.name = hap['name'] + task.path = hap['path'] + task.type = hap['type'] + task.build_path = hap['build_path'] + task.output_hap_path = hap['output_hap_path'] + task.output_app_path = hap['output_app_path'] + task.inc_modify_file = hap['inc_modify_file'] + task.backup_info.cache_path = os.path.join(task.path, 'test_suite_cache') + + task_list.append(task) + + return task_list + + +def process_options(): + parse_args() + init_logger(arguments.log_level, arguments.log_file) + parse_configs() + return create_test_tasks() \ No newline at end of file diff --git a/test_suite/preparation.py b/test_suite/preparation.py new file mode 100644 index 0000000000..f0b28a957f --- /dev/null +++ b/test_suite/preparation.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +Copyright (c) 2023 Huawei Device Co., Ltd. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Description: prepare environment for test +""" + +import logging +import os +import shutil +import validators + +import options +from utils import is_linux, is_mac, is_windows, get_time_string + +def check_deveco_installation(): + if is_linux(): + return True ## caution! TODO: just for test, should be False + if is_mac() or (is_windows() and not options.arguments.pack_only): + if not os.path.exists(options.configs['deveco_path']): + logging.error("DevEco not found!") + return False + return True + + +def GetSdkFromRemote(): + ## TODO: 1)download sdk, 2)unzip sdk, 3)run npm install in ets and js dir + return '' + + +def update_sdk_to_deveco(sdk_path): + deveco_sdk_path = options.configs['deveco_sdk_path'] + shutil.move(deveco_sdk_path, deveco_sdk_path + '-' + get_time_string()) + for item in os.listdir(sdk_path): + shutil.move(os.path.join(sdk_path, item), os.path.join(deveco_sdk_path, item)) + + +def prepare_sdk(): + sdk_arg = options.arguments.sdk_path + if sdk_arg == '': + return True # use the sdk specified in config.yaml + + sdk_path = sdk_arg + if validators.url(sdk_arg): + sdk_path = GetSdkFromRemote() + + if not os.path.exists(sdk_path): + return False + + update_sdk_to_deveco(sdk_path) + return True + + +def prepare_image(): + if options.arguments.pack_only: + return True + + ## TODO: 1)download image, 2)flash image + + return True + + +def prepare_test_env(): + return check_deveco_installation() and prepare_sdk() and prepare_image() diff --git a/test_suite/result.py b/test_suite/result.py new file mode 100644 index 0000000000..1ba6ee7c7d --- /dev/null +++ b/test_suite/result.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +Copyright (c) 2023 Huawei Device Co., Ltd. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Description: output test results +""" + +import logging +import time + +import options + +class TestResult: + def __init__(self): + self.passed = [] + self.failed = [] + self.time = 0.0 + + +def print_test_result(test_result, test_tasks): + logging.info("========================================") + logging.info("Test finished. The result is as following:") + logging.info("=====> Summary") + logging.info("Total test number: %s, took time: %.3f s", len(test_tasks), test_result.time) + logging.info("Passed test number: %s", len(test_result.passed)) + logging.info("Failed test number: %s", len(test_result.failed)) + + logging.info("=====> Detail Information") + logging.info("-----") + idx = 1 + for task in test_tasks: + logging.info("task index: %d", idx) + idx = idx + 1 + logging.info("task name: %s", task.name) + logging.info("task type: %s", task.type) + # print full compile result + logging.info("--full compilation result:") + logging.info("debug: %s, abc_size(byte) %s, time(s) %s, error message: %s", + task.full_compilation_info.debug_info.result, + task.full_compilation_info.debug_info.abc_size, + task.full_compilation_info.debug_info.time, + task.full_compilation_info.debug_info.error_message) + logging.info("release: %s, abc_size(byte) %s, time(s) %s, error message: %s", + task.full_compilation_info.release_info.result, + task.full_compilation_info.release_info.abc_size, + task.full_compilation_info.release_info.time, + task.full_compilation_info.debug_info.error_message) + + # print incremental compile result + logging.info("--incremental compilation result:") + for inc_task in task.incre_compilation_info.values(): + logging.info("incre task: %s", inc_task.name) + logging.info("debug: %s, abc_size(byte) %s, time(s) %s, error message: %s", + inc_task.debug_info.result, + inc_task.debug_info.abc_size, + inc_task.debug_info.time, + inc_task.debug_info.error_message) + logging.info("release: %s, abc_size(byte) %s, time(s) %s, error message: %s", + inc_task.release_info.result, + inc_task.release_info.abc_size, + inc_task.release_info.time, + inc_task.release_info.error_message) + logging.info("--abc consistency: %s", task.abc_consistency) + logging.info("-----") + logging.info("========================================") + + +def is_full_compilation_passed(task_info): + if not options.arguments.compile_mode in ['all', 'full']: + return True, True + + passed_debug = True + passed_release = True + + if options.arguments.hap_mode in ['all', 'release']: + passed_release = task_info.release_info.result == options.TaskResult.passed + if options.arguments.hap_mode == ['all', 'debug']: + passed_debug = task_info.debug_info.result == options.TaskResult.passed + + return passed_debug and passed_release + + +def is_incremental_compilation_passed(task_info): + if not options.arguments.compile_mode in ['all', 'incremental']: + return True + + if len(task_info) == 0: + return False + + passed_debug = True + passed_release = True + for inc_task in task_info.values(): + if options.arguments.hap_mode in ['all', 'release']: + passed_release = passed_release and inc_task.release_info.result == options.TaskResult.passed + if options.arguments.hap_mode == ['all', 'debug']: + passed_debug = passed_debug and inc_task.debug_info.result == options.TaskResult.passed + + return passed_debug and passed_release + + +def is_task_passed(task): + return (is_full_compilation_passed(task.full_compilation_info) and + is_incremental_compilation_passed(task.incre_compilation_info) and + task.abc_consistency == options.TaskResult.passed) + # TODO: add break compile result + + + +def collect_result(test_result, test_tasks, start_time): + for task in test_tasks: + if not is_task_passed(task): + test_result.failed.append(task) + else: + test_result.passed.append(task) + + end_time = time.time() + test_result.time = round(end_time - start_time, 3) + + +def email_result(test_result): + # TODO + return + + +def process_test_result(test_tasks, start_time): + test_result = TestResult() + + collect_result(test_result, test_tasks, start_time) + print_test_result(test_result, test_tasks) + + # TODO: add baseline comparison + # TODO: add write result to a file + + if options.arguments.email_result: + email_result(test_result) diff --git a/test_suite/run.py b/test_suite/run.py new file mode 100644 index 0000000000..1ce1fe3080 --- /dev/null +++ b/test_suite/run.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +Copyright (c) 2023 Huawei Device Co., Ltd. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Description: entrance to run sdk test suite +""" + +import logging +import sys +import time + +from execution import execute +from options import process_options +from preparation import prepare_test_env +from result import process_test_result + +def run(): + try: + start_time = time.time() + test_tasks = process_options() + if not test_tasks: + logging.error("No test task found, test suite exit!") + # TODO: make this open when finished: + # sys.exit(1) + + if not prepare_test_env(): + logging.error("Prepare test environment failed, test suite exit!") + # TODO: make this open when finished: + # sys.exit(1) + + execute(test_tasks) + process_test_result(test_tasks, start_time) + except Exception as e: + logging.exception(e) + + +if __name__ == '__main__': + run() \ No newline at end of file diff --git a/test_suite/utils.py b/test_suite/utils.py new file mode 100644 index 0000000000..b2207ffe73 --- /dev/null +++ b/test_suite/utils.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +Copyright (c) 2023 Huawei Device Co., Ltd. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Description: utils for test suite +""" + +import logging +import time +import sys +import subprocess + +log_level_dict = { + 'debug': logging.DEBUG, + 'info': logging.INFO, + 'warn': logging.WARN, + 'error': logging.ERROR +} + +def init_logger(log_level, log_file): + logging.basicConfig(filename=log_file, + level=log_level_dict[log_level], + encoding='utf-8', + format='[%(asctime)s %(filename)s:%(lineno)d]: [%(levelname)s] %(message)s') + + +def is_windows(): + return sys.platform == 'win32' or sys.platform == 'cygwin' + + +def is_mac(): + return sys.platform == 'darwin' + + +def is_linux(): + return sys.platform == 'linux' + + +def get_time_string(): + return time.strftime('%Y%m%d-%H%M%S') + + +def is_esmodule(hap_type): + # if hap_type is stage, it's esmodule. + # if hap_type is js, fa, compatible 8, it's js_bundle + return hap_type in ['stage', 'stage_widget'] + + +def is_same_file(file_a, file_b): + cmd = [] + if is_windows(): + cmd.append('fc') + elif is_mac(): + cmd.append('diff') + + cmd.extend([file_a, file_b]) + logging.debug("is_same_file cmd: %s", cmd) + process = subprocess.Popen(cmd) + process.communicate() + ret_code = process.returncode + + return True if ret_code == 0 else False \ No newline at end of file -- Gitee From 558013d7c849bf037437807a27f3cdb5809e8a21 Mon Sep 17 00:00:00 2001 From: wuhailong Date: Mon, 12 Jun 2023 15:20:29 +0800 Subject: [PATCH 2/5] Add sdk download and decompression Signed-off-by: wuhailong --- test_suite/preparation.py | 56 +++++++++++++++++++++++------ test_suite/utils.py | 74 +++++++++++++++++++++++++++++++++++++-- 2 files changed, 116 insertions(+), 14 deletions(-) diff --git a/test_suite/preparation.py b/test_suite/preparation.py index f0b28a957f..8722361c15 100644 --- a/test_suite/preparation.py +++ b/test_suite/preparation.py @@ -18,13 +18,18 @@ limitations under the License. Description: prepare environment for test """ +import httpx import logging import os import shutil +import sys +import tarfile +import tqdm import validators +import zipfile import options -from utils import is_linux, is_mac, is_windows, get_time_string +from utils import is_linux, is_mac, is_windows, get_time_string, get_api_version, npm_install, check_gzip_file def check_deveco_installation(): if is_linux(): @@ -36,16 +41,44 @@ def check_deveco_installation(): return True -def GetSdkFromRemote(): - ## TODO: 1)download sdk, 2)unzip sdk, 3)run npm install in ets and js dir - return '' - - -def update_sdk_to_deveco(sdk_path): +def GetSdkFromRemote(sdk_url): + deveco_sdk_path = options.configs['deveco_sdk_path'] + temp_floder = deveco_sdk_path + '_temp' + sdk_floder = os.path.join(temp_floder, 'SDK') + sdk_temp_file = os.path.join(temp_floder, 'ohos-sdk-full.tar.gz') + + if os.path.exists(temp_floder): + shutil.rmtree(temp_floder) + os.mkdir(temp_floder) + with httpx.stream('GET', sdk_url) as response: + with open(sdk_temp_file, "wb") as sdktemp: + total_length = int(response.headers.get("content-length")) + with tqdm.tqdm(total=total_length, unit="B", unit_scale=True) as pbar: + pbar.set_description('ohos-sdk-full.tar.gz') + for chunk in response.iter_bytes(): + sdktemp.write(chunk) + pbar.update(len(chunk)) + if not check_gzip_file(sdk_temp_file): + logging.error('The downloaded file is not a valid gzip file.') + sys.exit(1) + with tarfile.open(sdk_temp_file, 'r:gz') as tar: + tar.extractall(temp_floder) + for item in os.listdir(os.path.join(*[temp_floder, 'ohos-sdk', 'windows'])): + with zipfile.ZipFile(os.path.join(os.path.join(*[temp_floder, 'ohos-sdk', 'windows', item]))) as zip: + zip.extractall(os.path.join(sdk_floder)) + npm_install(os.path.join(*[sdk_floder, 'ets', 'build-tools', 'ets-loader'])) + npm_install(os.path.join(*[sdk_floder, 'js', 'build-tools', 'ace-loader'])) + api_version = get_api_version(os.path.join(*[sdk_floder, 'ets', 'oh-uni-package.json'])) + return sdk_floder, api_version + +def update_sdk_to_deveco(sdk_path, api_version): + if not api_version: + api_version = '9' deveco_sdk_path = options.configs['deveco_sdk_path'] - shutil.move(deveco_sdk_path, deveco_sdk_path + '-' + get_time_string()) + deveco_sdk_version_path = os.path.join(deveco_sdk_path, api_version) + shutil.move(deveco_sdk_version_path, deveco_sdk_version_path + '-' + get_time_string()) for item in os.listdir(sdk_path): - shutil.move(os.path.join(sdk_path, item), os.path.join(deveco_sdk_path, item)) + shutil.move(os.path.join(sdk_path, item), os.path.join(deveco_sdk_version_path, item)) def prepare_sdk(): @@ -53,14 +86,15 @@ def prepare_sdk(): if sdk_arg == '': return True # use the sdk specified in config.yaml + api_version = '' sdk_path = sdk_arg if validators.url(sdk_arg): - sdk_path = GetSdkFromRemote() + sdk_path, api_version = GetSdkFromRemote(sdk_arg) if not os.path.exists(sdk_path): return False - update_sdk_to_deveco(sdk_path) + update_sdk_to_deveco(sdk_path, api_version) return True diff --git a/test_suite/utils.py b/test_suite/utils.py index b2207ffe73..9ef6739f0b 100644 --- a/test_suite/utils.py +++ b/test_suite/utils.py @@ -18,10 +18,16 @@ limitations under the License. Description: utils for test suite """ +import datetime +import gzip +import json import logging -import time -import sys +import os +import requests +import shutil import subprocess +import sys +import time log_level_dict = { 'debug': logging.DEBUG, @@ -72,4 +78,66 @@ def is_same_file(file_a, file_b): process.communicate() ret_code = process.returncode - return True if ret_code == 0 else False \ No newline at end of file + return True if ret_code == 0 else False + + +def get_sdk_url(): + now_time = datetime.datetime.now().strftime('%Y%m%d%H%M%S') + last_hour = (datetime.datetime.now() + datetime.timedelta(hours=-24)).strftime('%Y%m%d%H%M%S') + url = 'http://ci.openharmony.cn/api/ci-backend/ci-portal/v1/dailybuilds' + downnload_job = { + 'pageNum': 1, + 'pageSize': 1000, + 'startTime': '', + 'endTime': '', + 'projectName': 'openharmony', + 'branch': 'master', + 'component': '', + 'deviceLevel': '', + 'hardwareBoard': '', + 'buildStatus': '', + 'buildFailReason': '', + 'testResult': '', + } + downnload_job['startTime'] = str(last_hour) + downnload_job['endTime'] = str(now_time) + post_result = requests.post(url, data = downnload_job) + post_data = json.loads(post_result.text) + sdk_url_suffix = '' + for ohos_sdk_list in post_data['result']['dailyBuildVos']: + try: + if 'ohos-sdk-full.tar.gz' in ohos_sdk_list['obsPath']: + sdk_url_suffix = ohos_sdk_list['obsPath'] + break + except BaseException as err: + logging.error(err) + sdk_url = 'http://download.ci.openharmony.cn/' + sdk_url_suffix + return sdk_url + + +def npm_install(loader_path): + npm_path = shutil.which('npm') + os.chdir(loader_path) + try: + subprocess.run(f'{npm_path} install', check=True) + except subprocess.CalledProcessError: + logging.error(f'npm install failed . Please check the local configuration environment.') + sys.exit(1) + os.chdir(os.path.dirname(__file__)) + + +def get_api_version(json_path): + with open(json_path, 'r') as uni: + uni_cont = uni.read() + uni_data = json.loads(uni_cont) + api_version = uni_data['apiVersion'] + return api_version + + +def check_gzip_file(file_path): + try: + with gzip.open(file_path, 'rb') as gzfile: + gzfile.read(1) + except (gzip.BadGzipFile, OSError): + return False + return True -- Gitee From 14d05f8c20309d3d83e0c3db421fa17c3fbe9244 Mon Sep 17 00:00:00 2001 From: chenqy930 Date: Mon, 12 Jun 2023 19:10:54 +0800 Subject: [PATCH 3/5] Add inremental compile test cases, and other test cases Signed-off-by: chenqy930 --- test_suite/config.yaml | 58 ++++--- test_suite/execution.py | 314 ++++++++++++++++++++++++++++++-------- test_suite/options.py | 6 +- test_suite/preparation.py | 43 +++++- test_suite/result.py | 22 ++- test_suite/run.py | 11 +- test_suite/utils.py | 38 ++--- 7 files changed, 367 insertions(+), 125 deletions(-) diff --git a/test_suite/config.yaml b/test_suite/config.yaml index 33773ca493..2231beef38 100644 --- a/test_suite/config.yaml +++ b/test_suite/config.yaml @@ -14,17 +14,32 @@ # Description: configs for test suite # environment settings -deveco_path: D:\deveco\bin\devecostudio64.exe +deveco_path: D:\Software\Deveco-0602\DevEco Studio deveco_sdk_path: D:\deveco-sdk\deveco-sdk-0602 +node_js_path: D:\Software\nodejs -# test haps list -# available values of attribute 'type' are: stage, stage_widget, fa, js, compatible8 +# descriptions about test haps list +# each hap have the following attributes: +# -name: name of the hap +# -path: path of the hap project +# -type: type of the hap. Available values are: [stage, fa, js, compatible8] +# besides, you can also append attributes in [widget, ohosTest, error, exceed_length_error] +# -widget: indicates this hap has widget, which has widgets.abc in stage mode +# -error: indicates this hap need to test compile error as well +# -exceed_length_error: indicates this hap need to test compile with exceed length as well +# -ohosTest: indicates this hap needed to compile ohosTest as well +# -build_path: path to build path, in form of list +# -cache_path: path to cache path, in form of list +# -output_hap_path: path of output hap(debug mode), in form of list +# -output_app_path: path of output app(release mode), in form of list +# -inc_modify_file: path of modified file in incremental build, in form of list +# -description: description about the hap, this attribute is not used yet. haps: # complex haps - calendar: name: Calendar path: D:\haps\calendar - type: stage + type: [stage] output_path: output_hap_name: output_app_name: @@ -32,7 +47,7 @@ haps: - fangtaobao: name: FangTaoBao path: D:\haps\fangtaobao - type: stage + type: [stage] output_path: output_hap_name: output_app_name: @@ -40,7 +55,7 @@ haps: - fangdouyin: name: FangDouYin path: D:\haps\fangdouyin - type: stage + type: [stage] output_path: output_hap_name: output_app_name: @@ -48,7 +63,7 @@ haps: - fangweixin: name: FangWeiXin path: D:\haps\fangweixin - type: stage + type: [stage] output_path: output_hap_name: output_app_name: @@ -57,7 +72,7 @@ haps: - arkuistagedemo: name: ArkuiStageDemo path: D:\haps\arkuiStageDemo - type: stage + type: [stage] output_path: output_hap_name: output_app_name: @@ -65,7 +80,7 @@ haps: - arkuifademo: name: ArkuiFaDemo path: D:\haps\arkuiFaDemo - type: fa + type: [fa] output_path: output_hap_name: output_app_name: @@ -74,7 +89,7 @@ haps: - moduledemo: name: ModuleDemo path: D:\haps\moduleDemo - type: stage + type: [stage] output_path: output_hap_name: output_app_name: @@ -83,7 +98,7 @@ haps: - widgetdemo: name: WidgetDemo path: D:\haps\WidgetDemo - type: stage_widget + type: [stage, widget] output_path: output_hap_name: output_app_name: @@ -92,31 +107,26 @@ haps: - idedemo_00: name: IdeStageDemoEmptyAbility path: D:\sdk-test\DemoApplication_EmptyAbility - type: stage + type: [stage, ohosTest, exceed_length_error, error] build_path: [entry, build, default] + cache_path: [cache, default, default@CompileArkTS, esmodule] output_hap_path: [outputs, default, entry-default-unsigned.hap] output_app_path: [outputs, default, app, entry-default.hap] - inc_modify_file: [entry, src, main, ets, pages, index.ets] - baseline_full_compile_time: 0 - baseline_inc_compile_time: 0 - baseline_abc_size: 999999999 + inc_modify_file: [entry, src, main, ets, pages, Index.ets] description: - idedemo_01: name: IdeFaDemoEmptyAbility path: D:\sdk-test\DemoApplication_EmptyAbility_fa - type: fa + type: [fa] build_path: [entry, build, default] output_hap_path: [outputs, default, entry-default-unsigned.hap] output_app_path: [outputs, default, app, entry-default.hap] inc_modify_file: [entry, src, main, ets, MainAbility, pages, index.ets] - baseline_full_compile_time: 0 - baseline_inc_compile_time: 0 - baseline_abc_size: 999999999 description: - idedemo_02: name: IdeCompatible8DemoEmptyAbility path: D:\haps\IdeCompatible8DemoEmptyAbility - type: compatible8 + type: [compatible8] output_path: output_hap_name: output_app_name: @@ -124,12 +134,11 @@ haps: - idedemo_03: name: IdeJsDemoEmptyAbility path: D:\haps\IdeJsDemoEmptyAbility - type: js + type: [js] output_path: output_hap_name: output_app_name: description: - # error demo haps(to be added) # modifications for incremental compilation patch_content: @@ -142,3 +151,6 @@ patch_content: tail: "console.log(a.toString());\n" patch_lines_2: tail: "console.log('This is a new line');\n" + patch_lines_error: + tail: "let a_duplicated_value_for_test_suite = 1; function a_duplicated_value_for_test_suite() {};" + expected_error: "Duplicate identifier 'a_duplicated_value_for_test_suite'" \ No newline at end of file diff --git a/test_suite/execution.py b/test_suite/execution.py index fe21876075..fe3773d1ee 100644 --- a/test_suite/execution.py +++ b/test_suite/execution.py @@ -19,10 +19,12 @@ Description: execute test tasks """ import logging +import json5 import os import re -import subprocess import shutil +import signal +import subprocess import zipfile import options @@ -73,7 +75,7 @@ def validate_output_for_esmodule(info, task_type, uncompressed_output_path, is_d return False info.abc_size = modules_abc_size - if task_type == 'stage_widget': + if 'widget' in task_type: widget_abc_path = os.path.join(abc_sourcemap_path, 'widgets.abc') if not os.path.exists(widget_abc_path): info.result = options.TaskResult.failed @@ -201,12 +203,17 @@ def validate(compilation_info, task, is_debug, stdout, stderr): return passed -def compile(task, is_debug): +def get_hvigor_compile_cmd(is_debug): cmd = ['hvigorw'] if is_debug: cmd.append('assembleHap') else: cmd.append('assembleApp') + return cmd + + +def compile(task, is_debug): + cmd = get_hvigor_compile_cmd(is_debug) logging.debug('cmd: %s', cmd) logging.debug("cmd execution path %s", task.path) @@ -232,42 +239,74 @@ def clean_compile(task): out, err = process.communicate(timeout=options.arguments.compile_timeout) -def validate_compile_incremental_time(task, inc_task, is_debug): +def validate_compile_incremental_file(task, inc_task, is_debug, modified_files): + cache_extention = '.protoBin' + modified_cache_files = [] + + # modified_files is a list of file with relative path to .../debug/release + for file in modified_files: + name, ext = os.path.splitext(file) + modified_cache_files.append(name + cache_extention) + if is_debug: - full_info = task.full_compilation_info.debug_info + cache_path = os.path.join(task.path, *(task.build_path), *(task.cache_path), 'debug') + backup_path = task.backup_info.cache_debug inc_info = inc_task.debug_info else: - full_info = task.full_compilation_info.release_info + cache_path = os.path.join(task.path, *(task.build_path), *(task.cache_path), 'release') + backup_path = task.backup_info.cache_release inc_info = inc_task.release_info - if full_info.time < inc_info.time: - inc_info.result = options.TaskResult.failed - inc_info.error_message = 'Incremental compile took more time than full compile.' + for root, dirs, files in os.walk(cache_path): + for file in files: + name, extension = os.path.splitext(file) + if extension == cache_extention: + file_absolute_path = os.path.join(root, file) + file_relative_path = os.path.relpath(file_absolute_path, cache_path) + backup_file = os.path.join(backup_path, file_relative_path) + + if not os.path.exists(backup_file): + logging.debug("backup file not exits: %s", backup_file) + continue + + logging.debug("time stamp same: %s", utils.is_file_timestamps_same(file_absolute_path, backup_file)) + logging.debug("file_relative_path %s", file_relative_path) + logging.debug("file not in list: %s", file_relative_path not in modified_cache_files) + logging.debug("file list: %s", modified_cache_files) + + if not utils.is_file_timestamps_same(file_absolute_path, backup_file) and \ + file_relative_path not in modified_cache_files: + inc_info.result = options.TaskResult.failed + inc_info.error_message = 'Incremental compile found unexpected file timestamp changed. Changed file: ' + file_relative_path + return -def prepare_incremental_task(task, task_name): - if task_name in task.incre_compilation_info: - inc_task = task.incre_compilation_info[task_name] +def prepare_incremental_task(task, test_name): + if test_name in task.incre_compilation_info: + inc_task = task.incre_compilation_info[test_name] else: inc_task = options.IncCompilationInfo() - inc_task.name = task_name - task.incre_compilation_info[task_name] = inc_task + inc_task.name = test_name + task.incre_compilation_info[test_name] = inc_task return inc_task def compile_incremental_no_modify(task, is_debug): - task_name = 'no_change' - inc_task = prepare_incremental_task(task, task_name) + test_name = 'no_change' + inc_task = prepare_incremental_task(task, test_name) + logging.info("==========> Running %s for task: %s", test_name, task.name) [stdout, stderr] = compile(task, is_debug) passed = validate(inc_task, task, is_debug, stdout, stderr) - validate_compile_incremental_time(task, inc_task, is_debug) + if passed: + validate_compile_incremental_file(task, inc_task, is_debug, []) def compile_incremental_add_oneline(task, is_debug): - task_name = 'add_oneline' - inc_task = prepare_incremental_task(task, task_name) + test_name = 'add_oneline' + inc_task = prepare_incremental_task(task, test_name) + logging.info("==========> Running %s for task: %s", test_name, task.name) modify_file_item = task.inc_modify_file modify_file = os.path.join(task.path, *modify_file_item) modify_file_backup = modify_file + ".bak" @@ -278,15 +317,18 @@ def compile_incremental_add_oneline(task, is_debug): [stdout, stderr] = compile(task, is_debug) passed = validate(inc_task, task, is_debug, stdout, stderr) - validate_compile_incremental_time(task, inc_task, is_debug) + if passed: + modified_files = [os.path.join(*modify_file_item)] + validate_compile_incremental_file(task, inc_task, is_debug, modified_files) shutil.move(modify_file_backup, modify_file) def compile_incremental_add_file(task, is_debug): - task_name = 'add_file' - inc_task = prepare_incremental_task(task, task_name) + test_name = 'add_file' + inc_task = prepare_incremental_task(task, test_name) + logging.info("==========> Running %s for task: %s", test_name, task.name) modify_file_item = task.inc_modify_file modify_file = os.path.join(task.path, *modify_file_item) modify_file_backup = modify_file + ".bak" @@ -308,28 +350,35 @@ def compile_incremental_add_file(task, is_debug): file.write(options.configs['patch_content']['patch_lines_1']['tail']) [stdout, stderr] = compile(task, is_debug) - validate(inc_task, task, is_debug, stdout, stderr) - validate_compile_incremental_time(task, inc_task, is_debug) + passed = validate(inc_task, task, is_debug, stdout, stderr) + if passed: + modified_files = [os.path.join(*modify_file_item)] + validate_compile_incremental_file(task, inc_task, is_debug, modified_files) shutil.move(modify_file_backup, modify_file) os.remove(new_file) def compile_incremental_delete_file(task, is_debug): - task_name = 'delete_file' - inc_task = prepare_incremental_task(task, task_name) + test_name = 'delete_file' + inc_task = prepare_incremental_task(task, test_name) + logging.info("==========> Running %s for task: %s", test_name, task.name) # this test is after 'add_file', and in test 'add_file' already done remove file, # so here just call compile [stdout, stderr] = compile(task, is_debug) - validate(inc_task, task, is_debug, stdout, stderr) - validate_compile_incremental_time(task, inc_task, is_debug) + passed = validate(inc_task, task, is_debug, stdout, stderr) + if passed: + modify_file_item = task.inc_modify_file + modified_files = [os.path.join(*modify_file_item)] + validate_compile_incremental_file(task, inc_task, is_debug, modified_files) def compile_incremental_reverse_hap_mode(task, is_debug): - task_name = 'reverse_hap_mode' - inc_task = prepare_incremental_task(task, task_name) + test_name = 'reverse_hap_mode' + inc_task = prepare_incremental_task(task, test_name) + logging.info("==========> Running %s for task: %s", test_name, task.name) hap_mode = not is_debug [stdout, stderr] = compile(task, hap_mode) validate(inc_task, task, hap_mode, stdout, stderr) @@ -341,6 +390,7 @@ def compile_incremental_modify_bundle_name(task, is_debug): def compile_incremental(task, is_debug): + logging.info("==========> Running task: %s in incremental compilation", task.name) [stdout, stderr] = compile(task, is_debug) [is_success, time_string] = is_compile_success(stdout) @@ -355,6 +405,7 @@ def compile_incremental(task, is_debug): return backup_compile_output(task, is_debug) + backup_compile_cache(task, is_debug) compile_incremental_no_modify(task, is_debug) compile_incremental_add_oneline(task, is_debug) @@ -376,7 +427,6 @@ def backup_compile_output(task, is_debug): backup_output_path = os.path.join(backup_path, 'output', 'debug') if not os.path.exists(backup_output_path): os.makedirs(backup_output_path) - output_file = get_compile_output_file_path(task, True) else: if len(task.backup_info.output_release) == 2: @@ -385,8 +435,8 @@ def backup_compile_output(task, is_debug): backup_output_path = os.path.join(backup_path, 'output', 'release') if not os.path.exists(backup_output_path): os.makedirs(backup_output_path) - output_file = get_compile_output_file_path(task, False) + output_file = get_compile_output_file_path(task, is_debug) shutil.copy(output_file, backup_output_path) backup_output = os.path.join(backup_output_path, os.path.basename(output_file)) backup_time_output = backup_output + '-' + utils.get_time_string() @@ -406,49 +456,75 @@ def backup_compile_cache(task, is_debug): backup_cache_path = os.path.join(backup_path, 'cache') if not os.path.exists(backup_cache_path): os.mkdir(backup_cache_path) - cache_files = os.path.join(task.path, *(task.build_path), 'cache') + cache_files = os.path.join(task.path, *(task.build_path), *(task.cache_path)) if is_debug: - if len(task.backup_info.cache_debug) == 1: + if task.backup_info.cache_debug != '': return + cache_files = os.path.join(cache_files, 'debug') backup_cache_file = os.path.join(backup_cache_path, 'debug') shutil.copytree(cache_files, backup_cache_file) task.backup_info.cache_debug = backup_cache_file else: - if len(task.backup_info.cache_release) == 1: + if task.backup_info.cache_release != '': return + cache_files = os.path.join(cache_files, 'release') backup_cache_file = os.path.join(backup_cache_path, 'release') shutil.copytree(cache_files, backup_cache_file) task.backup_info.cache_release = backup_cache_file -def backup_compile_output_and_cache(task, is_debug): - backup_compile_output(task, is_debug) - backup_compile_cache(task, is_debug) +def is_abc_same_in_haps(hap_1, hap_2): + hap_1_abc_files = [] + hap_2_abc_files = [] + with zipfile.ZipFile(hap_1) as zf1, zipfile.ZipFile(hap_2) as zf2: + for file in zf1.namelist(): + if file.endswith('.abc'): + hap_1_abc_files.append(file) + for file in zf2.namelist(): + if file.endswith('.abc'): + hap_2_abc_files.append(file) + + hap_1_abc_files.sort() + hap_2_abc_files.sort() + + if len(hap_1_abc_files) != len(hap_2_abc_files): + return False + + for idx in range(len(hap_1_abc_files)): + with zf1.open(hap_1_abc_files[idx]) as f1, zf2.open(hap_2_abc_files[idx]) as f2: + data1 = f1.read() + data2 = f2.read() + if data1 != data2: + return False + + return True def execute_full_compile(task): + logging.info("==========> Running task: %s in full compilation", task.name) clean_compile(task) passed = False if options.arguments.hap_mode in ['all', 'release']: [stdout, stderr] = compile(task, False) passed = validate(task.full_compilation_info, task, False, stdout, stderr) if passed: - backup_compile_output_and_cache(task, False) + backup_compile_output(task, False) clean_compile(task) if options.arguments.hap_mode in ['all', 'debug']: [stdout, stderr] = compile(task, True) passed = validate(task.full_compilation_info, task, True, stdout, stderr) if passed: - backup_compile_output_and_cache(task, True) + backup_compile_output(task, True) clean_compile(task) return passed def execute_incremental_compile(task): + logging.info("==========> Running task: %s in incremental compilation", task.name) if options.arguments.hap_mode in ['all', 'release']: compile_incremental(task, False) if options.arguments.hap_mode in ['all', 'debug']: @@ -456,15 +532,13 @@ def execute_incremental_compile(task): clean_compile(task) -def execute_break_compile(task): - # TODO - return '' - - def verify_binary_consistency(task): + test_name = 'binary_consistency' + test_info = options.CompilationInfo() debug_consistency = True release_consistency = True + logging.info("==========> Running %s for task: %s", test_name, task.name) if options.arguments.hap_mode in ['all', 'release']: # will have at lease 1 output from full compile if len(task.backup_info.output_release) == 1: @@ -472,55 +546,173 @@ def verify_binary_consistency(task): backup_compile_output(task, False) if len(task.backup_info.output_release) == 2: - release_consistency = utils.is_same_file( - task.backup_info.output_release[0], task.backup_info.output_release[1]) + release_consistency = is_abc_same_in_haps(task.backup_info.output_release[0], + task.backup_info.output_release[1]) else: release_consistency = False + logging.debug("release consistency: %s", release_consistency) if options.arguments.hap_mode in ['all', 'debug']: - logging.debug("----> len cache: %s", len(task.backup_info.output_debug)) if len(task.backup_info.output_debug) == 1: - logging.debug("----> rebuild") compile(task, True) backup_compile_output(task, True) if len(task.backup_info.output_debug) == 2: - logging.debug('-----> compare') - debug_consistency = utils.is_same_file( - task.backup_info.output_debug[0], task.backup_info.output_debug[1]) + debug_consistency = is_abc_same_in_haps(task.backup_info.output_debug[0], + task.backup_info.output_debug[1]) else: debug_consistency = False + logging.debug("debug consistency: %s", debug_consistency) if debug_consistency and release_consistency: - task.abc_consistency = options.TaskResult.passed + test_info.result = options.TaskResult.passed else: - task.abc_consistency = options.TaskResult.failed + test_info.result = options.TaskResult.failed + + task.other_tests[test_name] = test_info + + +def execute_break_compile(task, is_debug): + test_name = 'break_continue_compile' + test_info = options.CompilationInfo() + + logging.info("==========> Running %s for task: %s", test_name, task.name) + clean_compile(task) + cmd = get_hvigor_compile_cmd(is_debug) + logging.debug('cmd: %s', cmd) + logging.debug("cmd execution path %s", task.path) + process = subprocess.Popen(cmd, shell = True, cwd = task.path, + stdout = subprocess.PIPE, + stderr = subprocess.PIPE) + + # TODO: this is signal seems to sent after the build process finished. Check + # this in a longer build time app later + for line in iter(process.stdout.readline, b''): + if b'CompileArkTS' in line: + logging.debug("terminate signal sent") + process.send_signal(signal.SIGTERM) + break + + [stdout, stderr] = process.communicate() + + logging.debug("first compile: stdcout: {}".format(stdout.decode('utf-8', errors="ignore"))) + logging.debug("first compile: stdcerr: {}".format(stderr.decode('utf-8', errors="ignore"))) + + logging.debug("another compile") + [stdout, stderr] = compile(task, is_debug) + + [is_success, time_string] = is_compile_success(stdout) + if not is_success: + test_info.result = options.TaskResult.failed + test_info.error_message = stderr + else: + passed = validate_compile_output(test_info, task, is_debug) + if passed: + test_info.result = options.TaskResult.passed + + task.other_tests[test_name] = test_info + + +def compile_full_with_error(task, is_debug): + test_name = 'compile_with_error' + test_info = options.CompilationInfo() + + logging.info("==========> Running %s for task: %s", test_name, task.name) + modify_file_item = task.inc_modify_file + modify_file = os.path.join(task.path, *modify_file_item) + modify_file_backup = modify_file + ".bak" + shutil.copyfile(modify_file, modify_file_backup) + + with open(modify_file, 'a', encoding='utf-8') as file: + file.write(options.configs['patch_content']['patch_lines_error']['tail']) + + [stdout, stderr] = compile(task, is_debug) + expected_error_message = options.configs['patch_content']['patch_lines_error']['expected_error'] + + if expected_error_message in stderr: + test_info.result = options.TaskResult.passed + else: + test_info.result = options.TaskResult.failed + test_info.error_message = "expected error message: {}, but got {}".format(expected_error_message, stderr) + + task.other_tests[test_name] = test_info + + shutil.move(modify_file_backup, modify_file) + + +def compile_with_exceed_length(task, is_debug): + test_name = 'compile_with_exceed_length' + test_info = options.CompilationInfo() + + logging.info("==========> Running %s for task: %s", test_name, task.name) + # get build-profile.json5 + entry_item = task.build_path[:-2] # to entry path + profile_file = os.path.join(task.path, *entry_item, 'build-profile.json5') + profile_file_backup = profile_file + ".bak" + shutil.copyfile(profile_file, profile_file_backup) + + with open(profile_file, 'r') as file: + profile_data = json5.load(file) + + long_str = 'default123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890' + profile_data['targets'][0]['name'] = long_str + + with open(profile_file, 'w') as file: + json5.dump(profile_data, file) + + [stdout, stderr] = compile(task, is_debug) + expected_error_message = 'The length of path exceeds the maximum length: 259' + + if expected_error_message in stderr: + test_info.result = options.TaskResult.passed + else: + test_info.result = options.TaskResult.failed + test_info.error_message = "expected error message: {}, but got {}".format(expected_error_message, stderr) + + task.other_tests[test_name] = test_info + + shutil.move(profile_file_backup, profile_file) + + +def compile_ohos_test(task): + return def clean_backup(task): if os.path.exists(task.backup_info.cache_path): shutil.rmtree(task.backup_info.cache_path) + return def execute(test_tasks): for task in test_tasks: try: # TODO: add sdk path checking(sdk path in hap is same as config.yaml) - logging.info("======> running task: %s", task.name) + logging.info("======> Running task: %s", task.name) if options.arguments.compile_mode in ['all', 'full']: - logging.info("==========> running task: %s in full compilation", task.name) if not execute_full_compile(task): - logging.error("Full compile failed, skip other tests!") + logging.info("Full compile failed, skip other tests!") continue if options.arguments.compile_mode in ['all', 'incremental']: - logging.info("==========> running task: %s in incremental compilation", task.name) execute_incremental_compile(task) - execute_break_compile(task) verify_binary_consistency(task) - logging.info("======> running task: %s finised", task.name) + + # for these tests, use one hapMode maybe enough + is_debug = True if options.arguments.hap_mode == 'debug' else False + execute_break_compile(task, is_debug) + if 'error' in task.type: + compile_full_with_error(task, is_debug) + + if 'exceed_length_error' in task.type: + compile_with_exceed_length(task, is_debug) + + if 'ohosTest' in task.type: + compile_ohos_test(task) + + logging.info("======> Running task: %s finised", task.name) except Exception as e: logging.exception(e) finally: - clean_backup(task) + clean_backup(task) \ No newline at end of file diff --git a/test_suite/options.py b/test_suite/options.py index 714c77e984..ae1c544184 100644 --- a/test_suite/options.py +++ b/test_suite/options.py @@ -76,8 +76,7 @@ class TestTask: self.full_compilation_info = FullCompilationInfo() self.incre_compilation_info = {} - self.break_compilation_info = CompilationInfo() - self.abc_consistency = TaskResult.undefind + self.other_tests = {} self.backup_info = BackupInfo() @@ -141,7 +140,7 @@ def create_test_tasks(): for hap in haps_list: if test_cases == 'all' or test_haps == 'all' \ - or (test_cases and (hap['type'] in test_cases)) \ + or (test_cases and (hap['type'][0] in test_cases)) \ or (test_haps and (hap['name'] in test_haps)): if not os.path.exists(hap['path']): logging.warn("Path of hap %s dosen't exist: %s" % (hap['name'], hap['path'])) @@ -151,6 +150,7 @@ def create_test_tasks(): task.path = hap['path'] task.type = hap['type'] task.build_path = hap['build_path'] + task.cache_path = hap['cache_path'] task.output_hap_path = hap['output_hap_path'] task.output_app_path = hap['output_app_path'] task.inc_modify_file = hap['inc_modify_file'] diff --git a/test_suite/preparation.py b/test_suite/preparation.py index 8722361c15..ef06e8310c 100644 --- a/test_suite/preparation.py +++ b/test_suite/preparation.py @@ -31,13 +31,40 @@ import zipfile import options from utils import is_linux, is_mac, is_windows, get_time_string, get_api_version, npm_install, check_gzip_file -def check_deveco_installation(): +def setup_env(): + old_env = os.environ.copy() + old_env_path = old_env['PATH'] + + java_home = os.path.join(options.configs['deveco_path'], 'jbr') + node_js_path = options.configs['node_js_path'] + java_path = os.path.join(java_home, 'bin') + + os.environ['PATH'] = os.pathsep.join([java_path, node_js_path]) + os.pathsep + old_env_path + os.environ['JAVA_HOME'] = java_home + + logging.debug('old env %s', old_env) + logging.debug('new env %s', os.environ.copy()) + + +def check_deveco_env(): if is_linux(): - return True ## caution! TODO: just for test, should be False + return False + if is_mac() or (is_windows() and not options.arguments.pack_only): - if not os.path.exists(options.configs['deveco_path']): + deveco_path = os.path.join(options.configs['deveco_path'], 'bin', 'devecostudio64.exe') + if not os.path.exists(deveco_exe): logging.error("DevEco not found!") return False + + java_path = os.path.join(options.configs['deveco_path'], 'jbr') + if not os.path.exists(java_path): + logging.error("Java not found!") + return False + + if not os.path.exists(options.configs['node_js_path']): + logging.error("Node js not found!") + return False + return True @@ -46,7 +73,7 @@ def GetSdkFromRemote(sdk_url): temp_floder = deveco_sdk_path + '_temp' sdk_floder = os.path.join(temp_floder, 'SDK') sdk_temp_file = os.path.join(temp_floder, 'ohos-sdk-full.tar.gz') - + if os.path.exists(temp_floder): shutil.rmtree(temp_floder) os.mkdir(temp_floder) @@ -76,7 +103,8 @@ def update_sdk_to_deveco(sdk_path, api_version): api_version = '9' deveco_sdk_path = options.configs['deveco_sdk_path'] deveco_sdk_version_path = os.path.join(deveco_sdk_path, api_version) - shutil.move(deveco_sdk_version_path, deveco_sdk_version_path + '-' + get_time_string()) + if os.path.exists(deveco_sdk_version_path): + shutil.move(deveco_sdk_version_path, deveco_sdk_version_path + '-' + get_time_string()) for item in os.listdir(sdk_path): shutil.move(os.path.join(sdk_path, item), os.path.join(deveco_sdk_version_path, item)) @@ -108,4 +136,7 @@ def prepare_image(): def prepare_test_env(): - return check_deveco_installation() and prepare_sdk() and prepare_image() + prepared = check_deveco_env() + setup_env() + prepared = prepared and prepare_sdk() and prepare_image() + return prepared diff --git a/test_suite/result.py b/test_suite/result.py index 1ba6ee7c7d..583535f676 100644 --- a/test_suite/result.py +++ b/test_suite/result.py @@ -62,7 +62,7 @@ def print_test_result(test_result, test_tasks): # print incremental compile result logging.info("--incremental compilation result:") for inc_task in task.incre_compilation_info.values(): - logging.info("incre task: %s", inc_task.name) + logging.info("incre test: %s", inc_task.name) logging.info("debug: %s, abc_size(byte) %s, time(s) %s, error message: %s", inc_task.debug_info.result, inc_task.debug_info.abc_size, @@ -73,7 +73,14 @@ def print_test_result(test_result, test_tasks): inc_task.release_info.abc_size, inc_task.release_info.time, inc_task.release_info.error_message) - logging.info("--abc consistency: %s", task.abc_consistency) + + # print other tests result + for name, task_info in task.other_tests.items(): + logging.info("--test name: %s", name) + logging.info("result: %s, error message: %s", + task_info.result, + task_info.error_message) + logging.info("-----") logging.info("========================================") @@ -112,11 +119,14 @@ def is_incremental_compilation_passed(task_info): def is_task_passed(task): - return (is_full_compilation_passed(task.full_compilation_info) and - is_incremental_compilation_passed(task.incre_compilation_info) and - task.abc_consistency == options.TaskResult.passed) - # TODO: add break compile result + passed = True + + passed = passed and is_full_compilation_passed(task.full_compilation_info) + passed = passed and is_incremental_compilation_passed(task.incre_compilation_info) + for test in task.other_tests.values(): + passed = passed and (test.result == options.TaskResult.passed) + return passed def collect_result(test_result, test_tasks, start_time): diff --git a/test_suite/run.py b/test_suite/run.py index 1ce1fe3080..eb92911f89 100644 --- a/test_suite/run.py +++ b/test_suite/run.py @@ -19,6 +19,7 @@ Description: entrance to run sdk test suite """ import logging +import os import sys import time @@ -28,23 +29,25 @@ from preparation import prepare_test_env from result import process_test_result def run(): + old_env = os.environ.copy() try: start_time = time.time() test_tasks = process_options() if not test_tasks: logging.error("No test task found, test suite exit!") - # TODO: make this open when finished: - # sys.exit(1) + sys.exit(1) if not prepare_test_env(): logging.error("Prepare test environment failed, test suite exit!") - # TODO: make this open when finished: - # sys.exit(1) + sys.exit(1) execute(test_tasks) process_test_result(test_tasks, start_time) except Exception as e: logging.exception(e) + finally: + os.environ.clear() + os.environ.update(old_env) if __name__ == '__main__': diff --git a/test_suite/utils.py b/test_suite/utils.py index 9ef6739f0b..a874c4c05d 100644 --- a/test_suite/utils.py +++ b/test_suite/utils.py @@ -25,9 +25,10 @@ import logging import os import requests import shutil +import time import subprocess import sys -import time + log_level_dict = { 'debug': logging.DEBUG, @@ -62,27 +63,11 @@ def get_time_string(): def is_esmodule(hap_type): # if hap_type is stage, it's esmodule. # if hap_type is js, fa, compatible 8, it's js_bundle - return hap_type in ['stage', 'stage_widget'] - - -def is_same_file(file_a, file_b): - cmd = [] - if is_windows(): - cmd.append('fc') - elif is_mac(): - cmd.append('diff') - - cmd.extend([file_a, file_b]) - logging.debug("is_same_file cmd: %s", cmd) - process = subprocess.Popen(cmd) - process.communicate() - ret_code = process.returncode - - return True if ret_code == 0 else False + return 'stage' in hap_type def get_sdk_url(): - now_time = datetime.datetime.now().strftime('%Y%m%d%H%M%S') + now_time = datetime.datetime.now().strftime('%Y%m%d%H%M%S') last_hour = (datetime.datetime.now() + datetime.timedelta(hours=-24)).strftime('%Y%m%d%H%M%S') url = 'http://ci.openharmony.cn/api/ci-backend/ci-portal/v1/dailybuilds' downnload_job = { @@ -119,9 +104,12 @@ def npm_install(loader_path): npm_path = shutil.which('npm') os.chdir(loader_path) try: - subprocess.run(f'{npm_path} install', check=True) - except subprocess.CalledProcessError: - logging.error(f'npm install failed . Please check the local configuration environment.') + result = subprocess.run(f'{npm_path} install --force', check=True, capture_output=True, text=True) + if result.stderr: + logging.error(result.stderr) + except subprocess.CalledProcessError as e: + logging.exception(e) + logging.error(f'npm install failed. Please check the local configuration environment.') sys.exit(1) os.chdir(os.path.dirname(__file__)) @@ -141,3 +129,9 @@ def check_gzip_file(file_path): except (gzip.BadGzipFile, OSError): return False return True + + +def is_file_timestamps_same(file_a, file_b): + file_a_mtime = os.stat(file_a).st_mtime + file_b_mtime = os.stat(file_b).st_mtime + return file_a_mtime == file_b_mtime -- Gitee From 9bd3c85dc55014c1d3638b0ba2790ebe946c19ab Mon Sep 17 00:00:00 2001 From: wuhailong Date: Tue, 13 Jun 2023 20:13:35 +0800 Subject: [PATCH 4/5] Add sending mail function Signed-off-by: wuhailong --- test_suite/preparation.py | 17 +--- test_suite/result.py | 174 ++++++++++++++++++++++++++++++++++++-- test_suite/utils.py | 13 +++ 3 files changed, 186 insertions(+), 18 deletions(-) diff --git a/test_suite/preparation.py b/test_suite/preparation.py index ef06e8310c..ec382fc7bd 100644 --- a/test_suite/preparation.py +++ b/test_suite/preparation.py @@ -18,18 +18,16 @@ limitations under the License. Description: prepare environment for test """ -import httpx import logging import os import shutil import sys import tarfile -import tqdm import validators import zipfile import options -from utils import is_linux, is_mac, is_windows, get_time_string, get_api_version, npm_install, check_gzip_file +from utils import is_linux, is_mac, is_windows, get_time_string, get_api_version, npm_install, check_gzip_file, download def setup_env(): old_env = os.environ.copy() @@ -52,7 +50,7 @@ def check_deveco_env(): if is_mac() or (is_windows() and not options.arguments.pack_only): deveco_path = os.path.join(options.configs['deveco_path'], 'bin', 'devecostudio64.exe') - if not os.path.exists(deveco_exe): + if not os.path.exists(deveco_path): logging.error("DevEco not found!") return False @@ -77,21 +75,14 @@ def GetSdkFromRemote(sdk_url): if os.path.exists(temp_floder): shutil.rmtree(temp_floder) os.mkdir(temp_floder) - with httpx.stream('GET', sdk_url) as response: - with open(sdk_temp_file, "wb") as sdktemp: - total_length = int(response.headers.get("content-length")) - with tqdm.tqdm(total=total_length, unit="B", unit_scale=True) as pbar: - pbar.set_description('ohos-sdk-full.tar.gz') - for chunk in response.iter_bytes(): - sdktemp.write(chunk) - pbar.update(len(chunk)) + download(sdk_url, sdk_temp_file, 'ohos-sdk-full.tar.gz') if not check_gzip_file(sdk_temp_file): logging.error('The downloaded file is not a valid gzip file.') sys.exit(1) with tarfile.open(sdk_temp_file, 'r:gz') as tar: tar.extractall(temp_floder) for item in os.listdir(os.path.join(*[temp_floder, 'ohos-sdk', 'windows'])): - with zipfile.ZipFile(os.path.join(os.path.join(*[temp_floder, 'ohos-sdk', 'windows', item]))) as zip: + with zipfile.ZipFile(os.path.join(*[temp_floder, 'ohos-sdk', 'windows', item])) as zip: zip.extractall(os.path.join(sdk_floder)) npm_install(os.path.join(*[sdk_floder, 'ets', 'build-tools', 'ets-loader'])) npm_install(os.path.join(*[sdk_floder, 'js', 'build-tools', 'ace-loader'])) diff --git a/test_suite/result.py b/test_suite/result.py index 583535f676..c95ed374a6 100644 --- a/test_suite/result.py +++ b/test_suite/result.py @@ -17,9 +17,14 @@ limitations under the License. Description: output test results """ - import logging +import pandas +import smtplib import time +from email.header import Header +from email.mime.application import MIMEApplication +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText import options @@ -140,9 +145,168 @@ def collect_result(test_result, test_tasks, start_time): test_result.time = round(end_time - start_time, 3) -def email_result(test_result): - # TODO - return +def email_result(test_result, test_tasks): + sender = '' + password = '' + receiver = [] + subject = 'SDK Test Daily Report' + + msg = MIMEMultipart() + msg['From'] = 'wuhailong' + msg['To'] = ", ".join(receiver) + msg['Subject'] = Header(subject, 'utf-8') + + summary_data = { + 'Total test number': [len(test_tasks)], + 'Took time (s)': [test_result.time], + 'Passed test number': [len(test_result.passed)], + 'Failed test number': [len(test_result.failed)] + } + + detail_data = [] + idx = 1 + for task in test_tasks: + task_data = { + 'Task index': idx, + 'Task name': task.name, + 'Task type': task.type + } + + full_compilation_debug = task.full_compilation_info.debug_info + full_compilation_release = task.full_compilation_info.release_info + task_data['Full Compilation - Debug'] = { + 'Result': full_compilation_debug.result, + 'ABC Size': full_compilation_debug.abc_size, + 'Error Message': full_compilation_debug.error_message + } + task_data['Full Compilation - Release'] = { + 'Result': full_compilation_release.result, + 'ABC Size': full_compilation_release.abc_size, + 'Error Message': full_compilation_release.error_message + } + + incremental_compilation = task.incre_compilation_info + for inc_task_name, inc_task_info in incremental_compilation.items(): + inc_task_debug = inc_task_info.debug_info + inc_task_release = inc_task_info.release_info + task_data[f'Incremental Compilation - {inc_task_name} - Debug'] = { + 'Result': inc_task_debug.result, + 'ABC Size': inc_task_debug.abc_size, + 'Error Message': inc_task_debug.error_message + } + task_data[f'Incremental Compilation - {inc_task_name} - Release'] = { + 'Result': inc_task_release.result, + 'ABC Size': inc_task_release.abc_size, + 'Error Message': inc_task_release.error_message + } + + for other_test_name, other_test_info in task.other_tests.items(): + task_data[f'Other Test - {other_test_name}'] = { + 'Result': other_test_info.result, + 'Error Message': other_test_info.error_message + } + + detail_data.append(task_data) + + summary_df = pandas.DataFrame(summary_data) + detail_df = pandas.DataFrame(detail_data) + + detail_table = '' + detail_table += '' + for column in detail_df.columns: + detail_table += f'' + detail_table += '' + for _, row in detail_df.iterrows(): + detail_table += '' + for column, value in row.items(): + if isinstance(value, dict): + detail_table += '' + elif isinstance(value, list): + detail_table += '' + else: + detail_table += f'' + detail_table += '' + detail_table += '
{column}
' + detail_table += '' + for sub_column, sub_value in value.items(): + detail_table += f'' + detail_table += '
{sub_column}{sub_value}
' + detail_table += '
' + detail_table += '' + for sub_value in value: + detail_table += f'' + detail_table += '
{sub_value}
' + detail_table += '
{value}
' + + summary_table = MIMEText(summary_df.to_html(index=False), 'html') + msg.attach(summary_table) + + html_content = f''' + + + + + +

Summary

+ {summary_table} +

Detail Information

+ {detail_table} + + + ''' + + today_date = time.strftime("%Y%m%d") + daily_report_file=f'SDK-test-report-{today_date}.html' + with open(daily_report_file, 'w') as report: + report.write(html_content) + + with open(daily_report_file, 'rb') as mesg: + attach_txt = MIMEApplication(mesg.read()) + attach_txt.add_header('Content-Disposition', 'attachment', filename = daily_report_file) + msg.attach(attach_txt) + + logging.info('Sending email') + smtp_server = 'smtp.163.com' + smtp = smtplib.SMTP(smtp_server, 25) + smtp.login(sender, password) + smtp.sendmail(sender, receiver, msg.as_string()) + smtp.quit() + logging.info('Sent email successfully!') def process_test_result(test_tasks, start_time): @@ -155,4 +319,4 @@ def process_test_result(test_tasks, start_time): # TODO: add write result to a file if options.arguments.email_result: - email_result(test_result) + email_result(test_result, test_tasks) diff --git a/test_suite/utils.py b/test_suite/utils.py index a874c4c05d..bfe777cd93 100644 --- a/test_suite/utils.py +++ b/test_suite/utils.py @@ -20,12 +20,14 @@ Description: utils for test suite import datetime import gzip +import httpx import json import logging import os import requests import shutil import time +import tqdm import subprocess import sys @@ -135,3 +137,14 @@ def is_file_timestamps_same(file_a, file_b): file_a_mtime = os.stat(file_a).st_mtime file_b_mtime = os.stat(file_b).st_mtime return file_a_mtime == file_b_mtime + + +def download(url, temp_file, temp_file_name): + with httpx.stream('GET', url) as response: + with open(temp_file, "wb") as temp: + total_length = int(response.headers.get("content-length")) + with tqdm.tqdm(total=total_length, unit="B", unit_scale=True) as pbar: + pbar.set_description(temp_file_name) + for chunk in response.iter_bytes(): + temp.write(chunk) + pbar.update(len(chunk)) -- Gitee From 262032afd35748f0ca31110a8c2f06a5948c9d0a Mon Sep 17 00:00:00 2001 From: chenqy930 Date: Fri, 16 Jun 2023 11:42:28 +0800 Subject: [PATCH 5/5] Adjust report format, fix bugs, add readme etc Signed-off-by: chenqy930 --- test/scripts/email_config.yaml | 14 +- .../scripts/sdk_test}/config.yaml | 118 +-- test/scripts/sdk_test/entry.py | 54 + test/scripts/sdk_test/execution.py | 954 ++++++++++++++++++ .../scripts/sdk_test}/options.py | 35 +- .../scripts/sdk_test}/preparation.py | 50 +- test/scripts/sdk_test/readme.md | 26 + test/scripts/sdk_test/readme_zh.md | 25 + .../scripts/sdk_test}/result.py | 324 +++--- {test_suite => test/scripts/sdk_test}/run.py | 1 + .../scripts/sdk_test}/utils.py | 39 +- test/scripts/send_email.py | 16 +- test/scripts/timer.py | 2 +- test_suite/execution.py | 718 ------------- 14 files changed, 1373 insertions(+), 1003 deletions(-) rename {test_suite => test/scripts/sdk_test}/config.yaml (59%) create mode 100644 test/scripts/sdk_test/entry.py create mode 100644 test/scripts/sdk_test/execution.py rename {test_suite => test/scripts/sdk_test}/options.py (81%) rename {test_suite => test/scripts/sdk_test}/preparation.py (73%) create mode 100644 test/scripts/sdk_test/readme.md create mode 100644 test/scripts/sdk_test/readme_zh.md rename {test_suite => test/scripts/sdk_test}/result.py (43%) rename {test_suite => test/scripts/sdk_test}/run.py (99%) rename {test_suite => test/scripts/sdk_test}/utils.py (86%) delete mode 100644 test_suite/execution.py diff --git a/test/scripts/email_config.yaml b/test/scripts/email_config.yaml index 63183d6d16..77a779d374 100644 --- a/test/scripts/email_config.yaml +++ b/test/scripts/email_config.yaml @@ -11,15 +11,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -sender_email_address : -auth_code : -receiver_list : -smtp_server: -smtp_port: +sender_email_address : +auth_code : +receiver_list : +smtp_server : +smtp_port : xts_report_file : ".\\auto_xts_test\\result\\summary_report.html" -sdk_report_file : "" +sdk_report_file : ".\\sdk_test\\sdk_test_report.html" perf_report_file : "" attatchment_files : - ".\\auto_xts_test\\result\\details_report.html" - ".\\auto_xts_test\\result\\failures_report.html" + - ".\\sdk_test\\sdk_test_report.html" + - ".\\sdk_test\\sdk_test_log.txt" diff --git a/test_suite/config.yaml b/test/scripts/sdk_test/config.yaml similarity index 59% rename from test_suite/config.yaml rename to test/scripts/sdk_test/config.yaml index 2231beef38..ffd306bf04 100644 --- a/test_suite/config.yaml +++ b/test/scripts/sdk_test/config.yaml @@ -16,7 +16,11 @@ # environment settings deveco_path: D:\Software\Deveco-0602\DevEco Studio deveco_sdk_path: D:\deveco-sdk\deveco-sdk-0602 -node_js_path: D:\Software\nodejs +node_js_path: D:\Software\nodejs # The nodejs which is used in Deveco + +# output settings +output_html_file: ./sdk_test_report.html +log_file: ./sdk_test_log.txt # descriptions about test haps list # each hap have the following attributes: @@ -25,8 +29,8 @@ node_js_path: D:\Software\nodejs # -type: type of the hap. Available values are: [stage, fa, js, compatible8] # besides, you can also append attributes in [widget, ohosTest, error, exceed_length_error] # -widget: indicates this hap has widget, which has widgets.abc in stage mode -# -error: indicates this hap need to test compile error as well -# -exceed_length_error: indicates this hap need to test compile with exceed length as well +# -error: indicates this hap need to test whether compile error is correctly generated as well +# -exceed_length_error: indicates this hap need to test whether compile error is correctly generated when path exceed the limit # -ohosTest: indicates this hap needed to compile ohosTest as well # -build_path: path to build path, in form of list # -cache_path: path to cache path, in form of list @@ -40,68 +44,22 @@ haps: name: Calendar path: D:\haps\calendar type: [stage] - output_path: - output_hap_name: - output_app_name: - description: - - fangtaobao: - name: FangTaoBao - path: D:\haps\fangtaobao - type: [stage] - output_path: - output_hap_name: - output_app_name: - description: - - fangdouyin: - name: FangDouYin - path: D:\haps\fangdouyin - type: [stage] - output_path: - output_hap_name: - output_app_name: - description: - - fangweixin: - name: FangWeiXin - path: D:\haps\fangweixin - type: [stage] - output_path: - output_hap_name: - output_app_name: - description: - # arkui haps - - arkuistagedemo: - name: ArkuiStageDemo - path: D:\haps\arkuiStageDemo - type: [stage] - output_path: - output_hap_name: - output_app_name: - description: - - arkuifademo: - name: ArkuiFaDemo - path: D:\haps\arkuiFaDemo - type: [fa] - output_path: - output_hap_name: - output_app_name: - description: - # module haps - - moduledemo: - name: ModuleDemo - path: D:\haps\moduleDemo - type: [stage] - output_path: - output_hap_name: - output_app_name: + build_path: + cache_path: + output_hap_path: + output_app_path: + inc_modify_file: description: # widget haps - widgetdemo: name: WidgetDemo path: D:\haps\WidgetDemo type: [stage, widget] - output_path: - output_hap_name: - output_app_name: + build_path: + cache_path: + output_hap_path: + output_app_path: + inc_modify_file: description: # IDE demo haps - idedemo_00: @@ -117,40 +75,48 @@ haps: - idedemo_01: name: IdeFaDemoEmptyAbility path: D:\sdk-test\DemoApplication_EmptyAbility_fa - type: [fa] + type: [fa, ohosTest, exceed_length_error, error] build_path: [entry, build, default] + cache_path: [cache, default, default@LegacyCompileArkTS, jsbundle] output_hap_path: [outputs, default, entry-default-unsigned.hap] output_app_path: [outputs, default, app, entry-default.hap] inc_modify_file: [entry, src, main, ets, MainAbility, pages, index.ets] description: - idedemo_02: name: IdeCompatible8DemoEmptyAbility - path: D:\haps\IdeCompatible8DemoEmptyAbility - type: [compatible8] - output_path: - output_hap_name: - output_app_name: + path: D:\sdk-test\DemoApplication_EmptyAbility_compatible8 + type: [compatible8, ohosTest, exceed_length_error, error] + build_path: [entry, build, default] + cache_path: [cache, default, default@LegacyCompileArkTS, jsbundle] + output_hap_path: [outputs, default, entry-default-unsigned.hap] + output_app_path: [outputs, default, app, entry-default.hap] + inc_modify_file: [entry, src, main, ets, MainAbility, pages, index.ets] description: - idedemo_03: name: IdeJsDemoEmptyAbility - path: D:\haps\IdeJsDemoEmptyAbility - type: [js] - output_path: - output_hap_name: - output_app_name: + path: D:\sdk-test\DemoApplication_EmptyAbility_js + type: [js, ohosTest, exceed_length_error, error] + build_path: [entry, build, default] + cache_path: [cache, default, default@LegacyCompileJS, jsbundle] + output_hap_path: [outputs, default, entry-default-unsigned.hap] + output_app_path: [outputs, default, app, entry-default.hap] + inc_modify_file: [entry, src, main, js, MainAbility, pages, index, index.js] description: -# modifications for incremental compilation +# modifications for incremental compilation and other tests patch_content: - patch_new_file: + patch_new_file_ets: # This new file will be added to the same directory as 'inc_modify_file' specified in haps name: test.ets content: "export function a() {return 'a'}" + patch_new_file_js: + name: test.js + content: "export function a() {return 'a'}" patch_lines_1: head: "import {a} from './test'\n" - tail: "console.log(a.toString());\n" + tail: "\n console.log(a.toString());\n" patch_lines_2: - tail: "console.log('This is a new line');\n" + tail: "\n console.log('This is a new line');\n" patch_lines_error: - tail: "let a_duplicated_value_for_test_suite = 1; function a_duplicated_value_for_test_suite() {};" - expected_error: "Duplicate identifier 'a_duplicated_value_for_test_suite'" \ No newline at end of file + tail: "\n let a_duplicated_value_for_test_suite = 1; function a_duplicated_value_for_test_suite() {};" + expected_error: [Duplicate identifier 'a_duplicated_value_for_test_suite', Identifier 'a_duplicated_value_for_test_suite' has already been declared] \ No newline at end of file diff --git a/test/scripts/sdk_test/entry.py b/test/scripts/sdk_test/entry.py new file mode 100644 index 0000000000..7f73a9afcd --- /dev/null +++ b/test/scripts/sdk_test/entry.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +Copyright (c) 2023 Huawei Device Co., Ltd. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Description: entry to run sdk test daily +""" + +import os +import subprocess +import time + +import utils + + +def run(): + test_start_time = time.strftime('%Y%m%d-%H%M%S') + sdk_url = utils.get_sdk_url() + + cmd = ['python3', 'run.py'] + cmd.extend(['--sdkPath', sdk_url]) + cmd.extend(['--hapMode', 'all']) + cmd.extend(['--compileMode', 'all']) + cmd.extend(['--logLevel', 'debug']) + cmd.extend(['--logFile', 'log' + '_' + test_start_time + '.txt']) + + current_dir = os.path.dirname(os.path.abspath(__file__)) + print(current_dir) + print(cmd) + process = subprocess.Popen(cmd, cwd=current_dir, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + + stdout, stderr = process.communicate(timeout=60 * 60 * 5) + stdout_utf8 = stdout.decode("utf-8", errors="ignore") + stderr_utf8 = stderr.decode("utf-8", errors="ignore") + print(f"cmd stdout: {stdout_utf8}") + print(f"cmd stderr: {stderr_utf8}") + + +if __name__ == '__main__': + run() diff --git a/test/scripts/sdk_test/execution.py b/test/scripts/sdk_test/execution.py new file mode 100644 index 0000000000..39b0f1422a --- /dev/null +++ b/test/scripts/sdk_test/execution.py @@ -0,0 +1,954 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +Copyright (c) 2023 Huawei Device Co., Ltd. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Description: execute test tasks +""" + +import logging +import os +import re +import shutil +import signal +import subprocess +import zipfile + +import json5 + +import options +import utils + + +class IncrementalTest: + @staticmethod + def validate_module_name_change(task, inc_task, is_debug, stdout, stderr, new_module_name): + output_file = get_compile_output_file_path(task, is_debug) + output_dir = os.path.dirname(output_file) + output_file_name = os.path.basename(output_file) + output_file_name_items = output_file_name.split('-') # hap name format: entry-default.hap + output_file_name_items[0] = new_module_name + output_file_name = '-'.join(output_file_name_items) + new_module_name_output_file = os.path.join(output_dir, output_file_name) + + logging.debug(f"new module hap file: {new_module_name_output_file}") + + passed = validate(inc_task, task, is_debug, stdout, stderr, new_module_name_output_file) + logging.debug(f"validate new module hap file, passed {passed}") + if not passed: + return + + if is_debug: + inc_info = inc_task.debug_info + else: + inc_info = inc_task.release_info + uncompressed_output_file = new_module_name_output_file + '.uncompressed' + with zipfile.ZipFile(new_module_name_output_file, 'r') as zip_ref: + zip_ref.extractall(uncompressed_output_file) + + abc_path = os.path.join(uncompressed_output_file, 'ets') + modules_abc_path = os.path.join(abc_path, 'modules.abc') + modules_pa = disasm_abc(modules_abc_path) + if not modules_pa or not os.path.exists(modules_pa): + inc_info.result = options.TaskResult.failed + inc_info.error_message = f'ark_disasm failed, module name change not verified' + return + + func_str = '' + with open(modules_pa, 'r', encoding='utf-8') as pa: + line = pa.readline() + while line: + if '.function' in line.strip(): + func_str = line.strip() + break + line = pa.readline() + + func_define_items = func_str.split('.') + if not new_module_name in func_define_items: + inc_info.result = options.TaskResult.failed + inc_info.error_message = f'expected entry name {new_module_name} in function name, \ + actual function name: {func_str}' + + shutil.rmtree(uncompressed_output_file) + + @staticmethod + def is_file_in_modified_files(task_type, backup_file_relative_path, modified_cache_files): + if 'stage' in task_type: + return backup_file_relative_path in modified_cache_files + else: + non_temporary_path = backup_file_relative_path.split("temporary")[1].lstrip(os.path.sep) + logging.debug(f"non_temporary_path: {non_temporary_path}") + for file in modified_cache_files: + logging.debug(f"modified_cache_files file: {file}") + if non_temporary_path in file: + return True + return False + + @staticmethod + def validate_compile_incremental_file(task, inc_task, is_debug, modified_files): + cache_extension = '' + if 'stage' in task.type: + cache_extention = '.protoBin' + elif 'fa' in task.type or 'compatible8' in task.type: + cache_extention = '.temp.abc' + elif 'js' in task.type: + cache_extention = '.abc' + + modified_cache_files = [] + # modified_files is a list of file with relative path to .../debug/release + for file in modified_files: + name, ext = os.path.splitext(file) + modified_cache_files.append(name + cache_extention) + + logging.debug(f"modified_cache_files: {modified_cache_files}") + + if is_debug: + cache_path = os.path.join(task.path, *(task.build_path), *(task.cache_path), 'debug') + backup_path = task.backup_info.cache_debug + inc_info = inc_task.debug_info + else: + cache_path = os.path.join(task.path, *(task.build_path), *(task.cache_path), 'release') + backup_path = task.backup_info.cache_release + inc_info = inc_task.release_info + + for root, dirs, files in os.walk(cache_path): + for file in files: + if not file.endswith(cache_extention): + continue + file_absolute_path = os.path.join(root, file) + file_relative_path = os.path.relpath(file_absolute_path, cache_path) + backup_file = os.path.join(backup_path, file_relative_path) + + if not os.path.exists(backup_file): + logging.debug(f"backup file not exits: {backup_file}") + continue + + if utils.is_file_timestamps_same(file_absolute_path, backup_file): + continue + + logging.debug(f"found file ${file_relative_path} changed") + is_file_in_list = IncrementalTest.is_file_in_modified_files( + task.type, file_relative_path, modified_cache_files) + logging.debug(f"is file in list: {is_file_in_list}") + if not is_file_in_list: + inc_info.result = options.TaskResult.failed + inc_info.error_message = f'Incremental compile found unexpected file timestamp changed. \ + Changed file: {file_relative_path}' + return + + @staticmethod + def prepare_incremental_task(task, test_name): + if test_name in task.incre_compilation_info: + inc_task = task.incre_compilation_info[test_name] + else: + inc_task = options.IncCompilationInfo() + inc_task.name = test_name + task.incre_compilation_info[test_name] = inc_task + return inc_task + + @staticmethod + def compile_incremental_no_modify(task, is_debug): + test_name = 'no_change' + inc_task = IncrementalTest.prepare_incremental_task(task, test_name) + + logging.info(f"==========> Running {test_name} for task: {task.name}") + [stdout, stderr] = compile_project(task, is_debug) + passed = validate(inc_task, task, is_debug, stdout, stderr) + if passed: + IncrementalTest.validate_compile_incremental_file(task, inc_task, is_debug, []) + + @staticmethod + def compile_incremental_add_oneline(task, is_debug): + test_name = 'add_oneline' + inc_task = IncrementalTest.prepare_incremental_task(task, test_name) + + logging.info(f"==========> Running {test_name} for task: {task.name}") + modify_file_item = task.inc_modify_file + modify_file = os.path.join(task.path, *modify_file_item) + modify_file_backup = modify_file + ".bak" + shutil.copyfile(modify_file, modify_file_backup) + + with open(modify_file, 'a', encoding='utf-8') as file: + file.write(options.configs.get('patch_content').get('patch_lines_2').get('tail')) + + [stdout, stderr] = compile_project(task, is_debug) + passed = validate(inc_task, task, is_debug, stdout, stderr) + if passed: + modified_files = [os.path.join(*modify_file_item)] + IncrementalTest.validate_compile_incremental_file(task, inc_task, is_debug, modified_files) + + shutil.move(modify_file_backup, modify_file) + + @staticmethod + def compile_incremental_add_file(task, is_debug): + test_name = 'add_file' + inc_task = IncrementalTest.prepare_incremental_task(task, test_name) + + logging.info(f"==========> Running {test_name} for task: {task.name}") + modify_file_item = task.inc_modify_file + modify_file = os.path.join(task.path, *modify_file_item) + modify_file_backup = modify_file + ".bak" + shutil.copyfile(modify_file, modify_file_backup) + + modify_dir = os.path.dirname(modify_file) + if 'js' in task.type: + patch_content = options.configs.get('patch_content').get('patch_new_file_js') + new_file_name = patch_content.get('name') + new_file_content = patch_content.get('content') + else: + patch_content = options.configs.get('patch_content').get('patch_new_file_ets') + new_file_name = patch_content.get('name') + new_file_content = patch_content.get('content') + new_file = os.path.join(modify_dir, new_file_name) + + with open(new_file, 'w', encoding='utf-8') as file: + file.writelines(new_file_content) + + with open(modify_file, 'r+', encoding='utf-8') as file: + old_content = file.read() + file.seek(0) + patch_lines = options.configs.get('patch_content').get('patch_lines_1') + file.write(patch_lines.get('head')) + file.write(old_content) + file.write(patch_lines.get('tail')) + + [stdout, stderr] = compile_project(task, is_debug) + passed = validate(inc_task, task, is_debug, stdout, stderr) + if passed: + modified_files = [os.path.join(*modify_file_item)] + IncrementalTest.validate_compile_incremental_file(task, inc_task, is_debug, modified_files) + + shutil.move(modify_file_backup, modify_file) + os.remove(new_file) + + @staticmethod + def compile_incremental_delete_file(task, is_debug): + test_name = 'delete_file' + inc_task = IncrementalTest.prepare_incremental_task(task, test_name) + + logging.info(f"==========> Running {test_name} for task: {task.name}") + # this test is after 'add_file', and in test 'add_file' already done remove file, + # so here just call compile + [stdout, stderr] = compile_project(task, is_debug) + passed = validate(inc_task, task, is_debug, stdout, stderr) + if passed: + modify_file_item = task.inc_modify_file + modified_files = [os.path.join(*modify_file_item)] + IncrementalTest.validate_compile_incremental_file(task, inc_task, is_debug, modified_files) + + @staticmethod + def compile_incremental_reverse_hap_mode(task, is_debug): + test_name = 'reverse_hap_mode' + inc_task = IncrementalTest.prepare_incremental_task(task, test_name) + + logging.info(f"==========> Running {test_name} for task: {task.name}") + hap_mode = not is_debug + [stdout, stderr] = compile_project(task, hap_mode) + validate(inc_task, task, hap_mode, stdout, stderr) + + @staticmethod + def compile_incremental_modify_module_name(task, is_debug): + if 'stage' not in task.type: + return + + test_name = 'change_module_name' + inc_task = IncrementalTest.prepare_incremental_task(task, test_name) + + logging.info(f"==========> Running {test_name} for task: {task.name}") + # modify build-profile.json5 + profile_file = os.path.join(task.path, 'build-profile.json5') + profile_file_backup = profile_file + ".bak" + shutil.copyfile(profile_file, profile_file_backup) + + with open(profile_file, 'r') as file: + profile_data = json5.load(file) + new_module_name = "new_entry" + logging.debug(f"profile_data is: {profile_data}") + profile_data['modules'][0]['name'] = new_module_name + with open(profile_file, 'w') as file: + json5.dump(profile_data, file) + + # modify module.json5 for stage mode + entry_item = task.build_path[:-2] # to entry path + config_file_dir = os.path.join(task.path, *entry_item, 'src', 'main') + config_file = os.path.join(config_file_dir, 'module.json5') + config_file_backup = config_file + ".bak" + shutil.copyfile(config_file, config_file_backup) + + with open(config_file, 'r') as file: + config_data = json5.load(file) + config_data['module']['name'] = new_module_name + with open(config_file, 'w') as file: + json5.dump(config_data, file) + + try: + [stdout, stderr] = compile_project(task, is_debug) + IncrementalTest.validate_module_name_change(task, inc_task, is_debug, stdout, stderr, new_module_name) + except Exception as e: + logging.exception(e) + finally: + shutil.move(profile_file_backup, profile_file) + shutil.move(config_file_backup, config_file) + + +class OtherTest: + @staticmethod + def is_abc_same_in_haps(hap_1, hap_2): + hap_1_abc_files = [] + hap_2_abc_files = [] + with zipfile.ZipFile(hap_1) as zf1, zipfile.ZipFile(hap_2) as zf2: + for file in zf1.namelist(): + if file.endswith('.abc'): + hap_1_abc_files.append(file) + for file in zf2.namelist(): + if file.endswith('.abc'): + hap_2_abc_files.append(file) + + hap_1_abc_files.sort() + hap_2_abc_files.sort() + + if len(hap_1_abc_files) != len(hap_2_abc_files): + return False + + for idx, abc_file in enumerate(hap_1_abc_files): + with zf1.open(abc_file) as f1, zf2.open(hap_2_abc_files[idx]) as f2: + data1 = f1.read() + data2 = f2.read() + if data1 != data2: + return False + + return True + + @staticmethod + def verify_binary_consistency(task): + test_name = 'binary_consistency' + test_info = options.CompilationInfo() + task.other_tests[test_name] = test_info + debug_consistency = True + release_consistency = True + + logging.info(f"==========> Running {test_name} for task: {task.name}") + if options.arguments.hap_mode in ['all', 'release']: + # will have at lease 1 output from full compile + if len(task.backup_info.output_release) == 1: + compile_project(task, False) + backup_compile_output(task, False) + + if len(task.backup_info.output_release) == 2: + release_consistency = OtherTest.is_abc_same_in_haps(task.backup_info.output_release[0], + task.backup_info.output_release[1]) + else: + release_consistency = False + logging.debug(f"release consistency: {release_consistency}") + + if options.arguments.hap_mode in ['all', 'debug']: + if len(task.backup_info.output_debug) == 1: + compile_project(task, True) + backup_compile_output(task, True) + + if len(task.backup_info.output_debug) == 2: + debug_consistency = OtherTest.is_abc_same_in_haps(task.backup_info.output_debug[0], + task.backup_info.output_debug[1]) + else: + debug_consistency = False + logging.debug(f"debug consistency: {debug_consistency}") + + if debug_consistency and release_consistency: + test_info.result = options.TaskResult.passed + else: + test_info.result = options.TaskResult.failed + + @staticmethod + def execute_break_compile(task, is_debug): + test_name = 'break_continue_compile' + test_info = options.CompilationInfo() + task.other_tests[test_name] = test_info + + logging.info(f"==========> Running {test_name} for task: {task.name}") + clean_compile(task) + cmd = get_hvigor_compile_cmd(task.path, is_debug) + logging.debug(f'cmd: {cmd}') + logging.debug(f"cmd execution path {task.path}") + process = subprocess.Popen(cmd, shell=False, cwd=task.path, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + + for line in iter(process.stdout.readline, b''): + if b'CompileArkTS' in line: + logging.debug("terminate signal sent") + process.send_signal(signal.SIGTERM) + break + + [stdout, stderr] = process.communicate(timeout=options.arguments.compile_timeout) + + logging.debug("first compile: stdcout: %s", stdout.decode('utf-8', errors="ignore")) + logging.warning("first compile: stdcerr: %s", stderr.decode('utf-8', errors="ignore")) + + logging.debug("another compile") + [stdout, stderr] = compile_project(task, is_debug) + + [is_success, time_string] = is_compile_success(stdout) + if not is_success: + test_info.result = options.TaskResult.failed + test_info.error_message = stderr + else: + passed = validate_compile_output(test_info, task, is_debug) + if passed: + test_info.result = options.TaskResult.passed + + @staticmethod + def compile_full_with_error(task, is_debug): + test_name = 'compile_with_error' + test_info = options.CompilationInfo() + task.other_tests[test_name] = test_info + + logging.info(f"==========> Running {test_name} for task: {task.name}") + modify_file_item = task.inc_modify_file + modify_file = os.path.join(task.path, *modify_file_item) + modify_file_backup = modify_file + ".bak" + shutil.copyfile(modify_file, modify_file_backup) + + patch_lines_error = options.configs.get('patch_content').get('patch_lines_error') + with open(modify_file, 'a', encoding='utf-8') as file: + file.write(patch_lines_error.get('tail')) + + [stdout, stderr] = compile_project(task, is_debug) + expected_errors = patch_lines_error.get('expected_error') + + passed = False + for expected_error in expected_errors: + if expected_error in stderr: + passed = True + break + + if passed: + test_info.result = options.TaskResult.passed + else: + test_info.result = options.TaskResult.failed + test_info.error_message = f"expected error message: {expected_errors}, but got {stderr}" + + shutil.move(modify_file_backup, modify_file) + + @staticmethod + def compile_with_exceed_length(task, is_debug): + test_name = 'compile_with_exceed_length' + test_info = options.CompilationInfo() + task.other_tests[test_name] = test_info + + logging.info(f"==========> Running {test_name} for task: {task.name}") + # get build-profile.json5 + entry_item = task.build_path[:-2] # to entry path + profile_file = os.path.join(task.path, *entry_item, 'build-profile.json5') + profile_file_backup = profile_file + ".bak" + shutil.copyfile(profile_file, profile_file_backup) + + with open(profile_file, 'r') as file: + profile_data = json5.load(file) + + long_str = 'default1234567890123456789012345678901234567890123456789012345678901234567890123456789' + \ + '012345678901234567890123456789' + logging.debug("long_str: %s", long_str) + profile_data['targets'][0]['name'] = long_str + + with open(profile_file, 'w') as file: + json5.dump(profile_data, file) + + [stdout, stderr] = compile_project(task, is_debug) + expected_error_message = 'The length of path exceeds the maximum length: 259' + + if expected_error_message in stderr: + test_info.result = options.TaskResult.passed + else: + test_info.result = options.TaskResult.failed + test_info.error_message = f"expected error message: {expected_error_message}, but got {stderr}" + + shutil.move(profile_file_backup, profile_file) + + @staticmethod + def compile_ohos_test(task): + test_name = 'ohos_test' + test_info = options.CompilationInfo() + task.other_tests[test_name] = test_info + + logging.info(f"==========> Running {test_name} for task: {task.name}") + # ohosTest has only debug mode + cmd = [get_hvigor_path(task.path), '--mode', 'module', '-p', 'module=entry@ohosTest', 'assembleHap'] + [stdout, stderr] = compile_project(task, True, cmd) + [is_success, time_string] = is_compile_success(stdout) + if not is_success: + test_info.result = options.TaskResult.failed + test_info.error_message = stderr + else: + output_file = get_compile_output_file_path(task, True) + output_dir = os.path.dirname(output_file) + output_file_name = os.path.basename(output_file) + + ohos_test_str = 'ohosTest' + output_file_name_items = output_file_name.split('-') # hap name format: entry-default-signed.hap + output_file_name_items[-2] = ohos_test_str # ohosTest hap format: entry-ohosTest-signed.hap + output_file_name = '-'.join(output_file_name_items) + + output_dir_items = output_dir.split(os.path.sep) + output_dir_items[-1] = ohos_test_str + if utils.is_windows(): + # for windows, need to add an empty string to mark between disk identifier and path + output_dir_items.insert(1, os.path.sep) + ohos_test_output_file = os.path.join(*output_dir_items, output_file_name) + + passed = validate_compile_output(test_info, task, True, ohos_test_output_file) + if passed: + test_info.result = options.TaskResult.passed + + +def disasm_abc(abc_file): + sdk_path = options.configs.get('deveco_sdk_path') + ark_disasm_path = '' + if utils.is_windows(): + ark_disasm = 'ark_disasm.exe' + else: + ark_disasm = 'ark_disasm' + ## try to find ark_disasm in api 10, api 9 sequentially + ark_disasm_10_path = os.path.join(sdk_path, '10', 'toolchains', ark_disasm) + ark_disasm_9_path = os.path.join(sdk_path, '9', 'toolchains', ark_disasm) + if os.path.exists(ark_disasm_10_path): + ark_disasm_path = ark_disasm_10_path + elif os.path.exists(ark_disasm_9_path): + ark_disasm_path = ark_disasm_9_path + else: + logging.error("ark_disasm executable not found") + return '' + + pa_file = abc_file + '.pa' + cmd = [ark_disasm_path, '--verbose', abc_file, pa_file] + logging.debug(f'cmd: {cmd}') + process = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + [stdout, stderr] = process.communicate(timeout=options.arguments.compile_timeout) + + logging.debug("disasm stdcout: %s", stdout.decode('utf-8', errors="ignore")) + logging.warning("disasm: stdcerr: %s", stderr.decode('utf-8', errors="ignore")) + + return pa_file + + +def is_abc_debug_info_correct(abc_file, is_debug): + pa_file = disasm_abc(abc_file) + if not os.path.exists(pa_file): + logging.error(f"pa file not exist: {pa_file}") + return False + + debug_info_block_str = 'LOCAL_VARIABLE_TABLE' + has_debug_info_block = False + with open(pa_file, 'r', encoding='utf-8') as pa: + line = pa.readline() + while line: + if debug_info_block_str in line.strip(): + has_debug_info_block = True + break + line = pa.readline() + + if is_debug: + return has_debug_info_block + else: + return not has_debug_info_block + + +def validate_output_for_jsbundle(info, task, uncompressed_output_path, is_debug): + abc_files = [] + for root, dirs, files in os.walk(uncompressed_output_path): + for file in files: + if file.endswith('.abc'): + abc_files.append(os.path.join(root, file)) + + total_size = 0 + for file in abc_files: + total_size += os.path.getsize(os.path.join(uncompressed_output_path, file)) + if 'compatible8' not in task.type and not is_abc_debug_info_correct(file, is_debug): + # skip compatible8 outputs as disasm may failed + info.result = options.TaskResult.failed + info.error_message = f"{file} debug info not correct" + return False + + if total_size == 0: + info.result = options.TaskResult.failed + info.error_message = "abc not found or abc size is 0" + return False + else: + info.abc_size = total_size + + if is_debug: + for file in abc_files: + sourcemap_file = file.replace('.abc', '.js.map') + if not os.path.exists(os.path.join(uncompressed_output_path, sourcemap_file)): + info.result = options.TaskResult.failed + info.error_message = "sourcemap not found" + return False + + return True + + +def validate_output_for_esmodule(info, task, uncompressed_output_path, is_debug): + abc_generated_path = os.path.join(uncompressed_output_path, 'ets') + + modules_abc_path = os.path.join(abc_generated_path, 'modules.abc') + if not os.path.exists(modules_abc_path): + info.result = options.TaskResult.failed + info.error_message = "modules.abc not found" + return False + + modules_abc_size = os.path.getsize(modules_abc_path) + if modules_abc_size <= 0: + info.result = options.TaskResult.failed + info.error_message = "modules.abc size is 0" + return False + if not is_abc_debug_info_correct(modules_abc_path, is_debug): + info.result = options.TaskResult.failed + info.error_message = "modules.abc debug info not correct" + return False + info.abc_size = modules_abc_size + + if 'widget' in task.type: + widget_abc_path = os.path.join(abc_generated_path, 'widgets.abc') + if not os.path.exists(widget_abc_path): + info.result = options.TaskResult.failed + info.error_message = "widgets.abc not found" + return False + + widgets_abc_size = os.path.getsize(widget_abc_path) + if widgets_abc_size <= 0: + info.result = options.TaskResult.failed + info.error_message = "widgets.abc size is 0" + return False + if not is_abc_debug_info_correct(widget_abc_path, is_debug): + info.result = options.TaskResult.failed + info.error_message = "widgets.abc debug info not correct" + return False + info.abc_size += widgets_abc_size + + if is_debug: + sourcemap_path = abc_generated_path + else: + sourcemap_path = os.path.join(task.path, *(task.build_path), *(task.cache_path), 'release') + sourcemap_file = os.path.join(sourcemap_path, 'sourceMaps.map') + if not os.path.exists(sourcemap_file): + info.result = options.TaskResult.failed + info.error_message = "sourcemap not found" + return False + + return True + + +def collect_compile_time(info, time_string): + time_min = 0.0 + time_second = 0.0 + time_millisecond = 0.0 + + time_items = time_string.split() + for idx, item in enumerate(time_items): + if item == 'min': + time_min = float(time_items[idx - 1]) * 60 + if item == 's': + time_second = float(time_items[idx - 1]) + if item == 'ms': + time_millisecond = round(float(time_items[idx - 1]) / 1000, 3) + + info.time = round(time_min + time_second + time_millisecond, 3) + + +def get_compile_output_file_path(task, is_debug): + output_file = '' + + if is_debug: + output_file = os.path.join(task.path, *(task.build_path), *(task.output_hap_path)) + else: + output_file = os.path.join(task.path, *(task.build_path), *(task.output_app_path)) + + return output_file + + +def validate_compile_output(info, task, is_debug, output_file=''): + passed = False + + if output_file == '': + output_file = get_compile_output_file_path(task, is_debug) + uncompressed_output_file = output_file + '.uncompressed' + + if not os.path.exists(output_file): + logging.error("output file for task %s not exists: %s", task.name, output_file) + passed = False + + info.result = options.TaskResult.failed + info.error_message = "Hap not found" + return passed + try: + with zipfile.ZipFile(output_file, 'r') as zip_ref: + zip_ref.extractall(uncompressed_output_file) + except Exception as e: + logging.error(f"unzip exception: {e}") + logging.error(f"uncompressed output file for task {task.name} failed. output file: {output_file}") + passed = False + + info.result = options.TaskResult.failed + info.error_message = "Hap uncompressed failed, cannot exam build products" + return passed + + if utils.is_esmodule(task.type): + passed = validate_output_for_esmodule(info, task, uncompressed_output_file, is_debug) + else: + passed = validate_output_for_jsbundle(info, task, uncompressed_output_file, is_debug) + + shutil.rmtree(uncompressed_output_file) + + return passed + + +def run_compile_output(info, task_path): + ## TODO: + # 1)install hap + # 2)run hap and verify + return False + + +def is_compile_success(compile_stdout): + pattern = r"BUILD SUCCESSFUL in (\d+ min )?(\d+ s )?(\d+ ms)?" + match_result = re.search(pattern, compile_stdout) + if not match_result: + return [False, ''] + + return [True, match_result.group(0)] + + +def validate(compilation_info, task, is_debug, stdout, stderr, output_file=''): + info = {} + if is_debug: + info = compilation_info.debug_info + else: + info = compilation_info.release_info + + # ret_code will be 1 if there's stderr, use "COMPILE SUCCESSFUL" as a flag to make a judge + [is_success, time_string] = is_compile_success(stdout) + if not is_success: + info.result = options.TaskResult.failed + info.error_message = stderr + return False + + passed = validate_compile_output(info, task, is_debug, output_file) + + if options.arguments.run_haps: + passed &= run_compile_output(info) + + if passed: + collect_compile_time(info, time_string) + info.result = options.TaskResult.passed + + return passed + + +def get_hvigor_path(project_path): + hvigor = '' + if utils.is_windows(): + hvigor = os.path.join(project_path, 'hvigorw.bat') + else: + hvigor = os.path.join(project_path, 'hvigorw') + return hvigor + + +def get_hvigor_compile_cmd(project_path, is_debug): + cmd = [get_hvigor_path(project_path)] + if is_debug: + cmd.append('assembleHap') + else: + cmd.append('assembleApp') + return cmd + + +def compile_project(task, is_debug, cmd=''): + if not cmd: + cmd = get_hvigor_compile_cmd(task.path, is_debug) + + logging.debug(f'cmd: {cmd}') + logging.debug(f"cmd execution path {task.path}") + process = subprocess.Popen(cmd, shell=False, cwd=task.path, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = process.communicate(timeout=options.arguments.compile_timeout) + stdout_utf8 = stdout.decode("utf-8", errors="ignore") + stderr_utf8 = stderr.decode("utf-8", errors="ignore") + logging.debug(f"cmd stdout: {stdout_utf8}") + logging.debug(f"cmd stderr: {stderr_utf8}") + + return [stdout_utf8, stderr_utf8] + + +def clean_compile(task): + cmd = [get_hvigor_path(task.path), 'clean'] + logging.debug(f'cmd: {cmd}') + logging.debug(f"cmd execution path {task.path}") + process = subprocess.Popen(cmd, shell=False, cwd=task.path, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = process.communicate(timeout=options.arguments.compile_timeout) + + +def compile_incremental(task, is_debug): + logging.info(f"==========> Running task: {task.name} in incremental compilation") + [stdout, stderr] = compile_project(task, is_debug) + + [is_success, time_string] = is_compile_success(stdout) + if not is_success: + logging.error("Incremental compile failed due to first compile failed!") + return + + if options.arguments.compile_mode == 'incremental': + passed = validate(task.full_compilation_info, task, is_debug, stdout, stderr) + if not passed: + logging.error("Incremental compile failed due to first compile failed!") + return + + backup_compile_output(task, is_debug) + backup_compile_cache(task, is_debug) + + IncrementalTest.compile_incremental_no_modify(task, is_debug) + IncrementalTest.compile_incremental_add_oneline(task, is_debug) + IncrementalTest.compile_incremental_add_file(task, is_debug) + IncrementalTest.compile_incremental_delete_file(task, is_debug) + IncrementalTest.compile_incremental_reverse_hap_mode(task, is_debug) + IncrementalTest.compile_incremental_modify_module_name(task, is_debug) + + +def backup_compile_output(task, is_debug): + backup_path = task.backup_info.cache_path + if not os.path.exists(backup_path): + os.mkdir(backup_path) + + if is_debug: + if len(task.backup_info.output_debug) == 2: + return + + backup_output_path = os.path.join(backup_path, 'output', 'debug') + if not os.path.exists(backup_output_path): + os.makedirs(backup_output_path) + + else: + if len(task.backup_info.output_release) == 2: + return + + backup_output_path = os.path.join(backup_path, 'output', 'release') + if not os.path.exists(backup_output_path): + os.makedirs(backup_output_path) + + output_file = get_compile_output_file_path(task, is_debug) + shutil.copy(output_file, backup_output_path) + backup_output = os.path.join(backup_output_path, os.path.basename(output_file)) + backup_time_output = backup_output + '-' + utils.get_time_string() + shutil.move(backup_output, backup_time_output) + + if is_debug: + task.backup_info.output_debug.append(backup_time_output) + else: + task.backup_info.output_release.append(backup_time_output) + + +def backup_compile_cache(task, is_debug): + backup_path = task.backup_info.cache_path + if not os.path.exists(backup_path): + os.mkdir(backup_path) + + backup_cache_path = os.path.join(backup_path, 'cache') + if not os.path.exists(backup_cache_path): + os.mkdir(backup_cache_path) + cache_files = os.path.join(task.path, *(task.build_path), *(task.cache_path)) + + if is_debug: + if task.backup_info.cache_debug != '': + return + + cache_files = os.path.join(cache_files, 'debug') + backup_cache_file = os.path.join(backup_cache_path, 'debug') + shutil.copytree(cache_files, backup_cache_file) + task.backup_info.cache_debug = backup_cache_file + else: + if task.backup_info.cache_release != '': + return + + cache_files = os.path.join(cache_files, 'release') + backup_cache_file = os.path.join(backup_cache_path, 'release') + shutil.copytree(cache_files, backup_cache_file) + task.backup_info.cache_release = backup_cache_file + + +def execute_full_compile(task): + logging.info(f"==========> Running task: {task.name} in full compilation") + clean_compile(task) + passed = False + if options.arguments.hap_mode in ['all', 'release']: + [stdout, stderr] = compile_project(task, False) + passed = validate(task.full_compilation_info, task, False, stdout, stderr) + if passed: + backup_compile_output(task, False) + clean_compile(task) + if options.arguments.hap_mode in ['all', 'debug']: + [stdout, stderr] = compile_project(task, True) + passed = validate(task.full_compilation_info, task, True, stdout, stderr) + if passed: + backup_compile_output(task, True) + clean_compile(task) + + return passed + + +def execute_incremental_compile(task): + logging.info(f"==========> Running task: {task.name} in incremental compilation") + if options.arguments.hap_mode in ['all', 'release']: + compile_incremental(task, False) + if options.arguments.hap_mode in ['all', 'debug']: + compile_incremental(task, True) + clean_compile(task) + + +def clean_backup(task): + if os.path.exists(task.backup_info.cache_path): + shutil.rmtree(task.backup_info.cache_path) + return + + +def execute(test_tasks): + for task in test_tasks: + try: + logging.info(f"======> Running task: {task.name}") + if options.arguments.compile_mode in ['all', 'full']: + if not execute_full_compile(task): + logging.info("Full compile failed, skip other tests!") + continue + + if options.arguments.compile_mode in ['all', 'incremental']: + execute_incremental_compile(task) + + OtherTest.verify_binary_consistency(task) + + # for these tests, use one hapMode maybe enough + is_debug = True if options.arguments.hap_mode == 'debug' else False + OtherTest.execute_break_compile(task, is_debug) + if 'error' in task.type: + OtherTest.compile_full_with_error(task, is_debug) + + if 'exceed_length_error' in task.type: + OtherTest.compile_with_exceed_length(task, is_debug) + + if 'ohosTest' in task.type: + OtherTest.compile_ohos_test(task) + + logging.info(f"======> Running task: {task.name} finised") + except Exception as e: + logging.exception(e) + finally: + clean_backup(task) diff --git a/test_suite/options.py b/test/scripts/sdk_test/options.py similarity index 81% rename from test_suite/options.py rename to test/scripts/sdk_test/options.py index ae1c544184..4d724f3203 100644 --- a/test_suite/options.py +++ b/test/scripts/sdk_test/options.py @@ -21,12 +21,16 @@ Description: process options and configs for test suite import argparse import logging import os +from enum import Enum + import yaml -from enum import Enum from utils import init_logger -YAML_PATH = './config.yaml' + +arguments = {} +configs = {} + class TaskResult(Enum): undefind = 0 @@ -84,7 +88,7 @@ class TestTask: def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('--sdkPath', type=str, dest='sdk_path', default='', - help='specify sdk path if need to update sdk') + help='specify sdk path if need to update sdk. Default to use sdk specify in config.yaml') parser.add_argument('--buildMode', type=str, dest='build_mode', default='all', choices=['all', 'assemble', 'preview', 'hotreload', 'hotfix'], help='specify build mode') @@ -99,38 +103,31 @@ def parse_args(): help='specify test cases') parser.add_argument('--testHap', type=str, dest='test_hap', default='all', help="specify test haps, option can be 'all' or a list of haps seperated by ','") - parser.add_argument('--imagePath', type=str, dest='image_path', default='all', - help='specify image path if need to update rk/phone images') - parser.add_argument('--packOnly', type=bool, dest='pack_only', default=True, - help='specify how to verify. if packOnly is true, will not verify results by running haps') - parser.add_argument('--outputBinarySize', type=bool, dest='output_binary_size', default=True, - help='specify whether to output binary size to the result') - parser.add_argument('--outputCompileTime', type=bool, dest='output_compile_time', default=True, - help='specify whether to output compilation time to the result') - parser.add_argument('--emailResult', type=bool, dest='email_result', default=True, - help='specify whether to send result by email') - parser.add_argument('--resultFile', type=str, dest='result_file', default='', - help='specify whether to output results to a file') + parser.add_argument('--imagePath', type=str, dest='image_path', default='', + help='specify image path if need to update rk/phone images. Default not to update image') + parser.add_argument('--runHaps', dest='run_haps', action='store_true', default=False, + help='specify whether to verify by running the haps on board.') parser.add_argument('--logLevel', type=str, dest='log_level', default='error', choices=['debug', 'info', 'warn', 'error'], help='specify log level of test suite') parser.add_argument('--logFile', type=str, dest='log_file', default='', help='specify the file log outputs to, empty string will output to console') - parser.add_argument('--compileTimeout', type=int, dest='compile_timeout', default=600, + parser.add_argument('--compileTimeout', type=int, dest='compile_timeout', default=1800, help='specify deveco compilation timeout') global arguments arguments = parser.parse_args() def parse_configs(): - with open(YAML_PATH, 'r') as config_file: + config_yaml = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml') + with open(config_yaml, 'r') as config_file: global configs configs = yaml.safe_load(config_file) def create_test_tasks(): task_list = [] - haps_list = configs['haps'] + haps_list = configs.get('haps') test_cases = 'all' if arguments.test_case == 'all' else [] test_haps = 'all' if arguments.test_hap == 'all' else [] if test_cases != 'all': @@ -143,7 +140,7 @@ def create_test_tasks(): or (test_cases and (hap['type'][0] in test_cases)) \ or (test_haps and (hap['name'] in test_haps)): if not os.path.exists(hap['path']): - logging.warn("Path of hap %s dosen't exist: %s" % (hap['name'], hap['path'])) + logging.warning("Path of hap %s dosen't exist: %s", hap['name'], hap['path']) continue task = TestTask() task.name = hap['name'] diff --git a/test_suite/preparation.py b/test/scripts/sdk_test/preparation.py similarity index 73% rename from test_suite/preparation.py rename to test/scripts/sdk_test/preparation.py index ec382fc7bd..d22cf36100 100644 --- a/test_suite/preparation.py +++ b/test/scripts/sdk_test/preparation.py @@ -23,53 +23,45 @@ import os import shutil import sys import tarfile -import validators import zipfile +import validators + import options from utils import is_linux, is_mac, is_windows, get_time_string, get_api_version, npm_install, check_gzip_file, download + def setup_env(): old_env = os.environ.copy() old_env_path = old_env['PATH'] - java_home = os.path.join(options.configs['deveco_path'], 'jbr') - node_js_path = options.configs['node_js_path'] + java_home = os.path.join(options.configs.get('deveco_path'), 'jbr') + node_js_path = options.configs.get('node_js_path') java_path = os.path.join(java_home, 'bin') os.environ['PATH'] = os.pathsep.join([java_path, node_js_path]) + os.pathsep + old_env_path os.environ['JAVA_HOME'] = java_home - logging.debug('old env %s', old_env) - logging.debug('new env %s', os.environ.copy()) - def check_deveco_env(): if is_linux(): return False - if is_mac() or (is_windows() and not options.arguments.pack_only): - deveco_path = os.path.join(options.configs['deveco_path'], 'bin', 'devecostudio64.exe') - if not os.path.exists(deveco_path): - logging.error("DevEco not found!") - return False - - java_path = os.path.join(options.configs['deveco_path'], 'jbr') + java_path = os.path.join(options.configs.get('deveco_path'), 'jbr') if not os.path.exists(java_path): logging.error("Java not found!") return False - if not os.path.exists(options.configs['node_js_path']): + if not os.path.exists(options.configs.get('node_js_path')): logging.error("Node js not found!") return False return True -def GetSdkFromRemote(sdk_url): - deveco_sdk_path = options.configs['deveco_sdk_path'] +def get_sdk_from_remote(sdk_url): + deveco_sdk_path = options.configs.get('deveco_sdk_path') temp_floder = deveco_sdk_path + '_temp' - sdk_floder = os.path.join(temp_floder, 'SDK') sdk_temp_file = os.path.join(temp_floder, 'ohos-sdk-full.tar.gz') if os.path.exists(temp_floder): @@ -78,21 +70,27 @@ def GetSdkFromRemote(sdk_url): download(sdk_url, sdk_temp_file, 'ohos-sdk-full.tar.gz') if not check_gzip_file(sdk_temp_file): logging.error('The downloaded file is not a valid gzip file.') - sys.exit(1) + return '', '' with tarfile.open(sdk_temp_file, 'r:gz') as tar: tar.extractall(temp_floder) + + sdk_floder = os.path.join(temp_floder, 'SDK') for item in os.listdir(os.path.join(*[temp_floder, 'ohos-sdk', 'windows'])): - with zipfile.ZipFile(os.path.join(*[temp_floder, 'ohos-sdk', 'windows', item])) as zip: - zip.extractall(os.path.join(sdk_floder)) - npm_install(os.path.join(*[sdk_floder, 'ets', 'build-tools', 'ets-loader'])) - npm_install(os.path.join(*[sdk_floder, 'js', 'build-tools', 'ace-loader'])) + with zipfile.ZipFile(os.path.join(*[temp_floder, 'ohos-sdk', 'windows', item])) as zip_file: + zip_file.extractall(os.path.join(sdk_floder)) + + if not npm_install(os.path.join(*[sdk_floder, 'ets', 'build-tools', 'ets-loader'])) or \ + not npm_install(os.path.join(*[sdk_floder, 'js', 'build-tools', 'ace-loader'])): + return '', '' + api_version = get_api_version(os.path.join(*[sdk_floder, 'ets', 'oh-uni-package.json'])) return sdk_floder, api_version + def update_sdk_to_deveco(sdk_path, api_version): if not api_version: api_version = '9' - deveco_sdk_path = options.configs['deveco_sdk_path'] + deveco_sdk_path = options.configs.get('deveco_sdk_path') deveco_sdk_version_path = os.path.join(deveco_sdk_path, api_version) if os.path.exists(deveco_sdk_version_path): shutil.move(deveco_sdk_version_path, deveco_sdk_version_path + '-' + get_time_string()) @@ -108,9 +106,9 @@ def prepare_sdk(): api_version = '' sdk_path = sdk_arg if validators.url(sdk_arg): - sdk_path, api_version = GetSdkFromRemote(sdk_arg) + sdk_path, api_version = get_sdk_from_remote(sdk_arg) - if not os.path.exists(sdk_path): + if not sdk_path or not os.path.exists(sdk_path): return False update_sdk_to_deveco(sdk_path, api_version) @@ -118,7 +116,7 @@ def prepare_sdk(): def prepare_image(): - if options.arguments.pack_only: + if options.arguments.run_haps: return True ## TODO: 1)download image, 2)flash image diff --git a/test/scripts/sdk_test/readme.md b/test/scripts/sdk_test/readme.md new file mode 100644 index 0000000000..7174c8a21b --- /dev/null +++ b/test/scripts/sdk_test/readme.md @@ -0,0 +1,26 @@ + +# SDK Test Suite Overview +This test suite can perform end-to-end SDK test verification. There are two ways to perform verification: +1) Verify if abc and sourcemap are generated in the compiled package. +2) Verify if the application of the compiled package can run normally (this feature is under development). + +## SDK Test Suite Usage +### Operating Environment +The SDK test automation script runs on Windows, Python 3.7 and above. The MAC version has not been verified yet. + +### Test Preparation +1. Ensure that Deveco is installed in the environment. +2. Install dependencies of the test suite: +`python3 -m pip install pyyaml validators requests httpx tqdm json5 pandas` +3. Modify the configuration file `config.yaml`, configure relevant parameters of Deveco and the test application. Detailed configuration instructions can be found in the file. + +### Running Tests +The test suite supports daily and manual runs. + +#### Daily Run +The daily run will download the SDK built on the current day from the trunk branch and use it to perform a full test verification. +Run the command: `python entry.py` + +#### Manual Run +Run the command: `python run.py` +By default, it will run all the test items. Optional parameters can be viewed through `--help`. \ No newline at end of file diff --git a/test/scripts/sdk_test/readme_zh.md b/test/scripts/sdk_test/readme_zh.md new file mode 100644 index 0000000000..54885d08b4 --- /dev/null +++ b/test/scripts/sdk_test/readme_zh.md @@ -0,0 +1,25 @@ +# SDK测试套说明 +本测试套可以执行端到端的SDK测试验证。 +验证方式有两种: +1) 验证编译打包的产物中,abc和sourcemap是否生成。 +2) 验证编译产物的应用是否可以正常运行(该功能在开发中)。 + +## SDK测试套使用 +### 运行环境 +SDK测试自动化脚本运行环境为windows,python3.7及以上。MAC版本的运行暂未验证。 +### 测试准备 +1. 确保环境中已安装Deveco +2. 安装测试套依赖: +`python3 -m pip install pyyaml validators requests httpx tqdm json5 pandas` +3. 修改配置文件config.yaml,配置Deveco和测试应用的相关参数。各项配置说明详见该文件。 + +### 测试运行 +测试套支持daily运行和手动单次运行。 +#### daily运行 +daily运行将从主干分支下载当日构建的sdk,使用该sdk进行全量的测试项验证: +执行命令: +`python entry.py` +#### 手动运行 +执行命令: +`python run.py` +不带参数默认跑全量的测试项。可选参数可通过`--help`查看。 \ No newline at end of file diff --git a/test_suite/result.py b/test/scripts/sdk_test/result.py similarity index 43% rename from test_suite/result.py rename to test/scripts/sdk_test/result.py index c95ed374a6..2082871990 100644 --- a/test_suite/result.py +++ b/test/scripts/sdk_test/result.py @@ -17,17 +17,33 @@ limitations under the License. Description: output test results """ + +import copy import logging -import pandas -import smtplib +import os import time -from email.header import Header -from email.mime.application import MIMEApplication -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText + +import pandas import options + +incremetal_compile_tests = ["no_change", + "add_oneline", + "add_file", + "delete_file", + "reverse_hap_mode", + "change_module_name" + ] + +other_tests = ["binary_consistency", + "break_continue_compile", + "compile_with_error", + "compile_with_exceed_length", + "ohos_test" + ] + + class TestResult: def __init__(self): self.passed = [] @@ -35,7 +51,7 @@ class TestResult: self.time = 0.0 -def print_test_result(test_result, test_tasks): +def print_result(test_result, test_tasks): logging.info("========================================") logging.info("Test finished. The result is as following:") logging.info("=====> Summary") @@ -92,7 +108,7 @@ def print_test_result(test_result, test_tasks): def is_full_compilation_passed(task_info): if not options.arguments.compile_mode in ['all', 'full']: - return True, True + return True passed_debug = True passed_release = True @@ -124,12 +140,11 @@ def is_incremental_compilation_passed(task_info): def is_task_passed(task): - passed = True + passed = is_full_compilation_passed(task.full_compilation_info) and \ + is_incremental_compilation_passed(task.incre_compilation_info) - passed = passed and is_full_compilation_passed(task.full_compilation_info) - passed = passed and is_incremental_compilation_passed(task.incre_compilation_info) for test in task.other_tests.values(): - passed = passed and (test.result == options.TaskResult.passed) + passed = passed and (test.result == options.TaskResult.passed) return passed @@ -145,101 +160,142 @@ def collect_result(test_result, test_tasks, start_time): test_result.time = round(end_time - start_time, 3) -def email_result(test_result, test_tasks): - sender = '' - password = '' - receiver = [] - subject = 'SDK Test Daily Report' +def get_result_symbol(result_type): + if result_type == options.TaskResult.passed: + return '√' + elif result_type == options.TaskResult.failed: + return '×' + else: + return '-' - msg = MIMEMultipart() - msg['From'] = 'wuhailong' - msg['To'] = ", ".join(receiver) - msg['Subject'] = Header(subject, 'utf-8') + +def generate_summary_data(test_result, test_tasks): + ## collect summary data + passed_task_name_list = [] + for task in test_result.passed: + passed_task_name_list.append(task.name) + failed_task_name_list = [] + for task in test_result.failed: + failed_task_name_list.append(task.name) summary_data = { - 'Total test number': [len(test_tasks)], - 'Took time (s)': [test_result.time], - 'Passed test number': [len(test_result.passed)], - 'Failed test number': [len(test_result.failed)] + 'Total Test Number': len(test_tasks), + 'Passed Test Number': len(test_result.passed), + 'Failed Test Number': len(test_result.failed), + 'Passed Tests': ','.join(passed_task_name_list), + 'Failed Tests': ','.join(failed_task_name_list), + 'Test Took Time(s)': test_result.time } - detail_data = [] - idx = 1 + return summary_data + + +def generate_detail_data(test_tasks): + time_size_data = [] + result_data = [] + + idx = 0 for task in test_tasks: - task_data = { - 'Task index': idx, - 'Task name': task.name, - 'Task type': task.type + idx += 1 + task_time_size_data = { + 'Task Index': idx, + 'Task Name': task.name } - + task_result_data = copy.deepcopy(task_time_size_data) + task_result_data['Task Type'] = ','.join(task.type) + full_compilation_debug = task.full_compilation_info.debug_info full_compilation_release = task.full_compilation_info.release_info - task_data['Full Compilation - Debug'] = { - 'Result': full_compilation_debug.result, - 'ABC Size': full_compilation_debug.abc_size, - 'Error Message': full_compilation_debug.error_message - } - task_data['Full Compilation - Release'] = { - 'Result': full_compilation_release.result, - 'ABC Size': full_compilation_release.abc_size, - 'Error Message': full_compilation_release.error_message - } - - incremental_compilation = task.incre_compilation_info - for inc_task_name, inc_task_info in incremental_compilation.items(): - inc_task_debug = inc_task_info.debug_info - inc_task_release = inc_task_info.release_info - task_data[f'Incremental Compilation - {inc_task_name} - Debug'] = { - 'Result': inc_task_debug.result, - 'ABC Size': inc_task_debug.abc_size, - 'Error Message': inc_task_debug.error_message - } - task_data[f'Incremental Compilation - {inc_task_name} - Release'] = { - 'Result': inc_task_release.result, - 'ABC Size': inc_task_release.abc_size, - 'Error Message': inc_task_release.error_message - } - - for other_test_name, other_test_info in task.other_tests.items(): - task_data[f'Other Test - {other_test_name}'] = { - 'Result': other_test_info.result, - 'Error Message': other_test_info.error_message - } - - detail_data.append(task_data) - - summary_df = pandas.DataFrame(summary_data) - detail_df = pandas.DataFrame(detail_data) - - detail_table = '' - detail_table += '' - for column in detail_df.columns: - detail_table += f'' - detail_table += '' - for _, row in detail_df.iterrows(): - detail_table += '' - for column, value in row.items(): - if isinstance(value, dict): - detail_table += '' - elif isinstance(value, list): - detail_table += '' - else: - detail_table += f'' - detail_table += '' - detail_table += '
{column}
' - detail_table += '' - for sub_column, sub_value in value.items(): - detail_table += f'' - detail_table += '
{sub_column}{sub_value}
' - detail_table += '
' - detail_table += '' - for sub_value in value: - detail_table += f'' - detail_table += '
{sub_value}
' - detail_table += '
{value}
' - - summary_table = MIMEText(summary_df.to_html(index=False), 'html') - msg.attach(summary_table) + task_time_size_data['[Full Compilation]\n[Debug]\n[Compilation Time(s)]'] = full_compilation_debug.time + task_time_size_data['[Full Compilation]\n[Release]\n[Compilation Time(s)]'] = full_compilation_release.time + task_result_data['[Debug]'] = get_result_symbol(full_compilation_debug.result) + task_result_data['[Release]'] = get_result_symbol(full_compilation_release.result) + + for test in incremetal_compile_tests: + debug_result = options.TaskResult.undefind + release_result = options.TaskResult.undefind + if test in task.incre_compilation_info.keys(): + inc_task_info = task.incre_compilation_info[test] + debug_result = inc_task_info.debug_info.result + release_result = inc_task_info.release_info.result + task_result_data[f'[Debug]\n{test}'] = get_result_symbol(debug_result) + task_result_data[f'[Release]\n{test}'] = get_result_symbol(release_result) + + if test == 'add_oneline': + debug_test_time = 0 + release_test_time = 0 + if test in task.incre_compilation_info.keys(): + inc_task_info = task.incre_compilation_info[test] + debug_test_time = inc_task_info.debug_info.time + release_test_time = inc_task_info.release_info.time + + task_time_size_data['[Incremental Compilation]\n[Debug]\n[Compilation Time(s)]'] = debug_test_time + task_time_size_data['[Incremental Compilation]\n[Release]\n[Compilation Time(s)]'] = release_test_time + + for test in other_tests: + result = options.TaskResult.undefind + if test in task.other_tests.keys(): + task_info = task.other_tests[test] + result = task_info.result + task_result_data[f'{test}'] = get_result_symbol(result) + + task_time_size_data['[Abc Size(byte)]\n[Debug]'] = full_compilation_debug.abc_size + task_time_size_data['[Abc Size(byte)]\n[Release]'] = full_compilation_release.abc_size + time_size_data.append(task_time_size_data) + result_data.append(task_result_data) + + detail_data = { + 'result_data': result_data, + 'time_size_data': time_size_data + } + return detail_data + + +def generate_data_html(summary_data, detail_data): + # summary table + key_value_pairs = [f'{key}{value}' for key, value in summary_data.items()] + summary_table_content = ''.join(key_value_pairs) + summary_table = f'{summary_table_content}
' + + # time and size table + time_size_data = detail_data.get('time_size_data') + time_size_df = pandas.DataFrame(time_size_data) + + time_size_table_header = '' + \ + ''.join([f'{column}' for column in time_size_df.columns[:2]]) + time_size_table_header += 'Full Compilation Time(s)' + \ + f'Incremental Compilation Time(s)' + \ + f'Abc Size(byte)' + time_size_table_sub_header = '' + f'[Debug][Release]' * 3 + '' + + time_size_table_content = ''.join([ + '' + ''.join([f'{value}' for _, value in row.items()]) + '' + for _, row in time_size_df.iterrows() + ]) + time_size_table = f'{time_size_table_header}{time_size_table_sub_header}{time_size_table_content}
' + + # result table + result_data = detail_data.get('result_data') + result_df = pandas.DataFrame(result_data) + + result_table_header = '' + ''.join([f'{column}' for column in result_df.columns[:3]]) + result_table_header += 'Full Compilation' + \ + f'Incremental Compilation' + \ + f'Other Tests' + + result_table_sub_header = '' + \ + ''.join([f'{column}' for column in result_df.columns[3:]]) + '' + result_table_content = ''.join([ + '' + ''.join([f'{value}' for _, value in row.items()]) + '' + for _, row in result_df.iterrows() + ]) + result_table = f'{result_table_header}{result_table_sub_header}{result_table_content}
' + + return summary_table, time_size_table, result_table + + +def generate_report_html(summary_data, detail_data): + [summary_table, time_size_table, result_table] = generate_data_html(summary_data, detail_data) html_content = f''' @@ -249,74 +305,72 @@ def email_result(test_result, test_tasks): font-family: Arial, sans-serif; margin: 20px; }} - h2 {{ color: #333; }} - table {{ border-collapse: collapse; width: 100%; margin-bottom: 20px; }} - table th, table td {{ padding: 8px; border: 1px solid #ddd; }} - table th {{ background-color: #f2f2f2; font-weight: bold; }} - - .sub-table {{ - border-collapse: collapse; - width: 100%; - }} - - .sub-table td {{ - padding: 4px; - border: 1px solid #ddd; + tr:nth-child(odd) {{ + background-color: #f9f9f9; }} + -

Summary

+

SDK Test Results

+

Summary

{summary_table} -

Detail Information

- {detail_table} +

Detail Information

+

Test Result

+ {result_table} +

Compilation Time And Abc Size

+ {time_size_table} +

+ Notes:
+ 1. Incremental compilation time refers to add-one line incremental compile.
+ 2. For details compile output or error message during compile, please refer to attachment of log file.
+ 3. For sdk commit tags, please refer to attachment of manifest file(to be added). +

''' - today_date = time.strftime("%Y%m%d") - daily_report_file=f'SDK-test-report-{today_date}.html' - with open(daily_report_file, 'w') as report: + daily_report_file = options.configs.get('output_html_file') + with open(daily_report_file, 'w', encoding='utf-8') as report: report.write(html_content) - with open(daily_report_file, 'rb') as mesg: - attach_txt = MIMEApplication(mesg.read()) - attach_txt.add_header('Content-Disposition', 'attachment', filename = daily_report_file) - msg.attach(attach_txt) - logging.info('Sending email') - smtp_server = 'smtp.163.com' - smtp = smtplib.SMTP(smtp_server, 25) - smtp.login(sender, password) - smtp.sendmail(sender, receiver, msg.as_string()) - smtp.quit() - logging.info('Sent email successfully!') +def generate_log_file(): + logger = logging.getLogger() + log_file = logger.handlers[0].baseFilename + logger.handlers[0].close() + output_log_file = options.configs.get('log_file') + if os.path.exists(output_log_file): + os.remove(output_log_file) + os.rename(log_file, output_log_file) + + +def generate_result_reports(test_result, test_tasks): + summary_data = generate_summary_data(test_result, test_tasks) + detail_data = generate_detail_data(test_tasks) + generate_report_html(summary_data, detail_data) + generate_log_file() def process_test_result(test_tasks, start_time): test_result = TestResult() collect_result(test_result, test_tasks, start_time) - print_test_result(test_result, test_tasks) - - # TODO: add baseline comparison - # TODO: add write result to a file - - if options.arguments.email_result: - email_result(test_result, test_tasks) + print_result(test_result, test_tasks) + generate_result_reports(test_result, test_tasks) diff --git a/test_suite/run.py b/test/scripts/sdk_test/run.py similarity index 99% rename from test_suite/run.py rename to test/scripts/sdk_test/run.py index eb92911f89..35077749ed 100644 --- a/test_suite/run.py +++ b/test/scripts/sdk_test/run.py @@ -28,6 +28,7 @@ from options import process_options from preparation import prepare_test_env from result import process_test_result + def run(): old_env = os.environ.copy() try: diff --git a/test_suite/utils.py b/test/scripts/sdk_test/utils.py similarity index 86% rename from test_suite/utils.py rename to test/scripts/sdk_test/utils.py index bfe777cd93..3ee96d7c5f 100644 --- a/test_suite/utils.py +++ b/test/scripts/sdk_test/utils.py @@ -19,31 +19,40 @@ Description: utils for test suite """ import datetime -import gzip -import httpx import json import logging import os -import requests import shutil import time -import tqdm import subprocess import sys +import gzip +import httpx +import requests +import tqdm + + +def get_log_level(arg_log_level): + log_level_dict = { + 'debug': logging.DEBUG, + 'info': logging.INFO, + 'warn': logging.WARN, + 'error': logging.ERROR + } + if arg_log_level not in log_level_dict.keys(): + return logging.ERROR # use error as default log level + else: + return log_level_dict[arg_log_level] -log_level_dict = { - 'debug': logging.DEBUG, - 'info': logging.INFO, - 'warn': logging.WARN, - 'error': logging.ERROR -} def init_logger(log_level, log_file): logging.basicConfig(filename=log_file, - level=log_level_dict[log_level], + level=get_log_level(log_level), encoding='utf-8', format='[%(asctime)s %(filename)s:%(lineno)d]: [%(levelname)s] %(message)s') + logging.info("Test command:") + logging.info(" ".join(sys.argv)) def is_windows(): @@ -88,7 +97,7 @@ def get_sdk_url(): } downnload_job['startTime'] = str(last_hour) downnload_job['endTime'] = str(now_time) - post_result = requests.post(url, data = downnload_job) + post_result = requests.post(url, data=downnload_job) post_data = json.loads(post_result.text) sdk_url_suffix = '' for ohos_sdk_list in post_data['result']['dailyBuildVos']: @@ -112,8 +121,9 @@ def npm_install(loader_path): except subprocess.CalledProcessError as e: logging.exception(e) logging.error(f'npm install failed. Please check the local configuration environment.') - sys.exit(1) + return False os.chdir(os.path.dirname(__file__)) + return True def get_api_version(json_path): @@ -128,7 +138,8 @@ def check_gzip_file(file_path): try: with gzip.open(file_path, 'rb') as gzfile: gzfile.read(1) - except (gzip.BadGzipFile, OSError): + except Exception as e: + logging.exception(e) return False return True diff --git a/test/scripts/send_email.py b/test/scripts/send_email.py index ed107160d9..44cfdd000f 100755 --- a/test/scripts/send_email.py +++ b/test/scripts/send_email.py @@ -30,16 +30,16 @@ def add_content(content, file_name, test_part): if not os.path.exists(file_name): content += f'

{test_part} run failed

' return content - with open(file_name, 'r') as f: + with open(file_name, 'r', encoding='utf-8') as f: content += f.read() return content - - + + def add_attachment(msg, file_list): for file in file_list: if os.path.exists(file): - with open(file, 'r') as f: - msg.add_attachment(f.read(), 'html', filename=os.path.basename(file)) + with open(file, 'r', encoding='utf-8') as f: + msg.add_attachment(f.read(), 'html', filename=os.path.basename(file)) def send_email(): @@ -55,12 +55,12 @@ def send_email(): perf_test = data["perf_report_file"] attachment_files = data["attatchment_files"] yl.close() - + msg = EmailMessage() msg['From'] = sender msg['To'] = ", ".join(receiver) msg['Subject'] = "Arkcompiler Test" - + html = "" dividing_line = '
' html = add_content(html, xts_test, "xts_test") @@ -71,7 +71,7 @@ def send_email(): msg.add_related(html, "html") add_attachment(msg, attachment_files) - + smtp = smtplib.SMTP(smtp_server, smtp_port) smtp.login(sender, auth_code) smtp.sendmail(sender, receiver, msg.as_string()) diff --git a/test/scripts/timer.py b/test/scripts/timer.py index e734372c57..f27fca80a3 100755 --- a/test/scripts/timer.py +++ b/test/scripts/timer.py @@ -30,7 +30,7 @@ if __name__ == "__main__": os.chdir(os.path.dirname(os.path.realpath(__file__))) #do preparations schedule.every().day.at("20:00").do(job, cmd=r'.\auto_xts_test\run.bat').tag('daily_xts_task') - #do sdk_test + schedule.every().day.at("20:00").do(job, cmd=r'python .\sdk_test\entry.py').tag('daily_sdk_task') #do perf_test schedule.every().day.at("20:00").do(job, cmd=r'python .\send_email.py').tag("send_email") schedule.run_all() diff --git a/test_suite/execution.py b/test_suite/execution.py deleted file mode 100644 index fe3773d1ee..0000000000 --- a/test_suite/execution.py +++ /dev/null @@ -1,718 +0,0 @@ -#!/usr/bin/env python3 -# coding: utf-8 - -""" -Copyright (c) 2023 Huawei Device Co., Ltd. -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -Description: execute test tasks -""" - -import logging -import json5 -import os -import re -import shutil -import signal -import subprocess -import zipfile - -import options -import utils - - -def validate_output_for_jsbundle(info, uncompressed_output_path, is_debug): - abc_files = [] - for root, dirs, files in os.walk(uncompressed_output_path): - for file in files: - if file.endswith('.abc'): - abc_files.append(os.path.join(root, file)) - - total_size = 0 - for file in abc_files: - total_size += os.path.getsize(os.path.join(uncompressed_output_path, file)) - if total_size == 0: - info.result = options.TaskResult.failed - info.error_message = "abc not found or abc size is 0" - return False - else: - info.abc_size = total_size - - if is_debug: - for file in abc_files: - sourcemap_file = file.replace('.abc', '.js.map') - if not os.path.exists(os.path.join(uncompressed_output_path, sourcemap_file)): - info.result = options.TaskResult.failed - info.error_message = "sourcemap not found" - return False - - return True - - -def validate_output_for_esmodule(info, task_type, uncompressed_output_path, is_debug): - abc_sourcemap_path = os.path.join(uncompressed_output_path, 'ets') - - modules_abc_path = os.path.join(abc_sourcemap_path, 'modules.abc') - if not os.path.exists(modules_abc_path): - info.result = options.TaskResult.failed - info.error_message = "modules.abc not found" - return False - - modules_abc_size = os.path.getsize(modules_abc_path) - if modules_abc_size <= 0: - info.result = options.TaskResult.failed - info.error_message = "modules.abc size is 0" - return False - info.abc_size = modules_abc_size - - if 'widget' in task_type: - widget_abc_path = os.path.join(abc_sourcemap_path, 'widgets.abc') - if not os.path.exists(widget_abc_path): - info.result = options.TaskResult.failed - info.error_message = "widgets.abc not found" - return False - - widgets_abc_size = os.path.getsize(widget_abc_path) - if widgets_abc_size <= 0: - info.result = options.TaskResult.failed - info.error_message = "widgets.abc size is 0" - return False - else: - info.abc_size += widgets_abc_size - - if is_debug: - sourcemap_path = os.path.join(abc_sourcemap_path, 'sourceMaps.map') - if not os.path.exists(sourcemap_path): - info.result = options.TaskResult.failed - info.error_message = "sourcemap not found" - return False - - return True - - -def collect_compile_time(info, time_string): - time_second = 0 - time_millisecond = 0 - - time_items = time_string.split() - for i in range(0, len(time_items)): - if time_items[i] == 's': - time_second = float(time_items[i - 1]) - if time_items[i] == 'ms': - time_millisecond = round(float(time_items[i - 1])/1000, 3) - - info.time = round(time_second + time_millisecond, 3) - - -def get_compile_output_file_path(task, is_debug): - output_file = '' - - if is_debug: - output_file = os.path.join(task.path, *(task.build_path), *(task.output_hap_path)) - else: - output_file = os.path.join(task.path, *(task.build_path), *(task.output_app_path)) - - return output_file - - -def validate_compile_output(info, task, is_debug): - passed = False - - output_file = get_compile_output_file_path(task, is_debug) - uncompressed_output_file = output_file + '.uncompressed' - - if not os.path.exists(output_file): - logging.error("output file for task %s not exists: %s", task.name, output_file) - passed = False - - info.result = options.TaskResult.failed - info.error_message = "Hap not found" - return [passed, uncompressed_output_file] - try: - with zipfile.ZipFile(output_file, 'r') as zip_ref: - zip_ref.extractall(uncompressed_output_file) - except Exception as e: - logging.error("unzip exception: %s", e) - logging.error("uncompressed output file for task %s failed. output file: %s", task.name, output_file) - passed = False - - info.result = options.TaskResult.failed - info.error_message = "Hap uncompressed failed, cannot exam build products" - return [passed, uncompressed_output_file] - - if utils.is_esmodule(task.type): - passed = validate_output_for_esmodule(info, task.type, uncompressed_output_file, is_debug) - else: - passed = validate_output_for_jsbundle(info, uncompressed_output_file, is_debug) - - shutil.rmtree(uncompressed_output_file) - - return passed - - -def run_compile_output(info, task_path): - ## TODO: - # 1)install hap - # 2)run hap and verify - return - - -def is_compile_success(compile_stdout): - pattern = r"BUILD SUCCESSFUL in (\d+ s )?(\d+ ms)?" - match_result = re.search(pattern, compile_stdout) - if not match_result: - return [False, ''] - - return [True, match_result.group(0)] - - -def validate(compilation_info, task, is_debug, stdout, stderr): - info = {} - if is_debug: - info = compilation_info.debug_info - else: - info = compilation_info.release_info - - # ret_code will be 1 if there's stderr, use "COMPILE SUCCESSFUL" as a flag to make a judge - [is_success, time_string] = is_compile_success(stdout) - if not is_success: - info.result = options.TaskResult.failed - info.error_message = stderr - return - - passed = False - passed = validate_compile_output(info, task, is_debug) - - if not options.arguments.pack_only: - passed = run_compile_output(info) - - if passed: - collect_compile_time(info, time_string) - info.result = options.TaskResult.passed - - return passed - - -def get_hvigor_compile_cmd(is_debug): - cmd = ['hvigorw'] - if is_debug: - cmd.append('assembleHap') - else: - cmd.append('assembleApp') - return cmd - - -def compile(task, is_debug): - cmd = get_hvigor_compile_cmd(is_debug) - - logging.debug('cmd: %s', cmd) - logging.debug("cmd execution path %s", task.path) - process = subprocess.Popen(cmd, shell = True, cwd = task.path, - stdout = subprocess.PIPE, - stderr = subprocess.PIPE) - stdout, stderr = process.communicate(timeout=options.arguments.compile_timeout) - stdout_utf8 = stdout.decode("utf-8", errors="ignore") - stderr_utf8 = stderr.decode("utf-8", errors="ignore") - logging.debug("cmd stdout: {}".format(stdout_utf8)) - logging.debug("cmd stderr: {}".format(stderr_utf8)) - - return [stdout_utf8, stderr_utf8] - - -def clean_compile(task): - cmd = 'hvigorw clean' - logging.debug('cmd: %s', cmd) - logging.debug("cmd execution path %s", task.path) - process = subprocess.Popen(cmd, shell = True, cwd = task.path, - stdout = subprocess.PIPE, - stderr = subprocess.PIPE) - out, err = process.communicate(timeout=options.arguments.compile_timeout) - - -def validate_compile_incremental_file(task, inc_task, is_debug, modified_files): - cache_extention = '.protoBin' - modified_cache_files = [] - - # modified_files is a list of file with relative path to .../debug/release - for file in modified_files: - name, ext = os.path.splitext(file) - modified_cache_files.append(name + cache_extention) - - if is_debug: - cache_path = os.path.join(task.path, *(task.build_path), *(task.cache_path), 'debug') - backup_path = task.backup_info.cache_debug - inc_info = inc_task.debug_info - else: - cache_path = os.path.join(task.path, *(task.build_path), *(task.cache_path), 'release') - backup_path = task.backup_info.cache_release - inc_info = inc_task.release_info - - for root, dirs, files in os.walk(cache_path): - for file in files: - name, extension = os.path.splitext(file) - if extension == cache_extention: - file_absolute_path = os.path.join(root, file) - file_relative_path = os.path.relpath(file_absolute_path, cache_path) - backup_file = os.path.join(backup_path, file_relative_path) - - if not os.path.exists(backup_file): - logging.debug("backup file not exits: %s", backup_file) - continue - - logging.debug("time stamp same: %s", utils.is_file_timestamps_same(file_absolute_path, backup_file)) - logging.debug("file_relative_path %s", file_relative_path) - logging.debug("file not in list: %s", file_relative_path not in modified_cache_files) - logging.debug("file list: %s", modified_cache_files) - - if not utils.is_file_timestamps_same(file_absolute_path, backup_file) and \ - file_relative_path not in modified_cache_files: - inc_info.result = options.TaskResult.failed - inc_info.error_message = 'Incremental compile found unexpected file timestamp changed. Changed file: ' + file_relative_path - return - - -def prepare_incremental_task(task, test_name): - if test_name in task.incre_compilation_info: - inc_task = task.incre_compilation_info[test_name] - else: - inc_task = options.IncCompilationInfo() - inc_task.name = test_name - task.incre_compilation_info[test_name] = inc_task - return inc_task - - -def compile_incremental_no_modify(task, is_debug): - test_name = 'no_change' - inc_task = prepare_incremental_task(task, test_name) - - logging.info("==========> Running %s for task: %s", test_name, task.name) - [stdout, stderr] = compile(task, is_debug) - passed = validate(inc_task, task, is_debug, stdout, stderr) - if passed: - validate_compile_incremental_file(task, inc_task, is_debug, []) - - -def compile_incremental_add_oneline(task, is_debug): - test_name = 'add_oneline' - inc_task = prepare_incremental_task(task, test_name) - - logging.info("==========> Running %s for task: %s", test_name, task.name) - modify_file_item = task.inc_modify_file - modify_file = os.path.join(task.path, *modify_file_item) - modify_file_backup = modify_file + ".bak" - shutil.copyfile(modify_file, modify_file_backup) - - with open(modify_file, 'a', encoding='utf-8') as file: - file.write(options.configs['patch_content']['patch_lines_2']['tail']) - - [stdout, stderr] = compile(task, is_debug) - passed = validate(inc_task, task, is_debug, stdout, stderr) - if passed: - modified_files = [os.path.join(*modify_file_item)] - validate_compile_incremental_file(task, inc_task, is_debug, modified_files) - - shutil.move(modify_file_backup, modify_file) - - -def compile_incremental_add_file(task, is_debug): - test_name = 'add_file' - inc_task = prepare_incremental_task(task, test_name) - - logging.info("==========> Running %s for task: %s", test_name, task.name) - modify_file_item = task.inc_modify_file - modify_file = os.path.join(task.path, *modify_file_item) - modify_file_backup = modify_file + ".bak" - shutil.copyfile(modify_file, modify_file_backup) - - modify_dir = os.path.dirname(modify_file) - new_file_name = options.configs['patch_content']['patch_new_file']['name'] - new_file_content = options.configs['patch_content']['patch_new_file']['content'] - new_file = os.path.join(modify_dir, new_file_name) - - with open(new_file, 'w', encoding='utf-8') as file: - file.writelines(new_file_content) - - with open(modify_file, 'r+', encoding='utf-8') as file: - old_content = file.read() - file.seek(0) - file.write(options.configs['patch_content']['patch_lines_1']['head']) - file.write(old_content) - file.write(options.configs['patch_content']['patch_lines_1']['tail']) - - [stdout, stderr] = compile(task, is_debug) - passed = validate(inc_task, task, is_debug, stdout, stderr) - if passed: - modified_files = [os.path.join(*modify_file_item)] - validate_compile_incremental_file(task, inc_task, is_debug, modified_files) - - shutil.move(modify_file_backup, modify_file) - os.remove(new_file) - - -def compile_incremental_delete_file(task, is_debug): - test_name = 'delete_file' - inc_task = prepare_incremental_task(task, test_name) - - logging.info("==========> Running %s for task: %s", test_name, task.name) - # this test is after 'add_file', and in test 'add_file' already done remove file, - # so here just call compile - [stdout, stderr] = compile(task, is_debug) - passed = validate(inc_task, task, is_debug, stdout, stderr) - if passed: - modify_file_item = task.inc_modify_file - modified_files = [os.path.join(*modify_file_item)] - validate_compile_incremental_file(task, inc_task, is_debug, modified_files) - - -def compile_incremental_reverse_hap_mode(task, is_debug): - test_name = 'reverse_hap_mode' - inc_task = prepare_incremental_task(task, test_name) - - logging.info("==========> Running %s for task: %s", test_name, task.name) - hap_mode = not is_debug - [stdout, stderr] = compile(task, hap_mode) - validate(inc_task, task, hap_mode, stdout, stderr) - - -def compile_incremental_modify_bundle_name(task, is_debug): - # TODO: this needs to modify bundle name and disasm abc for compare - return - - -def compile_incremental(task, is_debug): - logging.info("==========> Running task: %s in incremental compilation", task.name) - [stdout, stderr] = compile(task, is_debug) - - [is_success, time_string] = is_compile_success(stdout) - if not is_success: - logging.error("Incremental compile failed due to first compile failed!") - return - - if options.arguments.compile_mode == 'incremental': - passed = validate(task.full_compilation_info, task, is_debug, stdout, stderr) - if not passed: - logging.error("Incremental compile failed due to first compile failed!") - return - - backup_compile_output(task, is_debug) - backup_compile_cache(task, is_debug) - - compile_incremental_no_modify(task, is_debug) - compile_incremental_add_oneline(task, is_debug) - compile_incremental_add_file(task, is_debug) - compile_incremental_delete_file(task, is_debug) - compile_incremental_reverse_hap_mode(task, is_debug) - # TODO: compile_incremental_modify_bundle_name(task, is_debug) - - -def backup_compile_output(task, is_debug): - backup_path = task.backup_info.cache_path - if not os.path.exists(backup_path): - os.mkdir(backup_path) - - if is_debug: - if len(task.backup_info.output_debug) == 2: - return - - backup_output_path = os.path.join(backup_path, 'output', 'debug') - if not os.path.exists(backup_output_path): - os.makedirs(backup_output_path) - - else: - if len(task.backup_info.output_release) == 2: - return - - backup_output_path = os.path.join(backup_path, 'output', 'release') - if not os.path.exists(backup_output_path): - os.makedirs(backup_output_path) - - output_file = get_compile_output_file_path(task, is_debug) - shutil.copy(output_file, backup_output_path) - backup_output = os.path.join(backup_output_path, os.path.basename(output_file)) - backup_time_output = backup_output + '-' + utils.get_time_string() - shutil.move(backup_output, backup_time_output) - - if is_debug: - task.backup_info.output_debug.append(backup_time_output) - else: - task.backup_info.output_release.append(backup_time_output) - - -def backup_compile_cache(task, is_debug): - backup_path = task.backup_info.cache_path - if not os.path.exists(backup_path): - os.mkdir(backup_path) - - backup_cache_path = os.path.join(backup_path, 'cache') - if not os.path.exists(backup_cache_path): - os.mkdir(backup_cache_path) - cache_files = os.path.join(task.path, *(task.build_path), *(task.cache_path)) - - if is_debug: - if task.backup_info.cache_debug != '': - return - - cache_files = os.path.join(cache_files, 'debug') - backup_cache_file = os.path.join(backup_cache_path, 'debug') - shutil.copytree(cache_files, backup_cache_file) - task.backup_info.cache_debug = backup_cache_file - else: - if task.backup_info.cache_release != '': - return - - cache_files = os.path.join(cache_files, 'release') - backup_cache_file = os.path.join(backup_cache_path, 'release') - shutil.copytree(cache_files, backup_cache_file) - task.backup_info.cache_release = backup_cache_file - - -def is_abc_same_in_haps(hap_1, hap_2): - hap_1_abc_files = [] - hap_2_abc_files = [] - with zipfile.ZipFile(hap_1) as zf1, zipfile.ZipFile(hap_2) as zf2: - for file in zf1.namelist(): - if file.endswith('.abc'): - hap_1_abc_files.append(file) - for file in zf2.namelist(): - if file.endswith('.abc'): - hap_2_abc_files.append(file) - - hap_1_abc_files.sort() - hap_2_abc_files.sort() - - if len(hap_1_abc_files) != len(hap_2_abc_files): - return False - - for idx in range(len(hap_1_abc_files)): - with zf1.open(hap_1_abc_files[idx]) as f1, zf2.open(hap_2_abc_files[idx]) as f2: - data1 = f1.read() - data2 = f2.read() - if data1 != data2: - return False - - return True - - -def execute_full_compile(task): - logging.info("==========> Running task: %s in full compilation", task.name) - clean_compile(task) - passed = False - if options.arguments.hap_mode in ['all', 'release']: - [stdout, stderr] = compile(task, False) - passed = validate(task.full_compilation_info, task, False, stdout, stderr) - if passed: - backup_compile_output(task, False) - clean_compile(task) - if options.arguments.hap_mode in ['all', 'debug']: - [stdout, stderr] = compile(task, True) - passed = validate(task.full_compilation_info, task, True, stdout, stderr) - if passed: - backup_compile_output(task, True) - clean_compile(task) - - return passed - - -def execute_incremental_compile(task): - logging.info("==========> Running task: %s in incremental compilation", task.name) - if options.arguments.hap_mode in ['all', 'release']: - compile_incremental(task, False) - if options.arguments.hap_mode in ['all', 'debug']: - compile_incremental(task, True) - clean_compile(task) - - -def verify_binary_consistency(task): - test_name = 'binary_consistency' - test_info = options.CompilationInfo() - debug_consistency = True - release_consistency = True - - logging.info("==========> Running %s for task: %s", test_name, task.name) - if options.arguments.hap_mode in ['all', 'release']: - # will have at lease 1 output from full compile - if len(task.backup_info.output_release) == 1: - compile(task, False) - backup_compile_output(task, False) - - if len(task.backup_info.output_release) == 2: - release_consistency = is_abc_same_in_haps(task.backup_info.output_release[0], - task.backup_info.output_release[1]) - else: - release_consistency = False - logging.debug("release consistency: %s", release_consistency) - - if options.arguments.hap_mode in ['all', 'debug']: - if len(task.backup_info.output_debug) == 1: - compile(task, True) - backup_compile_output(task, True) - - if len(task.backup_info.output_debug) == 2: - debug_consistency = is_abc_same_in_haps(task.backup_info.output_debug[0], - task.backup_info.output_debug[1]) - else: - debug_consistency = False - logging.debug("debug consistency: %s", debug_consistency) - - if debug_consistency and release_consistency: - test_info.result = options.TaskResult.passed - else: - test_info.result = options.TaskResult.failed - - task.other_tests[test_name] = test_info - - -def execute_break_compile(task, is_debug): - test_name = 'break_continue_compile' - test_info = options.CompilationInfo() - - logging.info("==========> Running %s for task: %s", test_name, task.name) - clean_compile(task) - cmd = get_hvigor_compile_cmd(is_debug) - logging.debug('cmd: %s', cmd) - logging.debug("cmd execution path %s", task.path) - process = subprocess.Popen(cmd, shell = True, cwd = task.path, - stdout = subprocess.PIPE, - stderr = subprocess.PIPE) - - # TODO: this is signal seems to sent after the build process finished. Check - # this in a longer build time app later - for line in iter(process.stdout.readline, b''): - if b'CompileArkTS' in line: - logging.debug("terminate signal sent") - process.send_signal(signal.SIGTERM) - break - - [stdout, stderr] = process.communicate() - - logging.debug("first compile: stdcout: {}".format(stdout.decode('utf-8', errors="ignore"))) - logging.debug("first compile: stdcerr: {}".format(stderr.decode('utf-8', errors="ignore"))) - - logging.debug("another compile") - [stdout, stderr] = compile(task, is_debug) - - [is_success, time_string] = is_compile_success(stdout) - if not is_success: - test_info.result = options.TaskResult.failed - test_info.error_message = stderr - else: - passed = validate_compile_output(test_info, task, is_debug) - if passed: - test_info.result = options.TaskResult.passed - - task.other_tests[test_name] = test_info - - -def compile_full_with_error(task, is_debug): - test_name = 'compile_with_error' - test_info = options.CompilationInfo() - - logging.info("==========> Running %s for task: %s", test_name, task.name) - modify_file_item = task.inc_modify_file - modify_file = os.path.join(task.path, *modify_file_item) - modify_file_backup = modify_file + ".bak" - shutil.copyfile(modify_file, modify_file_backup) - - with open(modify_file, 'a', encoding='utf-8') as file: - file.write(options.configs['patch_content']['patch_lines_error']['tail']) - - [stdout, stderr] = compile(task, is_debug) - expected_error_message = options.configs['patch_content']['patch_lines_error']['expected_error'] - - if expected_error_message in stderr: - test_info.result = options.TaskResult.passed - else: - test_info.result = options.TaskResult.failed - test_info.error_message = "expected error message: {}, but got {}".format(expected_error_message, stderr) - - task.other_tests[test_name] = test_info - - shutil.move(modify_file_backup, modify_file) - - -def compile_with_exceed_length(task, is_debug): - test_name = 'compile_with_exceed_length' - test_info = options.CompilationInfo() - - logging.info("==========> Running %s for task: %s", test_name, task.name) - # get build-profile.json5 - entry_item = task.build_path[:-2] # to entry path - profile_file = os.path.join(task.path, *entry_item, 'build-profile.json5') - profile_file_backup = profile_file + ".bak" - shutil.copyfile(profile_file, profile_file_backup) - - with open(profile_file, 'r') as file: - profile_data = json5.load(file) - - long_str = 'default123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890' - profile_data['targets'][0]['name'] = long_str - - with open(profile_file, 'w') as file: - json5.dump(profile_data, file) - - [stdout, stderr] = compile(task, is_debug) - expected_error_message = 'The length of path exceeds the maximum length: 259' - - if expected_error_message in stderr: - test_info.result = options.TaskResult.passed - else: - test_info.result = options.TaskResult.failed - test_info.error_message = "expected error message: {}, but got {}".format(expected_error_message, stderr) - - task.other_tests[test_name] = test_info - - shutil.move(profile_file_backup, profile_file) - - -def compile_ohos_test(task): - return - - -def clean_backup(task): - if os.path.exists(task.backup_info.cache_path): - shutil.rmtree(task.backup_info.cache_path) - return - - -def execute(test_tasks): - for task in test_tasks: - try: - # TODO: add sdk path checking(sdk path in hap is same as config.yaml) - logging.info("======> Running task: %s", task.name) - if options.arguments.compile_mode in ['all', 'full']: - if not execute_full_compile(task): - logging.info("Full compile failed, skip other tests!") - continue - - if options.arguments.compile_mode in ['all', 'incremental']: - execute_incremental_compile(task) - - verify_binary_consistency(task) - - # for these tests, use one hapMode maybe enough - is_debug = True if options.arguments.hap_mode == 'debug' else False - execute_break_compile(task, is_debug) - if 'error' in task.type: - compile_full_with_error(task, is_debug) - - if 'exceed_length_error' in task.type: - compile_with_exceed_length(task, is_debug) - - if 'ohosTest' in task.type: - compile_ohos_test(task) - - logging.info("======> Running task: %s finised", task.name) - except Exception as e: - logging.exception(e) - finally: - clean_backup(task) \ No newline at end of file -- Gitee