diff --git a/build/pack-App/pack.py b/build/pack-App/pack.py index 991fed889c352ebf63f4d1ec18cdabc000854d14..f49602a21ad65607c03c8e05831b995e6bb65fd1 100644 --- a/build/pack-App/pack.py +++ b/build/pack-App/pack.py @@ -26,6 +26,8 @@ import logging import subprocess sys.path.append('../pack-Config') from Config_pre import gen_data_for_sign +# set logging level for INFO +logging.basicConfig(level=logging.INFO) COMPRESS_LEVEL = 9 @@ -69,11 +71,24 @@ def run_cmd(command): sys.exit(1) -def run_clean(): +def run_clean(file_name): """ delete build files """ - cmd = ["rm", "libcombine.so", "config", "manifest.txt", "configs.xml"] - run_cmd(cmd) - logging.critical("success to clean") + files_to_remove = [ + "libcombine.so", + "config", + "manifest.txt", + "configs.xml", + "data_for_sign", + "{}.tar".format(file_name), + "{}.tar.gz".format(file_name) + ] + + for file in files_to_remove: + if os.path.exists(file): + cmd = ["rm", file] + run_cmd(cmd) + + logging.info("success to clean") def get_file_type(file_name): @@ -86,10 +101,8 @@ def get_file_type(file_name): if ("java" in file_name_list) or (".class" in file_name_list): file_type_num = "7" # means java return file_type_num - logging.info("warning : the input file type cannot be distinguished. \ - need python or java") - logging.info("warning: the default value is python") - file_type_num = "6" # default python + logging.info("info: the default file type value is raw executable type") + file_type_num = "8" # default raw executable return file_type_num @@ -112,6 +125,37 @@ def whitelist_check(intput_str): return 0 +def make_package(file_name, tar_file_name, file_path, tgz_file_name): + """ make package """ + # 1. make package + try: + cmd = ["tar", "cvf", tar_file_name, file_path] + run_cmd(cmd) + tar_file_size = os.path.getsize(tar_file_name) + except RuntimeError: + logging.error("pack failed in packaging.") + run_clean(file_name) + return 1 + + # 2. make tgz file + try: + compress(tar_file_name, tgz_file_name, COMPRESS_LEVEL) + except RuntimeError: + logging.error("pack failed in compression.") + run_clean(file_name) + return 1 + + # 3. change tgz file to libcombine.so and add head + tgz_version = 1 + try: + add_head_to_tgz(tgz_file_name, tar_file_size, tgz_version) + except RuntimeError: + logging.error("pack failed in header addition.") + run_clean(file_name) + return 1 + return 0 + + def main(): """ main process """ if len(sys.argv) < 2: @@ -119,7 +163,9 @@ def main(): sys.exit(1) file_name = sys.argv[1] work_path = os.getcwd() - output_path = work_path + input_path = sys.argv[2] if len(sys.argv) > 2 else work_path + output_path = sys.argv[3] if len(sys.argv) > 3 else work_path + file_path = os.path.join(input_path, file_name) # combine input_path and file_name if whitelist_check(file_name): logging.error("file name is incorrect") sys.exit(1) @@ -129,50 +175,51 @@ def main(): signtool_path = "{}/../signtools/signtool_v3.py".format(work_path) ini_file_path = "{}/../signtools/config_cloud_app.ini".format(work_path) - # 0. clean by user and check input - if file_name == "clean": - run_clean() - sys.exit(0) - elif os.path.exists(file_name): - logging.critical("start pack %s", file_name) + # clean before pack app + run_clean(file_name) + if os.path.exists(file_path): + logging.info("start pack %s", file_path) else: - logging.error("%s is not exist, please check", file_name) + logging.error("%s is not exist, please check", file_path) sys.exit(1) # 1. get file type file_type_num = get_file_type(file_name) # 2. make package - cmd = ["tar", "cvf", tar_file_name, file_name] - run_cmd(cmd) - tar_file_size = os.path.getsize(tar_file_name) - - # 3. make tgz file - compress(tar_file_name, tgz_file_name, COMPRESS_LEVEL) - - # 4. change tgz file to libcombine.so and add head - tgz_version = 1 - add_head_to_tgz(tgz_file_name, tar_file_size, tgz_version) + if make_package(file_name, tar_file_name, file_path, tgz_file_name): + logging.error("packing failed") + sys.exit(1) - # 5. replace file name type content + # 3. replace file name type content replace_file_content("pack_tools/manifest_mask.txt", "manifest.txt", \ file_name, file_type_num) replace_file_content("pack_tools/configs_mask.xml", "configs.xml", \ file_name, file_type_num) - # 6. build config + # 4. build config config_path = "{}/config".format(work_path) # this parameter is not required but must exist. - gen_data_for_sign(work_path, ta_cert_file_path, config_path) - cmd = ["mv", "data_for_sign", "config"] - run_cmd(cmd) + try: + gen_data_for_sign(work_path, ta_cert_file_path, config_path) + cmd = ["mv", "data_for_sign", "config"] + run_cmd(cmd) + except RuntimeError: + logging.error("pack failed in config building.") + run_clean(file_name) + sys.exit(1) - # 7. do sign process - cmd = ["python3", "-B", signtool_path, work_path, output_path, "--privateCfg", ini_file_path] - run_cmd(cmd) + # 5. do sign process + try: + cmd = ["python3", "-B", signtool_path, work_path, output_path, "--privateCfg", ini_file_path] + run_cmd(cmd) + except RuntimeError: + logging.error("pack failed in signing.") + run_clean(file_name) + sys.exit(1) - # 8. do clean - run_clean() - logging.critical("success to packing %s", file_name) + # 6. do clean + run_clean(file_name) + logging.info("success to packing %s", file_name) if __name__ == '__main__': main() diff --git a/build/pack-Config/Config_pre.py b/build/pack-Config/Config_pre.py index c305bf744e45b2522748994e3394d435f10a087c..4b87e5adb9e0c7f5c15883925ae858e5b96367ff 100644 --- a/build/pack-Config/Config_pre.py +++ b/build/pack-Config/Config_pre.py @@ -19,6 +19,7 @@ import os import stat import sys import hashlib +import configparser import subprocess import re import logging @@ -33,6 +34,8 @@ BASE_POLICY_VERSION_TEE = 0b001 XML2TLV_PARSE_TOOL_INDEX = 1 XML2TLV_PY_VALUE = 1 << XML2TLV_PARSE_TOOL_INDEX +# set logging level for INFO +logging.basicConfig(level=logging.INFO) def get_policy_version(): @@ -55,6 +58,7 @@ def whitelist_check(intput_str): class load_config_header: + """ load config.ini """ str = struct.Struct('IHHIIIIIIIII') def __init__(self, data): @@ -172,7 +176,7 @@ def convert_xml2tlv(xml_file, tlv_file, input_path): parser_config_xml(xml_file, tag_parse_dict_file_path, \ tlv_file, input_path) if os.path.isfile(tlv_file): - logging.critical("convert xml to tlv success") + logging.info("convert xml to tlv success") else: logging.error("convert xml to tlv failed") raise RuntimeError @@ -195,9 +199,31 @@ def get_target_type_in_config(config_path, in_path): conf.write(ans) +def get_sign_conf_alg(file_name): + """ get sign conf alg from config """ + parser = configparser.ConfigParser() + parser.read(file_name) + if parser.has_option("signSecPublicCfg", "secSignConfigAlg"): + return parser.get("signSecPublicCfg", "secSignConfigAlg") + return 2 + + +def write_sign_data(input_path, config_header, ta_cert_buf, tlv_config_buf): + """ write data for sign to temp file """ + data_for_sign = os.path.join(input_path, "data_for_sign") + fd_sign = os.open(data_for_sign, os.O_WRONLY | os.O_CREAT, \ + stat.S_IWUSR | stat.S_IRUSR) + data_for_sign_fp = os.fdopen(fd_sign, "wb") + data_for_sign_fp.write(config_header.get_packed_data()) + data_for_sign_fp.write(ta_cert_buf) + data_for_sign_fp.write(tlv_config_buf) + data_for_sign_fp.close() + + def gen_data_for_sign(input_path, ta_cert_path, config_cert_path): ''' convert xml to tlv ''' - logging.critical(os.getcwd()) + sign_conf_alg = get_sign_conf_alg(os.path.join(input_path, "config_cloud.ini")) + logging.info(os.getcwd()) creat_temp_folder(input_path) tlv_dynconf_data = os.path.join(input_path, "config_tlv") xml_config_file = os.path.join(input_path, "configs.xml") @@ -207,8 +233,7 @@ def gen_data_for_sign(input_path, ta_cert_path, config_cert_path): dyn_conf_xml_file_path = os.path.join(input_path, 'temp/dyn_perm.xml') # may be use abspath csv_dir = os.path.realpath(os.path.join(os.getcwd(), 'xml2tlv_tools/csv')) - tag_parse_dict_file_path = \ - os.path.join(csv_dir, 'tag_parse_dict.csv') + tag_parse_dict_file_path = os.path.join(csv_dir, 'tag_parse_dict.csv') parser_dyn_conf(dyn_conf_xml_file_path, "", tag_parse_dict_file_path, input_path) convert_xml2tlv(xml_config_file, tlv_config_file, input_path) src_file_path = os.path.join(input_path, 'temp/configs_bak.xml') @@ -223,11 +248,9 @@ def gen_data_for_sign(input_path, ta_cert_path, config_cert_path): if os.path.exists(tlv_dynconf_data): with open(tlv_config_file, 'rb') as tlv_config_fp: - tlv_config_buf = \ - tlv_config_fp.read(os.path.getsize(tlv_config_file)) + tlv_config_buf = tlv_config_fp.read(os.path.getsize(tlv_config_file)) with open(tlv_dynconf_data, 'rb') as tlv_dynconf_fp: - tlv_config_buf = tlv_config_buf + \ - tlv_dynconf_fp.read(os.path.getsize(tlv_dynconf_data)) + b"\n" + tlv_config_buf = tlv_config_buf + tlv_dynconf_fp.read(os.path.getsize(tlv_dynconf_data)) + b"\n" tlv_data_size = len(tlv_config_buf) else: tlv_data_size = os.path.getsize(tlv_config_file) @@ -238,7 +261,10 @@ def gen_data_for_sign(input_path, ta_cert_path, config_cert_path): with open(ta_cert_path, 'rb') as ta_cert_fp: ta_cert_buf = struct.pack('I', 1) + ta_cert_fp.read(ta_cert_size) - sign_data_size = 4 + 4 + 4 + config_cert_size + 512 + if sign_conf_alg == '3': + sign_data_size = 4 + 4 + 4 + config_cert_size + 64 + else: + sign_data_size = 4 + 4 + 4 + config_cert_size + 512 config_hd_len = 44 context_size = ta_cert_size + tlv_data_size + sign_data_size @@ -246,15 +272,8 @@ def gen_data_for_sign(input_path, ta_cert_path, config_cert_path): CONFIG_VERSION, get_policy_version(), \ context_size, ta_cert_size, tlv_data_size, sign_data_size) - logging.critical(os.getcwd()) - data_for_sign = os.path.join(input_path, "data_for_sign") - fd_sign = os.open(data_for_sign, os.O_WRONLY | os.O_CREAT, \ - stat.S_IWUSR | stat.S_IRUSR) - data_for_sign_fp = os.fdopen(fd_sign, "wb") - data_for_sign_fp.write(config_header.get_packed_data()) - data_for_sign_fp.write(ta_cert_buf) - data_for_sign_fp.write(tlv_config_buf) - data_for_sign_fp.close() + logging.info(os.getcwd()) + write_sign_data(input_path, config_header, ta_cert_buf, tlv_config_buf) delete_temp_folder(input_path) diff --git a/build/signtools/auth_conf_parser.py b/build/signtools/auth_conf_parser.py index 3dc586c4cf9c8a91a16b12be5f6aa10e5e129d1d..dc39b9412d12ba3a19904dd8019fd4468724728c 100644 --- a/build/signtools/auth_conf_parser.py +++ b/build/signtools/auth_conf_parser.py @@ -24,7 +24,7 @@ from ctypes import c_uint32 from ctypes import sizeof from ctypes import memmove from ctypes import byref -from defusedxml import ElementTree as ET +import xml.etree.ElementTree as ET logging.basicConfig(level=logging.INFO, format='%(asctime)s line:%(lineno)d %(levelname)s:%(name)s:%(message)s', diff --git a/build/signtools/config_cloud.ini b/build/signtools/config_cloud.ini index b366ad84e807d88c72e804fe8a410c108fd2047a..2fd85d21797e813890be6fd7ff3f5c969af61d4c 100644 --- a/build/signtools/config_cloud.ini +++ b/build/signtools/config_cloud.ini @@ -9,6 +9,7 @@ secSignKeyLen = 4096 ;[fixed value] ;0 means SHA256 hash type ;1 means SHA512 hash type +;2 means SM3 hash type secHashType = 0 ;;; ; [fixed value] @@ -28,6 +29,34 @@ secEncryptKey = rsa_public_key_cloud.pem ;;; ;public key length secEncryptKeyLen = 3072 +;the algorithm used to encrypt TA content +;0 means AES-CBC +;1 means SM4-CBC +;[fixed value] +secEncryptContentAlg = 0 +;;; +;the scheme for protecting private key +;2 means use OEMK with ECDH +;3 means use OEMK with GM +;[fixed value] +secKeyProtectVersion = 2 +;;; +;the algorithm used to encrypt AES key info +;0 means RSA-OAEP +;1 means SM2 +;[fixed value] +secEncryptKeyInfoAlg = 0 +;the algorithm used to sign TA +;1 means ECDSA +;2 means RSA +;3 means SM2 +secSignTaAlg = 2 +;;; +;the algorithm used to sign config +;1 means ECDSA-SHA256 +;2 means RSA-PKCSV15-SHA256 +;3 means SM2 +secSignConfigAlg = 2 [signSecPublicCfg] ;;; diff --git a/build/signtools/dyn_conf_checker.py b/build/signtools/dyn_conf_checker.py index 0b37bcec87b3285385635087d8f250e1b4fbe168..bf7b38b459f9a6819815e579fceb0974a342a10a 100644 --- a/build/signtools/dyn_conf_checker.py +++ b/build/signtools/dyn_conf_checker.py @@ -47,7 +47,7 @@ def check_csv_sym(value): raise RuntimeError("has invalid sym in csv", value) -def classify_uuid_list(value): +def classify_uuid_list(dyn_key, attrib, value, origin_value): ans = "" uuid_list = value.split(',') @@ -110,18 +110,18 @@ def check_and_classify_attr(old_item, attr, value): value = do_split_and_classify(old_item, attr, 0, value) if attr == "uuid": - value = classify_uuid_list(value) + value = classify_uuid_list(0, 0, value, 0) return value -def check_iomap_range(iomap_range): +def check_iomap_range(dyn_key, attrib, value, origin_value): - if len(iomap_range) == 0: + if len(value) == 0: raise RuntimeError("you must define iomap_range") - iomap_range.replace(" ", "") - iomap_ranges = iomap_range.split(";") + value.replace(" ", "") + iomap_ranges = value.split(";") for iomap in iomap_ranges: addrs = iomap.split(",") # check if range is start,end format @@ -130,11 +130,11 @@ def check_iomap_range(iomap_range): if len(addrs) != 2: raise RuntimeError("iomap must be start1,end1;\ -start2,end2....", addrs) + start2,end2....", addrs) if '0x' not in addrs[0] or '0x' not in addrs[1]: raise RuntimeError("addr must be hex like \ -0xF8555000", addrs[0], addrs[1]) + 0xF8555000", addrs[0], addrs[1]) # check if addr is 4K aligned start = int(addrs[0], 16) @@ -145,12 +145,12 @@ start2,end2....", addrs) raise RuntimeError("addr must be 4K aligned", addrs[0], addrs[1]) if end <= start: raise RuntimeError("iomap range start must \ -smaller than end ", addrs[0], addrs[1]) + smaller than end ", addrs[0], addrs[1]) return 0 -def check_thread_limit(value): +def check_thread_limit(dyn_key, attrib, value, origin_value): if len(value) > 0: thread_limit = int(value) @@ -158,34 +158,41 @@ def check_thread_limit(value): raise RuntimeError("thread_limit is invalid", thread_limit) -def check_upgrade(value): +def check_upgrade(dyn_key, attrib, value, origin_value): if len(value) > 0: if value.lower() != 'true' and value.lower() != 'false': raise RuntimeError("upgrade must be true or false", value) -def check_virt2phys(value): +def check_virt2phys(dyn_key, attrib, value, origin_value): if len(value) > 0: if value.lower() != 'true' and value.lower() != 'false': raise RuntimeError("virt2phys must be true or false", value) -def check_get_vsrootinfo(value): +def check_ioremap_ns(dyn_key, attrib, value, origin_value): + ''' check ioremap ns ''' + if len(value) > 0: + if value.lower() != 'true' and value.lower() != 'false': + raise RuntimeError("ioremap ns must be true or false", value) + + +def check_get_vsrootinfo(dyn_key, attrib, value, origin_value): ''' check get vsrootinfo ''' if len(value) > 0: if value.lower() != 'true' and value.lower() != 'false': raise RuntimeError("get_vsrootinfo must be true or false", value) -def check_exception_mode(value): +def check_exception_mode(dyn_key, attrib, value, origin_value): if value != "restart" and value != "syscrash" and value != "ddos": raise RuntimeError("unknown exception mode", value) -def check_chip_type(attrib, value): +def check_chip_type(dyn_key, attrib, value, origin_value): if len(value) == 0: raise RuntimeError("chip_type cannot be NULL") @@ -215,7 +222,7 @@ def check_drv_name(value): length larger than 31", value) -def check_irq(value): +def check_irq(dyn_key, attrib, value, origin_value): if len(value) == 0: raise RuntimeError("irq cannot be NULL") @@ -230,7 +237,7 @@ def check_irq(value): raise RuntimeError("irq shoule not smaller than 32", value) -def check_map_secure_uuid(attrib, value): +def check_map_secure_uuid(dyn_key, attrib, value, origin_value): if len(value) != 36: raise RuntimeError("uuid len is invalid", value) @@ -244,7 +251,7 @@ def check_map_secure_uuid(attrib, value): raise RuntimeError("please set region in map secure item", attrib) -def check_map_secure_region(attrib, value): +def check_map_secure_region(dyn_key, attrib, value, origin_value): if len(value) == 0: raise RuntimeError("region cannot be NULL") @@ -257,32 +264,34 @@ def check_map_secure_region(attrib, value): if flag == 0: raise RuntimeError("please set uuid in map secure item", attrib) - check_iomap_range(value) + check_iomap_range(dyn_key, attrib, value, origin_value) -def check_drv_cmd_perm_info_item_permission(attrs, perm): +def check_drv_cmd_perm_info_item_permission(dyn_key, attrib, value, origin_value): - if len(perm) == 0: + if len(value) == 0: raise RuntimeError("permssion len should not be NULL") - if not re.match(r"^[0-9]*$", perm): - raise RuntimeError("there has invalid sym in perm", perm) + if not re.match(r"^[0-9]*$", value): + raise RuntimeError("there has invalid sym in perm", value) - if int(perm, 10) > 64 or int(perm, 10) < 1: - raise RuntimeError("perm can only in range 1-64", perm) + if int(value, 10) > 64 or int(value, 10) < 1: + raise RuntimeError("perm can only in range 1-64", value) flag = 0 - for attr in attrs: - if attr == "cmd" and len(attrs[attr]) != 0: + for attr in attrib: + if attr == "cmd" and len(attrib[attr]) != 0: flag = 1 break if flag == 0: raise RuntimeError("you should set cmd while you set cmd permission") + check_permssion_unique(value, origin_value) -def check_drv_cmd_perm_info_item_cmd(attrs, dyn_key): + +def check_drv_cmd_perm_info_item_cmd(dyn_key, attrib, value, origin_value): if len(dyn_key) == 0: raise RuntimeError("dyn_key len should not be 0") @@ -290,55 +299,59 @@ def check_drv_cmd_perm_info_item_cmd(attrs, dyn_key): flag = 0 cmd = "" - for attr in attrs: - if attr == "permission" and len(attrs[attr]) != 0: + for attr in attrib: + if attr == "permission" and len(attrib[attr]) != 0: flag = 1 - if attr == "cmd" and len(attrs[attr]) != 0: - cmd = attrs[attr] - if (dyn_key, attrs[attr]) in unique_list: + if attr == "cmd" and len(attrib[attr]) != 0: + cmd = attrib[attr] + if (dyn_key, attrib[attr]) in unique_list: raise RuntimeError("one cmd can only set \ -permission once", attrs[attr]) + permission once", attrib[attr]) unique_list.append((dyn_key, cmd)) if flag == 0: raise RuntimeError("you should set permission while \ -you set cmd permission") + you set cmd permission") + check_cmd_unique(value, origin_value) -def check_mac_info_item_permission(attrs, perm): - if len(perm) == 0: +def check_mac_info_item_permission(dyn_key, attrib, value, origin_value): + + if len(value) == 0: raise RuntimeError("permssion len should not be 0") - if ',' in perm or ';' in perm: - raise RuntimeError("multi permssion can only split by | ", perm) + if ',' in value or ';' in value: + raise RuntimeError("multi permssiom can only split by | ", value) flag = 0 - for attr in attrs: - if attr == "uuid" and len(attrs[attr]) != 0: + for attr in attrib: + if attr == "uuid" and len(attrib[attr]) != 0: flag = 1 break if flag == 0: raise RuntimeError("you should set uuid while \ -you set drvcall's permission") + you set drvcall's permission") - for perm_num in perm.split("|"): + for perm_num in value.split("|"): if int(perm_num, 10) > 64 or int(perm_num, 10) < 1: - raise RuntimeError("perm can only in range 1-64", perm) + raise RuntimeError("perm can only in range 1-64", value) + check_permssion_unique(value, origin_value) -def check_mac_info_item_uuid(attrs, dyn_key): + +def check_mac_info_item_uuid(dyn_key, attrib, value, origin_value): if len(dyn_key) == 0: raise RuntimeError("dyn_key len should not be 0") uuid_str = "" - for attr in attrs: - if attr == "uuid" and len(attrs[attr]) != 0: - uuid_str = attrs[attr] + for attr in attrib: + if attr == "uuid" and len(attrib[attr]) != 0: + uuid_str = attrib[attr] if ',' in uuid_str: raise RuntimeError("uuid in mac can only set one", uuid_str) if (dyn_key, uuid_str) in unique_list: @@ -377,124 +390,99 @@ def check_cmd_unique(value, origin_value): cmd_unique_dict[value_list[i]] = origin_value_list[i] -def check_perm_apply_item(attrs, perm): +def check_perm_apply_item(dyn_key, attrib, value, origin_value): - if len(perm) == 0: + if len(value) == 0: raise RuntimeError("permssion len should not be 0") flag = 0 - for attr in attrs: - if attr == "name" and len(attrs[attr]) != 0: + for attr in attrib: + if attr == "name" and len(attrib[attr]) != 0: flag = 1 break if flag == 0: raise RuntimeError("you should set drv's name while \ -you set drv's permission") + you set drv's permission") + check_permssion_unique(value, origin_value) -def check_ta_config_service_name(service_name): - if len(service_name) == 0 or len(service_name) >= 40: - raise Exception("service name is invalid", service_name) +def check_ta_config_service_name(dyn_key, attrib, value, origin_value): + if len(value) == 0 or len(value) >= 40: + raise Exception("service name is invalid", value) -def check_ta_config_stack_size(stack_size): - if int(stack_size, 10) > 0xffffffff or int(stack_size, 10) <= 0: - raise Exception("stack size is invalid", stack_size) +def check_ta_config_stack_size(dyn_key, attrib, value, origin_value): + if int(value, 10) > 0xffffffff or int(value, 10) <= 0: + raise Exception("stack size is invalid", value) -def check_ta_config_heap_size(heap_size): - if int(heap_size, 10) > 0xffffffff or int(heap_size, 10) <= 0: - raise Exception("heap size is invalid", heap_size) +def check_ta_config_heap_size(dyn_key, attrib, value, origin_value): + if int(value, 10) > 0xffffffff or int(value, 10) <= 0: + raise Exception("heap size is invalid", value) -def check_ta_config_rpmb_size(rpmb_size): - if int(rpmb_size, 10) > 0xffffffff or int(rpmb_size, 10) <= 0: - raise Exception("rpmb size is invalid", rpmb_size) +def check_ta_config_rpmb_size(dyn_key, attrib, value, origin_value): + if int(value, 10) > 0xffffffff or int(value, 10) <= 0: + raise Exception("rpmb size is invalid", value) -def check_ta_config_device_id(device_id): - if len(device_id) != 64: - raise Exception("device_id len is invalid", device_id) +def check_ta_config_device_id(dyn_key, attrib, value, origin_value): - for sym in device_id: + if len(value) != 64: + raise Exception("device_id len is invalid", value) + + for sym in value: if sym >= 'A' and sym <= 'Z': continue elif sym >= '0' and sym <= '9': continue else: - raise RuntimeError("has invalid sym in device_id", sym, device_id) + raise RuntimeError("has invalid sym in device_id", sym, value) + + +check_fun_list = { + 'drv_perm/drv_basic_info/thread_limit': check_thread_limit, + 'drv_perm/drv_basic_info/upgrade': check_upgrade, + 'drv_perm/drv_basic_info/virt2phys': check_virt2phys, + 'drv_perm/drv_basic_info/get_vsrootinfo': check_get_vsrootinfo, + 'drv_perm/drv_basic_info/exception_mode': check_exception_mode, + 'drv_perm/drv_basic_info/ioremap_ns': check_ioremap_ns, + 'drv_perm/drv_io_map/item/chip_type': check_chip_type, + 'drv_perm/drv_io_map/item/iomap': check_iomap_range, + 'drv_perm/irq/item/irq': check_irq, + 'drv_perm/map_secure/item/chip_type': check_chip_type, + 'drv_perm/map_secure/item/uuid': check_map_secure_uuid, + 'drv_perm/map_secure/item/region': check_map_secure_region, + 'drv_perm/map_nosecure/item/chip_type': check_chip_type, + 'drv_perm/drv_cmd_perm_info/item/cmd': check_drv_cmd_perm_info_item_cmd, + 'drv_perm/drv_cmd_perm_info/item/permission': check_drv_cmd_perm_info_item_permission, + 'drv_perm/drv_mac_info/item/uuid': check_mac_info_item_uuid, + 'drv_perm/drv_mac_info/item/permission': check_mac_info_item_permission, + 'drvcall_conf/drvcall_perm_apply/item/permission': check_perm_apply_item, + 'ConfigInfo/TA_Basic_Info/service_name/service_name': check_ta_config_service_name, + 'ConfigInfo/TA_Basic_Info/uuid/uuid': classify_uuid_list, + 'ConfigInfo/TA_Manifest_Info/stack_size/stack_size': check_ta_config_stack_size, + 'ConfigInfo/TA_Manifest_Info/heap_size/heap_size': check_ta_config_heap_size, + 'ConfigInfo/TA_Control_Info/RPMB_Info/RPMB_size/RPMB_size': check_ta_config_rpmb_size, + 'ConfigInfo/TA_Control_Info/DEBUG_Info/DEBUG_device_id/DEBUG_device_id': check_ta_config_device_id, +} + + +def check_fun_default(dyn_key, attrib, value, origin_value): + ''' check fun default ''' + return def dyn_perm_check(dyn_key, attrib, value, origin_value): - if dyn_key == 'drv_perm/drv_basic_info/thread_limit': - check_thread_limit(value) - elif dyn_key == 'drv_perm/drv_basic_info/upgrade': - check_upgrade(value) - elif dyn_key == 'drv_perm/drv_basic_info/virt2phys': - check_virt2phys(value) - elif dyn_key == 'drv_perm/drv_basic_info/get_vsrootinfo': - check_get_vsrootinfo(value) - elif dyn_key == 'drv_perm/drv_basic_info/exception_mode': - check_exception_mode(value) - elif dyn_key == 'drv_perm/drv_io_map/item/chip_type': - check_chip_type(attrib, value) - elif dyn_key == 'drv_perm/drv_io_map/item/iomap': - check_iomap_range(value) - elif dyn_key == 'drv_perm/irq/item/irq': - check_irq(value) - elif dyn_key == 'drv_perm/map_secure/item/chip_type': - check_chip_type(attrib, value) - elif dyn_key == 'drv_perm/map_secure/item/uuid': - check_map_secure_uuid(attrib, value) - return - elif dyn_key == 'drv_perm/map_secure/item/region': - check_map_secure_region(attrib, value) - elif dyn_key == 'drv_perm/map_nosecure/item/chip_type': - check_chip_type(attrib, value) - elif dyn_key == 'drv_perm/map_nosecure/item/uuid': - # uuid has been checked in classify_uuid() - return - elif dyn_key == 'drv_perm/drv_cmd_perm_info/item/cmd': - # cmd has been trans by csv, so it must be valied - check_drv_cmd_perm_info_item_cmd(attrib, dyn_key) - check_cmd_unique(value, origin_value) - return - elif dyn_key == 'drv_perm/drv_cmd_perm_info/item/permission': - check_drv_cmd_perm_info_item_permission(attrib, value) - check_permssion_unique(value, origin_value) - elif dyn_key == 'drv_perm/drv_mac_info/item/uuid': - # uuid has been checked in classify_uuid() - check_mac_info_item_uuid(attrib, dyn_key) - return - elif dyn_key == 'drv_perm/drv_mac_info/item/permission': - check_mac_info_item_permission(attrib, value) - check_permssion_unique(value, origin_value) - elif dyn_key == 'drvcall_conf/drvcall_perm_apply/item/permission': - check_perm_apply_item(attrib, value) - check_permssion_unique(value, origin_value) - elif dyn_key == 'ConfigInfo/TA_Basic_Info/service_name/service_name': - check_ta_config_service_name(value) - elif dyn_key == 'ConfigInfo/TA_Basic_Info/uuid/uuid': - classify_uuid_list(value) - elif dyn_key == 'ConfigInfo/TA_Manifest_Info/stack_size/stack_size': - check_ta_config_stack_size(value) - elif dyn_key == 'ConfigInfo/TA_Manifest_Info/heap_size/heap_size': - check_ta_config_heap_size(value) - elif dyn_key == 'ConfigInfo/TA_Control_Info/RPMB_Info/RPMB_size/RPMB_size': - check_ta_config_rpmb_size(value) - elif dyn_key == \ - 'ConfigInfo/TA_Control_Info/DEBUG_Info/DEBUG_device_id/DEBUG_device_id': - check_ta_config_device_id(value) - else: - return + check_fun_list.get(dyn_key, check_fun_default)(dyn_key, attrib, value, origin_value) def check_text_ava(old_item, text): diff --git a/build/signtools/dyn_conf_parser.py b/build/signtools/dyn_conf_parser.py index 9e1302fbe602b73e541e8da7cd64dbf6445c0912..f26b82acaad9ed0293715f145b2d98b1d36aaefd 100644 --- a/build/signtools/dyn_conf_parser.py +++ b/build/signtools/dyn_conf_parser.py @@ -18,7 +18,7 @@ import string import os import stat import logging -from defusedxml import ElementTree as ET +import xml.etree.ElementTree as ET from dyn_conf_checker import dyn_perm_check from dyn_conf_checker import check_and_classify_attr from dyn_conf_checker import check_csv_sym diff --git a/build/signtools/generate_signature.py b/build/signtools/generate_signature.py index 8e55f5a1b4df7b096fc013d6477c92829a2977ad..4ac277b688aedb61d537432a6fda5073c33f2460 100644 --- a/build/signtools/generate_signature.py +++ b/build/signtools/generate_signature.py @@ -17,13 +17,75 @@ import os import stat import subprocess +import base64 +import binascii import logging from generate_hash import gen_hash +from gmssl import sm2 +from gmssl import sm3, func + + +def sm3_z(sm2_crypt, data): + """ use sm3 cal hash """ + sm3_zt = '0080' + '31323334353637383132333435363738' + \ + sm2_crypt.ecc_table['a'] + sm2_crypt.ecc_table['b'] + sm2_crypt.ecc_table['g'] + \ + sm2_crypt.public_key + sm3_zt = binascii.a2b_hex(sm3_zt) + sm3_za = sm3.sm3_hash(func.bytes_to_list(sm3_zt)) + sm3_m = (sm3_za + data.hex()).encode('utf-8') + sm3_e = sm3.sm3_hash(func.bytes_to_list(binascii.a2b_hex(sm3_m))) + return sm3_e + + +def sign_with_sm3(sm2_crypt, data, random_hex_str): + """ use sm3 sign """ + sign_data = binascii.a2b_hex(sm3_z(sm2_crypt, data).encode('utf-8')) + if random_hex_str is None: + random_hex_str = func.random_hex(sm2_crypt.para_len) + sign = sm2_crypt.sign(sign_data, random_hex_str) + return sign + + +def read_key_pem(file_path, arr): + """ get key from pem file """ + with open(file_path, 'r') as pem_fp: + pem_p = pem_fp.read() + begin = pem_p.find(arr[0]) + arr[1] + end = pem_p.find(arr[2]) + key = pem_p[begin:end].replace('\n', '') + + key = base64.b64decode(key).hex()[arr[3]:arr[4]] + return key + + +def get_array(str_array): + """ converting strings to arrays """ + hex_array = [] + for tmp in str_array: + hex_array.append(int(tmp, 16)) + return hex_array + + +def get_str_array(code_str): + """ converting strings to arrays """ + str_list = list(code_str) + index = 0 + prefix_bit = 2 + tmp = 0 + while index < len(str_list): + if tmp == prefix_bit: + str_list.insert(index, ',') + prefix_bit = 3 + tmp = 0 + tmp += 1 + index += 1 + code_str = ''.join(str_list) + return get_array(code_str.split(',')) def gen_ta_signature(cfg, uuid_str, raw_data, raw_data_path, hash_file_path, \ - out_file_path, out_path, key_info_data, is_big_ending, temp_path): + out_file_path, out_path, key_info_data, temp_path): msg_file = os.path.join(out_path, "temp", "config_msg") fd_msg = os.open(msg_file, os.O_WRONLY | os.O_CREAT, \ stat.S_IWUSR | stat.S_IRUSR) @@ -31,27 +93,45 @@ def gen_ta_signature(cfg, uuid_str, raw_data, raw_data_path, hash_file_path, \ msg_file_fp.write(raw_data) msg_file_fp.close() if cfg.sign_type == '1': # signed with local key - if cfg.padding_type == '0': - gen_hash(cfg.hash_type, raw_data, hash_file_path) - cmd = "openssl pkeyutl -sign -inkey {} -in {} -out {}".\ - format(cfg.sign_key, hash_file_path, out_file_path) - elif cfg.padding_type == '1': - if cfg.hash_type == '0': - cmd = "openssl dgst -sign {} -sha256 -sigopt \ - rsa_padding_mode:pss -sigopt rsa_pss_saltlen:-1 \ - -out {} {}".format(cfg.sign_key, out_file_path, msg_file) - else: - cmd = "openssl dgst -sign {} -sha512 -sigopt \ - rsa_padding_mode:pss -sigopt rsa_pss_saltlen:-1 \ - -out {} {}".format(cfg.sign_key, out_file_path, msg_file) - try: - subprocess.check_output(cmd.split(), shell=False) - except Exception: - logging.error("sign operation failed") - raise RuntimeError + if cfg.sign_ta_alg == '3': # SM2 + array = ['-----BEGIN EC PRIVATE KEY-----', 31, '-----END EC PRIVATE KEY-----', 14, 78] + priv_key = read_key_pem(cfg.sign_key, array) + array_pub = ['-----BEGIN EC PRIVATE KEY-----', 31, '-----END EC PRIVATE KEY-----', 114, 242] + pub_key = read_key_pem(cfg.sign_key, array_pub) + sm2_crypt = sm2.CryptSM2(public_key=pub_key, private_key=priv_key) + sm2_crypt.mode = 1 + random_hex_str = func.random_hex(sm2_crypt.para_len) + signature = sign_with_sm3(sm2_crypt, raw_data, random_hex_str) + temp_sig = get_str_array(signature) + final_sig = b''.join(map(lambda x:int.to_bytes(x, 1, 'little'), temp_sig)) + + fd_out = os.open(out_file_path, os.O_WRONLY | os.O_CREAT, \ + stat.S_IWUSR | stat.S_IRUSR) + out_fp = os.fdopen(fd_out, "wb") + out_fp.write(final_sig) + out_fp.close() + else: + if cfg.padding_type == '0': + gen_hash(cfg.hash_type, raw_data, hash_file_path) + cmd = "openssl pkeyutl -sign -inkey {} -in {} -out {}".\ + format(cfg.sign_key, hash_file_path, out_file_path) + elif cfg.padding_type == '1': + if cfg.hash_type == '0': + cmd = "openssl dgst -sign {} -sha256 -sigopt \ + rsa_padding_mode:pss -sigopt rsa_pss_saltlen:-1 \ + -out {} {}".format(cfg.sign_key, out_file_path, msg_file) + else: + cmd = "openssl dgst -sign {} -sha512 -sigopt \ + rsa_padding_mode:pss -sigopt rsa_pss_saltlen:-1 \ + -out {} {}".format(cfg.sign_key, out_file_path, msg_file) + try: + print("========================== sign success =====================================") + subprocess.check_output(cmd.split(), shell=False) + except Exception: + logging.error("sign operation failed") + print("========================== sign error =====================================") + raise RuntimeError else: logging.error("unhandled signtype %s", cfg.sign_type) return - - diff --git a/build/signtools/get_ta_elf_hash.py b/build/signtools/get_ta_elf_hash.py index 59034d40f238afc18b8db3d8139297f1f2af9b03..4b971cb403a05a681d4e703da015a8e64594e540 100644 --- a/build/signtools/get_ta_elf_hash.py +++ b/build/signtools/get_ta_elf_hash.py @@ -273,6 +273,7 @@ def get_code_segment_from_elf(elf_file_name, sign_data, out_hash_file_name, out_ logging.error("file name is incorrect.") return + elf_file_name = os.path.realpath(elf_file_name) if check_if_pack_app(elf_file_name, sign_data, out_hash_file_name, out_hash_file_path) is True: return diff --git a/build/signtools/manifest.py b/build/signtools/manifest.py index 43214f81770bab14ffabb97f56afbf43422d4c9d..12055df3bd36c770dea481a3ca09cc1776a75a38 100755 --- a/build/signtools/manifest.py +++ b/build/signtools/manifest.py @@ -29,6 +29,9 @@ PRODUCT_CLIENT_IMAGE = 4 PRODUCT_DRIVER_IMAGE = 5 PRODUCT_PYTHON_IMAGE = 6 PRODUCT_JAVA_IMAGE = 7 +PRODUCT_RAW_EXECUTABLE_IMAGE = 8 +# set logging level for INFO +logging.basicConfig(level=logging.INFO) class PackUuid: @@ -46,9 +49,9 @@ class PackUuid: PackUuid.data = struct.Struct('>IHH8b') def print_values(self): - logging.critical("ATTRIBUTE / VALUE") + logging.info("ATTRIBUTE / VALUE") for attr, value in self.__dict__.items(): - logging.critical(attr, value) + logging.info(attr, value) def get_pack_data(self): values = [self.time_low, @@ -81,9 +84,9 @@ class Manifest: Manifest.data = struct.Struct('>' + 'I' * 6) def print_values(self): - logging.critical("ATTRIBUTE / VALUE") + logging.info("ATTRIBUTE / VALUE") for attr, value in self.__dict__.items(): - logging.critical(attr, value) + logging.info(attr, value) def get_pack_data(self): values = [self.single_instance, @@ -101,7 +104,7 @@ class Manifest: # verify property name in manifest file #---------------------------------------------------------------------------- def verify_property_name(str_line): - logging.critical("verify property name") + logging.info("verify property name") alphas = string.ascii_letters + string.digits cont = "".join([alphas, '-', '_', '.']) if len(str_line) > 1: @@ -124,7 +127,7 @@ def verify_property_name(str_line): # verify property value in manifest file #---------------------------------------------------------------------------- def verify_property_value(str_line): - logging.critical("verify property value") + logging.info("verify property value") filt_letter = chr(0) + chr(10) + chr(13) for thechar in str_line: if thechar in filt_letter: @@ -137,11 +140,11 @@ def verify_property_value(str_line): # remove tabs and space in property value #---------------------------------------------------------------------------- def trailing_space_tabs(str_line): - logging.critical("trailing space tabs in value head and trail") + logging.info("trailing space tabs in value head and trail") space_tabs = chr(9) + chr(32) + chr(160) space_tabs_newlines = space_tabs + chr(10) + chr(13) - logging.critical("str in: %s", str_line) + logging.info("str in: %s", str_line) index = 0 for thechar in str_line: if thechar in space_tabs: @@ -161,23 +164,65 @@ def trailing_space_tabs(str_line): break str_ret = headvalue[0:strlen + 1] + chr(10) - logging.critical("str ret: %s", str_ret) + logging.info("str ret: %s", str_ret) return str_ret -#---------------------------------------------------------------------------- -# verify manifest file, parse manifest file, generate a new manfiest file -#---------------------------------------------------------------------------- -def parser_manifest(manifest, manifest_data_path, mani_ext, big_endian=False): - logging.critical("verify manifest") - target_type = PRODUCT_TA_IMAGE +def update_target_type(target_info): + ''' update target type value. ''' + dyn_conf_target_type = target_info.dyn_conf_target_type + service_name = target_info.service_name + target_type = target_info.target_type + service_name_len = len(service_name) + logging.info("service name: %s", service_name) + logging.info("service name len: %d", service_name_len) + + max_service_len = 36 + if dyn_conf_target_type == 1: + target_type = PRODUCT_DRIVER_IMAGE + if dyn_conf_target_type == 3: + target_type = PRODUCT_SERVICE_IMAGE + if dyn_conf_target_type == 4: + target_type = PRODUCT_CLIENT_IMAGE + if dyn_conf_target_type == 6: + target_type = PRODUCT_PYTHON_IMAGE + if dyn_conf_target_type == 7: + target_type = PRODUCT_JAVA_IMAGE + if dyn_conf_target_type == 8: + target_type = PRODUCT_RAW_EXECUTABLE_IMAGE + + if not re.match(r"^[A-Za-z0-9_-]*$", service_name): + logging.error("service name only can use [A-Z] [a-z] [0-9] '-' and '_'") + return (False, 0) + + if service_name_len > max_service_len: + logging.error("service name len cannot larger than %s", str(max_service_len)) + return (False, 0) + return (True, target_type) + + +class TargetInfo: + ''' Class representing target info ''' + def __init__(self, dyn_conf_target_type, service_name, target_type, uuid_val): + self.dyn_conf_target_type = dyn_conf_target_type + self.service_name = service_name + self.target_type = target_type + self.uuid_val = uuid_val + def print_values(self): + ''' print values ''' + logging.info("ATTRIBUTE / VALUE") + for attr, value in self.__dict__.items(): + logging.info(attr, value) + + +def init_data_val(big_endian): + """ Init data value. """ uuid_val = PackUuid('\0' * 16, big_endian) - #manifest default + # manifest default manifest_val = Manifest('\0' * 24, big_endian) - manifest_val.single_instance = 1 manifest_val.multi_session = 0 manifest_val.multi_command = 0 @@ -185,15 +230,155 @@ def parser_manifest(manifest, manifest_data_path, mani_ext, big_endian=False): manifest_val.heap_size = 16384 manifest_val.stack_size = 2048 + target_type = PRODUCT_TA_IMAGE service_name = 'external_service' dyn_conf_target_type = 0 + target_info = TargetInfo(dyn_conf_target_type, service_name, target_type, uuid_val) + + return manifest_val, target_info + + +def update_manifest_info(prop_value_v, val, prop_name_low): + ''' update manifest information ''' + prop_value_low = prop_value_v.lower() + if 'true' == prop_value_low: + val = 1 + elif 'false' == prop_value_low: + val = 0 + else: + logging.error("%s value error!", prop_name_low) + return val + + +def check_prop_info(prop_name, prop_value_v): + ''' check property information ''' + if verify_property_name(prop_name) is False: + logging.error("manifest format invalid, please check it") + return False + + if verify_property_value(prop_value_v) is False: + logging.error("manifest format invalid, please check it") + return False + return True + + +class PropInfo: + ''' get Prop info ''' + def __init__(self, prop_name, prop_name_t, prop_value_t): + self.prop_name = prop_name + self.prop_name_t = prop_name_t + self.prop_value_t = prop_value_t + + def get_prop_value(self): + ''' get Prop value ''' + prop_value_t = self.prop_value_t + prop_value = trailing_space_tabs(prop_value_t) + prop_len = len(prop_value) + prop_value_v = prop_value[0:prop_len - 1] + logging.info("prop value_v: %s", prop_value_v) + return prop_value, prop_value_v + + +def parse_prop_info(manifest_val, prop_info, mani_ext_fp, target_info): + ''' parse property information ''' + prop_value, prop_value_v = PropInfo.get_prop_value(prop_info) + prop_name = prop_info.prop_name + prop_name_t = prop_info.prop_name_t + + if not check_prop_info(prop_name, prop_value_v): + return (False, 0, 0) + # name:value to lowcase, and parse manifest + prop_name_low = prop_name.lower() + logging.info("name lower: %s", prop_name_low) + if 'gpd.ta.appid' == prop_name_low: + logging.info("compare name is srv id") + target_info.uuid_val = uuid.UUID(prop_value_v) + logging.info("uuid str %s", target_info.uuid_val) + logging.info("val fields %s", target_info.uuid_val.fields) + elif 'gpd.ta.singleinstance' == prop_name_low: + manifest_val.single_instance = update_manifest_info(prop_value_v, manifest_val.single_instance, \ + prop_name_low) + elif 'gpd.ta.multisession' == prop_name_low: + manifest_val.multi_session = update_manifest_info(prop_value_v, manifest_val.multi_session, \ + prop_name_low) + elif 'gpd.ta.multicommand' == prop_name_low: + manifest_val.multi_command = update_manifest_info(prop_value_v, manifest_val.multi_command, \ + prop_name_low) + elif 'gpd.ta.instancekeepalive' == prop_name_low: + manifest_val.instancekeepalive = update_manifest_info(prop_value_v, manifest_val.instancekeepalive, \ + prop_name_low) + elif 'gpd.ta.datasize' == prop_name_low: + manifest_val.heap_size = int(prop_value_v) + logging.info('b') + elif 'gpd.ta.stacksize' == prop_name_low: + manifest_val.stack_size = int(prop_value_v) + logging.info('b') + elif 'gpd.ta.service_name' == prop_name_low: + target_info.service_name = prop_value_v + logging.info('b') + elif 'gpd.ta.dynconf' == prop_name_low: + logging.error("gpd.ta.dynConf is reserved, cannot set") + return (False, 0, 0) + else: + logging.info('b') + #write have not paresed manifest into sample.manifest file + mani_ext_fp.write(str.encode(prop_name_t)) + mani_ext_fp.write(str.encode(prop_value)) + if 'gpd.ta.is_lib' == prop_name_low: + if 'true' == prop_value_v.lower(): + target_info.target_type = PRODUCT_DYN_LIB + elif 'gpd.ta.target_type' == prop_name_low: + target_info.dyn_conf_target_type = int(prop_value_v) + if target_info.dyn_conf_target_type > 0xFFFF or target_info.dyn_conf_target_type < 0: + logging.error("gpd.ta.target_type must in range [0, 0xFFFF]") + return (False, 0, 0) + return (True, manifest_val, target_info) + + +def gen_product_name(uuid_val, target_info): + ''' generate product name. ''' + service_name = target_info.service_name + target_type = target_info.target_type + uuid_str = str(uuid_val) + product_name = str(uuid_val) + if target_type == PRODUCT_TA_IMAGE: + logging.info("product type is ta image") + product_name = "".join([uuid_str, ".sec"]) + elif target_type == PRODUCT_DRIVER_IMAGE: + logging.info("product type is driver") + product_name = "".join([service_name, ".sec"]) + elif target_type == PRODUCT_SERVICE_IMAGE: + logging.info("product type is service") + product_name = "".join([service_name, ".sec"]) + elif target_type == PRODUCT_CLIENT_IMAGE: + logging.info("product type is client") + product_name = "".join([service_name, ".so.sec"]) + elif target_type == PRODUCT_DYN_LIB: + logging.info("product type is dyn lib") + product_name = "".join([uuid_str, service_name, ".so.sec"]) + elif target_type == PRODUCT_PYTHON_IMAGE or target_type == PRODUCT_JAVA_IMAGE \ + or target_type == PRODUCT_RAW_EXECUTABLE_IMAGE: + logging.info("product type is python, java or raw_executable packing") + product_name = "".join([service_name, ".sec"]) + else: + logging.error("invalid product type!") + return (False, 0, 0) + return (True, product_name, uuid_str) + + +#---------------------------------------------------------------------------- +# verify manifest file, parse manifest file, generate a new manfiest file +#---------------------------------------------------------------------------- +def parser_manifest(manifest, manifest_data_path, mani_ext, big_endian=False): + logging.info("verify manifest") + manifest_val, target_info = init_data_val(big_endian) with open(manifest, 'r') as mani_fp: fd_ext = os.open(mani_ext, os.O_WRONLY | os.O_CREAT, \ stat.S_IWUSR | stat.S_IRUSR) mani_ext_fp = os.fdopen(fd_ext, "wb") for each_line in mani_fp: - logging.critical(each_line) + logging.info(each_line) if each_line.startswith("#") or not each_line.strip(): continue index = each_line.find(':', 1, len(each_line)) @@ -201,169 +386,36 @@ def parser_manifest(manifest, manifest_data_path, mani_ext, big_endian=False): prop_name = each_line[0:index] prop_name_t = each_line[0:index + 1] prop_value_t = each_line[index + 1:] - logging.critical("name is: %s; value is: %s", prop_name, prop_value_t) - - prop_value = trailing_space_tabs(prop_value_t) - prop_len = len(prop_value) - prop_value_v = prop_value[0:prop_len - 1] - logging.critical("prop value_v: %s", prop_value_v) - - if verify_property_name(prop_name) is False: - logging.error("manifest format invalid, please check it") - mani_ext_fp.close() - return (False, 0, 0) - - if verify_property_value(prop_value_v) is False: - logging.error("manifest format invalid, please check it") + prop_info = PropInfo(prop_name, prop_name_t, prop_value_t) + logging.info("name is: %s; value is: %s", prop_name, prop_value_t) + result, manifest_val, target_info = parse_prop_info(manifest_val, prop_info, \ + mani_ext_fp, target_info) + if result is False: mani_ext_fp.close() return (False, 0, 0) - - # name:value to lowcase, and parse manifest - prop_name_low = prop_name.lower() - logging.critical("name lower: %s", prop_name_low) - if 'gpd.ta.appid' == prop_name_low: - logging.critical("compare name is srv id") - uuid_val = uuid.UUID(prop_value_v) - logging.critical("uuid str %s", uuid_val) - logging.critical("val fields %s", uuid_val.fields) - - elif 'gpd.ta.singleinstance' == prop_name_low: - prop_value_low = prop_value_v.lower() - if 'true' == prop_value_low: - manifest_val.single_instance = 1 - elif 'false' == prop_value_low: - manifest_val.single_instance = 0 - else: - logging.error("single_instance value error!") - - elif 'gpd.ta.multisession' == prop_name_low: - prop_value_low = prop_value_v.lower() - if 'true' == prop_value_low: - manifest_val.multi_session = 1 - elif 'false' == prop_value_low: - manifest_val.multi_session = 0 - else: - logging.error("multi_session value error!") - - elif 'gpd.ta.multicommand' == prop_name_low: - prop_value_low = prop_value_v.lower() - if 'true' == prop_value_low: - manifest_val.multi_command = 1 - elif 'false' == prop_value_low: - manifest_val.multi_command = 0 - else: - logging.error("multi_command value error!") - - elif 'gpd.ta.instancekeepalive' == prop_name_low: - prop_value_low = prop_value_v.lower() - if 'true' == prop_value_low: - manifest_val.instancekeepalive = 1 - elif 'false' == prop_value_low: - manifest_val.instancekeepalive = 0 - else: - logging.error("instancekeepalive value error!") - - elif 'gpd.ta.datasize' == prop_name_low: - manifest_val.heap_size = int(prop_value_v) - logging.critical('b') - - elif 'gpd.ta.stacksize' == prop_name_low: - manifest_val.stack_size = int(prop_value_v) - logging.critical('b') - - elif 'gpd.ta.service_name' == prop_name_low: - service_name = prop_value_v - logging.critical('b') - - elif 'gpd.ta.dynconf' == prop_name_low: - mani_ext_fp.close() - logging.error("gpd.ta.dynConf is reserved, cannot set") - return (False, 0, 0) - - else: - logging.critical('b') - #write have not paresed manifest into sample.manifest file - mani_ext_fp.write(str.encode(prop_name_t)) - mani_ext_fp.write(str.encode(prop_value)) - if 'gpd.ta.is_lib' == prop_name_low: - prop_value_low = prop_value_v.lower() - if 'true' == prop_value_low: - target_type = PRODUCT_DYN_LIB - elif 'gpd.ta.target_type' == prop_name_low: - dyn_conf_target_type = int(prop_value_v) - if dyn_conf_target_type > 0xFFFF or \ - dyn_conf_target_type < 0: - mani_ext_fp.close() - logging.error("gpd.ta.target_type must \ - in range [0, 0xFFFF]") - return (False, 0, 0) - mani_ext_fp.close() #write the whole parsed manifest into sample.manifest file - - service_name_len = len(service_name) - logging.critical("service name: %s", service_name) - logging.critical("service name len: %s", service_name_len) - - max_service_len = 64 - - # dyn_conf_target_type is 1 means that is drv - if dyn_conf_target_type == 1: - max_service_len = 32 - target_type = PRODUCT_DRIVER_IMAGE - if not re.match(r"^[A-Za-z0-9_]*$", service_name): - logging.error("drv's name only can use [A-Z] [a-z] [0-9] and '_'") - return (False, 0, 0) - - if dyn_conf_target_type == 3: - max_service_len = 32 - target_type = PRODUCT_SERVICE_IMAGE - if not re.match(r"^[A-Za-z0-9_]*$", service_name): - logging.error("drv's name only can use \ - [A-Z] [a-z] [0-9] and '_'") - return (False, 0, 0) - if dyn_conf_target_type == 4: - max_service_len = 32 - target_type = PRODUCT_CLIENT_IMAGE - if not re.match(r"^[A-Za-z0-9_]*$", service_name): - logging.error("drv's name only can use \ - [A-Z] [a-z] [0-9] and '_'") - return (False, 0, 0) - if dyn_conf_target_type == 6: - max_service_len = 32 - target_type = PRODUCT_PYTHON_IMAGE - if not re.match(r"^[A-Za-z0-9_]*$", service_name): - logging.error("python dir's name only can use \ - [A-Z] [a-z] [0-9] and '_'") - return (False, 0, 0) - if dyn_conf_target_type == 7: - max_service_len = 32 - target_type = PRODUCT_JAVA_IMAGE - if not re.match(r"^[A-Za-z0-9_]*$", service_name): - logging.error("python dir's name only can use \ - [A-Z] [a-z] [0-9] and '_'") - return (False, 0, 0) - - if service_name_len > max_service_len: - logging.error("service name len cannot larger than %s", str(max_service_len)) + uuid_val = target_info.uuid_val + ret, target_info.target_type = update_target_type(target_info) + if ret is False: return (False, 0, 0) # get manifest string file len manifest_str_size = os.path.getsize(mani_ext) - logging.critical('manifest str size %s', manifest_str_size) + logging.info('manifest str size %d', manifest_str_size) # 2> manifest + service_name if big_endian: - logging.critical("bytes len %s", len(uuid_val.bytes)) + logging.info("bytes len %d", len(uuid_val.bytes)) else: - logging.critical("bytes len %s", len(uuid_val.bytes_le)) - logging.critical("bytes len %s", len(manifest_val.get_pack_data())) - logging.critical("bytes len %s", len(service_name)) + logging.info("bytes len %d", len(uuid_val.bytes_le)) + logging.info("bytes len %d", len(manifest_val.get_pack_data())) + logging.info("bytes len %d", len(target_info.service_name)) # 3> unparsed manifest, string manifest with open(mani_ext, 'rb') as string_mani_fp: - logging.critical("read manifest string size %s", manifest_str_size) + logging.info("read manifest string size %d", manifest_str_size) manifest_string_buf = string_mani_fp.read(manifest_str_size) - logging.critical("manifest strint: %s", manifest_string_buf) + logging.info("manifest strint: %s", manifest_string_buf) #---- write manifest parse context to manifest file fd_out = os.open(manifest_data_path, os.O_WRONLY | os.O_CREAT, \ @@ -373,35 +425,14 @@ def parser_manifest(manifest, manifest_data_path, mani_ext, big_endian=False): out_manifest_fp.write(uuid_val.bytes) else: out_manifest_fp.write(uuid_val.bytes_le) - out_manifest_fp.write(str.encode(service_name)) + out_manifest_fp.write(str.encode(target_info.service_name)) out_manifest_fp.write(manifest_val.get_pack_data()) out_manifest_fp.close() - uuid_str = str(uuid_val) - product_name = str(uuid_val) - if target_type == PRODUCT_TA_IMAGE: - logging.critical("product type is ta image") - product_name = "".join([uuid_str, ".sec"]) - elif target_type == PRODUCT_DRIVER_IMAGE: - logging.critical("product type is driver") - product_name = "".join([service_name, ".sec"]) - elif target_type == PRODUCT_SERVICE_IMAGE: - logging.critical("product type is service") - product_name = "".join([service_name, ".sec"]) - elif target_type == PRODUCT_CLIENT_IMAGE: - logging.critical("product type is client") - product_name = "".join([service_name, ".so.sec"]) - elif target_type == PRODUCT_DYN_LIB: - logging.critical("product type is dyn lib") - product_name = "".join([uuid_str, service_name, ".so.sec"]) - elif target_type == PRODUCT_PYTHON_IMAGE or target_type == PRODUCT_JAVA_IMAGE: - logging.critical("product type is python or java packing") - product_name = "".join([service_name, ".sec"]) - else: - logging.error("invalid product type!") + ret, product_name, uuid_str = gen_product_name(uuid_val, target_info) + if ret is False: return (False, 0, 0) - - return (True, product_name, uuid_str) + return (ret, product_name, uuid_str) class ManifestInfo: @@ -418,7 +449,7 @@ def process_manifest_file(xml_config_path, manifest_path, \ manifest_txt_exist = True if not os.path.exists(manifest_path): - logging.critical("xml trans manifest cfg") + logging.info("xml trans manifest cfg") manifest_txt_exist = False from xml_trans_manifest import trans_xml_to_manifest trans_xml_to_manifest(xml_config_path, manifest_path) diff --git a/build/signtools/signtool_v3.py b/build/signtools/signtool_v3.py index 39bec9ecae53f536c26c33e9e3f84b303528bf2d..906320f86a6a703bb56a22949e2da2c4bddd8d19 100755 --- a/build/signtools/signtool_v3.py +++ b/build/signtools/signtool_v3.py @@ -22,6 +22,7 @@ import shutil import argparse import configparser import re +import base64 import logging @@ -66,14 +67,32 @@ ELF_BLOCK_ALIGN = 0x1000 SEC_HEADER_BYTES = 16 SING_BIG_ENDIAN = False +# set logging level for INFO +logging.basicConfig(level=logging.INFO) -def whitelist_check(intput_str): - if not re.match(r"^[A-Za-z0-9\/\-_.]+$", intput_str): +def check_cfg_whitelist_format(input_str): + ''' input_str can be an empty string ''' + if input_str != "": + if whitelist_check(input_str): + return 1 + return 0 + + +def whitelist_check(input_str): + if not re.match(r"^[A-Za-z0-9\/\-_.]+$", input_str): return 1 return 0 +def check_cfg_integer_format(input_str): + ''' input_str can be an empty string ''' + if input_str != "": + if integer_check(input_str): + return 1 + return 0 + + def integer_check(intput_str): if not str(intput_str).isdigit(): return 1 @@ -104,15 +123,17 @@ def verify_elf_header(elf_path): (elf_type != 1 and elf_type != 2)): logging.error("invliad elf format") raise RuntimeError - return class AllCfg: release_type = "1" otrp_flag = "0" sign_type = "0" + enc_key_alg = "0" public_key = "" pub_key_len = "" + enc_cont_alg = "0" + key_protect_v = "2" re_sign_flag = "0" server_ip = "" config_path = "" @@ -123,6 +144,8 @@ class AllCfg: ta_version = 3 in_path = "" out_path = "" + sign_plat = 0 + sign_ta_alg = "0" class PublicCfg: @@ -174,77 +197,89 @@ class PrivateCfg: if parser.has_option(cfg_section, "secSignAlg"): all_cfg.sign_alg = parser.get(cfg_section, "secSignAlg") + if parser.has_option(cfg_section, "secEncryptKeyInfoAlg"): + all_cfg.enc_key_alg = parser.get(cfg_section, "secEncryptKeyInfoAlg") + if parser.has_option(cfg_section, "secEncryptContentAlg"): + all_cfg.enc_cont_alg = parser.get(cfg_section, "secEncryptContentAlg") + if parser.has_option(cfg_section, "secKeyProtectVersion"): + all_cfg.key_protect_v = parser.get(cfg_section, "secKeyProtectVersion") + if parser.has_option(cfg_section, "secSignTaAlg"): + all_cfg.sign_ta_alg = parser.get(cfg_section, "secSignTaAlg") + + +def check_key_info(cfg): + ''' check ini key info ''' + ret = 0 + if check_cfg_whitelist_format(cfg.sign_key): + logging.error("secSignKey is invalid.") + ret = 1 + if check_cfg_integer_format(cfg.sign_key_len): + logging.error("secSignKeyLen is invalid.") + ret = 1 + if check_cfg_whitelist_format(cfg.public_key): + logging.error("secEncryptKey is invalid.") + ret = 1 + if check_cfg_integer_format(cfg.pub_key_len): + logging.error("secEncryptKeyLen is invalid.") + ret = 1 + if check_cfg_integer_format(cfg.re_sign_flag): + logging.error("secReSignFlag is invalid.") + ret = 1 + return ret + def check_cfg(cfg): + ''' check setting info ''' ret = 0 - if cfg.release_type != "": - if integer_check(cfg.release_type): - logging.error("secReleaseType is invalid.") - ret = 1 - if cfg.otrp_flag != "": - if integer_check(cfg.otrp_flag): - logging.error("secOtrpFlag is invalid.") - ret = 1 - if cfg.sign_type != "": - if integer_check(cfg.sign_type): - logging.error("secSignType is invalid.") - ret = 1 - if cfg.server_ip != "": - if whitelist_check(cfg.server_ip): - logging.error("secSignServerIp is invalid.") - ret = 1 - if cfg.config_path != "": - if whitelist_check(cfg.config_path): - logging.error("configPath is invalid.") - ret = 1 - if cfg.sign_key != "": - if whitelist_check(cfg.sign_key): - logging.error("secSignKey is invalid.") - ret = 1 - if cfg.public_key != "": - if whitelist_check(cfg.public_key): - logging.error("secEncryptKey is invalid.") - ret = 1 - if cfg.pub_key_len != "": - if integer_check(cfg.pub_key_len): - logging.error("secEncryptKeyLen is invalid.") - ret = 1 - if cfg.re_sign_flag != "": - if integer_check(cfg.re_sign_flag): - logging.error("secReSignFlag is invalid.") - ret = 1 - if cfg.hash_type != "": - if integer_check(cfg.hash_type): - logging.error("secHashType is invalid.") - ret = 1 - if cfg.sign_key_len != "": - if integer_check(cfg.sign_key_len): - logging.error("secSignKeyLen is invalid.") - ret = 1 - if cfg.padding_type != "": - if integer_check(cfg.padding_type): - logging.error("secPaddingType is invalid.") - ret = 1 - if cfg.sign_alg != "": - if whitelist_check(cfg.sign_alg): - logging.error("secSignAlg is invalid.") - ret = 1 + if check_cfg_integer_format(cfg.release_type): + logging.error("secReleaseType is invalid.") + ret = 1 + if check_cfg_integer_format(cfg.otrp_flag): + logging.error("secOtrpFlag is invalid.") + ret = 1 + if check_cfg_integer_format(cfg.sign_type): + logging.error("secSignType is invalid.") + ret = 1 + if check_cfg_whitelist_format(cfg.server_ip): + logging.error("secSignServerIp is invalid.") + ret = 1 + if check_cfg_whitelist_format(cfg.config_path): + logging.error("configPath is invalid.") + ret = 1 + if check_cfg_integer_format(cfg.hash_type): + logging.error("secHashType is invalid.") + ret = 1 + if check_cfg_integer_format(cfg.padding_type): + logging.error("secPaddingType is invalid.") + ret = 1 + if check_cfg_whitelist_format(cfg.sign_alg): + logging.error("secSignAlg is invalid.") + ret = 1 + if check_key_info(cfg) != 0: + ret = 1 return ret def gen_key_version(cfg): ''' gen key version ''' + key_version = 0 + key_version = key_version | (int(cfg.enc_cont_alg) << 24) + key_version = key_version | (int(cfg.enc_key_alg) << 16) if cfg.pub_key_len == '4096': - return int(0x0302) + key_version = key_version | 0x0300 elif cfg.pub_key_len == '3072': - return int(0x0202) + key_version = key_version | 0x0200 elif cfg.pub_key_len == '2048': - return int(0x0002) + key_version = key_version | 0x0000 + elif cfg.pub_key_len == '256': + key_version = key_version | 0x0100 elif cfg.pub_key_len == '': return int(0x0000) - - logging.error("unhandled pulic key len %s", cfg.pub_key_len) - raise RuntimeError + else: + logging.info("unhandled pulic key len %s", cfg.pub_key_len) + raise RuntimeError + key_version = key_version | (int(cfg.key_protect_v)) + return key_version def gen_header(content_len, cfg): @@ -261,12 +296,14 @@ def get_sign_alg(cfg): sign_alg = 0 sign_alg = sign_alg | (int(cfg.release_type) << 28) sign_alg = sign_alg | (int(cfg.padding_type) << 27) - sign_alg = sign_alg | (int(cfg.hash_type) << 26) + sign_alg = sign_alg | ((int(cfg.hash_type) & 1) << 26) + sign_alg = sign_alg | ((int(cfg.hash_type) & 2) << 25) + sign_alg = sign_alg | (int(cfg.sign_ta_alg) << 20) if cfg.sign_alg == "RSA": sign_alg = sign_alg | (2 << 20) elif cfg.sign_alg == "ECDSA": sign_alg = sign_alg | (1 << 20) - if cfg.sign_type == '4' or cfg.sign_type == '5' or cfg.sign_type == '6' : + if cfg.sign_type in '4' '5': sign_alg = sign_alg | 0x0000C000 else: if cfg.sign_key_len == "2048": @@ -280,13 +317,18 @@ def get_sign_alg(cfg): def gen_aes_key_info(cfg): iv_data = get_random_bytes(16) - key_data = get_random_bytes(32) if SING_BIG_ENDIAN: aes_tag = '>3I' else: aes_tag = '<3I' + if cfg.enc_cont_alg == '1': + random_size = 16 + key_data = get_random_bytes(16) + else: + random_size = 32 + key_data = get_random_bytes(32) sign_alg = get_sign_alg(cfg) - key_info = struct.pack(aes_tag, 32, 16, sign_alg) + key_info = struct.pack(aes_tag, random_size, 16, sign_alg) key_info += key_data key_info += iv_data return key_data, iv_data, key_info @@ -294,7 +336,7 @@ def gen_aes_key_info(cfg): def gen_sign_alg_info(cfg, out_file_path): sign_alg = get_sign_alg(cfg) - logging.critical("sign_alg value is 0x%x", sign_alg) + logging.info("sign_alg value is 0x%x", sign_alg) if SING_BIG_ENDIAN: info_tag = '>I' else: @@ -307,22 +349,43 @@ def gen_sign_alg_info(cfg, out_file_path): out_file.write(struct.pack(info_tag, sign_alg)) out_file.close() - return +def read_key_pem(file_path, arr): + ''' read key pem ''' + with open(file_path, 'r') as pem_fp: + pem_p = pem_fp.read() + begin = pem_p.find(arr[0]) + arr[1] + end = pem_p.find(arr[2]) + key = pem_p[begin:end].replace('\n', '') + + key = base64.b64decode(key).hex()[arr[3]:arr[4]] + return key -def encrypt_aes_key(pubkey_path, in_data, out_path): - with open(pubkey_path, 'rb') as pubkey_file_fd: - pubkey_file = pubkey_file_fd.read(os.path.getsize(pubkey_path)) - pubkey = RSA.importKey(pubkey_file) - cipher = PKCS1_OAEP.new(pubkey) - ciphertext = cipher.encrypt(in_data) + +def encrypt_aes_key(pubkey_path, in_data, out_path, cfg): + ''' encrypt aes key ''' + if cfg.enc_cont_alg == '1': + from gmssl import sm2 + array = ['-----BEGIN PUBLIC KEY-----', 27, '-----END PUBLIC KEY-----', 52, 183] + pubkey = read_key_pem(pubkey_path, array) + + not_use = '000000000000000000000000000000000000000000000000000000000000000000' + # Fixed offset two bits, Skip 04 Prefix + sm2_crypt = sm2.CryptSM2(public_key=pubkey[2:], private_key=not_use) + sm2_crypt.mode = 1 + ciphertext = sm2_crypt.encrypt(in_data) + else: + with open(pubkey_path, 'rb') as pubkey_file_fd: + pubkey_file = pubkey_file_fd.read(os.path.getsize(pubkey_path)) + pubkey = RSA.importKey(pubkey_file) + cipher = PKCS1_OAEP.new(pubkey) + ciphertext = cipher.encrypt(in_data) fd_out = os.open(out_path, os.O_WRONLY | os.O_CREAT, \ stat.S_IWUSR | stat.S_IRUSR) out_file = os.fdopen(fd_out, "wb") out_file.write(ciphertext) out_file.close() - return def gen_signature(cfg, uuid_str, data_for_sign, key_info_data, temp_path): @@ -332,7 +395,7 @@ def gen_signature(cfg, uuid_str, data_for_sign, key_info_data, temp_path): signature_path = os.path.join(temp_path, "signature.bin") gen_ta_signature(cfg, uuid_str, data_for_sign, raw_data_path, \ - hash_file_path, signature_path, cfg.out_path, key_info_data, SING_BIG_ENDIAN, temp_path) + hash_file_path, signature_path, cfg.out_path, key_info_data, temp_path) os.chmod(signature_path, stat.S_IWUSR | stat.S_IRUSR) @@ -373,18 +436,23 @@ def gen_raw_data(manifest_data_path, manifest_ext_path, elf_file_path, \ with open(config_path, 'rb') as config: file_op.write(config.read(config_size)) file_op.close() - return -def aes_encrypt(key_data, iv_data, in_file_path, out_file_path): +def aes_encrypt(key_data, iv_data, in_file_path, out_file_path, cfg): + """ encrypt aes key info """ in_size = os.path.getsize(in_file_path) with open(in_file_path, 'rb') as in_file: in_data = in_file.read(in_size) - padding = 16 - in_size % 16 - in_data += bytes([padding]) * padding - - cipher = AES.new(key_data, AES.MODE_CBC, iv_data) - ciphertext = cipher.encrypt(in_data) + if cfg.enc_cont_alg == '1': + from gmssl.sm4 import CryptSM4, SM4_ENCRYPT + crypt_sm4 = CryptSM4() + crypt_sm4.set_key(key_data, SM4_ENCRYPT) + ciphertext = crypt_sm4.crypt_cbc(iv_data, in_data) + else: + padding = 16 - in_size % 16 + in_data += bytes([padding]) * padding + cipher = AES.new(key_data, AES.MODE_CBC, iv_data) + ciphertext = cipher.encrypt(in_data) fd_out = os.open(out_file_path, os.O_WRONLY | os.O_CREAT, \ stat.S_IWUSR | stat.S_IRUSR) @@ -392,8 +460,6 @@ def aes_encrypt(key_data, iv_data, in_file_path, out_file_path): out_file.write(ciphertext) out_file.close() - return - def parser_api_level(mk_compile_cfg, cmake_compile_cfg): default_api_level = 1 @@ -408,7 +474,7 @@ def parser_api_level(mk_compile_cfg, cmake_compile_cfg): elif os.path.exists(cmake_compile_cfg): compile_cfg_file = cmake_compile_cfg else: - logging.critical("Build config file doesn't exist, ignore it") + logging.info("Build config file doesn't exist, ignore it") return default_api_level with open(compile_cfg_file) as file_op: @@ -416,11 +482,11 @@ def parser_api_level(mk_compile_cfg, cmake_compile_cfg): if line.startswith("#") or "-DAPI_LEVEL" not in line: continue key, value = line.strip().split("-DAPI_LEVEL=") - logging.critical("key info %s", key) - logging.critical("ta_api_level = %s", value[0]) + logging.info("key info %s", key) + logging.info("ta_api_level = %s", value[0]) return value[0] - logging.critical("Build Config file doesn't define API_LEVEL") + logging.info("Build Config file doesn't define API_LEVEL") return default_api_level @@ -516,11 +582,10 @@ def get_sign_cert_block_buffer(cfg, signature_path, signature_size): def get_ta_sign_len(cfg): ''' get ta sign len ''' if cfg.sign_type == '4': - return 9219 + from generate_signature import get_ta_cms_sign_len + return get_ta_cms_sign_len() if cfg.sign_type == '5': return 0 - if cfg.sign_type == '6': - return 9227 if int(cfg.sign_key_len) == 256: return 72 return int(cfg.sign_key_len) / 8 @@ -559,10 +624,11 @@ def get_key_info_data(cfg, raw_file_path, key_data_path, raw_data_path): is_encrypt_sec = False if is_encrypt_sec is True: - # generate AES key info to encrypt raw data + # generate AES or SM4 key info to encrypt raw data key_data, iv_data, key_info_data = gen_aes_key_info(cfg) - encrypt_aes_key(cfg.public_key, key_info_data, key_data_path) - aes_encrypt(key_data, iv_data, raw_file_path, raw_data_path) + encrypt_aes_key(cfg.public_key, key_info_data, key_data_path, cfg) + # use AES-CBC or SM4-CBC to encrypt TA content + aes_encrypt(key_data, iv_data, raw_file_path, raw_data_path, cfg) else: gen_sign_alg_info(cfg, key_data_path) with open(key_data_path, 'rb') as key_info_fp: @@ -612,12 +678,22 @@ def get_data_path(cfg, temp_path): return key_data_path, raw_data_path +def find_so(in_path_1): + ''' find so ''' + path_list = os.listdir(in_path_1) + for file_name in path_list: + if os.path.splitext(file_name)[1] == ".so": + logging.info(file_name) + return file_name + return "" + + def prepare_data(cfg, temp_path): ''' get sec image ''' manifest_path = os.path.join(cfg.in_path, "manifest.txt") manifest_data_path = os.path.join(temp_path, "manifestData.bin") manifest_ext_path = os.path.join(temp_path, "manifestExt.bin") - elf_file_path = os.path.join(cfg.in_path, "libcombine.so") + elf_file_path = os.path.join(cfg.in_path, find_so(cfg.in_path)) raw_file_path = os.path.join(temp_path, "rawData") key_data_path, raw_data_path = get_data_path(cfg, temp_path) @@ -632,7 +708,7 @@ def prepare_data(cfg, temp_path): # 3. update_otrp_flag if cfg.otrp_flag == "1": - logging.critical("package otrp sec file\n") + logging.info("package otrp sec file\n") update_otrp_flag(manifest_ext_path) # 4. parser_dyn_conf @@ -708,10 +784,10 @@ def pack_sec_img(cfg, manifest_info, temp_path): sec_image.write(raw_data_fp.read(os.path.getsize(raw_data_path))) sec_image.truncate(int(SEC_HEADER_BYTES) + int(content_len)) sec_image.close() - logging.critical("=========================SUCCESS============================") - logging.critical("generate sec(common format) load image success: ") - logging.critical(sec_img_path) - logging.critical("============================================================") + logging.info("=========================SUCCESS============================") + logging.info("generate sec(common format) load image success: ") + logging.info(sec_img_path) + logging.info("============================================================") def gen_sec_image(temp_path, cfg): @@ -724,7 +800,7 @@ def gen_sec_image(temp_path, cfg): uuid_str = manifest_info.uuid_str uuid_str = uuid_str[0:36] - logging.critical("uuid str %s", uuid_str) + logging.info("uuid str %s", uuid_str) gen_signature(cfg, uuid_str, data_for_sign, key_info_data, temp_path) pack_sec_img(cfg, manifest_info, temp_path) @@ -740,32 +816,6 @@ def print_file(file_path): logging.error("%s", output) -def check_signature(temp_path, check_path): - ''' check ta signature ''' - temp_hash_path = os.path.join(temp_path, "rawDataHash.bin") - check_hash_path = os.path.join(check_path, "rawDataHash.bin") - - temp_hash_size = os.path.getsize(temp_hash_path) - check_hash_size = os.path.getsize(check_hash_path) - if temp_hash_size != check_hash_size: - logging.error("hash file size is diff: %d, %d", temp_hash_size, check_hash_size) - return -1 - - with open(temp_hash_path, 'rb') as temp_hash_fp: - temp_hash_info = temp_hash_fp.read(temp_hash_size) - with open(check_hash_path, 'rb') as check_hash_fp: - check_hash_info = check_hash_fp.read(check_hash_size) - if temp_hash_info != check_hash_info: - logging.error("hash file content is diff:") - logging.error("temp_hash_info:") - print_file(temp_hash_path) - logging.error("check_hash_info:") - print_file(check_hash_path) - return -1 - - return 0 - - def check_inout_path(in_path, out_path): ''' check inpath or outpath valid ''' if not os.path.exists(in_path): @@ -784,9 +834,8 @@ def check_inout_path(in_path, out_path): return 0 -def main(): - global SING_BIG_ENDIAN - sign_tool_dir = os.path.dirname(os.path.realpath(__file__)) +def define_parser(): + ''' define parser ''' parser = argparse.ArgumentParser() parser.add_argument("in_path", help="input path of data to be signed. \ (libcombine.so; manifest.txt; ...", type=str) @@ -798,7 +847,14 @@ def main(): help="sign cfg for product developer", type=str) parser.add_argument("--sign_endian", \ help="sign endian (little/big default little)", type=str) - args = parser.parse_args() + parser.add_argument("--sign_plat", \ + help="sign plat (pnf/wireless_hert/wireless_marp/wireless_debug default pnf)", type=str) + return parser + + +def init_cfg(args): + ''' init cfg ''' + global SING_BIG_ENDIAN cfg = AllCfg() if args.privateCfg: PrivateCfg(args.privateCfg, cfg) @@ -813,33 +869,49 @@ def main(): if args.sign_endian and args.sign_endian == "big": SING_BIG_ENDIAN = True + if args.sign_plat and args.sign_plat == "wireless_hert": + cfg.sign_plat = 1 + if args.sign_plat and args.sign_plat == "wireless_marp": + cfg.sign_plat = 2 + if args.sign_plat and args.sign_plat == "wireless_debug": + cfg.sign_plat = 3 + if args.sign_plat and args.sign_plat == "optics": + cfg.sign_plat = 4 + if args.sign_plat and args.sign_plat == "microwave": + cfg.sign_plat = 5 if check_cfg(cfg): logging.error("the configuration file field is incorrect.") - exit() + raise RuntimeError cfg.in_path = os.path.realpath(args.in_path) cfg.out_path = os.path.realpath(args.out_path) if check_inout_path(cfg.in_path, cfg.out_path): - exit() + raise RuntimeError + return cfg + + +def main(): + sign_tool_dir = os.path.dirname(os.path.realpath(__file__)) + parser = define_parser() + args = parser.parse_args() + cfg = init_cfg(args) os.chdir(sign_tool_dir) if cfg.re_sign_flag == "1": from re_generate_signature import re_sign_sec_img re_sign_sec_img(cfg.in_path, cfg.out_path, cfg) else: - if SING_BIG_ENDIAN: + if cfg.sign_type == '4': + from generate_signature import check_signature retry_time = 0 result = -1 - while retry_time <= 3 and result != 0: + while retry_time <= 10 and result != 0: temp_path = os.path.join(cfg.out_path, "temp") - check_path = os.path.join(cfg.out_path, "check") gen_sec_image(temp_path, cfg) - gen_sec_image(check_path, cfg) - result = check_signature(temp_path, check_path) - shutil.rmtree(check_path) + result = check_signature(temp_path) shutil.rmtree(temp_path) retry_time += 1 - if retry_time > 3 and result != 0: + if retry_time > 10 and result != 0: raise RuntimeError else: temp_path = os.path.join(cfg.out_path, "temp") diff --git a/build/signtools/xml_trans_manifest.py b/build/signtools/xml_trans_manifest.py index f9eb36a910b8ca092c24399c4e16f714a4ac64cc..40872bcfd01b176c5f3768d9f3d999ff83849843 100644 --- a/build/signtools/xml_trans_manifest.py +++ b/build/signtools/xml_trans_manifest.py @@ -16,8 +16,7 @@ import os import logging -from defusedxml import ElementTree as ET - +import xml.etree.ElementTree as ET type_trans = {"TYPE_NONE": "-1", "TYPE_CLASS": "0", diff --git a/build/tools/docker_config_tools/README b/build/tools/docker_config_tools/README new file mode 100644 index 0000000000000000000000000000000000000000..1e78e0c263cc1fc7e74980316ce915f2f415e7b8 --- /dev/null +++ b/build/tools/docker_config_tools/README @@ -0,0 +1,6 @@ +1. move start.sh and stop.sh to /var/lib/docker/hooks, and add executable right for scripts +2. move prestart_container.sh and poststop_container.sh to /var/lib/docker/hooks, and add executable right for scripts. This item is optional, if the container key isolation feature is not used, delete the configuration items related to the two scripts from the config.json file. +3. use gen_config.py to generate config.json, for example. python3 gen_config.py /root/config --memory=200M --cpus=10000 --cpuset-cpus="0,1" --disk-size=300M + 1) /root/config is dest directory means where you want to generate config.json + 2) other parameters are used to config tee cgroup resource +4. docker run --hook-spec=xxx/config.json .... diff --git a/build/tools/docker_config_tools/config_template.json b/build/tools/docker_config_tools/config_template.json new file mode 100644 index 0000000000000000000000000000000000000000..c9431e5f85e48fc581127e33c3aaf7e1c0761420 --- /dev/null +++ b/build/tools/docker_config_tools/config_template.json @@ -0,0 +1,27 @@ +{ + "prestart": [ + { + "path": "/var/lib/docker/hooks/start.sh", + "args":[], + "env":[] + }, + { + "path": "/var/lib/docker/hooks/prestart_container.sh", + "args":[], + "env":[] + } + ], + "poststart":[], + "poststop":[ + { + "path": "/var/lib/docker/hooks/stop.sh", + "args":[], + "env":[] + }, + { + "path": "/var/lib/docker/hooks/poststop_container.sh", + "args":[], + "env":[] + } + ] +} diff --git a/build/tools/docker_config_tools/gen_config.py b/build/tools/docker_config_tools/gen_config.py new file mode 100644 index 0000000000000000000000000000000000000000..6d66dc4873c0e800fb321d0aea8dbf12ceff597a --- /dev/null +++ b/build/tools/docker_config_tools/gen_config.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 +# coding=utf-8 +#---------------------------------------------------------------------------- +# Copyright (c) Huawei Technologies Co., Ltd. 2023-2023. All rights reserved. +# Licensed under the Mulan PSL v2. +# You can use this software according to the terms and conditions of the Mulan +# PSL v2. +# You may obtain a copy of Mulan PSL v2 at: +# http://license.coscl.org.cn/MulanPSL2 +# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY +# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +# NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. +# See the Mulan PSL v2 for more details. +# Description: tools for generating a trusted application load image +#---------------------------------------------------------------------------- +"""Description: gen config.json +""" +import sys +import os +import stat +import logging +import json + +DST_FILE = "config.json" +start_args = ["start.sh"] +stop_args = ["stop.sh"] + + +def main(): + """ must specify dest dir """ + if len(sys.argv) < 2: + logging.error("too few arguments!") + raise RuntimeError + + start_args.extend(sys.argv[1:]) + stop_args.extend(sys.argv[1:2]) + dirs = sys.argv[1] + if not os.path.exists(dirs): + os.makedirs(dirs) + file_name = os.path.join(dirs, DST_FILE) + if os.path.isfile(file_name): + os.remove(file_name) + + with open("config_template.json", "r") as file: + content = json.load(file) + content["prestart"][0]["args"] = start_args + content["poststop"][0]["args"] = stop_args + + flags = os.O_RDWR | os.CREAT + modes = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH + with os.fdopen(op.open(file_name, flags, modes), 'w+') as json_file: + json.dump(content, json_file, indent=4) + +if __name__ == '__main__': + main() + diff --git a/build/tools/docker_config_tools/poststop_container.sh b/build/tools/docker_config_tools/poststop_container.sh new file mode 100644 index 0000000000000000000000000000000000000000..565f20df8aaeb53f1518da686d16c0c9bae63d26 --- /dev/null +++ b/build/tools/docker_config_tools/poststop_container.sh @@ -0,0 +1,16 @@ +#!/bin/bash +# Description: Add files related to ccos derived keys. +# Copyright Huawei Technologies Co., Ltd. 2023-2023. All rights reserved. +# iTrustee licensed under the Mulan PSL v2. +# You can use this software according to the terms and conditions of the Mulan +# PSL v2. +# You may obtain a copy of Mulan PSL v2 at: +# http://license.coscl.org.cn/MulanPSL2 +# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY +# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +# NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. +# See the Mulan PSL v2 for more details. +container_id=$(pwd | awk -F '/' '{print $NF}') + +# del data by container_id +tee_teleport --containerid=$container_id --clean diff --git a/build/tools/docker_config_tools/prestart_container.sh b/build/tools/docker_config_tools/prestart_container.sh new file mode 100644 index 0000000000000000000000000000000000000000..c902b5d6d7a8c38d5701503d941bdd0c17db92d6 --- /dev/null +++ b/build/tools/docker_config_tools/prestart_container.sh @@ -0,0 +1,16 @@ +#!/bin/bash +# Description: Add files related to ccos derived keys. +# Copyright Huawei Technologies Co., Ltd. 2023-2023. All rights reserved. +# iTrustee licensed under the Mulan PSL v2. +# You can use this software according to the terms and conditions of the Mulan +# PSL v2. +# You may obtain a copy of Mulan PSL v2 at: +# http://license.coscl.org.cn/MulanPSL2 +# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY +# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +# NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. +# See the Mulan PSL v2 for more details. +container_id=$(pwd | awk -F '/' '{print $NF}') +nsid=$(lsns -t pid -o ns,ppid | grep $PPID | awk '{print $1}') + +tee_teleport --nsid=${nsid} --containerid=$container_id --config-container diff --git a/build/tools/docker_config_tools/start.sh b/build/tools/docker_config_tools/start.sh new file mode 100644 index 0000000000000000000000000000000000000000..c9fe894197d69616b500a48c1fc5b4e0f426911a --- /dev/null +++ b/build/tools/docker_config_tools/start.sh @@ -0,0 +1,33 @@ +#!/bin/bash +# Description: this script is used to config tee resource when docker prestart +# Copyright Huawei Technologies Co., Ltd. 2023-2023. All rights reserved. +# iTrustee licensed under the Mulan PSL v2. +# You can use this software according to the terms and conditions of the Mulan +# PSL v2. +# You may obtain a copy of Mulan PSL v2 at: +# http://license.coscl.org.cn/MulanPSL2 +# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY +# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +# NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. +# See the Mulan PSL v2 for more details. +set -e +nsid=$(lsns -t pid -o ns,ppid | grep $PPID | awk '{print $1}') + +cnt=1 +for i in $@ +do + if [ $cnt -gt 1 ] + then + cmd+=" $i" + fi + cnt=$(($cnt + 1)) +done +echo $cmd > $1/cmd.log +set +e +tee_teleport --config-resource --nsid=$nsid $cmd > $1/std.log +result=$? +echo $result > $1/id.txt +if [ $result -eq 255 ] +then + exit -1 +fi diff --git a/build/tools/docker_config_tools/stop.sh b/build/tools/docker_config_tools/stop.sh new file mode 100644 index 0000000000000000000000000000000000000000..d14b53874b38a37d2037178138849d2ccb528723 --- /dev/null +++ b/build/tools/docker_config_tools/stop.sh @@ -0,0 +1,14 @@ +#!/bin/bash +# Description: this script is used to clean tee resource when docker poststop +# Copyright Huawei Technologies Co., Ltd. 2023-2023. All rights reserved. +# iTrustee licensed under the Mulan PSL v2. +# You can use this software according to the terms and conditions of the Mulan +# PSL v2. +# You may obtain a copy of Mulan PSL v2 at: +# http://license.coscl.org.cn/MulanPSL2 +# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY +# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +# NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. +# See the Mulan PSL v2 for more details. +id=$(cat $1/id.txt) +tee_teleport --clean --grpid=$id > stop.log diff --git a/include/TA/huawei_ext/crypto_wrapper.h b/include/TA/huawei_ext/crypto_wrapper.h index aba36c2541410ec3fa77e8fe65922db9e2f4ee96..68ecd1b35dd0f8b3dfa4c558ed87e3ecd398b1fc 100644 --- a/include/TA/huawei_ext/crypto_wrapper.h +++ b/include/TA/huawei_ext/crypto_wrapper.h @@ -16,7 +16,6 @@ #include #include #include "crypto_cert_wrapper.h" -#include "crypto_device_key_wrapper.h" #include "crypto_ec_wrapper.h" #include "crypto_ec_x509_wrapper.h" #include "crypto_rsa_wrapper.h" diff --git a/include/TA/huawei_ext/tee_crypto_hal.h b/include/TA/huawei_ext/tee_crypto_hal.h index bfc0be5fefe2dadd1de229614578760f8c66985c..f38a82cd4616241a76298b829e5387c024a57207 100644 --- a/include/TA/huawei_ext/tee_crypto_hal.h +++ b/include/TA/huawei_ext/tee_crypto_hal.h @@ -14,12 +14,15 @@ #ifndef TEE_CRYPTO_HAL_H #define TEE_CRYPTO_HAL_H #include "tee_crypto_api.h" +#define CRYPTO_ENGINE_FLAG_MAX 0xFFFF +#define CRYPTO_ENGINE_WITHOUT_COPY 0x10000 enum CRYPTO_ENGINE { DX_CRYPTO = 0, EPS_CRYPTO = 1, SOFT_CRYPTO = 2, SEC_CRYPTO = 3, - CRYPTO_ENGINE_MAX = 1024, + RNG_CRYPTO = 4, + CRYPTO_ENGINE_MAX, }; /* diff --git a/include/TA/huawei_ext/tee_ext_api.h b/include/TA/huawei_ext/tee_ext_api.h index 29add2af9ef493ac9a28cffbaf4ba9573ffffd78..f468f9a6bf87d865f4a936901fc469545bad5788 100644 --- a/include/TA/huawei_ext/tee_ext_api.h +++ b/include/TA/huawei_ext/tee_ext_api.h @@ -132,9 +132,18 @@ TEE_Result AddCaller_CA(const uint8_t *cainfo_hash, uint32_t length); */ TEE_Result AddCaller_TA_all(void); +/* + * caller info types: + * the SESSION_FROM_CA means caller is CA from REE side + * the SESSION_FROM_TA means caller is TA from TEE side + * the SESSION_FROM_NOT_SUPPORTED means the current TA task not found, for example, from TA sub thread + * the SESSION_FROM_UNKNOWN means the TA caller not found + */ #define SESSION_FROM_CA 0 #define SESSION_FROM_TA 1 +#define SESSION_FROM_NOT_SUPPORTED 0xFE #define SESSION_FROM_UNKNOWN 0xFF + /* * get cruurent session type * diff --git a/include/TA/tee_crypto_api.h b/include/TA/tee_crypto_api.h index 0908f83b6cee3bcf476662df699b5d198ed77a30..5c957afe7aad3f6134039092754c9af209fea6cc 100644 --- a/include/TA/tee_crypto_api.h +++ b/include/TA/tee_crypto_api.h @@ -306,8 +306,6 @@ typedef struct { size_t payload_len; } operation_ae_init; -typedef struct __TEE_OperationHandle *TEE_OperationHandle; - typedef struct __TEE_OperationHandle TEE_OperationHandleVar; typedef struct __TEE_ObjectHandle TEE_ObjectHandleVar; diff --git a/include/TA/tee_defines.h b/include/TA/tee_defines.h index ca5cdc2c8a140761b9518bc8670ac09d1db2c9e8..36ff389a6ed02b28b5364225b6a833aac59bc8af 100755 --- a/include/TA/tee_defines.h +++ b/include/TA/tee_defines.h @@ -1,5 +1,5 @@ /* - * Copyright (c) Huawei Technologies Co., Ltd. 2018-2020. All rights reserved. + * Copyright (C) Huawei Technologies Co., Ltd. 2018-2020. All rights reserved. * Licensed under the Mulan PSL v2. * You can use this software according to the terms and conditions of the Mulan PSL v2. * You may obtain a copy of Mulan PSL v2 at: @@ -242,6 +242,7 @@ enum TEE_Result_Value { TEE_ERROR_STORAGE_ENFILE = 0x80001005, /* opened files exceed max count in system */ TEE_ERROR_STORAGE_EMFILE = 0x80001006, /* opened files exceed max count for this process */ TEE_ERROR_STORAGE_EROFS = 0x80001007, /* stroage section is read only */ + TEE_ERROR_STORAGE_EROLLBACK = 0x80001008, /* file object has been rolled back */ TEE_ERROR_STORAGE_PATH_WRONG = 0x8000100A, /* File path error */ TEE_ERROR_MSG_QUEUE_OVERFLOW = 0x8000100B, /* sevice msg queue overflow */ TEE_ERROR_SUBTHREAD_ACCESS = 0x8000100C, /* The subthread created by TA cannot access the service */ @@ -362,7 +363,6 @@ typedef TEE_Result TEEC_Result; typedef uint32_t TEE_TASessionHandle; #endif -typedef struct __TEE_ObjectHandle *TEE_ObjectHandle; typedef struct __TEE_ObjectEnumHandle *TEE_ObjectEnumHandle; typedef struct __TEE_OperationHandle *TEE_OperationHandle; diff --git a/include/TA/tee_trusted_storage_api.h b/include/TA/tee_trusted_storage_api.h index a3d3396ddad0574f7bff1d67d097ab378747a148..680ea332bdd1046c7ab1ef35b3ef09ffc08a399c 100644 --- a/include/TA/tee_trusted_storage_api.h +++ b/include/TA/tee_trusted_storage_api.h @@ -36,6 +36,7 @@ typedef uint32_t TEE_Whence; */ enum Object_Storage_Constants { TEE_OBJECT_STORAGE_PRIVATE = 0x00000001, /* Separate private storage space for each application */ + TEE_OBJECT_STORAGE_ANTIROLLBACK = 0x80000003, /* Add for anti-rollback */ }; /* diff --git a/thirdparty/open_source/musl/libc/stddef.h b/thirdparty/open_source/musl/libc/stddef.h index f25b86396e80a015490a6aec521cb38ab36ab118..4955dc56feba5e45e75ae9f18d0c94ff843dfc64 100644 --- a/thirdparty/open_source/musl/libc/stddef.h +++ b/thirdparty/open_source/musl/libc/stddef.h @@ -24,4 +24,11 @@ #define offsetof(type, member) ((size_t)( (char *)&(((type *)0)->member) - (char *)0 )) #endif +#ifdef __clang__ +#ifdef __cplusplus +namespace std { typedef decltype(nullptr) nullptr_t; } +using ::std::nullptr_t; +#endif +#endif + #endif diff --git a/thirdparty/open_source/musl/libc/unistd.h b/thirdparty/open_source/musl/libc/unistd.h index 212263a7e800bd8d89d87eef9886222c005a06b3..fdcd76ce9c91dacad3b1b4e126c4631dfe0baa78 100644 --- a/thirdparty/open_source/musl/libc/unistd.h +++ b/thirdparty/open_source/musl/libc/unistd.h @@ -196,6 +196,17 @@ int euidaccess(const char *, int); int eaccess(const char *, int); ssize_t copy_file_range(int, off_t *, int, off_t *, size_t, unsigned); pid_t gettid(void); + +/* Evaluate EXPRESSION, and repeat as long as it returns -1 with `errno' + set to EINTR. */ + +# define TEMP_FAILURE_RETRY(expression) \ + (__extension__ \ + ({ long int __result; \ + do __result = (long int) (expression); \ + while (__result == -1L && errno == EINTR); \ + __result; })) + #endif #if defined(_LARGEFILE64_SOURCE) || defined(_GNU_SOURCE) diff --git a/tools/ca_auth_hash_tools/calc_ca_caller_hash.py b/tools/ca_auth_hash_tools/calc_ca_caller_hash.py index 13c06c9be729cd61383d66a201ea76e31a033967..3db2d84068462fc43d84ffcd92bab33b2f35a5fe 100644 --- a/tools/ca_auth_hash_tools/calc_ca_caller_hash.py +++ b/tools/ca_auth_hash_tools/calc_ca_caller_hash.py @@ -15,7 +15,7 @@ import binascii import struct import logging import os -from defusedxml import ElementTree as ET +import xml.etree.ElementTree as ET logging.basicConfig(level=logging.INFO, format='%(asctime)s line:%(lineno)d %(levelname)s:%(name)s:%(message)s',