diff --git a/build-tools/capi_parser/src/coreImpl/diff/diff.py b/build-tools/capi_parser/src/coreImpl/diff/diff.py index 43f15d2cacb0b87ad2fe5b1d080b505f6adbd6df..375f7ab94be6ce616070cfe0d276a9b5125dadab 100644 --- a/build-tools/capi_parser/src/coreImpl/diff/diff.py +++ b/build-tools/capi_parser/src/coreImpl/diff/diff.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from coreImpl.parser.parser import parser_include_ast +from coreImpl.parser.parser import diff_parser_include_ast from coreImpl.diff.diff_file import start_diff_file @@ -42,5 +42,5 @@ def process_file_diff(old_file, new_file): def get_parser_data(file_path): root_start = file_path.split('sdk_c')[0] root_path = f'{root_start}sdk_c' - parser_data = parser_include_ast(root_path, [file_path]) + parser_data = diff_parser_include_ast(root_path, [file_path]) return parser_data diff --git a/build-tools/capi_parser/src/coreImpl/diff/diff_file.py b/build-tools/capi_parser/src/coreImpl/diff/diff_file.py index 0635db772b2671ede5285629946c7da01df24d34..2c4c0fb169ed2c0b71bed6886c5fd36873073477 100644 --- a/build-tools/capi_parser/src/coreImpl/diff/diff_file.py +++ b/build-tools/capi_parser/src/coreImpl/diff/diff_file.py @@ -19,14 +19,15 @@ import os import stat from collections import OrderedDict import openpyxl as op -from coreImpl.parser.parser import parser_include_ast +from coreImpl.parser.parser import diff_parser_include_ast from coreImpl.diff.diff_processor_node import judgment_entrance, change_data_total -from typedef.diff.diff import OutputJson, ApiChangeData +from typedef.diff.diff import OutputJson, ApiChangeData, IgnoreFileDirectory from bin.write_md import write_md_entrance global_old_dir = '' global_new_dir = '' diff_info_list = [] +syntax_file_list = [] def get_modification_type_dict(): @@ -67,13 +68,15 @@ def get_api_change_obj(api_data): change_data_obj.set_kit_name(element.kit_name) change_data_obj.set_sub_system(element.sub_system) change_data_obj.set_is_api_change(element.is_api_change) - change_data_obj.set_class_name(element.class_name) + change_data_obj.set_current_api_type(element.current_api_type) change_data_obj.set_diff_type(element.operation_diff_type) change_data_obj.set_change_type(element.api_modification_type) change_data_obj.set_old_all_text(element.old_api_full_text) change_data_obj.set_new_all_text(element.new_api_full_text) change_data_obj.set_compatible_total(element.is_compatible) change_data_obj.set_is_system_api(element.is_system_api) + change_data_obj.set_open_close_api(element.open_close_api) + change_data_obj.set_is_third_party_api(element.is_third_party_api) key = 1 else: old_all_text = '{}#&#{}'.format(change_data_obj.old_all_text, element.old_api_full_text) @@ -120,7 +123,7 @@ def collect_node_api_change(api_change_info_list): api_change_info.kit_name, api_change_info.sub_system, api_change_info.is_api_change, - api_change_info.class_name, + api_change_info.current_api_type, api_change_info.diff_type, api_change_info.change_type, api_change_info.compatible, @@ -129,18 +132,42 @@ def collect_node_api_change(api_change_info_list): api_change_info.new_all_text, api_change_info.compatible_total, api_change_info.unique_id, - api_change_info.is_system_api + api_change_info.is_system_api, + api_change_info.open_close_api, + api_change_info.is_third_party_api ] change_data.append(info_data) return change_data +def syntax_file_excel(output_path): + data = [] + if syntax_file_list: + for syntax_dict in syntax_file_list: + info_data = [ + syntax_dict.get('current_file'), + syntax_dict.get('error_message') + ] + data.append(info_data) + + wb = op.Workbook() + ws = wb['Sheet'] + ws.title = '语法错误文件信息' + ws.append(['当前文件', '错误信息']) + for element in data: + d = element[0], element[1] + ws.append(d) + output_path_xlsx = os.path.abspath(os.path.join(output_path, r'syntax_file_error.xlsx')) + wb.save(output_path_xlsx) + + def start_diff_file(old_dir, new_dir, output_path): result_info_list = global_assignment(old_dir, new_dir) total = change_data_total collect_api_change_data = collect_api_change(total) generate_excel(result_info_list, collect_api_change_data, output_path) + syntax_file_excel(output_path) write_md_entrance(result_info_list, output_path) result_json = result_to_json(result_info_list) diff_result_path = r'./diff_result.txt' @@ -179,11 +206,11 @@ def generate_excel(result_info_list, api_change_data, output_path): ws_of_change = wb.create_sheet('api变更次数统计') ws_of_change.append(['api名称', 'kit名称', '归属子系统', '是否是api', 'api类型', '操作标记', '变更类型', '兼容性', '变更次数', '差异项-旧版本', '差异项-新版本', '兼容性列表', '接口全路径', - '是否为系统API']) + '是否为系统API', '开源/闭源API', '是否是三方库api']) for element in change_data_list: change_data = element[0], element[1], element[2], element[3], element[4], element[5],\ element[6], element[7], element[8], element[9], element[10], element[11],\ - element[12], element[13] + element[12], element[13], element[14], element[15] ws_of_change.append(change_data) output_path_xlsx = os.path.abspath(os.path.join(output_path, 'diff.xlsx')) wb.save(output_path_xlsx) @@ -224,6 +251,14 @@ def get_file_ext(file_name): return os.path.splitext(file_name)[1] +def filter_ignore_file(file_path): + ignore_dict = IgnoreFileDirectory.ignore_file_dict + for key in ignore_dict.keys(): + if key in file_path: + return False + return True + + def diff_list(old_file_list, new_file_list, old_dir, new_dir): all_list = set(old_file_list + new_file_list) if len(all_list) == 0: @@ -246,8 +281,8 @@ def diff_list(old_file_list, new_file_list, old_dir, new_dir): def add_new_file(diff_file_path): if os.path.isdir(diff_file_path): add_file(diff_file_path) - else: - result_map = parse_file_result(parser_include_ast(global_new_dir, [diff_file_path], flag=1)) + elif filter_ignore_file(diff_file_path): + result_map = parse_file_result(diff_parser_include_ast(global_new_dir, [diff_file_path], flag=1)) for new_info in result_map.values(): diff_info_list.extend(judgment_entrance(None, new_info)) @@ -255,8 +290,8 @@ def add_new_file(diff_file_path): def del_old_file(diff_file_path): if os.path.isdir(diff_file_path): del_file(diff_file_path) - else: - result_map = parse_file_result(parser_include_ast(global_old_dir, [diff_file_path], flag=0)) + elif filter_ignore_file(diff_file_path): + result_map = parse_file_result(diff_parser_include_ast(global_old_dir, [diff_file_path], flag=0)) for old_info in result_map.values(): diff_info_list.extend(judgment_entrance(old_info, None)) @@ -278,12 +313,14 @@ def get_same_file_diff(target_file, old_file_list, new_file_list, old_dir, new_d def get_file_result_diff(old_target_file, new_target_file): - old_file_result_map = parse_file_result(parser_include_ast(global_old_dir, [old_target_file], flag=0)) - new_file_result_map = parse_file_result(parser_include_ast(global_new_dir, [new_target_file], flag=1)) - merged_dict = OrderedDict(list(old_file_result_map.items()) + list(new_file_result_map.items())) - all_key_list = merged_dict.keys() - for key in all_key_list: - diff_info_list.extend(judgment_entrance(old_file_result_map.get(key), new_file_result_map.get(key))) + if filter_ignore_file(old_target_file): + old_file_result_map = parse_file_result(diff_parser_include_ast(global_old_dir, [old_target_file], flag=0)) + new_file_result_map = parse_file_result(diff_parser_include_ast(global_new_dir, [new_target_file], flag=1)) + if old_file_result_map and new_file_result_map: + merged_dict = OrderedDict(list(old_file_result_map.items()) + list(new_file_result_map.items())) + all_key_list = merged_dict.keys() + for key in all_key_list: + diff_info_list.extend(judgment_entrance(old_file_result_map.get(key), new_file_result_map.get(key))) def del_file(dir_path): @@ -294,8 +331,8 @@ def del_file(dir_path): file_path = os.path.join(dir_path, i) if os.path.isdir(file_path): del_file(file_path) - if get_file_ext(i) == '.h': - result_map = parse_file_result(parser_include_ast(global_old_dir, [file_path], flag=0)) + if get_file_ext(i) == '.h' and filter_ignore_file(file_path): + result_map = parse_file_result(diff_parser_include_ast(global_old_dir, [file_path], flag=0)) for old_info in result_map.values(): diff_info_list.extend(judgment_entrance(old_info, None)) @@ -308,8 +345,8 @@ def add_file(dir_path): file_path = os.path.join(dir_path, i) if os.path.isdir(file_path): add_file(file_path) - if get_file_ext(i) == '.h': - result_map = parse_file_result(parser_include_ast(global_new_dir, [file_path], flag=1)) + if get_file_ext(i) == '.h' and filter_ignore_file(file_path): + result_map = parse_file_result(diff_parser_include_ast(global_new_dir, [file_path], flag=1)) for new_info in result_map.values(): diff_info_list.extend(judgment_entrance(None, new_info)) @@ -322,16 +359,36 @@ def parse_file_result(result, data_type=0): """ result_map = {} for root_node in result: + if root_node['syntax_error'] != 'NA': + error_file_path = os.path.abspath(os.path.join(root_node['gn_path'], root_node['name'])) + error_message_dict = { + 'current_file': error_file_path, + 'error_message': root_node['syntax_error'] + } + syntax_file_list.append(error_message_dict) + result_map.setdefault(f'{root_node["name"]}-{root_node["kind"]}', root_node) if data_type != 1: parse_file_result_by_child(result_map, root_node) - result_map.setdefault(f'{root_node["name"]}-{root_node["kind"]}', root_node) return result_map +def process_empty_name(data_info: dict, result_map): + data_current_file = os.path.split(data_info['location']['location_path'])[1] + if data_info['kind'] == 'ENUM_DECL' and 'members' in data_info and data_current_file in data_info['type']: + for element in data_info['members']: + result_map.setdefault(f'{data_current_file}-{element["name"]}', element) + elif data_info['kind'] == 'ENUM_DECL' and 'members' in data_info and (data_current_file not in data_info['type']): + result_map.setdefault(f'{data_current_file}-{data_info["type"]}', data_info) + elif (data_info['kind'] == 'STRUCT_DECL' or data_info['kind'] == 'UNION_DECL') and \ + (data_current_file not in data_info['type']): + result_map.setdefault(f'{data_current_file}-{data_info["type"]}', data_info) + + def parse_file_result_by_child(result_map, root_node): children_list = root_node['children'] for children in children_list: if children["name"] == '': + process_empty_name(children, result_map) continue result_map.setdefault(f'{children["name"]}-{children["kind"]}', children) del root_node['children'] diff --git a/build-tools/capi_parser/src/coreImpl/diff/diff_processor_node.py b/build-tools/capi_parser/src/coreImpl/diff/diff_processor_node.py index a0deb5b436a09cf167b02143a8dd66277d237017..c4c1b6a3a217036472d4d7b0f4e6c52cc9325157 100644 --- a/build-tools/capi_parser/src/coreImpl/diff/diff_processor_node.py +++ b/build-tools/capi_parser/src/coreImpl/diff/diff_processor_node.py @@ -749,6 +749,27 @@ def process_typedef_child(old_child, new_child, diff_typedef_list): diff_typedef_list.extend(special_data) +def process_anonymous_enum_member(old_info, new_info): + result_obj_list = [] + if old_info and new_info: + if old_info['name'] != new_info['name']: + change_message_obj = DiffInfo(DiffType.ENUM_MEMBER_NAME_CHANGE, old_info['name'], new_info['name']) + result_obj_list.append(wrap_diff_info(old_info, new_info, change_message_obj)) + if old_info['value'] != new_info['value']: + change_message_obj = DiffInfo(DiffType.ENUM_MEMBER_VALUE_CHANGE, old_info['value'], new_info['value']) + result_obj_list.append(wrap_diff_info(old_info, new_info, change_message_obj)) + else: + if old_info: + change_message_obj = DiffInfo(DiffType.ENUM_MEMBER_REDUCE, old_info['node_content']['content'], 'NA') + result_obj_list.append(wrap_diff_info(old_info, new_info, change_message_obj)) + + elif new_info: + change_message_obj = DiffInfo(DiffType.ENUM_MEMBER_ADD, 'NA', new_info['node_content']['content']) + result_obj_list.append(wrap_diff_info(old_info, new_info, change_message_obj)) + + return result_obj_list + + process_data = { Scene.FUNCTION_DECL.value: process_function, Scene.MACRO_DEFINITION.value: process_define, @@ -756,14 +777,38 @@ process_data = { Scene.UNION_DECL.value: process_union, Scene.ENUM_DECL.value: process_enum, Scene.VAR_DECL.value: process_variable_const, - Scene.TYPEDEF_DECL.value: process_typedef + Scene.TYPEDEF_DECL.value: process_typedef, + Scene.ENUM_CONSTANT_DECL.value: process_anonymous_enum_member, } +def get_ch_api_kind(dict_key): + if dict_key == Scene.ENUM_CONSTANT_DECL.value: + key = 'ENUM_DECL' + else: + key = dict_key + api_kind_dict = { + 'FUNCTION_DECL': '函数类型', + 'MACRO_DEFINITION': '宏定义类型', + 'STRUCT_DECL': '结构体类型', + 'UNION_DECL': '联合体类型', + 'ENUM_DECL': '枚举类型', + 'VAR_DECL': '常/变量类型', + 'TYPEDEF_DECL': '重命名类型', + 'TRANSLATION_UNIT': 'NA' + } + return api_kind_dict.get(key) + + def collect_change_data_total(data: dict, diff_info_list): for element in diff_info_list: element.set_api_node_name(data['name']) + if (data['kind'] == Scene.STRUCT_DECL.value or data['kind'] == Scene.UNION_DECL.value) and (not data['name']): + element.set_api_node_name(data['type']) element.set_current_api_unique_id(data['unique_id']) + element.set_open_close_api(data['open_close_api']) + element.set_is_third_party_api(data['is_third_party_api']) + element.set_current_api_type(get_ch_api_kind(data['kind'])) change_data_total.append(diff_info_list) @@ -780,17 +825,18 @@ def process_add_node(add_infor, key_extern, struct_union_enum): return diff_info_list if 'is_extern' in add_infor and add_infor['is_extern']: key_extern = True - diff_type = DiffType.ADD_API - old_api_content = 'NA' - if add_infor['kind'] in struct_union_enum: - new_api_content = add_infor['type'] + if add_infor['kind'] == Scene.ENUM_CONSTANT_DECL.value: + result_obj_list = process_anonymous_enum_member(old_infor, add_infor) + diff_info_list.extend(result_obj_list) else: - new_api_content = add_infor['node_content']['content'] - diff_info_list.append(wrap_diff_info(old_infor, add_infor, DiffInfo(diff_type, - old_api_content, new_api_content))) - if diff_type == DiffType.ADD_API: - set_is_api_change_result(diff_info_list, key_extern) - collect_change_data_total(add_infor, diff_info_list) + if add_infor['kind'] in struct_union_enum: + new_api_content = add_infor['type'] + else: + new_api_content = add_infor['node_content']['content'] + diff_info_list.append(wrap_diff_info(old_infor, add_infor, DiffInfo(DiffType.ADD_API, + 'NA', new_api_content))) + set_is_api_change_result(diff_info_list, key_extern) + collect_change_data_total(add_infor, diff_info_list) return diff_info_list @@ -802,14 +848,16 @@ def process_reduce_node(reduce_infor, key_extern, struct_union_enum): return diff_info_list if 'is_extern' in reduce_infor and reduce_infor['is_extern']: key_extern = True - diff_type = DiffType.REDUCE_API - new_api_content = 'NA' - if reduce_infor['kind'] in struct_union_enum: - old_api_content = reduce_infor['type'] + if reduce_infor['kind'] == Scene.ENUM_CONSTANT_DECL.value: + result_obj_list = process_anonymous_enum_member(reduce_infor, new_infor) + diff_info_list.extend(result_obj_list) else: - old_api_content = reduce_infor['node_content']['content'] - diff_info_list.append(wrap_diff_info(reduce_infor, new_infor, DiffInfo(diff_type, - old_api_content, new_api_content))) + if reduce_infor['kind'] in struct_union_enum: + old_api_content = reduce_infor['type'] + else: + old_api_content = reduce_infor['node_content']['content'] + diff_info_list.append(wrap_diff_info(reduce_infor, new_infor, DiffInfo(DiffType.REDUCE_API, + old_api_content, 'NA'))) set_is_api_change_result(diff_info_list, key_extern) collect_change_data_total(reduce_infor, diff_info_list) diff --git a/build-tools/capi_parser/src/coreImpl/parser/generating_tables.py b/build-tools/capi_parser/src/coreImpl/parser/generating_tables.py index 158d76bb828c789afb30e1a8d74fa2a1382b1a69..e109bfe95bdfd5aa02a9b379d9d05a90ca3a5245 100644 --- a/build-tools/capi_parser/src/coreImpl/parser/generating_tables.py +++ b/build-tools/capi_parser/src/coreImpl/parser/generating_tables.py @@ -85,8 +85,6 @@ def get_result_api(file_data, result_api): if 'children' in file_data: for item1 in file_data["children"]: # 抛开根节点 if (item1["kind"] == 'FUNCTION_DECL' or item1["kind"] == 'VAR_DECL') and item1["is_extern"]: - differ_infor = difference_api(item1) - item1['differ_infor'] = differ_infor item = filter_func(item1) result_api.append(item) @@ -151,7 +149,8 @@ def collated_api_data(api_data: list): api.get('location_path'), api.get('sub_system'), api.get('unique_id'), - api.get('differ_infor') + api.get('open_close_api'), + api.get('is_third_party_api') ] collated_data_total.append(collated_data) return collated_data_total @@ -161,7 +160,7 @@ def generate_excel(array, name, generate_json_unique, original_json_unique): first_line_infor = ['模块名', '类名', '方法名', '函数', '类型', '起始版本', '废弃版本', 'syscap', '错误码', '是否为系统API', '模型限制', '权限', '是否支持跨平台', '是否支持卡片应用', '是否支持高阶API', - '装饰器', 'kit', '文件路径', '子系统', '接口全路径', '开源/闭源/三方库API'] + '装饰器', 'kit', '文件路径', '子系统', '接口全路径', '开源/闭源API', '是否是三方库API'] workbook = openpyxl.Workbook() work_sheet1 = workbook.active work_sheet1.title = '对比结果' @@ -184,7 +183,7 @@ def write_information_to_worksheet(work_sheet, information_data): write_data = data[0], data[1], data[2], data[3], data[4], \ data[5], data[6], data[7], data[8], data[9], \ data[10], data[11], data[12], data[13], data[14], \ - data[15], data[16], data[17], data[18], data[19], data[20] + data[15], data[16], data[17], data[18], data[19], data[20], data[21] work_sheet.append(write_data) diff --git a/build-tools/capi_parser/src/coreImpl/parser/parse_include.py b/build-tools/capi_parser/src/coreImpl/parser/parse_include.py index a067d98afeea9306f53ebf43223e9c9579411467..446ddfc8de86f5f943202a09946df3dc8000a873 100644 --- a/build-tools/capi_parser/src/coreImpl/parser/parse_include.py +++ b/build-tools/capi_parser/src/coreImpl/parser/parse_include.py @@ -25,7 +25,7 @@ from clang.cindex import CursorKind from clang.cindex import TypeKind from utils.constants import StringConstant from utils.constants import RegularExpressions -from typedef.parser.parser import NodeKind +from typedef.parser.parser import NodeKind, DifferApiInfor, DifferApiRegular line_dist = {} @@ -149,13 +149,26 @@ def processing_def(cursor, data): # 处理宏定义 print('mar_define error, its content is none') if text: text = text.strip() # 删除两边的字符(默认是删除左右空格) - data['text'] = text + data['text'] = text data["type"] = "def_no_type" +def difference_api(api_data: dict): + api_name = api_data['name'] + closed_pattern = DifferApiRegular.CLOSED_SOURCE_API_REGULAR.value + open_pattern = DifferApiRegular.OPEN_SOURCE_API_REGULAR.value + if re.search(closed_pattern, api_name, flags=re.IGNORECASE): + api_data['open_close_api'] = DifferApiInfor.CLOSED_SOURCE_API.value + elif re.search(open_pattern, api_name, flags=re.IGNORECASE): + api_data['open_close_api'] = DifferApiInfor.OPEN_SOURCE_API.value + else: + api_data['is_third_party_api'] = True + + def processing_func(cursor, data): # 处理函数 data["return_type"] = cursor.result_type.spelling # 增加返回类型键值对 judgment_extern(cursor, data) + difference_api(data) def processing_type(cursor, data): # 没有类型的节点处理 @@ -197,6 +210,12 @@ special_node_process = { } +def process_members_class_name(data: dict, parent_cursor): + file_name = os.path.split(data['location']['location_path'])[1] + if (not data['name']) and (file_name not in parent_cursor.type.spelling): + data['class_name'] = '{}-{}'.format(file_name, parent_cursor.type.spelling) + + def get_api_unique_id(cursor, loc, data): unique_id = '' if cursor.kind == CursorKind.MACRO_DEFINITION: @@ -213,7 +232,7 @@ def get_api_unique_id(cursor, loc, data): parent_name_str = '' elif parent_of_cursor.kind.name in struct_union_enum: parent_name_str = parent_of_cursor.type.spelling - data['class_name'] = parent_of_cursor.spelling + process_members_class_name(data, parent_of_cursor) else: parent_name_str = parent_of_cursor.spelling except ValueError: @@ -227,6 +246,20 @@ def get_api_unique_id(cursor, loc, data): return unique_id +def get_node_class_name(data): + struct_union_enum = [NodeKind.STRUCT_DECL.value, NodeKind.UNION_DECL.value, + NodeKind.ENUM_DECL.value] + current_file_name = os.path.split(data["location"]["location_path"])[1] + if data.get('kind') in struct_union_enum and 'class_name' in data: + class_name = '{}-{}'.format(current_file_name, data["name"]) + if (not data["name"]) and (current_file_name not in data["type"]): + class_name = '{}-{}'.format(current_file_name, data["type"]) + else: + class_name = current_file_name + + return class_name + + def processing_special_node(cursor, data, key, gn_path): # 处理需要特殊处理的节点 if key == 0: location_path = cursor.spelling @@ -244,9 +277,12 @@ def processing_special_node(cursor, data, key, gn_path): # 处理需要特殊 relative_path = os.path.relpath(location_path, gn_path) # 获取头文件相对路 loc["location_path"] = relative_path data["location"] = loc + data["class_name"] = get_node_class_name(data) data["unique_id"] = get_api_unique_id(cursor, loc, data) if key == 0: data["unique_id"] = data["name"] + syntax_error_message = diagnostic_callback(cursor.translation_unit.diagnostics, gn_path) + data["syntax_error"] = syntax_error_message if kind_name in special_node_process.keys(): node_process = special_node_process[kind_name] node_process(cursor, data) # 调用对应节点处理函数 @@ -302,11 +338,36 @@ def get_default_node_data(cursor, gn_path): "form": 'NA', "atomic_service": 'NA', "decorator": 'NA', - "unique_id": '' + "unique_id": '', + "syntax_error": 'NA', + "open_close_api": 'NA', + "is_third_party_api": False } return data +def diagnostic_callback(diagnostic, dir_path): + # 获取诊断信息的详细内容 + syntax_error_message = 'NA' + key = 0 + for dig in diagnostic: + file_path = f"{dig.location.file}" + try: + file_path = os.path.relpath(os.path.normpath(file_path), dir_path) + except ValueError: + pass + line = dig.location.line + message = dig.spelling + # 输出诊断信息 + error_message = f"{file_path}:{line}\n错误信息:{message}" + if 0 == key: + syntax_error_message = error_message + key = 1 + else: + syntax_error_message = '{}\n{}'.format(syntax_error_message, error_message) + return syntax_error_message + + def parser_data_assignment(cursor, current_file, gn_path, comment=None, key=0): data = get_default_node_data(cursor, gn_path) get_comment(cursor, data) @@ -323,10 +384,6 @@ def parser_data_assignment(cursor, current_file, gn_path, comment=None, key=0): data["kind"] = cursor.kind.name if cursor.kind.name == CursorKind.MACRO_DEFINITION.name: define_comment(cursor, current_file, data) - struct_union_enum = [NodeKind.STRUCT_DECL.value, NodeKind.UNION_DECL.value, - NodeKind.ENUM_DECL.value] - if data.get('kind') in struct_union_enum and 'class_name' in data: - data['class_name'] = data.get('name') get_syscap_value(data) get_since_value(data) get_kit_value(data) @@ -564,6 +621,7 @@ def api_entrance(share_lib, include_path, gn_path, link_path): # 统计入口 # options赋值为如下,代表宏定义解析数据也要 args = ['-I{}'.format(path) for path in link_path] args.append('-std=c99') + args.append('--target=aarch64-linux-musl') options = clang.cindex.TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD data_total = [] # 列表对象-用于统计 diff --git a/build-tools/capi_parser/src/coreImpl/parser/parser.py b/build-tools/capi_parser/src/coreImpl/parser/parser.py index 5ab20372e1dafd9f410fa0fbfd89c0bf1208ffb5..1adac3f2ecd1379a1c0809afd72484445e0bbae5 100644 --- a/build-tools/capi_parser/src/coreImpl/parser/parser.py +++ b/build-tools/capi_parser/src/coreImpl/parser/parser.py @@ -267,10 +267,8 @@ def find_include(link_include_path): def copy_self_include(link_include_path, self_include_file): for dir_path, dir_name, file_name_list in os.walk(self_include_file): - for element in dir_name: - dir_path_name = os.path.abspath(os.path.join(dir_path, element)) - if 'sysroot_myself' not in dir_path and dir_path_name not in link_include_path: - link_include_path.append(dir_path_name) + if 'sysroot_myself' not in dir_path and 'build-tools' not in dir_path and dir_path not in link_include_path: + link_include_path.append(dir_path) def delete_typedef_child(child): @@ -294,19 +292,11 @@ def parser(directory_path): # 目录路径 return data_total -def parser_include_ast(dire_file_path, include_path: list, flag=-1): # 对于单独的.h解析接口 +def parser_include_ast(dire_file_path, include_path: list): # 对于单独的.h解析接口 correct_include_path = [] link_include_path = [dire_file_path] - # 针对check - if -1 == flag: - copy_std_lib(link_include_path, dire_file_path) - link_include(dire_file_path, StringConstant.FUNK_NAME.value, link_include_path) - # 针对diff - else: - copy_std_lib(link_include_path) - find_include(link_include_path) - if len(link_include_path) <= 2: - copy_self_include(link_include_path, dire_file_path) + copy_std_lib(link_include_path, dire_file_path) + copy_self_include(link_include_path, dire_file_path) for item in include_path: split_path = os.path.splitext(item) if split_path[1] == '.h': # 判断.h结尾 @@ -322,13 +312,25 @@ def parser_include_ast(dire_file_path, include_path: list, flag=-1): # return data +def diff_parser_include_ast(dire_file_path, include_path: list, flag=-1): # 对于单独的.h解析接口 + link_include_path = [dire_file_path] + copy_self_include(link_include_path, dire_file_path) + data = parse_include.get_include_file(include_path, link_include_path, dire_file_path) + + for item in data: + if 'children' in item: + for child in item['children']: + delete_typedef_child(child) + + return data + + def get_dir_file_path(dir_path): file_path_list = [] link_include_path = [] # 装链接头文件路径 for dir_path, dir_names, filenames in os.walk(dir_path): - for dir_name in dir_names: - if 'build-tools' not in dir_path and 'sysroot_myself' not in dir_path: - link_include_path.append(os.path.join(dir_path, dir_name)) + if 'sysroot_myself' not in dir_path and 'build-tools' not in dir_path and dir_path not in link_include_path: + link_include_path.append(dir_path) for file in filenames: if 'build-tools' not in dir_path and 'sysroot_myself' not in dir_path and file.endswith('.h'): file_path_list.append(os.path.join(dir_path, file)) diff --git a/build-tools/capi_parser/src/typedef/diff/diff.py b/build-tools/capi_parser/src/typedef/diff/diff.py index 897c320ba71011f093e2668d18c51f3106accf7e..d9a7b4b3db1052683c80f587685ad2b2070307ba 100644 --- a/build-tools/capi_parser/src/typedef/diff/diff.py +++ b/build-tools/capi_parser/src/typedef/diff/diff.py @@ -116,6 +116,7 @@ class Scene(enum.Enum): VAR_DECL = 'VAR_DECL' TYPEDEF_DECL = 'TYPEDEF_DECL' TRANSLATION_UNIT = 'TRANSLATION_UNIT' + ENUM_CONSTANT_DECL = 'ENUM_CONSTANT_DECL' class TAGS(enum.Enum): @@ -229,6 +230,7 @@ compatible_list = [ DiffType.ADD_API, DiffType.ADD_DOC, DiffType.ADD_DOC_TAG, + DiffType.ENUM_MEMBER_ADD, DiffType.FUNCTION_PARAM_NAME_CHANGE, DiffType.DOC_TAG_ADDTOGROUP_NA_TO_HAVE, DiffType.DOC_TAG_ADDTOGROUP_HAVE_TO_NA, @@ -386,6 +388,9 @@ class DiffInfo: operation_diff_type: str = '' old_differ_content: str = '' new_differ_content: str = '' + open_close_api: str = '' + is_third_party_api = False + current_api_type: str = '' def __init__(self, diff_type: DiffType, old_differ_content, new_differ_content): self.diff_type = diff_type @@ -540,6 +545,24 @@ class DiffInfo: def get_new_differ_content(self): return self.new_differ_content + def set_open_close_api(self, open_close_api): + self.open_close_api = open_close_api + + def get_open_close_api(self): + return self.open_close_api + + def set_is_third_party_api(self, is_third_party_api): + self.is_third_party_api = is_third_party_api + + def get_is_third_party_api(self): + return self.is_third_party_api + + def set_current_api_type(self, current_api_type): + self.current_api_type = current_api_type + + def get_current_api_type(self): + return self.current_api_type + class OutputJson: api_name: str = '' @@ -595,6 +618,10 @@ class ApiChangeData: compatible_total = False unique_id: str = '' is_system_api = False + open_close_api: str = '' + is_third_party_api = False + api_type: str = '' + current_api_type: str = '' def set_api_name(self, api_name): self.api_name = api_name @@ -679,3 +706,41 @@ class ApiChangeData: def get_is_system_api(self): return self.is_system_api + + def set_open_close_api(self, open_close_api): + self.open_close_api = open_close_api + + def get_open_close_api(self): + return self.open_close_api + + def set_is_third_party_api(self, is_third_party_api): + self.is_third_party_api = is_third_party_api + + def get_is_third_party_api(self): + return self.is_third_party_api + + def set_api_type(self, api_type): + self.api_type = api_type + + def get_api_type(self): + return self.api_type + + def set_current_api_type(self, current_api_type): + self.current_api_type = current_api_type + + def get_current_api_type(self): + return self.current_api_type + + +class IgnoreFileDirectory: + ignore_file_dict = { + 'arm-linux-ohos': '', + 'aarch64-linux-ohos': '', + 'x86_64-linux-ohos': '', + 'i686-linux-ohos': '', + 'tee': '', + 'linux': '' + } + + def get_ignore_file_dict(self): + return self.ignore_file_dict diff --git a/build-tools/capi_parser/src/utils/constants.py b/build-tools/capi_parser/src/utils/constants.py index 9c67d962b8669b9dda3a6868012349fb0d908c90..579737ce9a20acb19c77009a8558a2cc7aff7e5b 100644 --- a/build-tools/capi_parser/src/utils/constants.py +++ b/build-tools/capi_parser/src/utils/constants.py @@ -24,15 +24,15 @@ class StringConstant(enum.Enum): REPLACE_WAREHOUSE = '\\interface_sdk_c\\interface_sdk_c' # 拉到本地仓路径(去掉磁盘) # 拉到本地仓的三方库绝对路径 INCLUDE_LIB = r'.\third_party\musl\ndk_musl_include' - STD_INCLUDE = r'.\sysroot_myself\std_include_files' - CREATE_LIB_PATH = r'.\sysroot_myself\$is_headers_out_dir' - SELF_INCLUDE = r'.\sysroot_myself\self_include_files' - SELF_INCLUDE_OLD = r'.\sysroot_myself\self_include_files_old' - SELF_INCLUDE_NEW = r'.\sysroot_myself\self_include_files_new' - SYSROOT = r'.\sysroot_myself' + STD_INCLUDE = r'./sysroot_myself/std_include_files' + CREATE_LIB_PATH = r'./sysroot_myself/$is_headers_out_dir' + SELF_INCLUDE = r'./sysroot_myself/self_include_files' + SELF_INCLUDE_OLD = r'./sysroot_myself/self_include_files_old' + SELF_INCLUDE_NEW = r'./sysroot_myself/self_include_files_new' + SYSROOT = r'./sysroot_myself' RESULT_HEAD_NAME = "result_total.xlsx" PARSER_DIRECT_EXCEL_NAME = 'parser_direct_data.xlsx' - FILE_LEVEL_API_DATA = r'.\api_kit_c.json' + FILE_LEVEL_API_DATA = r'./api_kit_c.json' class RegularExpressions(enum.Enum):