1#!/usr/bin/env python 2# coding=utf-8 3############################################## 4# Copyright (c) 2021-2022 Huawei Device Co., Ltd. 5# Licensed under the Apache License, Version 2.0 (the "License"); 6# you may not use this file except in compliance with the License. 7# You may obtain a copy of the License at 8# 9# http://www.apache.org/licenses/LICENSE-2.0 10# 11# Unless required by applicable law or agreed to in writing, software 12# distributed under the License is distributed on an "AS IS" BASIS, 13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14# See the License for the specific language governing permissions and 15# limitations under the License. 16############################################## 17 18import json 19import os 20import glob 21import re 22import shutil 23from utils.constants import StringConstant, RegularExpressions 24from typedef.parser.parser import ParserGetResultTable, OneFileApiMessage, NodeKind 25from coreImpl.parser import parse_include, generating_tables # 引入解析文件 # 引入得到结果表格文件 26 27 28def find_gn_file(directory): # 找指定目录下所有GN文件 29 gn_files = [] 30 for root, _, files in os.walk(directory): # dirpath, dirnames, filenames(对应信息) 31 for file in files: 32 if file.endswith(".gn"): 33 gn_files.append(os.path.join(root, file)) 34 return gn_files 35 36 37def find_h_file(matches, f, sources): 38 for mat in matches: 39 # 匹配sources = \[[^\]]*\](匹配方括号内的内容,其中包括一个或多个非右括号字符),\s*:匹配0个或多个空白字符 40 f.seek(mat.span()[0]) 41 content = f.read() 42 pattern = RegularExpressions.SOURCES.value 43 sources_match = re.search(pattern, content) 44 if sources_match: 45 sources_value = sources_match.group(0) # 获取完整匹配的字符串 46 sources_value = re.sub(r'\s', '', sources_value) # 去除源字符串的空白字符(换行符)和空格 47 pattern = RegularExpressions.INCLUDE_H.value # 匹配引号中的内容,找对应的.h 48 source = re.findall(pattern, sources_value) 49 sources.extend(source) 50 51 52def find_function_file(file, function_name): # 在GN文件中查找指定函数并在有函数名,获取对应sources的值 53 with open(file, 'r') as f: 54 content = f.read() # 获取文件内容 55 pattern = ''.join([r'\b', re.escape(function_name), r'\b']) # '\b'确保函数名的完全匹配 56 matches = re.finditer(pattern, content) # finditer会返回位置信息 57 f.seek(0) # 回到文件开始位置 58 sources = [] # 装全部匹配的sources的.h(可能不止一个-headers函数) 59 if matches: # 是否匹配成功 60 find_h_file(matches, f, sources) 61 f.close() 62 return matches, sources 63 64 65def find_dest_dir(matches, content, f): 66 sources_dir = [] 67 if matches: 68 end = 0 69 for _ in matches: 70 pattern = RegularExpressions.DEST_DIR.value 71 source_match = re.search(pattern, content) 72 if source_match: 73 con = source_match.group(1) 74 sources_dir.append(con) 75 end += source_match.end() # 每次找完一个sources的.h路径,记录光标结束位置 76 f.seek(end) # 移动光标在该结束位置 77 content = f.read() 78 return sources_dir 79 80 81def get_dest_dir(file, function_name): # 获取dest_dir 82 with open(file, 'r') as f: 83 content = f.read() # 获取文件内容 84 pattern = ''.join([r'\b', re.escape(function_name), r'\b']) # '\b'确保函数名的完全匹配 85 matches = re.findall(pattern, content) 86 f.seek(0) 87 sources_dir = find_dest_dir(matches, content, f) 88 f.close() 89 return sources_dir 90 91 92def find_json_file(gn_file_match): # 找gn文件同级目录下的.json文件 93 match_json_file = [] 94 directory = os.path.dirname(gn_file_match) 95 for file in glob.glob(os.path.join(directory, "*.json")): # 统计.json文件 96 match_json_file.append(file) 97 return match_json_file 98 99 100def dire_func(gn_file, func_name): # 统计数据的 101 matches_file_total = [] # 统计有ohos_ndk_headers函数的gn文件 102 json_file_total = [] # 统计跟含有函数的gn文件同级的json文件 103 source_include = [] # 统计sources里面的.h 104 matches, source = find_function_file(gn_file, func_name) # 找到包含函数的gn文件 105 if matches: # 保证两个都不为空,source可能为空 106 source_include = source # 获取头文件列表 107 matches_file_total.append(gn_file) # 调用匹配函数的函数(说明有对应的函数、source) 108 json_file_total.extend(find_json_file(gn_file)) # 同级目录下的.json文件 109 110 return matches_file_total, json_file_total, source_include 111 112 113def change_json_file(dict_data, name): # 生成json文件 114 file_name = name + '_new' + '.json' # json文件名 115 with open(file_name, 'w', encoding='UTF-8') as f: # encoding='UTF-8'能显示中文 116 # ensure_ascii=False确保能显示中文,indent=4(格式控制)使生成的json样式跟字典一样 117 json.dump(dict_data, f, ensure_ascii=False, indent=4) 118 f.close() 119 return file_name 120 121 122def change_abs(include_files, dire_path): # 获取.h绝对路径 123 abs_path = [] 124 for j_item in include_files: # 拼接路径,生成绝对路径 125 # os.path.normpath(path):规范或者是格式化路径,它会把所有路径分割符按照操作系统进行替换 126 # 把规范路径和gn文件对应的目录路径拼接 127 if os.path.isabs(j_item): # 是否是绝对路径,是就拼接路径盘,不是就拼接gn目录路径 128 head = os.path.splitdrive(dire_path) # 获取windows盘路径 129 include_file = j_item 130 change_path = head[1].split('interface_sdk_c') 131 replace_path = os.path.normpath(os.path.join(change_path[0], 'interface_sdk_c')) 132 if 'third_party/node/src' in j_item: 133 include_file = include_file.replace('//', 134 '{}{}'.format(replace_path, '/')) 135 else: 136 # 去掉绝对路径的双\\,替换为interface_sdk_c 137 include_file = include_file.replace('//interface/sdk_c', 138 replace_path) 139 if head: 140 include_file = os.path.join(head[0], include_file) # 拼接盘和路径 141 include_file = os.path.normpath(include_file) 142 abs_path.append(include_file) 143 else: 144 relative_path = os.path.abspath(os.path.join(dire_path, os.path.normpath(j_item))) # ../ .解决 145 abs_path.append(relative_path) 146 return abs_path 147 148 149def get_result_table(json_files, abs_path, link_path, gn_path): # 进行处理,生成表格 150 compare_result_list = [] 151 head_name = "" 152 generate_data_only = [] 153 original_data_only = [] 154 parser_file_data = [] 155 if json_files: 156 file_name = os.path.split(json_files[0]) # 取第一个json名,但我是用列表装的 157 file_name = os.path.splitext(file_name[1]) # 取下标1对应的元素(元组) 158 parser_file_data = parse_include.get_include_file(abs_path, link_path, gn_path) # 获取解析返回的数据 159 parser_json_name = change_json_file(parser_file_data, file_name[0]) # 生成json文件 160 # 解析完后,传两个json文件,对比两个json文件,最后生成数据表格 161 (compare_result_list, head_name, generate_data_only, 162 original_data_only) = generating_tables.get_json_file(parser_json_name, json_files) 163 164 elif abs_path: 165 file_name = os.path.splitext(os.path.split(abs_path[0])[1]) 166 parser_file_data = parse_include.get_include_file(abs_path, link_path, gn_path) 167 parser_json_name = change_json_file(parser_file_data, file_name[0]) # 生成json文件 168 (compare_result_list, head_name, generate_data_only, 169 original_data_only) = generating_tables.get_parser_json_data(parser_json_name, parser_file_data) 170 171 obj_data = ParserGetResultTable(compare_result_list, head_name, 172 generate_data_only, original_data_only, parser_file_data) 173 174 return obj_data 175 176 177def create_dir(sources_dir, gn_file, function_name, link_include_file): 178 if sources_dir: 179 for item in sources_dir: 180 directory = item 181 new_dire = os.path.join('sysroot_myself', directory) 182 new_dire = os.path.normpath(new_dire) 183 if not os.path.exists(new_dire): 184 os.makedirs(new_dire) 185 if new_dire in link_include_file: 186 pass 187 else: 188 link_include_file.append(new_dire) # 添加链接的头文件 189 match_files, json_files, include_files = dire_func(gn_file, function_name) 190 dire_path = os.path.dirname(gn_file) # 获取gn文件路径 191 if match_files: 192 dir_copy(include_files, dire_path, new_dire) 193 else: 194 print("在create_dir函数中,原因:gn文件条件不满足") 195 else: 196 print("gn文件没有ohos_sdk_headers") 197 198 199def dir_copy(include_files, dire_path, new_dire): 200 abs_path = change_abs(include_files, dire_path) # 接收.h绝对路径 201 for j_item in abs_path: 202 shutil.copy(j_item, new_dire) 203 204 205def link_include(directory_path, function_names, link_include_file): 206 gn_file_total = find_gn_file(directory_path) # 查找gn文件 207 for item in gn_file_total: # 处理每个gn文件 208 sources_dir = get_dest_dir(item, function_names) 209 if sources_dir: 210 create_dir(sources_dir, item, function_names, link_include_file) 211 212 213def main_entrance(directory_path, function_names, link_path): # 主入口 214 gn_file_total = find_gn_file(directory_path) # 查找gn文件 215 compare_result_list_total = [] 216 generate_data_only_total = [] 217 original_data_only_total = [] 218 data_total = [] # 总的解析数据 219 for item in gn_file_total: # 处理每个gn文件 220 match_files, json_files, include_files = dire_func(item, function_names) 221 dire_path = os.path.dirname(item) # 获取gn文件路径 222 if include_files: # 符合条件的gn文件 223 abs_path = change_abs(include_files, dire_path) # 接收.h绝对路径 224 # 接收对比结果信息 225 data_result = get_result_table(json_files, abs_path, link_path, directory_path) 226 data_total.append(data_result.parser_data) 227 if len(data_result.compare_result_list) != 0: 228 compare_result_list_total.extend(data_result.compare_result_list) 229 generate_data_only_total.extend(data_result.generate_data_only) 230 original_data_only_total.extend(data_result.original_data_only) 231 elif data_result.head_name == "": 232 print("gn文件下无json文件") 233 else: 234 generate_data_only_total.extend(data_result.generate_data_only) 235 original_data_only_total.extend(data_result.original_data_only) 236 print("没有匹配项") 237 else: 238 print("gn文件无header函数") 239 generating_tables.generate_excel(compare_result_list_total, StringConstant.RESULT_HEAD_NAME.value, 240 generate_data_only_total, original_data_only_total) 241 242 obj_data_total = ParserGetResultTable(compare_result_list_total, '', generate_data_only_total, 243 original_data_only_total, data_total) 244 return obj_data_total 245 246 247def copy_std_lib(link_include_file, root_path=''): 248 if root_path: 249 include_lib = os.path.abspath(os.path.join(root_path, StringConstant.INCLUDE_LIB.value)) 250 else: 251 include_lib = StringConstant.INCLUDE_LIB.value 252 std_include = StringConstant.STD_INCLUDE.value 253 if not os.path.exists(std_include): 254 try: 255 shutil.copytree(include_lib, std_include) 256 except OSError: 257 pass 258 if std_include not in link_include_file: 259 link_include_file.append(std_include) 260 261 262def find_include(link_include_path): 263 for dir_path, _, _ in os.walk(StringConstant.CREATE_LIB_PATH.value): 264 if dir_path not in link_include_path: 265 link_include_path.append(dir_path) 266 267 268def copy_self_include(link_include_path, self_include_file): 269 for dir_path, dir_name, file_name_list in os.walk(self_include_file): 270 for element in dir_name: 271 dir_path_name = os.path.abspath(os.path.join(dir_path, element)) 272 if 'sysroot_myself' not in dir_path and dir_path_name not in link_include_path: 273 link_include_path.append(dir_path_name) 274 275 276def delete_typedef_child(child): 277 if child['kind'] == 'TYPEDEF_DECL': 278 if 'children' in child and len(child['children']) \ 279 and (child['children'][0]['kind'] == 'STRUCT_DECL' 280 or child['children'][0]['kind'] == 'ENUM_DECL' 281 or child['children'][0]['kind'] == 'UNION_DECL'): 282 child['children'] = [] 283 284 285def parser(directory_path): # 目录路径 286 function_name = StringConstant.FUNK_NAME.value # 匹配的函数名 287 288 link_include_path = [] # 装链接头文件路径 289 copy_std_lib(link_include_path) # 头文件移到sysroot_myself中 290 find_include(link_include_path) 291 link_include(directory_path, function_name, link_include_path) 292 293 data_total = main_entrance(directory_path, function_name, link_include_path) # 调用入口函数 294 return data_total 295 296 297def parser_include_ast(dire_file_path, include_path: list, flag=-1): # 对于单独的.h解析接口 298 correct_include_path = [] 299 link_include_path = [dire_file_path] 300 # 针对check 301 if -1 == flag: 302 copy_std_lib(link_include_path, dire_file_path) 303 link_include(dire_file_path, StringConstant.FUNK_NAME.value, link_include_path) 304 # 针对diff 305 else: 306 copy_std_lib(link_include_path) 307 find_include(link_include_path) 308 if len(link_include_path) <= 2: 309 copy_self_include(link_include_path, dire_file_path) 310 for item in include_path: 311 split_path = os.path.splitext(item) 312 if split_path[1] == '.h': # 判断.h结尾 313 correct_include_path.append(item) 314 315 data = parse_include.get_include_file(correct_include_path, link_include_path, dire_file_path) 316 317 for item in data: 318 if 'children' in item: 319 for child in item['children']: 320 delete_typedef_child(child) 321 322 return data 323 324 325def get_dir_file_path(dir_path): 326 file_path_list = [] 327 link_include_path = [] # 装链接头文件路径 328 for dir_path, dir_names, filenames in os.walk(dir_path): 329 for dir_name in dir_names: 330 if 'build-tools' not in dir_path and 'sysroot_myself' not in dir_path: 331 link_include_path.append(os.path.join(dir_path, dir_name)) 332 for file in filenames: 333 if 'build-tools' not in dir_path and 'sysroot_myself' not in dir_path and file.endswith('.h'): 334 file_path_list.append(os.path.join(dir_path, file)) 335 336 return file_path_list, link_include_path 337 338 339def get_file_api_num(file_data, kind_list): 340 api_number = 0 341 if 'children' in file_data: 342 for child in file_data['children']: 343 if 'kind' in child and child['kind'] in kind_list: 344 api_number += 1 345 return api_number 346 347 348def get_file_api_dict(data_total): 349 api_obj_total_list = [] 350 kind_list = [ 351 NodeKind.MACRO_DEFINITION.value, 352 NodeKind.STRUCT_DECL.value, 353 NodeKind.UNION_DECL.value, 354 NodeKind.ENUM_DECL.value, 355 NodeKind.FUNCTION_DECL.value, 356 NodeKind.VAR_DECL.value 357 ] 358 for one_file_data in data_total: 359 file_api_num = get_file_api_num(one_file_data, kind_list) 360 if 'name' in one_file_data and 'kit_name' in one_file_data and 'sub_system' in one_file_data: 361 api_message_obj = OneFileApiMessage(one_file_data['name'], one_file_data['kit_name'], 362 one_file_data['sub_system'], file_api_num) 363 api_message_obj.set_file_path(api_message_obj.get_file_path().replace('\\', '/')) 364 current_file = os.path.dirname(__file__) 365 kit_json_file_path = os.path.abspath(os.path.join(current_file, 366 r"kit_sub_system/c_file_kit_sub_system.json")) 367 complete_kit_or_system(api_message_obj, kit_json_file_path) 368 api_obj_total_list.append(api_message_obj) 369 api_dict_total_list = obj_change_to_dict(api_obj_total_list) 370 371 return api_dict_total_list 372 373 374def obj_change_to_dict(obj_data: list): 375 dict_list = [] 376 for element in obj_data: 377 element_dict = { 378 'filePath': element.file_path, 379 'kitName': element.kit_name, 380 'subSystem': element.sub_system, 381 'apiNumber': element.api_number 382 } 383 dict_list.append(element_dict) 384 385 return dict_list 386 387 388def generate_file_api_json(dict_data, output_path=''): 389 if not output_path: 390 output_path = StringConstant.FILE_LEVEL_API_DATA.value 391 with open(output_path, 'w', encoding='utf-8') as fs: 392 json.dump(dict_data, fs, indent=4, ensure_ascii=False) 393 fs.close() 394 395 396def complete_kit_or_system(api_message: OneFileApiMessage, json_path): 397 if (not api_message.get_kit_name()) or (not api_message.get_sub_system()): 398 kit_name, sub_system_name = parse_include.get_kit_system_data(json_path, 399 api_message.get_file_path()) 400 if not api_message.get_kit_name(): 401 api_message.set_kit_name(kit_name) 402 if not api_message.get_sub_system(): 403 api_message.set_sub_system(sub_system_name) 404 405 406def parser_direct(path): # 目录路径 407 file_path_list = [] 408 link_include_path = [] # 装链接头文件路径 409 copy_std_lib(link_include_path) 410 dir_path = '' 411 if os.path.isdir(path): 412 link_include_path.append(path) 413 file_path_total, link_include_total = get_dir_file_path(path) 414 file_path_list.extend(file_path_total) 415 link_include_path.extend(link_include_total) 416 dir_path = path 417 else: 418 if path.endswith('.h'): 419 file_path_list.append(path) 420 dir_path = os.path.dirname(path) 421 link_include_path.append(dir_path) 422 data_total = parse_include.get_include_file(file_path_list, link_include_path, dir_path) 423 generating_tables.get_api_data(data_total, StringConstant.PARSER_DIRECT_EXCEL_NAME.value) 424 425 return data_total 426 427 428def parser_file_level(output_path): 429 current_file = os.path.dirname(__file__) 430 parser_path = os.path.abspath(os.path.join(current_file, r'../../../../..')) 431 file_path_list = [] 432 link_include_path = [] # 装链接头文件路径 433 data_total = [] 434 if not os.path.isdir(parser_path): 435 return data_total 436 file_path_total, link_include_total = get_dir_file_path(parser_path) 437 file_path_list.extend(file_path_total) 438 link_include_path.extend(link_include_total) 439 data_total = parse_include.get_include_file(file_path_list, link_include_path, parser_path) 440 file_api_dict = get_file_api_dict(data_total) 441 generate_file_api_json(file_api_dict, output_path) 442 443 return data_total 444