• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3# Copyright (c) 2023 Huawei Device Co., Ltd.
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8#     http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16import filecmp
17import json
18import os
19import stat
20import re
21from collections import OrderedDict
22import openpyxl as op
23from coreImpl.parser.parser import diff_parser_include_ast
24from coreImpl.diff.diff_processor_node import judgment_entrance, change_data_total
25from typedef.diff.diff import OutputJson, ApiChangeData, IgnoreFileDirectory
26from bin.write_md import write_md_entrance
27
28global_old_dir = ''
29global_new_dir = ''
30diff_info_list = []
31syntax_file_list = []
32
33
34def get_modification_type_dict():
35    modification_type_dict = {
36        'API新增': 0,
37        'API删除': 0,
38        'API废弃': 0,
39        'API修改': 0,
40        'API修改(原型修改)': 0,
41        'API修改(约束变化)': 0
42    }
43    return modification_type_dict
44
45
46def get_compatible_dict():
47    compatible_dict = {
48        '兼容性': 0,
49        '非兼容性': 0
50    }
51    return compatible_dict
52
53
54def change_to_json(data):
55    data_of_json = json.dumps(data, ensure_ascii=False, indent=4)
56    return data_of_json
57
58
59def get_api_change_obj(api_data):
60    modification_type_dict = get_modification_type_dict()
61    compatible_dict = get_compatible_dict()
62    change_data_obj = ApiChangeData()
63    key = 0
64    api_unique_id = ''
65    for element in api_data:
66        api_unique_id = element.current_api_unique_id
67        if 0 == key:
68            change_data_obj.set_api_name(element.api_node_name)
69            change_data_obj.set_kit_name(element.kit_name)
70            change_data_obj.set_sub_system(element.sub_system)
71            change_data_obj.set_is_api_change(element.is_api_change)
72            change_data_obj.set_current_api_type(element.current_api_type)
73            change_data_obj.set_diff_type(element.operation_diff_type)
74            change_data_obj.set_change_type(element.api_modification_type)
75            change_data_obj.set_old_all_text(element.old_api_full_text)
76            change_data_obj.set_new_all_text(element.new_api_full_text)
77            change_data_obj.set_compatible_total(element.is_compatible)
78            change_data_obj.set_is_system_api(element.is_system_api)
79            change_data_obj.set_open_close_api(element.open_close_api)
80            change_data_obj.set_is_third_party_api(element.is_third_party_api)
81            key = 1
82        else:
83            old_all_text = '{}#&#{}'.format(change_data_obj.old_all_text, element.old_api_full_text)
84            new_all_text = '{}#&#{}'.format(change_data_obj.new_all_text, element.new_api_full_text)
85            diff_type_all = '{}#&#{}'.format(change_data_obj.get_diff_type(), element.operation_diff_type)
86            change_type_all = '{}#&#{}'.format(change_data_obj.get_change_type(), element.api_modification_type)
87            compatible_data_all = '{}#&#{}'.format(change_data_obj.get_compatible_total(), element.is_compatible)
88            change_data_obj.set_old_all_text(old_all_text)
89            change_data_obj.set_new_all_text(new_all_text)
90            change_data_obj.set_diff_type(diff_type_all)
91            change_data_obj.set_change_type(change_type_all)
92            change_data_obj.set_compatible_total(compatible_data_all)
93        if element.is_compatible and (0 == compatible_dict.get('兼容性')):
94            compatible_dict['兼容性'] = 1
95        elif not element.is_compatible and (0 == compatible_dict.get('非兼容性')):
96            compatible_dict['非兼容性'] = 1
97        if element.api_modification_type in modification_type_dict:
98            modification_type_dict[element.api_modification_type] = 1
99    if 1 == modification_type_dict.get('API修改(原型修改)') and 1 == modification_type_dict.get('API修改(约束变化)'):
100        modification_type_dict['API修改'] = 1
101    compatible_str = change_to_json(compatible_dict)
102    modification_type_str = change_to_json(modification_type_dict)
103    change_data_obj.set_compatible(compatible_str)
104    change_data_obj.set_change_num(modification_type_str)
105    change_data_obj.set_unique_id(api_unique_id)
106
107    return change_data_obj
108
109
110def collect_api_change(change_data: list):
111    api_change_data = []
112    for list_element in change_data:
113        change_obj = get_api_change_obj(list_element)
114        api_change_data.append(change_obj)
115
116    return api_change_data
117
118
119def collect_node_api_change(api_change_info_list):
120    change_data = []
121    for api_change_info in api_change_info_list:
122        info_data = [
123            api_change_info.api_name,
124            api_change_info.kit_name,
125            api_change_info.sub_system,
126            api_change_info.is_api_change,
127            api_change_info.current_api_type,
128            api_change_info.diff_type,
129            api_change_info.change_type,
130            api_change_info.compatible,
131            api_change_info.change_num,
132            api_change_info.old_all_text,
133            api_change_info.new_all_text,
134            api_change_info.compatible_total,
135            api_change_info.unique_id,
136            api_change_info.is_system_api,
137            api_change_info.open_close_api,
138            api_change_info.is_third_party_api
139        ]
140        change_data.append(info_data)
141
142    return change_data
143
144
145def syntax_file_excel(output_path):
146    data = []
147    if syntax_file_list:
148        for syntax_dict in syntax_file_list:
149            info_data = [
150                syntax_dict.get('current_file'),
151                syntax_dict.get('error_message')
152            ]
153            data.append(info_data)
154
155        wb = op.Workbook()
156        ws = wb['Sheet']
157        ws.title = '语法错误文件信息'
158        ws.append(['当前文件', '错误信息'])
159        for element in data:
160            d = element[0], element[1]
161            ws.append(d)
162        output_path_xlsx = os.path.abspath(os.path.join(output_path, r'syntax_file_error.xlsx'))
163        wb.save(output_path_xlsx)
164
165
166def start_diff_file(old_dir, new_dir, output_path):
167    result_info_list = global_assignment(old_dir, new_dir)
168    total = change_data_total
169    collect_api_change_data = collect_api_change(total)
170    generate_excel(result_info_list, collect_api_change_data, output_path)
171    syntax_file_excel(output_path)
172    write_md_entrance(result_info_list, output_path)
173    result_json = result_to_json(result_info_list)
174    diff_result_path = r'./diff_result.txt'
175    output_path_txt = os.path.abspath(os.path.join(output_path, diff_result_path))
176    write_in_txt(result_json, output_path_txt)
177
178
179def check_diff_entrance(old_dir, new_dir):
180    result_info_list = global_assignment(old_dir, new_dir)
181
182    return result_info_list
183
184
185def disposal_result_data(result_info_list):
186    data = []
187    for diff_info in result_info_list:
188        info_data = [
189            diff_info.operation_diff_type,
190            diff_info.old_api_full_text,
191            diff_info.new_api_full_text,
192            diff_info.api_file_path,
193            diff_info.sub_system,
194            diff_info.kit_name,
195            diff_info.is_system_api
196        ]
197        data.append(info_data)
198
199    return data
200
201
202def generate_excel(result_info_list, api_change_data, output_path):
203    data = disposal_result_data(result_info_list)
204    wb = op.Workbook()
205    ws = wb['Sheet']
206    ws.title = 'api差异'
207    ws.append(['操作标记', '差异项-旧版本', '差异项-新版本', '.h文件', '归属子系统', 'kit', '是否为系统API'])
208    for title in data:
209        d = title[0], title[1], title[2], title[3], title[4], title[5], title[6]
210        ws.append(d)
211
212    change_data_list = collect_node_api_change(api_change_data)
213    ws_of_change = wb.create_sheet('api变更次数统计')
214    ws_of_change.append(['api名称', 'kit名称', '归属子系统', '是否是api', 'api类型', '操作标记', '变更类型',
215                         '兼容性', '变更次数', '差异项-旧版本', '差异项-新版本', '兼容性列表', '接口全路径',
216                         '是否为系统API', '开源/闭源API', '是否是三方库api'])
217    for element in change_data_list:
218        change_data = element[0], element[1], element[2], element[3], element[4], element[5],\
219                      element[6], element[7], element[8], element[9], element[10], element[11],\
220                      element[12], element[13], element[14], element[15]
221        ws_of_change.append(change_data)
222    output_path_xlsx = os.path.abspath(os.path.join(output_path, 'diff.xlsx'))
223    wb.save(output_path_xlsx)
224
225
226def global_assignment(old_dir, new_dir):
227    global diff_info_list
228    diff_info_list = []
229    global global_old_dir
230    global_old_dir = old_dir
231    global global_new_dir
232    global_new_dir = new_dir
233    do_diff(old_dir, new_dir)
234    return diff_info_list
235
236
237def result_to_json(result_info_list):
238    result_json = []
239    for diff_info in result_info_list:
240        result_json.append(OutputJson(diff_info))
241    return json.dumps(result_json, default=lambda obj: obj.__dict__, indent=4, ensure_ascii=False)
242
243
244def write_in_txt(check_result, output_path):
245    modes = stat.S_IRWXO | stat.S_IRWXG | stat.S_IRWXU
246    fd = os.open(output_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, mode=modes)
247    os.write(fd, check_result.encode())
248    os.close(fd)
249
250
251def do_diff(old_dir, new_dir):
252    old_file_list = os.listdir(old_dir)
253    new_file_list = os.listdir(new_dir)
254    diff_list(old_file_list, new_file_list, old_dir, new_dir)
255
256
257def get_file_ext(file_name):
258    return os.path.splitext(file_name)[1]
259
260
261def filter_ignore_file(file_path):
262    norm_file_path = os.path.normpath(file_path)
263    if os.name == 'nt':  # Windows
264        pattern = re.compile(IgnoreFileDirectory.IGNORE_FILE_DIR_wd.value)
265    else:  # Linux / macOS
266        pattern = re.compile(IgnoreFileDirectory.IGNORE_FILE_DIR_lx.value)
267    # 检查匹配
268    if pattern.search(norm_file_path):
269        return False
270    return True
271
272
273def diff_list(old_file_list, new_file_list, old_dir, new_dir):
274    all_list = set(old_file_list + new_file_list)
275    if len(all_list) == 0:
276        return
277    for target_file in all_list:
278        if (get_file_ext(target_file) != '.h'
279                and get_file_ext(target_file) != ''):
280            continue
281        if (target_file in old_file_list
282                and target_file not in new_file_list):
283            diff_file_path = os.path.join(old_dir, target_file)
284            del_old_file(diff_file_path)
285        if (target_file in new_file_list
286                and target_file not in old_file_list):
287            diff_file_path = os.path.join(new_dir, target_file)
288            add_new_file(diff_file_path)
289        get_same_file_diff(target_file, old_file_list, new_file_list, old_dir, new_dir)
290
291
292def add_new_file(diff_file_path):
293    if os.path.isdir(diff_file_path):
294        add_file(diff_file_path)
295    else:
296        result_map = parse_file_result(diff_parser_include_ast(global_new_dir, [diff_file_path], flag=1))
297        for new_info in result_map.values():
298            diff_info_list.extend(judgment_entrance(None, new_info))
299
300
301def del_old_file(diff_file_path):
302    if os.path.isdir(diff_file_path):
303        del_file(diff_file_path)
304    else:
305        result_map = parse_file_result(diff_parser_include_ast(global_old_dir, [diff_file_path], flag=0))
306        for old_info in result_map.values():
307            diff_info_list.extend(judgment_entrance(old_info, None))
308
309
310def get_same_file_diff(target_file, old_file_list, new_file_list, old_dir, new_dir):
311    if (target_file in old_file_list
312            and target_file in new_file_list):
313        if (os.path.isdir(os.path.join(old_dir, target_file))
314                and os.path.isdir(os.path.join(new_dir, target_file))):
315            old_child_dir = os.path.join(old_dir, target_file)
316            new_child_dir = os.path.join(new_dir, target_file)
317            do_diff(old_child_dir, new_child_dir)
318        if (os.path.isfile(os.path.join(old_dir, target_file))
319                and os.path.isfile(os.path.join(new_dir, target_file))):
320            old_target_file = os.path.join(old_dir, target_file)
321            new_target_file = os.path.join(new_dir, target_file)
322            if not filecmp.cmp(old_target_file, new_target_file):
323                get_file_result_diff(old_target_file, new_target_file)
324
325
326def get_file_result_diff(old_target_file, new_target_file):
327    old_file_result_map = parse_file_result(diff_parser_include_ast(global_old_dir, [old_target_file], flag=0))
328    new_file_result_map = parse_file_result(diff_parser_include_ast(global_new_dir, [new_target_file], flag=1))
329    if old_file_result_map and new_file_result_map:
330        merged_dict = OrderedDict(list(old_file_result_map.items()) + list(new_file_result_map.items()))
331        all_key_list = merged_dict.keys()
332        for key in all_key_list:
333            diff_info_list.extend(judgment_entrance(old_file_result_map.get(key), new_file_result_map.get(key)))
334
335
336def del_file(dir_path):
337    file_list = os.listdir(dir_path)
338    for i in file_list:
339        if get_file_ext(i) != '.h' and get_file_ext(i) != '':
340            continue
341        file_path = os.path.join(dir_path, i)
342        if os.path.isdir(file_path):
343            del_file(file_path)
344        if get_file_ext(i) == '.h':
345            result_map = parse_file_result(diff_parser_include_ast(global_old_dir, [file_path], flag=0))
346            for old_info in result_map.values():
347                diff_info_list.extend(judgment_entrance(old_info, None))
348
349
350def add_file(dir_path):
351    file_list = os.listdir(dir_path)
352    for i in file_list:
353        if get_file_ext(i) != '.h' and get_file_ext(i) != '':
354            continue
355        file_path = os.path.join(dir_path, i)
356        if os.path.isdir(file_path):
357            add_file(file_path)
358        if get_file_ext(i) == '.h':
359            result_map = parse_file_result(diff_parser_include_ast(global_new_dir, [file_path], flag=1))
360            for new_info in result_map.values():
361                diff_info_list.extend(judgment_entrance(None, new_info))
362
363
364def parse_file_result(result, data_type=0):
365    """
366    Args:
367        result: ***
368        data_type(int): 数据处理类型。1-文件新增或删除;0-其他
369    """
370    result_map = {}
371    for root_node in result:
372        if root_node['syntax_error'] != 'NA':
373            error_file_path = os.path.abspath(os.path.join(root_node['gn_path'], root_node['name']))
374            error_message_dict = {
375                'current_file': error_file_path,
376                'error_message': root_node['syntax_error']
377            }
378            syntax_file_list.append(error_message_dict)
379        result_map.setdefault(f'{root_node["name"]}-{root_node["kind"]}', root_node)
380        if data_type != 1:
381            parse_file_result_by_child(result_map, root_node)
382    return result_map
383
384
385def process_empty_name(data_info: dict, result_map):
386    data_current_file = os.path.split(data_info['location']['location_path'])[1]
387    if data_info['kind'] == 'ENUM_DECL' and 'members' in data_info and data_current_file in data_info['type']:
388        for element in data_info['members']:
389            result_map.setdefault(f'{data_current_file}-{element["name"]}', element)
390    elif data_info['kind'] == 'ENUM_DECL' and 'members' in data_info and (data_current_file not in data_info['type']):
391        result_map.setdefault(f'{data_current_file}-{data_info["type"]}', data_info)
392    elif (data_info['kind'] == 'STRUCT_DECL' or data_info['kind'] == 'UNION_DECL') and \
393            (data_current_file not in data_info['type']):
394        result_map.setdefault(f'{data_current_file}-{data_info["type"]}', data_info)
395
396
397def parse_file_result_by_child(result_map, root_node):
398    children_list = root_node['children']
399    for children in children_list:
400        if children["name"] == '':
401            process_empty_name(children, result_map)
402            continue
403        result_map.setdefault(f'{children["name"]}-{children["kind"]}', children)
404    del root_node['children']
405