• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python3
2# -*- coding: utf-8 -*-
3#
4# Copyright (c) 2024 Huawei Device Co., Ltd.
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16
17
18import re
19import json
20import os
21import subprocess
22
23
24class Utils:
25    def run_command(self, cmd, path):
26        result = subprocess.run(cmd, cwd=os.path.abspath(path), shell=True,
27                                stdout=subprocess.PIPE, stderr=subprocess.PIPE)
28        return result
29
30    def get_stderr(self, result):
31        try:
32            return result.stderr.decode()
33        except Exception as e:
34            return result.stderr.decode('gbk')
35
36    def remove_ansi_colors(self, text):
37        code_pattern = r'\x1b\[[0-9;]*[A-Za-z]'
38        return re.sub(code_pattern, '', text)
39
40
41class SDKLinterTest:
42    project_path = ''
43    expected_path = 'tmp'
44    data = []
45    utils = Utils()
46    build_info = []
47
48    def __init__(self, project_path='', expected_path='') -> None:
49        self.project_path = project_path.replace('\\', '/')
50        self.expected_path = expected_path.replace('\\', '/')
51
52    def get_sdk_result(self):
53        log = ''
54        install_cmd = 'ohpm install --all'
55        print(install_cmd)
56        result = self.utils.run_command(install_cmd, self.project_path)
57        log = result.stdout.decode() + self.utils.get_stderr(result)
58        install_cmd = 'cd ' + self.project_path + '; ' + install_cmd
59        print(log)
60
61        hvigorw_tool = ".\hvigorw.bat" if os.name == "nt" else "./hvigorw"
62        sync_cmd = f"{hvigorw_tool} --sync -p product=default --parallel;"
63        print(sync_cmd)
64        result = self.utils.run_command(sync_cmd, self.project_path)
65        log = result.stdout.decode() + self.utils.get_stderr(result)
66        print(log)
67
68        clean_cmd = f"{hvigorw_tool} -p product=default clean --info --verbose-analyze --parallel --incremental --daemon;"
69        print(clean_cmd)
70        clean_result = self.utils.run_command(clean_cmd, self.project_path)
71        build_cmd = f"{hvigorw_tool} clean --mode module -p product=default assembleHap --parallel --incremental --daemon;"
72        build_result = self.utils.run_command(build_cmd, self.project_path)
73        log = build_result.stdout.decode() + self.utils.get_stderr(build_result)
74        print(log)
75
76        print('=' * 60)
77        self.build_info = self.utils.remove_ansi_colors(log).split('\r\n')
78
79    def open_output(self):
80        self.get_sdk_result()
81        errors = self._split_errors_list()
82        self.data = []
83        hash_index_map = dict()
84        for error in errors:
85            filepath = self._get_filepath(error)
86            # If filepath in SDK, skip it.
87            if not filepath:
88                print('Report info filepath is not in DevEcoProject:\n', error)
89                continue
90            row_col_info = re.findall('.[ts|ets]:(\d+:\d+)', error)
91            row, col = (row_col_info[0].split(
92                ':')) if filepath and row_col_info else (-1, -1)
93
94            error_info = re.findall('\n.*', error, re.DOTALL)
95            error_level = re.findall('^([^:]+):', error)
96            if error_level:
97                if error_level[0] == 'WARN':
98                    error_level = 1
99                elif error_level[0] == 'ERROR':
100                    error_level = 2
101            if filepath in hash_index_map:
102                group_i = self.data[hash_index_map[filepath]]
103            else:
104                group_i = dict()
105                group_i['filePath'] = filepath
106                group_i['defects'] = []
107                hash_index_map[filepath] = len(self.data)
108                self.data.append(group_i)
109            group_i['defects'].append({
110                'origin': error.replace(self.project_path, ''),
111                'severity': error_level,
112                'reportLine': int(row),
113                'reportColumn': int(col),
114                'description': error_info[0].strip().replace(self.project_path, '') if error_info else ''
115            })
116        d = self.empty_report(hash_index_map)
117        self.data.extend(d)
118
119    def empty_report(self, hash_index_map):
120        items = os.listdir(os.path.abspath(self.expected_path))
121
122        empty_data = []
123        for item in items:
124            file_path = os.path.join(self.expected_path, item)
125            if os.path.isfile(file_path):
126                with open(file_path) as f:
127                    d_json = json.load(f)
128                    if 'sdklinter' in d_json:
129                        if d_json['sdklinter']['filePath'] not in hash_index_map:
130                            empty_data.append({
131                                'filePath': d_json['sdklinter']['filePath'],
132                                'defects': []
133                            })
134        return empty_data
135
136    def update(self):
137        self._data_sort()
138        for i in self.data:
139            file_path = os.path.join(self.expected_path, os.path.splitext(
140                os.path.basename(i['filePath']))[0] + '-expected.json')
141            read_data = dict()
142            if os.path.exists(file_path) and os.path.isfile(file_path):
143                with open(file_path) as f:
144                    read_data = json.load(f)
145            with open(file_path, 'w', encoding='utf-8') as f:
146                read_data['sdklinter'] = i
147                json.dump(read_data, f, indent=4)
148        print('update done!')
149
150    def verify(self):
151        self._data_sort()
152        expected_dicts = self._load_expected_files()
153        output = []
154        for i in expected_dicts:
155            pass_flag = False
156            for j in self.data:
157                if i['filePath'] == j['filePath'] and i == j:
158                    pass_flag = True
159            output.append([i['filePath'], pass_flag])
160        FILEPATH, PASS_FLAG = 0, 1
161        files = [i[FILEPATH] for i in output]
162        for i in self.data:
163            if i['filePath'] not in files:
164                output.append([i['filePath'], False])
165        output.sort(key=lambda x: x[PASS_FLAG] is False)
166        once = False  # Run once, print '===='
167        for i in output:
168            if once is not True and i[PASS_FLAG] is False:
169                print('=' * 60)
170                once = True
171            print(*i)
172
173    def _split_errors_list(self):
174        lines = self.build_info
175        idx_list = []
176        start_end = []
177        for idx, line in enumerate(lines):
178            if 'ArkTS:ERROR File' in line or 'ArkTS:WARN File' in line:
179                idx_list.append(idx)
180            elif 'COMPILE RESULT:FAIL' in line:
181                idx_list.append(idx)
182            lines[idx] = lines[idx]
183        errors = []
184        for i in range(len(idx_list) - 1):
185            start, end = idx_list[i], idx_list[i + 1]
186            errors.append('\n'.join(lines[start:end]) + '\n')
187        return errors
188
189    def _get_filepath(self, error):
190        file_path = re.findall(self.project_path + '/(.*?):', error)
191        file_path = file_path[0] if file_path else ''
192        if file_path == '':
193            error = error.replace('\\', '/')
194            file_path = re.findall(self.project_path + '/(.*?):', error)
195            file_path = file_path[0] if file_path else ''
196        return file_path
197
198    def _load_expected_files(self):
199        expected_dicts = []
200        for root, dirs, files in os.walk(self.expected_path):
201            for file in files:
202                with open(os.path.join(root, file), 'r') as f:
203                    expected_dict = json.load(f)
204                    expected_dicts.append(expected_dict['sdklinter'])
205        return expected_dicts
206
207    def _data_sort(self):
208        for i in self.data:
209            i['defects'].sort(key=lambda x: (
210                x['reportLine'], x['reportColumn'], -x['severity'], x['origin']))
211