• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python3
2# coding: utf-8
3
4"""
5Copyright (c) 2023 Huawei Device Co., Ltd.
6Licensed under the Apache License, Version 2.0 (the "License");
7you may not use this file except in compliance with the License.
8You may obtain a copy of the License at
9
10    http://www.apache.org/licenses/LICENSE-2.0
11
12Unless required by applicable law or agreed to in writing, software
13distributed under the License is distributed on an "AS IS" BASIS,
14WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15See the License for the specific language governing permissions and
16limitations under the License.
17
18Description: execute test tasks
19"""
20
21import logging
22import os
23import re
24import shutil
25import signal
26import subprocess
27import zipfile
28
29import json5
30
31import options
32import utils
33
34
35class IncrementalTest:
36    @staticmethod
37    def validate_module_name_change(task, inc_task, is_debug, stdout, stderr, new_module_name):
38        output_file = get_compile_output_file_path(task, is_debug)
39        output_dir = os.path.dirname(output_file)
40        output_file_name = os.path.basename(output_file)
41        output_file_name_items = output_file_name.split(
42            '-')  # hap name format: entry-default.hap
43        output_file_name_items[0] = new_module_name
44        output_file_name = '-'.join(output_file_name_items)
45        new_module_name_output_file = os.path.join(
46            output_dir, output_file_name)
47
48        logging.debug(f"new module hap file: {new_module_name_output_file}")
49
50        passed = validate(inc_task, task, is_debug, stdout,
51                          stderr, new_module_name_output_file)
52        logging.debug(f"validate new module hap file, passed {passed}")
53        if not passed:
54            return
55
56        if is_debug:
57            inc_info = inc_task.debug_info
58        else:
59            inc_info = inc_task.release_info
60        uncompressed_output_file = new_module_name_output_file + '.uncompressed'
61        with zipfile.ZipFile(new_module_name_output_file, 'r') as zip_ref:
62            zip_ref.extractall(uncompressed_output_file)
63
64        abc_path = os.path.join(uncompressed_output_file, 'ets')
65        modules_abc_path = os.path.join(abc_path, 'modules.abc')
66        modules_pa = disasm_abc(modules_abc_path)
67        if not modules_pa or not os.path.exists(modules_pa):
68            inc_info.result = options.TaskResult.failed
69            inc_info.error_message = f'ark_disasm failed, module name change not verified'
70            return
71
72        func_str = ''
73        with open(modules_pa, 'r', encoding='utf-8') as pa:
74            line = pa.readline()
75            while line:
76                if '.function' in line.strip():
77                    func_str = line.strip()
78                    break
79                line = pa.readline()
80
81        func_define_items = func_str.split('.')
82        if not new_module_name in func_define_items:
83            inc_info.result = options.TaskResult.failed
84            inc_info.error_message = f'expected entry name {new_module_name} in function name, \
85                                     actual function name: {func_str}'
86
87        shutil.rmtree(uncompressed_output_file)
88
89    @staticmethod
90    def is_file_in_modified_files(task_type, backup_file_relative_path, modified_cache_files):
91        if 'stage' in task_type:
92            return backup_file_relative_path in modified_cache_files
93        else:
94            non_temporary_path = backup_file_relative_path.split("temporary")[
95                1].lstrip(os.path.sep)
96            logging.debug(f"non_temporary_path: {non_temporary_path}")
97            for file in modified_cache_files:
98                logging.debug(f"modified_cache_files file: {file}")
99                if non_temporary_path in file:
100                    return True
101        return False
102
103    @staticmethod
104    def validate_compile_incremental_file(task, inc_task, is_debug, modified_files):
105        cache_extension = ''
106        if 'stage' in task.type:
107            cache_extention = '.protoBin'
108        elif 'fa' in task.type or 'compatible8' in task.type:
109            cache_extention = '.temp.abc'
110        elif 'js' in task.type:
111            cache_extention = '.abc'
112
113        modified_cache_files = []
114        # modified_files is a list of file with relative path to .../debug/release
115        for file in modified_files:
116            name, ext = os.path.splitext(file)
117            modified_cache_files.append(name + cache_extention)
118
119        logging.debug(f"modified_cache_files: {modified_cache_files}")
120
121        if is_debug:
122            cache_path = os.path.join(
123                task.path, *(task.build_path), *(task.cache_path), 'debug')
124            backup_path = task.backup_info.cache_debug
125            inc_info = inc_task.debug_info
126        else:
127            cache_path = os.path.join(
128                task.path, *(task.build_path), *(task.cache_path), 'release')
129            backup_path = task.backup_info.cache_release
130            inc_info = inc_task.release_info
131
132        for root, dirs, files in os.walk(cache_path):
133            for file in files:
134                if not file.endswith(cache_extention):
135                    continue
136                file_absolute_path = os.path.join(root, file)
137                file_relative_path = os.path.relpath(
138                    file_absolute_path, cache_path)
139                backup_file = os.path.join(backup_path, file_relative_path)
140
141                if not os.path.exists(backup_file):
142                    logging.debug(f"backup file not exits: {backup_file}")
143                    continue
144
145                if utils.is_file_timestamps_same(file_absolute_path, backup_file):
146                    continue
147
148                logging.debug(f"found file ${file_relative_path} changed")
149                is_file_in_list = IncrementalTest.is_file_in_modified_files(
150                    task.type, file_relative_path, modified_cache_files)
151                logging.debug(f"is file in list: {is_file_in_list}")
152                if not is_file_in_list:
153                    inc_info.result = options.TaskResult.failed
154                    inc_info.error_message = f'Incremental compile found unexpected file timestamp changed. \
155                                             Changed file: {file_relative_path}'
156                    return
157
158    @staticmethod
159    def prepare_incremental_task(task, test_name):
160        if test_name in task.incre_compilation_info:
161            inc_task = task.incre_compilation_info[test_name]
162        else:
163            inc_task = options.IncCompilationInfo()
164            inc_task.name = test_name
165            task.incre_compilation_info[test_name] = inc_task
166        return inc_task
167
168    @staticmethod
169    def compile_incremental_no_modify(task, is_debug):
170        test_name = 'no_change'
171        inc_task = IncrementalTest.prepare_incremental_task(task, test_name)
172
173        logging.info(f"==========> Running {test_name} for task: {task.name}")
174        [stdout, stderr] = compile_project(task, is_debug)
175        passed = validate(inc_task, task, is_debug, stdout, stderr)
176        if passed:
177            IncrementalTest.validate_compile_incremental_file(
178                task, inc_task, is_debug, [])
179
180    @staticmethod
181    def compile_incremental_add_oneline(task, is_debug):
182        test_name = 'add_oneline'
183        inc_task = IncrementalTest.prepare_incremental_task(task, test_name)
184
185        logging.info(f"==========> Running {test_name} for task: {task.name}")
186        modify_file_item = task.inc_modify_file
187        modify_file = os.path.join(task.path, *modify_file_item)
188        modify_file_backup = modify_file + ".bak"
189        shutil.copyfile(modify_file, modify_file_backup)
190
191        with open(modify_file, 'a', encoding='utf-8') as file:
192            file.write(options.configs.get('patch_content').get(
193                'patch_lines_2').get('tail'))
194
195        [stdout, stderr] = compile_project(task, is_debug)
196        passed = validate(inc_task, task, is_debug, stdout, stderr)
197        if passed:
198            modified_files = [os.path.join(*modify_file_item)]
199            IncrementalTest.validate_compile_incremental_file(
200                task, inc_task, is_debug, modified_files)
201
202        shutil.move(modify_file_backup, modify_file)
203
204    @staticmethod
205    def compile_incremental_add_file(task, is_debug):
206        test_name = 'add_file'
207        inc_task = IncrementalTest.prepare_incremental_task(task, test_name)
208
209        logging.info(f"==========> Running {test_name} for task: {task.name}")
210        modify_file_item = task.inc_modify_file
211        modify_file = os.path.join(task.path, *modify_file_item)
212        modify_file_backup = modify_file + ".bak"
213        shutil.copyfile(modify_file, modify_file_backup)
214
215        modify_dir = os.path.dirname(modify_file)
216        if 'js' in task.type:
217            patch_content = options.configs.get(
218                'patch_content').get('patch_new_file_js')
219            new_file_name = patch_content.get('name')
220            new_file_content = patch_content.get('content')
221        else:
222            patch_content = options.configs.get(
223                'patch_content').get('patch_new_file_ets')
224            new_file_name = patch_content.get('name')
225            new_file_content = patch_content.get('content')
226        new_file = os.path.join(modify_dir, new_file_name)
227
228        with open(new_file, 'w', encoding='utf-8') as file:
229            file.writelines(new_file_content)
230
231        with open(modify_file, 'r+', encoding='utf-8') as file:
232            old_content = file.read()
233            file.seek(0)
234            patch_lines = options.configs.get(
235                'patch_content').get('patch_lines_1')
236            file.write(patch_lines.get('head'))
237            file.write(old_content)
238            file.write(patch_lines.get('tail'))
239
240        [stdout, stderr] = compile_project(task, is_debug)
241        passed = validate(inc_task, task, is_debug, stdout, stderr)
242        if passed:
243            modified_files = [os.path.join(*modify_file_item)]
244            IncrementalTest.validate_compile_incremental_file(
245                task, inc_task, is_debug, modified_files)
246
247        shutil.move(modify_file_backup, modify_file)
248        os.remove(new_file)
249
250    @staticmethod
251    def compile_incremental_delete_file(task, is_debug):
252        test_name = 'delete_file'
253        inc_task = IncrementalTest.prepare_incremental_task(task, test_name)
254
255        logging.info(f"==========> Running {test_name} for task: {task.name}")
256        # this test is after 'add_file', and in test 'add_file' already done remove file,
257        # so here just call compile
258        [stdout, stderr] = compile_project(task, is_debug)
259        passed = validate(inc_task, task, is_debug, stdout, stderr)
260        if passed:
261            modify_file_item = task.inc_modify_file
262            modified_files = [os.path.join(*modify_file_item)]
263            IncrementalTest.validate_compile_incremental_file(
264                task, inc_task, is_debug, modified_files)
265
266    @staticmethod
267    def compile_incremental_reverse_hap_mode(task, is_debug):
268        test_name = 'reverse_hap_mode'
269        inc_task = IncrementalTest.prepare_incremental_task(task, test_name)
270
271        logging.info(f"==========> Running {test_name} for task: {task.name}")
272        hap_mode = not is_debug
273        [stdout, stderr] = compile_project(task, hap_mode)
274        validate(inc_task, task, hap_mode, stdout, stderr)
275
276    @staticmethod
277    def compile_incremental_modify_module_name(task, is_debug):
278        if 'stage' not in task.type:
279            return
280
281        test_name = 'change_module_name'
282        inc_task = IncrementalTest.prepare_incremental_task(task, test_name)
283
284        logging.info(f"==========> Running {test_name} for task: {task.name}")
285        # modify build-profile.json5
286        profile_file = os.path.join(task.path, 'build-profile.json5')
287        profile_file_backup = profile_file + ".bak"
288        shutil.copyfile(profile_file, profile_file_backup)
289
290        with open(profile_file, 'r') as file:
291            profile_data = json5.load(file)
292        new_module_name = "new_entry"
293        logging.debug(f"profile_data is: {profile_data}")
294        profile_data['modules'][0]['name'] = new_module_name
295        with open(profile_file, 'w') as file:
296            json5.dump(profile_data, file)
297
298        # modify module.json5 for stage mode
299        entry_item = task.build_path[:-2]  # to entry path
300        config_file_dir = os.path.join(task.path, *entry_item, 'src', 'main')
301        config_file = os.path.join(config_file_dir, 'module.json5')
302        config_file_backup = config_file + ".bak"
303        shutil.copyfile(config_file, config_file_backup)
304
305        with open(config_file, 'r') as file:
306            config_data = json5.load(file)
307        config_data['module']['name'] = new_module_name
308        with open(config_file, 'w') as file:
309            json5.dump(config_data, file)
310
311        try:
312            [stdout, stderr] = compile_project(task, is_debug)
313            IncrementalTest.validate_module_name_change(
314                task, inc_task, is_debug, stdout, stderr, new_module_name)
315        except Exception as e:
316            logging.exception(e)
317        finally:
318            shutil.move(profile_file_backup, profile_file)
319            shutil.move(config_file_backup, config_file)
320
321
322class OtherTest:
323    @staticmethod
324    def is_abc_same_in_haps(hap_1, hap_2):
325        hap_1_abc_files = []
326        hap_2_abc_files = []
327        with zipfile.ZipFile(hap_1) as zf1, zipfile.ZipFile(hap_2) as zf2:
328            for file in zf1.namelist():
329                if file.endswith('.abc'):
330                    hap_1_abc_files.append(file)
331            for file in zf2.namelist():
332                if file.endswith('.abc'):
333                    hap_2_abc_files.append(file)
334
335            hap_1_abc_files.sort()
336            hap_2_abc_files.sort()
337
338            if len(hap_1_abc_files) != len(hap_2_abc_files):
339                return False
340
341            for idx, abc_file in enumerate(hap_1_abc_files):
342                with zf1.open(abc_file) as f1, zf2.open(hap_2_abc_files[idx]) as f2:
343                    data1 = f1.read()
344                    data2 = f2.read()
345                    if data1 != data2:
346                        return False
347
348        return True
349
350    @staticmethod
351    def verify_binary_consistency(task):
352        test_name = 'binary_consistency'
353        test_info = options.CompilationInfo()
354        task.other_tests[test_name] = test_info
355        debug_consistency = True
356        release_consistency = True
357
358        logging.info(f"==========> Running {test_name} for task: {task.name}")
359        if options.arguments.hap_mode in ['all', 'release']:
360            # will have at lease 1 output from full compile
361            if len(task.backup_info.output_release) == 1:
362                compile_project(task, False)
363                backup_compile_output(task, False)
364
365            if len(task.backup_info.output_release) == 2:
366                release_consistency = OtherTest.is_abc_same_in_haps(task.backup_info.output_release[0],
367                                                                    task.backup_info.output_release[1])
368            else:
369                release_consistency = False
370            logging.debug(f"release consistency: {release_consistency}")
371
372        if options.arguments.hap_mode in ['all', 'debug']:
373            if len(task.backup_info.output_debug) == 1:
374                compile_project(task, True)
375                backup_compile_output(task, True)
376
377            if len(task.backup_info.output_debug) == 2:
378                debug_consistency = OtherTest.is_abc_same_in_haps(task.backup_info.output_debug[0],
379                                                                  task.backup_info.output_debug[1])
380            else:
381                debug_consistency = False
382            logging.debug(f"debug consistency: {debug_consistency}")
383
384        if debug_consistency and release_consistency:
385            test_info.result = options.TaskResult.passed
386        else:
387            test_info.result = options.TaskResult.failed
388
389    @staticmethod
390    def execute_break_compile(task, is_debug):
391        test_name = 'break_continue_compile'
392        test_info = options.CompilationInfo()
393        task.other_tests[test_name] = test_info
394
395        logging.info(f"==========> Running {test_name} for task: {task.name}")
396        clean_compile(task)
397        cmd = get_hvigor_compile_cmd(task.path, is_debug)
398        logging.debug(f'cmd: {cmd}')
399        logging.debug(f"cmd execution path {task.path}")
400        process = subprocess.Popen(cmd, shell=False, cwd=task.path,
401                                   stdout=subprocess.PIPE,
402                                   stderr=subprocess.PIPE)
403
404        for line in iter(process.stdout.readline, b''):
405            if b'CompileArkTS' in line:
406                logging.debug("terminate signal sent")
407                process.send_signal(signal.SIGTERM)
408                break
409
410        [stdout, stderr] = process.communicate(
411            timeout=options.arguments.compile_timeout)
412
413        logging.debug("first compile: stdcout: %s",
414                      stdout.decode('utf-8', errors="ignore"))
415        logging.warning("first compile: stdcerr: %s",
416                        stderr.decode('utf-8', errors="ignore"))
417
418        logging.debug("another compile")
419        [stdout, stderr] = compile_project(task, is_debug)
420
421        [is_success, time_string] = is_compile_success(stdout)
422        if not is_success:
423            test_info.result = options.TaskResult.failed
424            test_info.error_message = stderr
425        else:
426            passed = validate_compile_output(test_info, task, is_debug)
427            if passed:
428                test_info.result = options.TaskResult.passed
429
430    @staticmethod
431    def compile_full_with_error(task, is_debug):
432        test_name = 'compile_with_error'
433        test_info = options.CompilationInfo()
434        task.other_tests[test_name] = test_info
435
436        logging.info(f"==========> Running {test_name} for task: {task.name}")
437        modify_file_item = task.inc_modify_file
438        modify_file = os.path.join(task.path, *modify_file_item)
439        modify_file_backup = modify_file + ".bak"
440        shutil.copyfile(modify_file, modify_file_backup)
441
442        patch_lines_error = options.configs.get(
443            'patch_content').get('patch_lines_error')
444        with open(modify_file, 'a', encoding='utf-8') as file:
445            file.write(patch_lines_error.get('tail'))
446
447        [stdout, stderr] = compile_project(task, is_debug)
448        expected_errors = patch_lines_error.get('expected_error')
449
450        passed = False
451        for expected_error in expected_errors:
452            if expected_error in stderr:
453                passed = True
454                break
455
456        if passed:
457            test_info.result = options.TaskResult.passed
458        else:
459            test_info.result = options.TaskResult.failed
460            test_info.error_message = f"expected error message: {expected_errors}, but got {stderr}"
461
462        shutil.move(modify_file_backup, modify_file)
463
464    @staticmethod
465    def compile_with_exceed_length(task, is_debug):
466        test_name = 'compile_with_exceed_length'
467        test_info = options.CompilationInfo()
468        task.other_tests[test_name] = test_info
469
470        logging.info(f"==========> Running {test_name} for task: {task.name}")
471        # get build-profile.json5
472        entry_item = task.build_path[:-2]  # to entry path
473        profile_file = os.path.join(
474            task.path, *entry_item, 'build-profile.json5')
475        profile_file_backup = profile_file + ".bak"
476        shutil.copyfile(profile_file, profile_file_backup)
477
478        with open(profile_file, 'r') as file:
479            profile_data = json5.load(file)
480
481        long_str = 'default1234567890123456789012345678901234567890123456789012345678901234567890123456789' + \
482                   '012345678901234567890123456789'
483        logging.debug("long_str: %s", long_str)
484        profile_data['targets'][0]['name'] = long_str
485
486        with open(profile_file, 'w') as file:
487            json5.dump(profile_data, file)
488
489        [stdout, stderr] = compile_project(task, is_debug)
490        # Only the Windows platform has a length limit
491        if utils.is_windows():
492            expected_error_message = 'The length of path exceeds the maximum length: 259'
493
494            if expected_error_message in stderr:
495                test_info.result = options.TaskResult.passed
496            else:
497                test_info.result = options.TaskResult.failed
498                test_info.error_message = f"expected error message: {expected_error_message}, but got {stderr}"
499        else:
500            [is_success, time_string] = is_compile_success(stdout)
501            if not is_success:
502                test_info.result = options.TaskResult.failed
503                test_info.error_message = stderr
504            else:
505                passed = validate_compile_output(test_info, task, is_debug)
506                if passed:
507                    test_info.result = options.TaskResult.passed
508
509        shutil.move(profile_file_backup, profile_file)
510
511    @staticmethod
512    def compile_ohos_test(task):
513        test_name = 'ohos_test'
514        test_info = options.CompilationInfo()
515        task.other_tests[test_name] = test_info
516
517        logging.info(f"==========> Running {test_name} for task: {task.name}")
518        # ohosTest has only debug mode
519        cmd = [get_hvigor_path(task.path), '--mode', 'module',
520               '-p', 'module=entry@ohosTest', 'assembleHap']
521        [stdout, stderr] = compile_project(task, True, cmd)
522        [is_success, time_string] = is_compile_success(stdout)
523        if not is_success:
524            test_info.result = options.TaskResult.failed
525            test_info.error_message = stderr
526        else:
527            output_file = get_compile_output_file_path(task, True)
528            output_dir = os.path.dirname(output_file)
529            output_file_name = os.path.basename(output_file)
530
531            ohos_test_str = 'ohosTest'
532            output_file_name_items = output_file_name.split(
533                '-')  # hap name format: entry-default-signed.hap
534            # ohosTest hap format: entry-ohosTest-signed.hap
535            output_file_name_items[-2] = ohos_test_str
536            output_file_name = '-'.join(output_file_name_items)
537
538            output_dir_items = output_dir.split(os.path.sep)
539            output_dir_items[-1] = ohos_test_str
540            if utils.is_windows():
541                # for windows, need to add an empty string to mark between disk identifier and path
542                output_dir_items.insert(1, os.path.sep)
543            elif utils.is_mac():
544                output_dir_items.insert(0, os.path.sep)
545            ohos_test_output_file = os.path.join(
546                *output_dir_items, output_file_name)
547
548            passed = validate_compile_output(
549                test_info, task, True, ohos_test_output_file)
550            if passed:
551                test_info.result = options.TaskResult.passed
552
553
554def disasm_abc(abc_file):
555    sdk_path = options.configs.get('deveco_sdk_path')
556    ark_disasm_path = ''
557    if utils.is_windows():
558        ark_disasm = 'ark_disasm.exe'
559    else:
560        ark_disasm = 'ark_disasm'
561    # try to find ark_disasm in api 10, api 9 sequentially
562    ark_disasm_10_path = os.path.join(sdk_path, '10', 'toolchains', ark_disasm)
563    ark_disasm_9_path = os.path.join(sdk_path, '9', 'toolchains', ark_disasm)
564    if os.path.exists(ark_disasm_10_path):
565        ark_disasm_path = ark_disasm_10_path
566    elif os.path.exists(ark_disasm_9_path):
567        ark_disasm_path = ark_disasm_9_path
568    else:
569        logging.error("ark_disasm executable not found")
570        return ''
571
572    pa_file = abc_file + '.pa'
573    cmd = [ark_disasm_path, '--verbose', abc_file, pa_file]
574    logging.debug(f'cmd: {cmd}')
575    process = subprocess.Popen(
576        cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
577    [stdout, stderr] = process.communicate(
578        timeout=options.arguments.compile_timeout)
579
580    logging.debug("disasm stdcout: %s",
581                  stdout.decode('utf-8', errors="ignore"))
582    logging.warning("disasm: stdcerr: %s",
583                    stderr.decode('utf-8', errors="ignore"))
584
585    return pa_file
586
587
588def is_abc_debug_info_correct(abc_file, is_debug):
589    pa_file = disasm_abc(abc_file)
590    if not os.path.exists(pa_file):
591        logging.error(f"pa file not exist: {pa_file}")
592        return False
593
594    debug_info_block_str = 'LOCAL_VARIABLE_TABLE'
595    has_debug_info_block = False
596    with open(pa_file, 'r', encoding='utf-8') as pa:
597        line = pa.readline()
598        while line:
599            if debug_info_block_str in line.strip():
600                has_debug_info_block = True
601                break
602            line = pa.readline()
603
604    if is_debug:
605        return has_debug_info_block
606    else:
607        return not has_debug_info_block
608
609
610def validate_output_for_jsbundle(info, task, uncompressed_output_path, is_debug):
611    abc_files = []
612    for root, dirs, files in os.walk(uncompressed_output_path):
613        for file in files:
614            if file.endswith('.abc'):
615                abc_files.append(os.path.join(root, file))
616
617    total_size = 0
618    for file in abc_files:
619        total_size += os.path.getsize(
620            os.path.join(uncompressed_output_path, file))
621        if 'compatible8' not in task.type and not is_abc_debug_info_correct(file, is_debug):
622            # skip compatible8 outputs as disasm may failed
623            info.result = options.TaskResult.failed
624            info.error_message = f"{file} debug info not correct"
625            return False
626
627    if total_size == 0:
628        info.result = options.TaskResult.failed
629        info.error_message = "abc not found or abc size is 0"
630        return False
631    else:
632        info.abc_size = total_size
633
634    if is_debug:
635        for file in abc_files:
636            sourcemap_file = file.replace('.abc', '.js.map')
637            if not os.path.exists(os.path.join(uncompressed_output_path, sourcemap_file)):
638                info.result = options.TaskResult.failed
639                info.error_message = "sourcemap not found"
640                return False
641
642    return True
643
644
645def validate_output_for_esmodule(info, task, uncompressed_output_path, is_debug):
646    abc_generated_path = os.path.join(uncompressed_output_path, 'ets')
647
648    modules_abc_path = os.path.join(abc_generated_path, 'modules.abc')
649    if not os.path.exists(modules_abc_path):
650        info.result = options.TaskResult.failed
651        info.error_message = "modules.abc not found"
652        return False
653
654    modules_abc_size = os.path.getsize(modules_abc_path)
655    if modules_abc_size <= 0:
656        info.result = options.TaskResult.failed
657        info.error_message = "modules.abc size is 0"
658        return False
659    if not is_abc_debug_info_correct(modules_abc_path, is_debug):
660        info.result = options.TaskResult.failed
661        info.error_message = "modules.abc debug info not correct"
662        return False
663    info.abc_size = modules_abc_size
664
665    if 'widget' in task.type:
666        widget_abc_path = os.path.join(abc_generated_path, 'widgets.abc')
667        if not os.path.exists(widget_abc_path):
668            info.result = options.TaskResult.failed
669            info.error_message = "widgets.abc not found"
670            return False
671
672        widgets_abc_size = os.path.getsize(widget_abc_path)
673        if widgets_abc_size <= 0:
674            info.result = options.TaskResult.failed
675            info.error_message = "widgets.abc size is 0"
676            return False
677        if not is_abc_debug_info_correct(widget_abc_path, is_debug):
678            info.result = options.TaskResult.failed
679            info.error_message = "widgets.abc debug info not correct"
680            return False
681        info.abc_size += widgets_abc_size
682
683    if is_debug:
684        sourcemap_path = abc_generated_path
685    else:
686        sourcemap_path = os.path.join(
687            task.path, *(task.build_path), *(task.cache_path), 'release')
688    sourcemap_file = os.path.join(sourcemap_path, 'sourceMaps.map')
689    if not os.path.exists(sourcemap_file):
690        info.result = options.TaskResult.failed
691        info.error_message = "sourcemap not found"
692        return False
693
694    return True
695
696
697def collect_compile_time(info, time_string):
698    time_min = 0.0
699    time_second = 0.0
700    time_millisecond = 0.0
701
702    time_items = time_string.split()
703    for idx, item in enumerate(time_items):
704        if item == 'min':
705            time_min = float(time_items[idx - 1]) * 60
706        if item == 's':
707            time_second = float(time_items[idx - 1])
708        if item == 'ms':
709            time_millisecond = round(float(time_items[idx - 1]) / 1000, 3)
710
711    info.time = round(time_min + time_second + time_millisecond, 3)
712
713
714def get_compile_output_file_path(task, is_debug):
715    output_file = ''
716
717    if is_debug:
718        output_file = os.path.join(
719            task.path, *(task.build_path), *(task.output_hap_path))
720    else:
721        output_file = os.path.join(
722            task.path, *(task.build_path), *(task.output_app_path))
723
724    return output_file
725
726
727def validate_compile_output(info, task, is_debug, output_file=''):
728    passed = False
729
730    if output_file == '':
731        output_file = get_compile_output_file_path(task, is_debug)
732    uncompressed_output_file = output_file + '.uncompressed'
733
734    if not os.path.exists(output_file):
735        logging.error("output file for task %s not exists: %s",
736                      task.name, output_file)
737        passed = False
738
739        info.result = options.TaskResult.failed
740        info.error_message = "Hap not found"
741        return passed
742    try:
743        with zipfile.ZipFile(output_file, 'r') as zip_ref:
744            zip_ref.extractall(uncompressed_output_file)
745    except Exception as e:
746        logging.error(f"unzip exception: {e}")
747        logging.error(
748            f"uncompressed output file for task {task.name} failed. output file: {output_file}")
749        passed = False
750
751        info.result = options.TaskResult.failed
752        info.error_message = "Hap uncompressed failed, cannot exam build products"
753        return passed
754
755    if utils.is_esmodule(task.type):
756        passed = validate_output_for_esmodule(
757            info, task, uncompressed_output_file, is_debug)
758    else:
759        passed = validate_output_for_jsbundle(
760            info, task, uncompressed_output_file, is_debug)
761
762    shutil.rmtree(uncompressed_output_file)
763
764    return passed
765
766
767def run_compile_output(info, task_path):
768    # TODO:
769    # 1)install hap
770    # 2)run hap and verify
771    return False
772
773
774def is_compile_success(compile_stdout):
775    pattern = r"BUILD SUCCESSFUL in (\d+ min )?(\d+ s )?(\d+ ms)?"
776    match_result = re.search(pattern, compile_stdout)
777    if not match_result:
778        return [False, '']
779
780    return [True, match_result.group(0)]
781
782
783def validate(compilation_info, task, is_debug, stdout, stderr, output_file=''):
784    info = {}
785    if is_debug:
786        info = compilation_info.debug_info
787    else:
788        info = compilation_info.release_info
789
790    # ret_code will be 1 if there's stderr, use "COMPILE SUCCESSFUL" as a flag to make a judge
791    [is_success, time_string] = is_compile_success(stdout)
792    if not is_success:
793        info.result = options.TaskResult.failed
794        info.error_message = stderr
795        return False
796
797    passed = validate_compile_output(info, task, is_debug, output_file)
798
799    if options.arguments.run_haps:
800        passed &= run_compile_output(info)
801
802    if passed:
803        collect_compile_time(info, time_string)
804        info.result = options.TaskResult.passed
805
806    return passed
807
808
809def get_hvigor_path(project_path):
810    hvigor = ''
811    if utils.is_windows():
812        hvigor = os.path.join(project_path, 'hvigorw.bat')
813    else:
814        hvigor = os.path.join(project_path, 'hvigorw')
815        utils.add_executable_permission(hvigor)
816    return hvigor
817
818
819def get_hvigor_compile_cmd(project_path, is_debug):
820    cmd = [get_hvigor_path(project_path)]
821    if is_debug:
822        cmd.append('assembleHap')
823    else:
824        cmd.append('assembleApp')
825    return cmd
826
827
828def compile_project(task, is_debug, cmd=''):
829    if not cmd:
830        cmd = get_hvigor_compile_cmd(task.path, is_debug)
831
832    logging.debug(f'cmd: {cmd}')
833    logging.debug(f"cmd execution path {task.path}")
834    process = subprocess.Popen(cmd, shell=False, cwd=task.path,
835                               stdout=subprocess.PIPE, stderr=subprocess.PIPE)
836    stdout, stderr = process.communicate(
837        timeout=options.arguments.compile_timeout)
838    stdout_utf8 = stdout.decode("utf-8", errors="ignore")
839    stderr_utf8 = stderr.decode("utf-8", errors="ignore")
840    logging.debug(f"cmd stdout: {stdout_utf8}")
841    logging.debug(f"cmd stderr: {stderr_utf8}")
842
843    return [stdout_utf8, stderr_utf8]
844
845
846def clean_compile(task):
847    cmd = [get_hvigor_path(task.path), 'clean']
848    logging.debug(f'cmd: {cmd}')
849    logging.debug(f"cmd execution path {task.path}")
850    process = subprocess.Popen(cmd, shell=False, cwd=task.path,
851                               stdout=subprocess.PIPE, stderr=subprocess.PIPE)
852    out, err = process.communicate(timeout=options.arguments.compile_timeout)
853
854
855def compile_incremental(task, is_debug):
856    logging.info(
857        f"==========> Running task: {task.name} in incremental compilation")
858    [stdout, stderr] = compile_project(task, is_debug)
859
860    [is_success, time_string] = is_compile_success(stdout)
861    if not is_success:
862        logging.error(
863            "Incremental compile failed due to first compile failed!")
864        return
865
866    if options.arguments.compile_mode == 'incremental':
867        passed = validate(task.full_compilation_info,
868                          task, is_debug, stdout, stderr)
869        if not passed:
870            logging.error(
871                "Incremental compile failed due to first compile failed!")
872            return
873
874    backup_compile_output(task, is_debug)
875    backup_compile_cache(task, is_debug)
876
877    IncrementalTest.compile_incremental_no_modify(task, is_debug)
878    IncrementalTest.compile_incremental_add_oneline(task, is_debug)
879    IncrementalTest.compile_incremental_add_file(task, is_debug)
880    IncrementalTest.compile_incremental_delete_file(task, is_debug)
881    IncrementalTest.compile_incremental_reverse_hap_mode(task, is_debug)
882    IncrementalTest.compile_incremental_modify_module_name(task, is_debug)
883
884
885def backup_compile_output(task, is_debug):
886    backup_path = task.backup_info.cache_path
887    if not os.path.exists(backup_path):
888        os.mkdir(backup_path)
889
890    if is_debug:
891        if len(task.backup_info.output_debug) == 2:
892            return
893
894        backup_output_path = os.path.join(backup_path, 'output', 'debug')
895        if not os.path.exists(backup_output_path):
896            os.makedirs(backup_output_path)
897
898    else:
899        if len(task.backup_info.output_release) == 2:
900            return
901
902        backup_output_path = os.path.join(backup_path, 'output', 'release')
903        if not os.path.exists(backup_output_path):
904            os.makedirs(backup_output_path)
905
906    output_file = get_compile_output_file_path(task, is_debug)
907    shutil.copy(output_file, backup_output_path)
908    backup_output = os.path.join(
909        backup_output_path, os.path.basename(output_file))
910    backup_time_output = backup_output + '-' + utils.get_time_string()
911    shutil.move(backup_output, backup_time_output)
912
913    if is_debug:
914        task.backup_info.output_debug.append(backup_time_output)
915    else:
916        task.backup_info.output_release.append(backup_time_output)
917
918
919def backup_compile_cache(task, is_debug):
920    backup_path = task.backup_info.cache_path
921    if not os.path.exists(backup_path):
922        os.mkdir(backup_path)
923
924    backup_cache_path = os.path.join(backup_path, 'cache')
925    if not os.path.exists(backup_cache_path):
926        os.mkdir(backup_cache_path)
927    cache_files = os.path.join(
928        task.path, *(task.build_path), *(task.cache_path))
929
930    if is_debug:
931        if task.backup_info.cache_debug != '':
932            return
933
934        cache_files = os.path.join(cache_files, 'debug')
935        backup_cache_file = os.path.join(backup_cache_path, 'debug')
936        shutil.copytree(cache_files, backup_cache_file)
937        task.backup_info.cache_debug = backup_cache_file
938    else:
939        if task.backup_info.cache_release != '':
940            return
941
942        cache_files = os.path.join(cache_files, 'release')
943        backup_cache_file = os.path.join(backup_cache_path, 'release')
944        shutil.copytree(cache_files, backup_cache_file)
945        task.backup_info.cache_release = backup_cache_file
946
947
948def execute_full_compile(task):
949    logging.info(f"==========> Running task: {task.name} in full compilation")
950    clean_compile(task)
951    passed = False
952    if options.arguments.hap_mode in ['all', 'release']:
953        [stdout, stderr] = compile_project(task, False)
954        passed = validate(task.full_compilation_info,
955                          task, False, stdout, stderr)
956        if passed:
957            backup_compile_output(task, False)
958        clean_compile(task)
959    if options.arguments.hap_mode in ['all', 'debug']:
960        [stdout, stderr] = compile_project(task, True)
961        passed = validate(task.full_compilation_info,
962                          task, True, stdout, stderr)
963        if passed:
964            backup_compile_output(task, True)
965        clean_compile(task)
966
967    return passed
968
969
970def execute_incremental_compile(task):
971    logging.info(
972        f"==========> Running task: {task.name} in incremental compilation")
973    if options.arguments.hap_mode in ['all', 'release']:
974        compile_incremental(task, False)
975        clean_compile(task)
976    if options.arguments.hap_mode in ['all', 'debug']:
977        compile_incremental(task, True)
978        clean_compile(task)
979
980
981def clean_backup(task):
982    if os.path.exists(task.backup_info.cache_path):
983        shutil.rmtree(task.backup_info.cache_path)
984    return
985
986
987def execute(test_tasks):
988    for task in test_tasks:
989        try:
990            logging.info(f"======> Running task: {task.name}")
991            if options.arguments.compile_mode in ['all', 'full']:
992                if not execute_full_compile(task):
993                    logging.info("Full compile failed, skip other tests!")
994                    continue
995
996            if options.arguments.compile_mode in ['all', 'incremental']:
997                execute_incremental_compile(task)
998
999            OtherTest.verify_binary_consistency(task)
1000
1001            # for these tests, use one hapMode maybe enough
1002            is_debug = True if options.arguments.hap_mode == 'debug' else False
1003            OtherTest.execute_break_compile(task, is_debug)
1004            if 'error' in task.type:
1005                OtherTest.compile_full_with_error(task, is_debug)
1006
1007            if 'exceed_length_error' in task.type:
1008                OtherTest.compile_with_exceed_length(task, is_debug)
1009
1010            if 'ohosTest' in task.type:
1011                OtherTest.compile_ohos_test(task)
1012
1013            logging.info(f"======> Running task: {task.name} finised")
1014        except Exception as e:
1015            logging.exception(e)
1016        finally:
1017            clean_backup(task)
1018