1#!/usr/bin/env python3 2# -*- coding: utf-8 -*- 3# 4# Copyright (c) 2021-2022 Huawei Device Co., Ltd. 5# Licensed under the Apache License, Version 2.0 (the "License"); 6# you may not use this file except in compliance with the License. 7# You may obtain a copy of the License at 8# 9# http://www.apache.org/licenses/LICENSE-2.0 10# 11# Unless required by applicable law or agreed to in writing, software 12# distributed under the License is distributed on an "AS IS" BASIS, 13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14# See the License for the specific language governing permissions and 15# limitations under the License. 16 17from glob import glob 18from os import path 19from enum import Enum 20import argparse 21import fnmatch 22import multiprocessing 23import os 24import re 25import shutil 26import subprocess 27import sys 28from config import API_VERSION_MAP, MIN_SUPPORT_BC_VERSION, MIX_COMPILE_ENTRY_POINT 29 30 31def is_directory(parser, arg): 32 if not path.isdir(arg): 33 parser.error("The directory '%s' does not exist" % arg) 34 35 return path.abspath(arg) 36 37 38def is_file(parser, arg): 39 if not path.isfile(arg): 40 parser.error("The file '%s' does not exist" % arg) 41 42 return path.abspath(arg) 43 44def prepare_tsc_testcases(test_root): 45 third_party_tsc = path.join(test_root, "TypeScript") 46 ohos_third_party_tsc = path.join(test_root, "../../../../third_party/typescript") 47 48 if not path.isdir(third_party_tsc): 49 if (path.isdir(ohos_third_party_tsc)): 50 return path.abspath(ohos_third_party_tsc) 51 subprocess.run( 52 f"git clone https://gitee.com/openharmony/third_party_typescript.git {third_party_tsc}", 53 shell=True, 54 stdout=subprocess.DEVNULL, 55 ) 56 else: 57 subprocess.run( 58 f"cd {third_party_tsc} && git clean -f > /dev/null 2>&1", 59 shell=True, 60 stdout=subprocess.DEVNULL, 61 ) 62 return third_party_tsc 63 64def check_timeout(value): 65 ivalue = int(value) 66 if ivalue <= 0: 67 raise argparse.ArgumentTypeError( 68 "%s is an invalid timeout value" % value) 69 return ivalue 70 71 72def get_args(): 73 parser = argparse.ArgumentParser(description="Regression test runner") 74 parser.add_argument( 75 'build_dir', type=lambda arg: is_directory(parser, arg), 76 help='panda build directory') 77 parser.add_argument( 78 '--error', action='store_true', dest='error', default=False, 79 help='capture stderr') 80 parser.add_argument( 81 '--abc-to-asm', action='store_true', dest='abc_to_asm', 82 default=False, help='run abc2asm tests') 83 parser.add_argument( 84 '--regression', '-r', action='store_true', dest='regression', 85 default=False, help='run regression tests') 86 parser.add_argument( 87 '--compiler', '-c', action='store_true', dest='compiler', 88 default=False, help='run compiler tests') 89 parser.add_argument( 90 '--tsc', action='store_true', dest='tsc', 91 default=False, help='run tsc tests') 92 parser.add_argument( 93 '--no-progress', action='store_false', dest='progress', default=True, 94 help='don\'t show progress bar') 95 parser.add_argument( 96 '--no-skip', action='store_false', dest='skip', default=True, 97 help='don\'t use skiplists') 98 parser.add_argument( 99 '--update', action='store_true', dest='update', default=False, 100 help='update skiplist') 101 parser.add_argument( 102 '--no-run-gc-in-place', action='store_true', dest='no_gip', default=False, 103 help='enable --run-gc-in-place mode') 104 parser.add_argument( 105 '--filter', '-f', action='store', dest='filter', 106 default="*", help='test filter regexp') 107 parser.add_argument( 108 '--es2panda-timeout', type=check_timeout, 109 dest='es2panda_timeout', default=60, help='es2panda translator timeout') 110 parser.add_argument( 111 '--paoc-timeout', type=check_timeout, 112 dest='paoc_timeout', default=600, help='paoc compiler timeout') 113 parser.add_argument( 114 '--timeout', type=check_timeout, 115 dest='timeout', default=10, help='JS runtime timeout') 116 parser.add_argument( 117 '--gc-type', dest='gc_type', default="g1-gc", help='Type of garbage collector') 118 parser.add_argument( 119 '--aot', action='store_true', dest='aot', default=False, 120 help='use AOT compilation') 121 parser.add_argument( 122 '--no-bco', action='store_false', dest='bco', default=True, 123 help='disable bytecodeopt') 124 parser.add_argument( 125 '--jit', action='store_true', dest='jit', default=False, 126 help='use JIT in interpreter') 127 parser.add_argument( 128 '--arm64-compiler-skip', action='store_true', dest='arm64_compiler_skip', default=False, 129 help='use skiplist for tests failing on aarch64 in AOT or JIT mode') 130 parser.add_argument( 131 '--arm64-qemu', action='store_true', dest='arm64_qemu', default=False, 132 help='launch all binaries in qemu aarch64') 133 parser.add_argument( 134 '--arm32-qemu', action='store_true', dest='arm32_qemu', default=False, 135 help='launch all binaries in qemu arm') 136 parser.add_argument( 137 '--test-list', dest='test_list', default=None, type=lambda arg: is_file(parser, arg), 138 help='run tests listed in file') 139 parser.add_argument( 140 '--aot-args', action='append', dest='aot_args', default=[], 141 help='Additional arguments that will passed to ark_aot') 142 parser.add_argument( 143 '--verbose', '-v', action='store_true', dest='verbose', default=False, 144 help='Enable verbose output') 145 parser.add_argument( 146 '--js-runtime', dest='js_runtime_path', default=None, type=lambda arg: is_directory(parser, arg), 147 help='the path of js vm runtime') 148 parser.add_argument( 149 '--LD_LIBRARY_PATH', dest='ld_library_path', default=None, help='LD_LIBRARY_PATH') 150 parser.add_argument( 151 '--tsc-path', dest='tsc_path', default=None, type=lambda arg: is_directory(parser, arg), 152 help='the path of tsc') 153 parser.add_argument('--hotfix', dest='hotfix', action='store_true', default=False, 154 help='run hotfix tests') 155 parser.add_argument('--hotreload', dest='hotreload', action='store_true', default=False, 156 help='run hotreload tests') 157 parser.add_argument('--coldfix', dest='coldfix', action='store_true', default=False, 158 help='run coldfix tests') 159 parser.add_argument('--coldreload', dest='coldreload', action='store_true', default=False, 160 help='run coldreload tests') 161 parser.add_argument('--base64', dest='base64', action='store_true', default=False, 162 help='run base64 tests') 163 parser.add_argument('--bytecode', dest='bytecode', action='store_true', default=False, 164 help='run bytecode tests') 165 parser.add_argument('--debugger', dest='debugger', action='store_true', default=False, 166 help='run debugger tests') 167 parser.add_argument('--debug', dest='debug', action='store_true', default=False, 168 help='run debug tests') 169 parser.add_argument('--enable-arkguard', action='store_true', dest='enable_arkguard', default=False, 170 help='enable arkguard for compiler tests') 171 parser.add_argument('--aop-transform', dest='aop_transform', action='store_true', default=False, 172 help='run debug tests') 173 parser.add_argument('--version-control', action='store_true', dest='version_control', default=False, 174 help='run api version control tests') 175 176 return parser.parse_args() 177 178 179def run_subprocess_with_beta3(test_obj, cmd): 180 has_target_api = False 181 has_version_12 = False 182 has_sub_version = False 183 is_es2abc_cmd = False 184 185 for param in cmd: 186 if "es2abc" in param: 187 is_es2abc_cmd = True 188 if "--target-api-sub-version" in param: 189 has_sub_version = True 190 if "--target-api-version" in param: 191 has_target_api = True 192 if "12" in param: 193 has_version_12 = True 194 if is_es2abc_cmd and (not has_target_api or (has_version_12 and not has_sub_version)): 195 cmd.append("--target-api-sub-version=beta3") 196 if test_obj: 197 test_obj.log_cmd(cmd) 198 return subprocess.Popen( 199 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 200 201 202class Test: 203 def __init__(self, test_path, flags): 204 self.path = test_path 205 self.flags = flags 206 self.output = None 207 self.error = None 208 self.passed = None 209 self.skipped = None 210 self.reproduce = "" 211 212 def log_cmd(self, cmd): 213 self.reproduce += "\n" + ' '.join(cmd) 214 215 def get_path_to_expected(self): 216 if self.path.find(".d.ts") == -1: 217 return "%s-expected.txt" % (path.splitext(self.path)[0]) 218 return "%s-expected.txt" % (self.path[:self.path.find(".d.ts")]) 219 220 def run(self, runner): 221 test_abc_name = ("%s.abc" % (path.splitext(self.path)[0])).replace("/", "_") 222 test_abc_path = path.join(runner.build_dir, test_abc_name) 223 cmd = runner.cmd_prefix + [runner.es2panda] 224 cmd.extend(self.flags) 225 cmd.extend(["--output=" + test_abc_path]) 226 cmd.append(self.path) 227 process = run_subprocess_with_beta3(self, cmd) 228 out, err = process.communicate() 229 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 230 231 expected_path = self.get_path_to_expected() 232 try: 233 with open(expected_path, 'r') as fp: 234 expected = fp.read() 235 self.passed = expected == self.output and process.returncode in [ 236 0, 1] 237 except Exception: 238 self.passed = False 239 240 if not self.passed: 241 self.error = err.decode("utf-8", errors="ignore") 242 243 if os.path.exists(test_abc_path): 244 os.remove(test_abc_path) 245 246 return self 247 248 249class TSCTest(Test): 250 def __init__(self, test_path, flags): 251 Test.__init__(self, test_path, flags) 252 self.options = self.parse_options() 253 254 def parse_options(self): 255 test_options = {} 256 257 with open(self.path, "r", encoding="latin1") as f: 258 lines = f.read() 259 options = re.findall(r"//\s?@\w+:.*\n", lines) 260 261 for option in options: 262 separated = option.split(":") 263 opt = re.findall(r"\w+", separated[0])[0].lower() 264 value = separated[1].strip().lower() 265 266 if opt == "filename": 267 if opt in options: 268 test_options[opt].append(value) 269 else: 270 test_options[opt] = [value] 271 272 elif opt == "lib" or opt == "module": 273 test_options[opt] = [each.strip() 274 for each in value.split(",")] 275 elif value == "true" or value == "false": 276 test_options[opt] = value.lower() == "true" 277 else: 278 test_options[opt] = value 279 280 # TODO: Possibility of error: all exports will be catched, even the commented ones 281 if 'module' not in test_options and re.search(r"export ", lines): 282 test_options['module'] = [] 283 284 return test_options 285 286 def run(self, runner): 287 cmd = runner.cmd_prefix + [runner.es2panda, '--parse-only'] 288 cmd.extend(self.flags) 289 if "module" in self.options: 290 cmd.append('--module') 291 cmd.append(self.path) 292 process = run_subprocess_with_beta3(self, cmd) 293 out, err = process.communicate() 294 self.output = out.decode("utf-8", errors="ignore") 295 296 self.passed = True if process.returncode == 0 else False 297 298 if not self.passed: 299 self.error = err.decode("utf-8", errors="ignore") 300 301 return self 302 303 304class TestAop: 305 def __init__(self, cmd, compare_str, compare_abc_str, remove_file): 306 self.cmd = cmd 307 self.compare_str = compare_str 308 self.compare_abc_str = compare_abc_str 309 self.remove_file = remove_file 310 self.path = '' 311 self.output = None 312 self.error = None 313 self.passed = None 314 self.skipped = None 315 self.reproduce = "" 316 317 def log_cmd(self, cmd): 318 self.reproduce += ''.join(["\n", ' '.join(cmd)]) 319 320 def run(self, runner): 321 cmd = self.cmd 322 process = run_subprocess_with_beta3(self, cmd) 323 out, err = process.communicate() 324 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 325 326 if self.compare_str == '': 327 self.passed = True 328 else : 329 self.passed = self.output.startswith(self.compare_str) and process.returncode in [0, 1] 330 if self.remove_file != '' and os.path.exists(self.remove_file): 331 os.remove(self.remove_file) 332 333 if not self.passed: 334 self.error = err.decode("utf-8", errors="ignore") 335 336 abc_path = path.join(os.getcwd(), 'test_aop.abc') 337 if os.path.exists(abc_path): 338 if self.compare_abc_str != '': 339 with open(abc_path, "r") as abc_file: 340 self.passed = self.passed and abc_file.read() == self.compare_abc_str 341 os.remove(abc_path) 342 343 return self 344 345 346class Runner: 347 def __init__(self, args, name): 348 self.test_root = path.dirname(path.abspath(__file__)) 349 self.args = args 350 self.name = name 351 self.tests = [] 352 self.failed = 0 353 self.passed = 0 354 self.es2panda = path.join(args.build_dir, 'es2abc') 355 self.build_dir = args.build_dir 356 self.cmd_prefix = [] 357 self.ark_js_vm = "" 358 self.ark_aot_compiler = "" 359 self.ld_library_path = "" 360 361 if args.js_runtime_path: 362 self.ark_js_vm = path.join(args.js_runtime_path, 'ark_js_vm') 363 self.ark_aot_compiler = path.join(args.js_runtime_path, 'ark_aot_compiler') 364 365 if args.ld_library_path: 366 self.ld_library_path = args.ld_library_path 367 368 if args.arm64_qemu: 369 self.cmd_prefix = ["qemu-aarch64", "-L", "/usr/aarch64-linux-gnu/"] 370 371 if args.arm32_qemu: 372 self.cmd_prefix = ["qemu-arm", "-L", "/usr/arm-linux-gnueabi"] 373 374 if not path.isfile(self.es2panda): 375 raise Exception("Cannot find es2panda binary: %s" % self.es2panda) 376 377 def add_directory(self, directory, extension, flags): 378 pass 379 380 def test_path(self, src): 381 pass 382 383 def run_test(self, test): 384 return test.run(self) 385 386 def run(self): 387 pool = multiprocessing.Pool() 388 result_iter = pool.imap_unordered( 389 self.run_test, self.tests, chunksize=32) 390 pool.close() 391 392 if self.args.progress: 393 from tqdm import tqdm 394 result_iter = tqdm(result_iter, total=len(self.tests)) 395 396 results = [] 397 for res in result_iter: 398 results.append(res) 399 400 self.tests = results 401 pool.join() 402 403 def deal_error(self, test): 404 path_str = test.path 405 err_col = {} 406 if test.error: 407 err_str = test.error.split('[')[0] if "patchfix" not in test.path else " patchfix throw error failed" 408 err_col = {"path" : [path_str], "status": ["fail"], "error" : [test.error], "type" : [err_str]} 409 else: 410 err_col = {"path" : [path_str], "status": ["fail"], "error" : ["Segmentation fault"], 411 "type" : ["Segmentation fault"]} 412 return err_col 413 414 def summarize(self): 415 print("") 416 fail_list = [] 417 success_list = [] 418 419 for test in self.tests: 420 assert(test.passed is not None) 421 if not test.passed: 422 fail_list.append(test) 423 else: 424 success_list.append(test) 425 426 if len(fail_list): 427 if self.args.error: 428 import pandas as pd 429 test_list = pd.DataFrame(columns=["path", "status", "error", "type"]) 430 for test in success_list: 431 suc_col = {"path" : [test.path], "status": ["success"], "error" : ["success"], "type" : ["success"]} 432 if self.args.error: 433 test_list = pd.concat([test_list, pd.DataFrame(suc_col)]) 434 print("Failed tests:") 435 for test in fail_list: 436 print(self.test_path(test.path)) 437 438 if self.args.error: 439 print("steps:", test.reproduce) 440 print("error:") 441 print(test.error) 442 print("\n") 443 err_col = self.deal_error(test) 444 test_list = pd.concat([test_list, pd.DataFrame(err_col)]) 445 446 if self.args.error: 447 test_list.to_csv('test_statistics.csv', index=False) 448 test_list["type"].value_counts().to_csv('type_statistics.csv', index_label="error") 449 print("Type statistics:\n", test_list["type"].value_counts()) 450 print("") 451 452 print("Summary(%s):" % self.name) 453 print("\033[37mTotal: %5d" % (len(self.tests))) 454 print("\033[92mPassed: %5d" % (len(self.tests) - len(fail_list))) 455 print("\033[91mFailed: %5d" % (len(fail_list))) 456 print("\033[0m") 457 458 return len(fail_list) 459 460 461class RegressionRunner(Runner): 462 def __init__(self, args): 463 Runner.__init__(self, args, "Regression") 464 465 def add_directory(self, directory, extension, flags, func=Test): 466 glob_expression = path.join( 467 self.test_root, directory, "*.%s" % (extension)) 468 files = glob(glob_expression) 469 files = fnmatch.filter(files, self.test_root + '**' + self.args.filter) 470 471 self.tests += list(map(lambda f: func(f, flags), files)) 472 473 def test_path(self, src): 474 return src 475 476 477class AbcToAsmRunner(Runner): 478 def __init__(self, args, is_debug): 479 Runner.__init__(self, args, "Abc2asm" if not is_debug else "Abc2asmDebug") 480 self.is_debug = is_debug 481 482 def add_directory(self, directory, extension, flags, func=Test): 483 glob_expression = path.join( 484 self.test_root, directory, "*.%s" % (extension)) 485 files = glob(glob_expression) 486 files = fnmatch.filter(files, self.test_root + '**' + self.args.filter) 487 488 self.tests += list(map(lambda f: AbcToAsmTest(f, flags, self.is_debug), files)) 489 490 def test_path(self, src): 491 return os.path.basename(src) 492 493 494class AbcToAsmTest(Test): 495 def __init__(self, test_path, flags, is_debug): 496 Test.__init__(self, test_path, flags) 497 self.is_debug = is_debug 498 499 def run(self, runner): 500 output_abc_file = ("%s.abc" % (path.splitext(self.path)[0])).replace("/", "_") 501 # source code compilation, generate an abc file 502 gen_abc_cmd = runner.cmd_prefix + [runner.es2panda] 503 if (self.is_debug): 504 gen_abc_cmd.extend(["--debug-info"]) 505 gen_abc_cmd.extend(["--module", "--dump-normalized-asm-program", "--output=" + output_abc_file]) 506 gen_abc_cmd.append(self.path) 507 process_gen_abc = run_subprocess_with_beta3(self, gen_abc_cmd) 508 gen_abc_out, gen_abc_err = process_gen_abc.communicate() 509 gen_abc_output = gen_abc_out.decode("utf-8", errors="ignore") 510 511 # If no abc file is generated, an error occurs during parser, but abc2asm function is normal. 512 if not os.path.exists(output_abc_file): 513 self.passed = True 514 return self 515 516 # abc file compilation 517 abc_to_asm_cmd = runner.cmd_prefix + [runner.es2panda] 518 if (self.is_debug): 519 abc_to_asm_cmd.extend(["--debug-info"]) 520 abc_to_asm_cmd.extend(["--module", "--dump-normalized-asm-program", "--enable-abc-input"]) 521 abc_to_asm_cmd.append(output_abc_file) 522 process_abc_to_asm = run_subprocess_with_beta3(self, abc_to_asm_cmd) 523 abc_to_asm_out, abc_to_asm_err = process_abc_to_asm.communicate() 524 abc_to_asm_output = abc_to_asm_out.decode("utf-8", errors="ignore") 525 526 self.passed = gen_abc_output == abc_to_asm_output and process_abc_to_asm.returncode in [0, 1] 527 if not self.passed: 528 self.error = "Comparison of dump results between source code compilation and abc file compilation failed." 529 if gen_abc_err: 530 self.error += "\n" + gen_abc_err.decode("utf-8", errors="ignore") 531 if abc_to_asm_err: 532 self.error += "\n" + abc_to_asm_err.decode("utf-8", errors="ignore") 533 534 os.remove(output_abc_file) 535 return self 536 537 538class TSCRunner(Runner): 539 def __init__(self, args): 540 Runner.__init__(self, args, "TSC") 541 542 if self.args.tsc_path: 543 self.tsc_path = self.args.tsc_path 544 else : 545 self.tsc_path = prepare_tsc_testcases(self.test_root) 546 547 self.add_directory("conformance", []) 548 self.add_directory("compiler", []) 549 550 def add_directory(self, directory, flags): 551 ts_suite_dir = path.join(self.tsc_path, 'tests/cases') 552 553 glob_expression = path.join( 554 ts_suite_dir, directory, "**/*.ts") 555 files = glob(glob_expression, recursive=True) 556 files = fnmatch.filter(files, ts_suite_dir + '**' + self.args.filter) 557 558 for f in files: 559 test_name = path.basename(f.split(".ts")[0]) 560 negative_references = path.join( 561 self.tsc_path, 'tests/baselines/reference') 562 is_negative = path.isfile(path.join(negative_references, 563 test_name + ".errors.txt")) 564 test = TSCTest(f, flags) 565 566 if 'target' in test.options: 567 targets = test.options['target'].replace(" ", "").split(',') 568 for target in targets: 569 if path.isfile(path.join(negative_references, 570 test_name + "(target=%s).errors.txt" % (target))): 571 is_negative = True 572 break 573 574 if is_negative or "filename" in test.options: 575 continue 576 577 with open(path.join(self.test_root, 'test_tsc_ignore_list.txt'), 'r') as failed_references: 578 if self.args.skip: 579 if path.relpath(f, self.tsc_path) in failed_references.read(): 580 continue 581 582 self.tests.append(test) 583 584 def test_path(self, src): 585 return src 586 587 588class CompilerRunner(Runner): 589 def __init__(self, args): 590 Runner.__init__(self, args, "Compiler") 591 592 def add_directory(self, directory, extension, flags): 593 if directory.endswith("projects"): 594 projects_path = path.join(self.test_root, directory) 595 for project in os.listdir(projects_path): 596 glob_expression = path.join(projects_path, project, "**/*.%s" % (extension)) 597 files = glob(glob_expression, recursive=True) 598 files = fnmatch.filter(files, self.test_root + '**' + self.args.filter) 599 self.tests.append(CompilerProjectTest(projects_path, project, files, flags)) 600 elif directory.endswith("protobin"): 601 test_path = path.join(self.test_root, directory) 602 for project in os.listdir(test_path): 603 self.tests.append(CompilerProtobinTest(path.join(test_path, project), flags)) 604 elif directory.endswith("abc2program"): 605 test_path = path.join(self.test_root, directory) 606 for project in os.listdir(test_path): 607 self.tests.append(CompilerAbcFileTest(path.join(test_path, project), flags)) 608 else: 609 glob_expression = path.join( 610 self.test_root, directory, "**/*.%s" % (extension)) 611 files = glob(glob_expression, recursive=True) 612 files = fnmatch.filter(files, self.test_root + '**' + self.args.filter) 613 self.tests += list(map(lambda f: CompilerTest(f, flags), files)) 614 615 def test_path(self, src): 616 return src 617 618 619class CompilerTest(Test): 620 def __init__(self, test_path, flags): 621 Test.__init__(self, test_path, flags) 622 623 def execute_arkguard(self, runner): 624 input_file_path = self.path 625 arkguard_root_dir = os.path.join(runner.test_root, "../../arkguard") 626 arkgurad_entry_path = os.path.join(arkguard_root_dir, "lib/cli/SecHarmony.js") 627 config_path = os.path.join(arkguard_root_dir, "test/compilerTestConfig.json") 628 arkguard_cmd = [ 629 'node', 630 '--no-warnings', 631 arkgurad_entry_path, 632 input_file_path, 633 '--config-path', 634 config_path, 635 '--inplace' 636 ] 637 self.log_cmd(arkguard_cmd) 638 process = subprocess.Popen(arkguard_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 639 out, err = process.communicate() 640 process.wait() 641 success = True 642 if err or process.returncode != 0: 643 success = False 644 self.passed = False 645 self.error = err.decode("utf-8", errors="ignore") 646 return success 647 648 def run(self, runner): 649 test_abc_name = ("%s.abc" % (path.splitext(self.path)[0])).replace("/", "_") 650 test_abc_path = path.join(runner.build_dir, test_abc_name) 651 es2abc_cmd = runner.cmd_prefix + [runner.es2panda] 652 es2abc_cmd.extend(self.flags) 653 es2abc_cmd.extend(["--output=" + test_abc_path]) 654 es2abc_cmd.append(self.path) 655 enable_arkguard = runner.args.enable_arkguard 656 if enable_arkguard: 657 success = self.execute_arkguard(runner) 658 if not success: 659 return self 660 661 process = run_subprocess_with_beta3(self, es2abc_cmd) 662 out, err = process.communicate() 663 if "--dump-assembly" in self.flags: 664 pa_expected_path = "".join([self.get_path_to_expected()[:self.get_path_to_expected().rfind(".txt")], 665 ".pa.txt"]) 666 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 667 try: 668 with open(pa_expected_path, 'r') as fp: 669 expected = fp.read() 670 self.passed = expected == self.output and process.returncode in [0, 1] 671 except Exception: 672 self.passed = False 673 if not self.passed: 674 self.error = err.decode("utf-8", errors="ignore") 675 if os.path.exists(test_abc_path): 676 os.remove(test_abc_path) 677 return self 678 if "--dump-debug-info" in self.flags: 679 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 680 try: 681 with open(self.get_path_to_expected(), 'r') as fp: 682 expected = fp.read() 683 self.passed = expected == self.output and process.returncode in [0, 1] 684 if os.path.exists(test_abc_path): 685 os.remove(test_abc_path) 686 return self 687 except Exception: 688 self.passed = False 689 if not self.passed: 690 self.error = err.decode("utf-8", errors="ignore") 691 if os.path.exists(test_abc_path): 692 os.remove(test_abc_path) 693 return self 694 if err: 695 self.passed = False 696 self.error = err.decode("utf-8", errors="ignore") 697 return self 698 699 ld_library_path = runner.ld_library_path 700 os.environ.setdefault("LD_LIBRARY_PATH", ld_library_path) 701 run_abc_cmd = [runner.ark_js_vm, '--enable-force-gc=false', test_abc_path] 702 self.log_cmd(run_abc_cmd) 703 704 process = subprocess.Popen(run_abc_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 705 out, err = process.communicate() 706 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 707 expected_path = self.get_path_to_expected() 708 try: 709 with open(expected_path, 'r') as fp: 710 expected = fp.read() 711 self.passed = expected == self.output and process.returncode in [0, 1] 712 except Exception: 713 self.passed = False 714 715 if not self.passed: 716 self.error = err.decode("utf-8", errors="ignore") 717 718 os.remove(test_abc_path) 719 720 return self 721 722 723class CompilerAbcFileTest(Test): 724 def __init__(self, test_dir, flags): 725 Test.__init__(self, test_dir, flags) 726 self.test_dir = test_dir 727 self.generated_path = os.path.join(self.test_dir, "gen") 728 if not path.exists(self.generated_path): 729 os.makedirs(self.generated_path) 730 self.original_abc_path = os.path.join(self.generated_path, "original.abc") 731 self.output_path = os.path.join(self.generated_path, "result.abc") 732 self.original_test = os.path.join(self.test_dir, "base.ts") 733 self.expected_path = os.path.join(self.test_dir, "expected.txt") 734 735 def remove_test_build(self, runner): 736 if path.exists(self.generated_path): 737 shutil.rmtree(self.generated_path) 738 739 def gen_abc(self, runner, test_path, output_path, flags): 740 es2abc_cmd = runner.cmd_prefix + [runner.es2panda] 741 es2abc_cmd.extend(['%s%s' % ("--output=", output_path)]) 742 es2abc_cmd.extend(flags) 743 es2abc_cmd.append(test_path) 744 process = run_subprocess_with_beta3(self, es2abc_cmd) 745 out, err = process.communicate() 746 if err: 747 self.passed = False 748 self.error = err.decode("utf-8", errors="ignore") 749 self.remove_test_build(runner) 750 return self 751 752 def run(self, runner): 753 new_flags = self.flags 754 # Generate 'abc' from the source file 755 self.gen_abc(runner, self.original_test, self.original_abc_path, new_flags) 756 # Generate 'abc' from the abc file 757 new_flags = self.flags 758 compile_context_info_path = path.join(self.test_dir, "compileContextInfo.json") 759 if path.exists(compile_context_info_path): 760 new_flags.append("%s%s" % ("--compile-context-info=", compile_context_info_path)) 761 es2abc_cmd = runner.cmd_prefix + [runner.es2panda] 762 es2abc_cmd.append('%s%s' % ("--output=", self.output_path)) 763 es2abc_cmd.append(self.original_abc_path) 764 es2abc_cmd.extend(new_flags) 765 process = run_subprocess_with_beta3(self, es2abc_cmd) 766 out, err = process.communicate() 767 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 768 try: 769 with open(self.expected_path, 'r') as fp: 770 expected = fp.read() 771 self.passed = expected == self.output and process.returncode in [0, 1] 772 except Exception: 773 self.passed = False 774 if not self.passed: 775 self.remove_test_build(runner) 776 return self 777 self.remove_test_build(runner) 778 return self 779 780 781class CompilerProtobinTest(Test): 782 def __init__(self, test_dir, flags): 783 Test.__init__(self, test_dir, flags) 784 self.test_dir = test_dir 785 self.generated_path = os.path.join(self.test_dir, "gen") 786 if not path.exists(self.generated_path): 787 os.makedirs(self.generated_path) 788 self.protobin_path = os.path.join(self.generated_path, "cache.protobin") 789 self.original_abc_path = os.path.join(self.generated_path, "base.abc") 790 self.output_path = os.path.join(self.generated_path, "module.abc") 791 self.original_test = os.path.join(self.test_dir, "base.ts") 792 self.modify_test = os.path.join(self.test_dir, "base_mod.ts") 793 self.expected_path = os.path.join(self.test_dir, "expected.txt") 794 795 def remove_test_build(self, runner): 796 if path.exists(self.generated_path): 797 shutil.rmtree(self.generated_path) 798 799 def gen_merge_abc(self, runner, test_path, need_cache, output_path): 800 es2abc_cmd = runner.cmd_prefix + [runner.es2panda] 801 es2abc_cmd.extend(["--merge-abc"]) 802 if need_cache: 803 es2abc_cmd.extend(["--enable-abc-input", '%s%s' % ("--cache-file=", self.protobin_path)]) 804 es2abc_cmd.extend(['%s%s' % ("--output=", output_path)]) 805 es2abc_cmd.append(test_path) 806 process = run_subprocess_with_beta3(self, es2abc_cmd) 807 out, err = process.communicate() 808 if err: 809 self.passed = False 810 self.error = err.decode("utf-8", errors="ignore") 811 self.remove_test_build(runner) 812 return self 813 814 def run(self, runner): 815 # Generate 'abc' from the source file before modifying it 816 self.gen_merge_abc(runner, self.original_test, False, self.original_abc_path) 817 # Generate protobin from the abc file before modifying it 818 self.gen_merge_abc(runner, self.original_abc_path, True, self.output_path) 819 # Modify the original abc file 820 self.gen_merge_abc(runner, self.modify_test, False, self.original_abc_path) 821 # Compile based on the modified abc file 822 self.gen_merge_abc(runner, self.original_abc_path, True, self.output_path) 823 ld_library_path = runner.ld_library_path 824 os.environ.setdefault("LD_LIBRARY_PATH", ld_library_path) 825 run_abc_cmd = [runner.ark_js_vm, '--entry-point=base', self.output_path] 826 self.log_cmd(run_abc_cmd) 827 828 process = subprocess.Popen(run_abc_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 829 out, err = process.communicate() 830 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 831 try: 832 with open(self.expected_path, 'r') as fp: 833 expected = fp.read() 834 self.passed = expected == self.output and process.returncode in [0, 1] 835 except Exception: 836 self.passed = False 837 if not self.passed: 838 self.remove_test_build(runner) 839 return self 840 self.remove_test_build(runner) 841 return self 842 843 844class CompilerProjectTest(Test): 845 def __init__(self, projects_path, project, test_paths, flags): 846 Test.__init__(self, "", flags) 847 self.projects_path = projects_path 848 self.project = project 849 self.test_paths = test_paths 850 self.files_info_path = os.path.join(os.path.join(self.projects_path, self.project), 'filesInfo.txt') 851 self.files_info_mod_path = os.path.join(os.path.join(self.projects_path, self.project), 'filesInfoModify.txt') 852 # Skip execution if --dump-assembly exists in flags 853 self.requires_execution = "--dump-assembly" not in self.flags 854 self.file_record_mapping = None 855 self.generated_abc_inputs_path = os.path.join(os.path.join(self.projects_path, self.project), "abcinputs_gen") 856 self.abc_input_filenames = None 857 self.protoBin_file_path = "" 858 self.record_names_path = os.path.join(os.path.join(self.projects_path, self.project), 'recordnames.txt') 859 self.abc_inputs_path = os.path.join(os.path.join(self.projects_path, self.project), 'abcinputs') 860 # Modify the hap file path 861 self.project_mod_path = os.path.join(os.path.join(self.projects_path, self.project), 'mod') 862 self.modules_cache_path = os.path.join(os.path.join(self.projects_path, self.project), 'modulescache.cache') 863 self.deps_json_path = os.path.join(os.path.join(self.projects_path, self.project), 'deps-json.json') 864 # Merge hap need to modify package name 865 self.modifyPkgNamePath = os.path.join(os.path.join(self.projects_path, self.project), 'modify_pkg_name.txt') 866 867 def remove_project(self, runner): 868 project_path = runner.build_dir + "/" + self.project 869 if path.exists(project_path): 870 shutil.rmtree(project_path) 871 if path.exists(self.files_info_path): 872 os.remove(self.files_info_path) 873 if path.exists(self.files_info_mod_path): 874 os.remove(self.files_info_mod_path) 875 if path.exists(self.generated_abc_inputs_path): 876 shutil.rmtree(self.generated_abc_inputs_path) 877 if path.exists(self.protoBin_file_path): 878 os.remove(self.protoBin_file_path) 879 if path.exists(self.modules_cache_path): 880 self.remove_cache_files() 881 882 def remove_cache_files(self): 883 if path.exists(self.modules_cache_path): 884 with open(self.modules_cache_path) as cache_fp: 885 cache_lines = cache_fp.readlines() 886 for cache_line in cache_lines: 887 cache_file_path = cache_line[:-1].split(";")[1] 888 if path.exists(cache_file_path): 889 os.remove(cache_file_path) 890 os.remove(self.modules_cache_path) 891 892 def get_file_absolute_path_and_name(self, runner): 893 sub_path = self.path[len(self.projects_path):] 894 file_relative_path = path.split(sub_path)[0] 895 file_name = path.split(sub_path)[1] 896 file_absolute_path = runner.build_dir + "/" + file_relative_path 897 return [file_absolute_path, file_name] 898 899 def gen_single_abc(self, runner): 900 for test_path in self.test_paths: 901 self.path = test_path 902 [file_absolute_path, file_name] = self.get_file_absolute_path_and_name(runner) 903 if not path.exists(file_absolute_path): 904 os.makedirs(file_absolute_path) 905 906 test_abc_name = ("%s.abc" % (path.splitext(file_name)[0])) 907 test_abc_path = path.join(file_absolute_path, test_abc_name) 908 es2abc_cmd = runner.cmd_prefix + [runner.es2panda] 909 es2abc_cmd.extend(self.flags) 910 es2abc_cmd.extend(['%s%s' % ("--output=", test_abc_path)]) 911 es2abc_cmd.append(self.path) 912 process = run_subprocess_with_beta3(self, es2abc_cmd) 913 out, err = process.communicate() 914 if err: 915 self.passed = False 916 self.error = err.decode("utf-8", errors="ignore") 917 self.remove_project(runner) 918 return self 919 920 def collect_record_mapping(self): 921 # Collect record mappings from recordnames.txt, file format: 922 # 'source_file_name:record_name\n' * n 923 if path.exists(self.record_names_path): 924 with open(self.record_names_path) as mapping_fp: 925 mapping_lines = mapping_fp.readlines() 926 self.file_record_mapping = {} 927 for mapping_line in mapping_lines: 928 cur_mapping = mapping_line[:-1].split(":") 929 self.file_record_mapping[cur_mapping[0]] = cur_mapping[1] 930 931 def get_record_name(self, test_path): 932 record_name = os.path.relpath(test_path, os.path.dirname(self.files_info_path)).split('.')[0] 933 if (self.file_record_mapping is not None and record_name in self.file_record_mapping): 934 record_name = self.file_record_mapping[record_name] 935 return record_name 936 937 def collect_abc_inputs(self, runner): 938 # Collect abc input information from the 'abcinputs' directory. Each txt file in the directory 939 # will generate a merged abc file with the same filename and serve as the final abc input. 940 # file format: 'source_file_name.ts\n' * n 941 if not path.exists(self.abc_inputs_path): 942 return 943 if not path.exists(self.generated_abc_inputs_path): 944 os.makedirs(self.generated_abc_inputs_path) 945 self.abc_input_filenames = {} 946 filenames = os.listdir(self.abc_inputs_path) 947 for filename in filenames: 948 if not filename.endswith('.txt'): 949 self.remove_project(runner) 950 raise Exception("Invalid abc input file: %s, only txt files are allowed in abcinputs directory: %s" 951 % (filename, self.abc_inputs_path)) 952 with open(path.join(self.abc_inputs_path, filename)) as abc_inputs_fp: 953 abc_inputs_lines = abc_inputs_fp.readlines() 954 for abc_input_line in abc_inputs_lines: 955 # filename is 'xxx.txt', remove '.txt' here 956 self.abc_input_filenames[abc_input_line[:-1]] = filename[:-len('.txt')] 957 958 def get_belonging_abc_input(self, test_path): 959 filename = os.path.relpath(test_path, os.path.dirname(self.files_info_path)) 960 if (self.abc_input_filenames is not None and filename in self.abc_input_filenames): 961 return self.abc_input_filenames[filename] 962 return None 963 964 def gen_abc_input_files_infos(self, runner, abc_files_infos, final_file_info_f, mod_files_info): 965 for abc_files_info_name in abc_files_infos: 966 abc_files_info = abc_files_infos[abc_files_info_name] 967 if len(abc_files_info) != 0: 968 abc_input_path = path.join(self.generated_abc_inputs_path, abc_files_info_name) 969 abc_files_info_path = ("%s-filesInfo.txt" % (abc_input_path)) 970 abc_files_info_fd = os.open(abc_files_info_path, os.O_RDWR | os.O_CREAT | os.O_TRUNC) 971 abc_files_info_f = os.fdopen(abc_files_info_fd, 'w') 972 abc_files_info_f.writelines(abc_files_info) 973 abc_line = '%s-abcinput.abc;;;;%s;\n' % (abc_input_path, abc_files_info_name) 974 mod_files_info.append(abc_line) 975 final_file_info_f.writelines(abc_line) 976 977 978 def gen_files_info(self, runner): 979 # After collect_record_mapping, self.file_record_mapping stores {'source file name' : 'source file record name'} 980 self.collect_record_mapping() 981 # After collect_abc_inputs, self.abc_input_filenames stores {'source file name' : 'belonging abc input name'} 982 self.collect_abc_inputs(runner) 983 984 fd = os.open(self.files_info_path, os.O_RDWR | os.O_CREAT | os.O_TRUNC) 985 f = os.fdopen(fd, 'w') 986 mod_files_info = [] 987 abc_files_infos = {} 988 for test_path in self.test_paths: 989 record_name = self.get_record_name(test_path) 990 module_kind = 'esm' 991 if (os.path.basename(test_path).startswith("commonjs")): 992 module_kind = 'commonjs' 993 is_shared_module = 'false' 994 if (os.path.basename(test_path).startswith("sharedmodule")): 995 is_shared_module = 'true' 996 file_info = ('%s;%s;%s;%s;%s;%s\n' % (test_path, record_name, module_kind, 997 os.path.relpath(test_path, self.projects_path), record_name, 998 is_shared_module)) 999 belonging_abc_input = self.get_belonging_abc_input(test_path) 1000 if belonging_abc_input is not None: 1001 if not belonging_abc_input in abc_files_infos: 1002 abc_files_infos[belonging_abc_input] = [] 1003 abc_files_infos[belonging_abc_input].append(file_info) 1004 elif test_path.startswith(self.project_mod_path): 1005 mod_files_info.append(file_info) 1006 else: 1007 mod_files_info.append(file_info) 1008 f.writelines(file_info) 1009 if (os.path.exists(self.deps_json_path)): 1010 record_name = self.get_record_name(self.deps_json_path) 1011 file_info = ('%s;%s;%s;%s;%s;%s\n' % (self.deps_json_path, record_name, 'esm', 1012 os.path.relpath(self.deps_json_path, self.projects_path), record_name, 1013 'false')) 1014 f.writelines(file_info) 1015 self.gen_abc_input_files_infos(runner, abc_files_infos, f, mod_files_info) 1016 f.close() 1017 if (os.path.exists(self.project_mod_path)): 1018 mod_fd = os.open(self.files_info_mod_path, os.O_RDWR | os.O_CREAT | os.O_TRUNC) 1019 mod_f = os.fdopen(mod_fd, 'w') 1020 for file_line in mod_files_info: 1021 mod_f.writelines(file_line) 1022 mod_f.close() 1023 1024 def gen_modules_cache(self, runner): 1025 if "--cache-file" not in self.flags or "--file-threads=0" in self.flags: 1026 return 1027 fd = os.open(self.modules_cache_path, os.O_RDWR | os.O_CREAT | os.O_TRUNC) 1028 f = os.fdopen(fd, 'w') 1029 abc_files = set() 1030 for test_path in self.test_paths: 1031 cache_info = ('%s;%s\n' % (test_path, f"{test_path.rsplit('.', 1)[0]}.protobin")) 1032 belonging_abc_input = self.get_belonging_abc_input(test_path) 1033 if belonging_abc_input is not None: 1034 abc_files.add(belonging_abc_input) 1035 else: 1036 f.writelines(cache_info) 1037 for abc_path in abc_files: 1038 abc_input_path = f"{path.join(self.generated_abc_inputs_path, abc_path)}-abcinput.abc" 1039 cache_info = ('%s;%s\n' % (abc_input_path, f"{abc_input_path.rsplit('.', 1)[0]}.protobin")) 1040 f.writelines(cache_info) 1041 f.close() 1042 1043 def gen_es2abc_cmd(self, runner, input_file, output_file): 1044 es2abc_cmd = runner.cmd_prefix + [runner.es2panda] 1045 1046 new_flags = self.flags 1047 if "--cache-file" in new_flags and len(self.test_paths) == 1: 1048 # Generate cache-file test case in single thread 1049 new_flags.remove("--cache-file") 1050 protobin_path = f"{self.test_paths[0].rsplit('.', 1)[0]}.protobin" 1051 self.protoBin_file_path = protobin_path 1052 es2abc_cmd.append('--cache-file=%s' % (protobin_path)) 1053 elif "--cache-file" in self.flags and output_file.endswith("-abcinput.abc"): 1054 # Generate abc for bytecode har 1055 new_flags = list(filter(lambda x: x != "--cache-file", new_flags)) 1056 elif "--cache-file" in self.flags: 1057 new_flags = list(filter(lambda x: x != "--cache-file", new_flags)) 1058 es2abc_cmd.append('--cache-file') 1059 es2abc_cmd.append('@%s' % (self.modules_cache_path)) 1060 1061 es2abc_cmd.extend(new_flags) 1062 es2abc_cmd.extend(['%s%s' % ("--output=", output_file)]) 1063 es2abc_cmd.append(input_file) 1064 return es2abc_cmd 1065 1066 def gen_merged_abc_for_abc_input(self, runner, files_info_name): 1067 self.passed = True 1068 if not files_info_name.endswith(".txt"): 1069 return 1070 abc_input_files_info_path = path.join(self.generated_abc_inputs_path, files_info_name) 1071 abc_input_merged_abc_path = path.join(self.generated_abc_inputs_path, 1072 '%s-abcinput.abc' % (files_info_name[:-len('-filesInfo.txt')])) 1073 1074 abc_input_file_path = '@' + abc_input_files_info_path 1075 if "unmerged_abc_input" in self.generated_abc_inputs_path: 1076 self.flags.remove("--merge-abc") 1077 with open(abc_input_files_info_path, 'r') as fp: 1078 abc_input_file_path = fp.read().split(';')[0] 1079 1080 es2abc_cmd = self.gen_es2abc_cmd(runner, abc_input_file_path, abc_input_merged_abc_path) 1081 process = run_subprocess_with_beta3(self, es2abc_cmd) 1082 out, err = process.communicate() 1083 if err: 1084 self.passed = False 1085 self.error = err.decode("utf-8", errors="ignore") 1086 1087 def gen_merged_abc(self, runner): 1088 # Generate abc inputs 1089 if (os.path.exists(self.generated_abc_inputs_path)): 1090 files_info_names = os.listdir(self.generated_abc_inputs_path) 1091 for filename in files_info_names: 1092 self.gen_merged_abc_for_abc_input(runner, filename) 1093 if (not self.passed): 1094 self.remove_project(runner) 1095 return self 1096 # Generate the abc to be tested 1097 for test_path in self.test_paths: 1098 self.path = test_path 1099 if (self.path.endswith("-exec.ts")) or (self.path.endswith("-exec.js")): 1100 exec_file_path = self.path 1101 [file_absolute_path, file_name] = self.get_file_absolute_path_and_name(runner) 1102 if not path.exists(file_absolute_path): 1103 os.makedirs(file_absolute_path) 1104 test_abc_name = ("%s.abc" % (path.splitext(file_name)[0])) 1105 output_abc_name = path.join(file_absolute_path, test_abc_name) 1106 if "merge_hap" in self.projects_path: 1107 exec_file_path = os.path.join(self.projects_path, self.project) 1108 exec_file_path = os.path.join(exec_file_path, "main_hap") 1109 [file_absolute_path, file_name] = self.get_file_absolute_path_and_name(runner) 1110 if not path.exists(file_absolute_path): 1111 os.makedirs(file_absolute_path) 1112 output_abc_name = path.join(file_absolute_path, "merge_hap.abc") 1113 # reverse merge-abc flag 1114 if "merge_abc_consistence_check" in self.path: 1115 if "--merge-abc" in self.flags: 1116 self.flags.remove("--merge-abc") 1117 else: 1118 self.flags.append("--merge-abc") 1119 1120 es2abc_cmd = self.gen_es2abc_cmd(runner, '@' + self.files_info_path, output_abc_name) 1121 if "--cache-file" in self.flags and len(self.test_paths) == 1: 1122 es2abc_cmd = self.gen_es2abc_cmd(runner, self.test_paths[0], output_abc_name) 1123 else: 1124 es2abc_cmd = self.gen_es2abc_cmd(runner, '@' + self.files_info_path, output_abc_name) 1125 compile_context_info_path = path.join(path.join(self.projects_path, self.project), "compileContextInfo.json") 1126 if path.exists(compile_context_info_path): 1127 es2abc_cmd.append("%s%s" % ("--compile-context-info=", compile_context_info_path)) 1128 if path.exists(self.modifyPkgNamePath): 1129 with open(self.modifyPkgNamePath, 'r') as file: 1130 modifyPkgName = file.readline().rstrip('\n') 1131 pkgNames = modifyPkgName.split(":") 1132 es2abc_cmd.append("--src-package-name=%s" % pkgNames[0]) 1133 es2abc_cmd.append("--dst-package-name=%s" % pkgNames[1]) 1134 process = run_subprocess_with_beta3(self, es2abc_cmd) 1135 self.path = exec_file_path 1136 out, err = [None, None] 1137 1138 # Check single-thread execution timeout when required 1139 if "--file-threads=0" in self.flags: 1140 try: 1141 out, err = process.communicate(timeout=60) 1142 except: 1143 process.kill() 1144 print("Generating the abc file timed out.") 1145 else: 1146 out, err = process.communicate() 1147 1148 if "--cache-file" in self.flags: 1149 # Firstly generate cache file, and generate abc from cache file 1150 if (os.path.exists(self.project_mod_path)): 1151 es2abc_cmd = self.gen_es2abc_cmd(runner, '@' + self.files_info_mod_path, output_abc_name) 1152 compile_context_info_path = path.join(path.join(self.projects_path, self.project), "compileContextInfo.json") 1153 if path.exists(compile_context_info_path): 1154 es2abc_cmd.append("%s%s" % ("--compile-context-info=", compile_context_info_path)) 1155 process = run_subprocess_with_beta3(self, es2abc_cmd) 1156 out, err = process.communicate() 1157 else: 1158 process = run_subprocess_with_beta3(self, es2abc_cmd) 1159 out, err = process.communicate() 1160 1161 # restore merge-abc flag 1162 if "merge_abc_consistence_check" in self.path and "--merge-abc" not in self.flags: 1163 self.flags.append("--merge-abc") 1164 1165 # Check dump-assembly outputs when required 1166 if "--dump-assembly" in self.flags: 1167 pa_expected_path = "".join([self.get_path_to_expected()[:self.get_path_to_expected().rfind(".txt")], 1168 ".pa.txt"]) 1169 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 1170 if "merge_abc_consistence_check" in self.path: 1171 self.output = self.output.split('.')[0] 1172 try: 1173 with open(pa_expected_path, 'r') as fp: 1174 expected = fp.read() 1175 self.passed = expected == self.output and process.returncode in [0, 1] 1176 except Exception: 1177 self.passed = False 1178 if not self.passed: 1179 self.error = err.decode("utf-8", errors="ignore") 1180 self.remove_project(runner) 1181 return self 1182 else: 1183 return self 1184 1185 if err: 1186 self.passed = False 1187 self.error = err.decode("utf-8", errors="ignore") 1188 self.remove_project(runner) 1189 return self 1190 1191 def run(self, runner): 1192 # Compile all ts source files in the project to abc files. 1193 if ("--merge-abc" in self.flags): 1194 self.gen_files_info(runner) 1195 self.gen_modules_cache(runner) 1196 self.gen_merged_abc(runner) 1197 else: 1198 self.gen_single_abc(runner) 1199 1200 if (not self.requires_execution): 1201 self.remove_project(runner) 1202 return self 1203 1204 # Run test files that need to be executed in the project. 1205 for test_path in self.test_paths: 1206 self.path = test_path 1207 if self.path.endswith("-exec.ts"): 1208 [file_absolute_path, file_name] = self.get_file_absolute_path_and_name(runner) 1209 1210 entry_point_name = path.splitext(file_name)[0] 1211 test_abc_name = ("%s.abc" % entry_point_name) 1212 test_abc_path = path.join(file_absolute_path, test_abc_name) 1213 1214 ld_library_path = runner.ld_library_path 1215 os.environ.setdefault("LD_LIBRARY_PATH", ld_library_path) 1216 run_abc_cmd = [runner.ark_js_vm] 1217 if ("--merge-abc" in self.flags): 1218 run_abc_cmd.extend(["--entry-point", entry_point_name]) 1219 run_abc_cmd.extend([test_abc_path]) 1220 self.log_cmd(run_abc_cmd) 1221 1222 process = subprocess.Popen(run_abc_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 1223 out, err = process.communicate() 1224 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 1225 expected_path = self.get_path_to_expected() 1226 try: 1227 with open(expected_path, 'r') as fp: 1228 expected = fp.read() 1229 self.passed = expected == self.output and process.returncode in [0, 1] 1230 except Exception: 1231 self.passed = False 1232 1233 if not self.passed: 1234 self.error = err.decode("utf-8", errors="ignore") 1235 self.remove_project(runner) 1236 return self 1237 1238 self.passed = True 1239 1240 self.remove_project(runner) 1241 return self 1242 1243 1244class FilesInfoRunner(Runner): 1245 def __init__(self, args): 1246 Runner.__init__(self, args, "FilesInfo") 1247 1248 def add_directory(self, directory, extension, flags): 1249 projects_path = path.join(self.test_root, directory) 1250 test_projects = ["base", "mod"] 1251 for project in test_projects: 1252 filesinfo_path = path.join(projects_path, project, "filesInfo.txt") 1253 self.tests.append(FilesInfoTest(projects_path, project, filesinfo_path, flags)) 1254 1255 def test_path(self, src): 1256 return src 1257 1258 1259class FilesInfoTest(Test): 1260 def __init__(self, projects_path, project, filesinfo_path, flags): 1261 Test.__init__(self, "", flags) 1262 self.projects_path = projects_path 1263 self.output_path = path.join(projects_path, "output") 1264 self.project = project 1265 self.origin_filesinfo_path = filesinfo_path 1266 self.files_info_path = path.join(self.output_path, self.project + "filesInfo.txt") 1267 self.path = path.join(self.projects_path, self.project) 1268 self.symbol_table_file = os.path.join(self.output_path, 'base.map') 1269 self.output_abc_name = path.join(self.output_path, self.project + ".abc") 1270 self.output_abc_name_of_input_abc = path.join(self.output_path, self.project + "_input.abc") 1271 1272 if not path.exists(self.output_path): 1273 os.makedirs(self.output_path) 1274 1275 def gen_files_info(self): 1276 with open(self.origin_filesinfo_path, 'r') as src, open(self.files_info_path, 'w') as dst: 1277 for line in src: 1278 dst.write(f"{path.join(self.projects_path, self.project)}/{line}") 1279 1280 def remove_output(self): 1281 shutil.rmtree(self.output_path) 1282 1283 def remove_project(self, runner): 1284 if self.project == "mod": # clear after all tests 1285 self.remove_output() 1286 1287 def gen_es2abc_cmd(self, runner, input_file, output_file): 1288 es2abc_cmd = runner.cmd_prefix + [runner.es2panda] 1289 es2abc_cmd.extend(self.flags) 1290 es2abc_cmd.extend(['%s%s' % ("--output=", output_file)]) 1291 es2abc_cmd.append(input_file) 1292 if ("base" == self.project): 1293 es2abc_cmd.extend(['--dump-symbol-table', self.symbol_table_file]) 1294 else: 1295 es2abc_cmd.extend(['--input-symbol-table', self.symbol_table_file]) 1296 return es2abc_cmd 1297 1298 def gen_es2abc_cmd_input_abc(self, runner, input_file, output_file): 1299 es2abc_cmd = runner.cmd_prefix + [runner.es2panda] 1300 es2abc_cmd.extend(self.flags) 1301 es2abc_cmd.extend(['%s%s' % ("--output=", output_file), "--enable-abc-input"]) 1302 es2abc_cmd.append(input_file) 1303 return es2abc_cmd 1304 1305 def gen_merged_abc(self, runner): 1306 # Generate the abc to be tested 1307 es2abc_cmd = self.gen_es2abc_cmd(runner, '@' + self.files_info_path, self.output_abc_name) 1308 process = run_subprocess_with_beta3(self, es2abc_cmd) 1309 out, err = process.communicate() 1310 1311 # Gen abc and verify it 1312 pa_expected_path = "".join([self.get_path_to_expected()[:self.get_path_to_expected().rfind(".txt")], 1313 ".pa.txt"]) 1314 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 1315 try: 1316 with open(pa_expected_path, 'r') as fp: 1317 expected = fp.read() 1318 self.passed = expected == self.output and process.returncode in [0, 1] 1319 except Exception: 1320 self.passed = False 1321 if not self.passed: 1322 self.error = err.decode("utf-8", errors="ignore") 1323 return self 1324 1325 # Input abc and verify it when it is base. 1326 if self.project == "base": 1327 self.input_abc(runner) 1328 return self 1329 1330 def input_abc(self, runner): 1331 es2abc_cmd = self.gen_es2abc_cmd_input_abc(runner, self.output_abc_name, self.output_abc_name_of_input_abc) 1332 process = run_subprocess_with_beta3(self, es2abc_cmd) 1333 out, err = process.communicate() 1334 pa_expected_path = "".join([self.path, "input_base-expected.pa.txt"]) 1335 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 1336 try: 1337 with open(pa_expected_path, 'r') as fp: 1338 expected = fp.read() 1339 self.passed = expected == self.output and process.returncode in [0, 1] 1340 except Exception: 1341 self.passed = False 1342 if not self.passed: 1343 self.error = err.decode("utf-8", errors="ignore") 1344 return self 1345 1346 1347 def run(self, runner): 1348 self.gen_files_info() 1349 self.gen_merged_abc(runner) 1350 self.remove_project(runner) 1351 return self 1352 1353 1354class TSDeclarationTest(Test): 1355 def get_path_to_expected(self): 1356 file_name = self.path[:self.path.find(".d.ts")] 1357 return "%s-expected.txt" % file_name 1358 1359 1360class BcVersionRunner(Runner): 1361 def __init__(self, args): 1362 Runner.__init__(self, args, "Target bc version") 1363 self.ts2abc = path.join(self.test_root, '..', 'scripts', 'ts2abc.js') 1364 1365 def add_cmd(self): 1366 api_sub_version_list = ["beta1", "beta2", "beta3"] 1367 for api_version in range(8, 17): 1368 cmd = self.cmd_prefix + [self.es2panda] 1369 cmd += ["--target-bc-version"] 1370 cmd += ["--target-api-version"] 1371 cmd += [str(api_version)] 1372 self.tests += [BcVersionTest(cmd, api_version)] 1373 node_cmd = ["node"] + [self.ts2abc] 1374 node_cmd += ["".join(["es2abc=", self.es2panda])] 1375 node_cmd += ["--target-api-version"] 1376 node_cmd += [str(api_version)] 1377 self.tests += [BcVersionTest(node_cmd, api_version)] 1378 1379 # Add tests for "--target-api-sub-version" option 1380 if api_version == 12: 1381 for api_sub_version in api_sub_version_list: 1382 new_cmd = cmd.copy() 1383 new_cmd += ["--target-api-sub-version", api_sub_version] 1384 self.tests += [BcVersionTest(new_cmd, str(api_version) + '_' + api_sub_version)] 1385 new_node_cmd = node_cmd.copy() 1386 new_node_cmd += ["--target-api-sub-version", api_sub_version] 1387 self.tests += [BcVersionTest(new_node_cmd, str(api_version) + '_' + api_sub_version)] 1388 1389 def run(self): 1390 for test in self.tests: 1391 test.run() 1392 1393 1394class BcVersionTest(Test): 1395 def __init__(self, cmd, api_version): 1396 Test.__init__(self, "", 0) 1397 self.cmd = cmd 1398 self.api_version = api_version 1399 # To avoid problems when api version is upgraded abruptly, 1400 # the corresponding bytecode version of the api version not written in isa.yaml is alaways the newest version. 1401 self.bc_version_expect = { 1402 8: "13.0.1.0", 1403 9: "9.0.0.0", 1404 10: "9.0.0.0", 1405 11: "11.0.2.0", 1406 12: "12.0.2.0", 1407 "12_beta1": "12.0.2.0", 1408 "12_beta2": "12.0.2.0", 1409 "12_beta3": "12.0.6.0", 1410 13: "12.0.6.0", 1411 14: "12.0.6.0", 1412 15: "12.0.6.0", 1413 16: "12.0.6.0", 1414 17: "12.0.6.0", 1415 18: "13.0.1.0" 1416 } 1417 self.es2abc_script_expect = { 1418 8: "0.0.0.2", 1419 9: "9.0.0.0", 1420 10: "9.0.0.0", 1421 11: "11.0.2.0", 1422 12: "12.0.2.0", 1423 "12_beta1": "12.0.2.0", 1424 "12_beta2": "12.0.2.0", 1425 "12_beta3": "12.0.6.0", 1426 13: "12.0.6.0", 1427 14: "12.0.6.0", 1428 15: "12.0.6.0", 1429 16: "12.0.6.0", 1430 17: "12.0.6.0", 1431 18: "13.0.1.0" 1432 } 1433 1434 def run(self): 1435 process = subprocess.Popen(self.cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 1436 out, err = process.communicate() 1437 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 1438 if self.cmd[0] == "node": 1439 self.passed = self.es2abc_script_expect.get(self.api_version) == self.output and process.returncode in [0, 1] 1440 else: 1441 self.passed = self.bc_version_expect.get(self.api_version) == self.output and process.returncode in [0, 1] 1442 if not self.passed: 1443 self.error = err.decode("utf-8", errors="ignore") 1444 return self 1445 1446 1447class TransformerRunner(Runner): 1448 def __init__(self, args): 1449 Runner.__init__(self, args, "Transformer") 1450 1451 def add_directory(self, directory, extension, flags): 1452 glob_expression = path.join( 1453 self.test_root, directory, "**/*.%s" % (extension)) 1454 files = glob(glob_expression, recursive=True) 1455 files = fnmatch.filter(files, self.test_root + '**' + self.args.filter) 1456 1457 self.tests += list(map(lambda f: TransformerTest(f, flags), files)) 1458 1459 def test_path(self, src): 1460 return src 1461 1462 1463class TransformerInTargetApiVersion10Runner(Runner): 1464 def __init__(self, args): 1465 Runner.__init__(self, args, "TransformerInTargetApiVersion10") 1466 1467 def add_directory(self, directory, extension, flags): 1468 glob_expression = path.join( 1469 self.test_root, directory, "**/*.%s" % (extension)) 1470 files = glob(glob_expression, recursive=True) 1471 files = fnmatch.filter(files, self.test_root + '**' + self.args.filter) 1472 1473 self.tests += list(map(lambda f: TransformerTest(f, flags), files)) 1474 1475 def test_path(self, src): 1476 return src 1477 1478 1479class TransformerTest(Test): 1480 def __init__(self, test_path, flags): 1481 Test.__init__(self, test_path, flags) 1482 1483 def get_path_to_expected(self): 1484 return "%s-transformed-expected.txt" % (path.splitext(self.path)[0]) 1485 1486 def run(self, runner): 1487 cmd = runner.cmd_prefix + [runner.es2panda] 1488 cmd.extend(self.flags) 1489 cmd.append(self.path) 1490 process = run_subprocess_with_beta3(self, cmd) 1491 out, err = process.communicate() 1492 self.output = out.decode("utf-8", errors="ignore") + err.decode("utf-8", errors="ignore") 1493 1494 expected_path = self.get_path_to_expected() 1495 try: 1496 with open(expected_path, 'r') as fp: 1497 expected = fp.read() 1498 self.passed = expected == self.output and process.returncode in [0, 1] 1499 except Exception: 1500 self.passed = False 1501 1502 if not self.passed: 1503 self.error = err.decode("utf-8", errors="ignore") 1504 1505 return self 1506 1507 1508class PatchTest(Test): 1509 def __init__(self, test_path, mode_arg, target_version, preserve_files): 1510 Test.__init__(self, test_path, "") 1511 self.mode = mode_arg 1512 self.target_version = target_version 1513 self.preserve_files = preserve_files 1514 1515 def is_ts_test_cases(self): 1516 return 'ts_test_cases' in os.path.normpath(self.path).split(os.sep) 1517 1518 def need_to_merge(self): 1519 # If the test case is in the 'export and import' directory, it needs to be merged. 1520 if os.path.split(self.path)[-2] == 'export-and-import': 1521 return True 1522 else: 1523 return False 1524 1525 def gen_files_infos(self, modified=False): 1526 if not self.need_to_merge(): 1527 return 1528 1529 file_suffix = '.ts' if self.is_ts_test_cases() else '.js' 1530 file_name_list = [] 1531 for file_name in os.listdir(self.path): 1532 if file_name.endswith(file_suffix): 1533 if '_mod' in file_name and modified: 1534 file_name_list.append(file_name) 1535 elif not '_mod' in file_name and not modified: 1536 file_name_list.append(file_name) 1537 1538 files_info_txt = os.path.join(self.path, 'filesInfo.txt') 1539 with open(files_info_txt, 'w', encoding='utf-8') as file: 1540 for file_name in file_name_list: 1541 file_path = os.path.join(self.path, file_name) 1542 file_prev, file_extension = os.path.splitext(file_name) 1543 abc_file_path = os.path.join(self.path, f"{file_prev}.abc") 1544 file.write(f'{file_path};{file_name};{file_name};esm;{file_name};false\n') 1545 1546 def gen_cmd(self, runner): 1547 symbol_table_file = os.path.join(self.path, 'base.map') 1548 origin_input_file = 'base.js' if not self.is_ts_test_cases() else 'base.ts' 1549 origin_output_abc = os.path.join(self.path, 'base.abc') 1550 modified_input_file = 'base_mod.js' if not self.is_ts_test_cases() else 'base_mod.ts' 1551 modified_output_abc = os.path.join(self.path, 'patch.abc') 1552 files_info_txt = os.path.join(self.path, 'filesInfo.txt') 1553 target_version_cmd = "" 1554 if self.target_version > 0: 1555 target_version_cmd = "--target-api-version=" + str(self.target_version) 1556 1557 gen_base_cmd = runner.cmd_prefix + [runner.es2panda, '--module', target_version_cmd] 1558 if self.need_to_merge(): 1559 gen_base_cmd = gen_base_cmd.append('--merge-abc') 1560 if 'record-name-with-dots' in os.path.basename(self.path): 1561 gen_base_cmd.extend(['--merge-abc', '--record-name=record.name.with.dots']) 1562 gen_base_cmd.extend(['--dump-symbol-table', symbol_table_file]) 1563 gen_base_cmd.extend(['--output', origin_output_abc]) 1564 if not self.need_to_merge(): 1565 gen_base_cmd.extend([os.path.join(self.path, origin_input_file)]) 1566 else: 1567 gen_base_cmd.extend(['--debug-info', f'@{files_info_txt}']) 1568 self.log_cmd(gen_base_cmd) 1569 1570 if self.mode == 'hotfix': 1571 mode_arg = ["--generate-patch"] 1572 elif self.mode == 'hotreload': 1573 mode_arg = ["--hot-reload"] 1574 elif self.mode == 'coldfix': 1575 mode_arg = ["--generate-patch", "--cold-fix"] 1576 elif self.mode == 'coldreload': 1577 mode_arg = ["--cold-reload"] 1578 1579 patch_test_cmd = runner.cmd_prefix + [runner.es2panda, '--module', target_version_cmd] 1580 if self.need_to_merge(): 1581 patch_test_cmd = patch_test_cmd.append('--merge-abc') 1582 patch_test_cmd.extend(mode_arg) 1583 patch_test_cmd.extend(['--input-symbol-table', symbol_table_file]) 1584 patch_test_cmd.extend(['--output', modified_output_abc]) 1585 if not self.need_to_merge(): 1586 patch_test_cmd.extend([os.path.join(self.path, modified_input_file)]) 1587 else: 1588 patch_test_cmd.extend(['--debug-info', f'@{files_info_txt}']) 1589 if 'record-name-with-dots' in os.path.basename(self.path): 1590 patch_test_cmd.extend(['--merge-abc', '--record-name=record.name.with.dots']) 1591 dump_assembly_testname = [ 1592 'modify-anon-content-keep-origin-name', 1593 'modify-class-memeber-function', 1594 'exist-lexenv-3', 1595 'lexenv-reduce', 1596 'lexenv-increase'] 1597 for name in dump_assembly_testname: 1598 if name in os.path.basename(self.path): 1599 patch_test_cmd.extend(['--dump-assembly']) 1600 self.log_cmd(patch_test_cmd) 1601 1602 return gen_base_cmd, patch_test_cmd, symbol_table_file, origin_output_abc, modified_output_abc 1603 1604 def run(self, runner): 1605 gen_base_cmd, patch_test_cmd, symbol_table_file, origin_output_abc, modified_output_abc = self.gen_cmd(runner) 1606 1607 self.gen_files_infos(False) 1608 process_base = run_subprocess_with_beta3(None, gen_base_cmd) 1609 stdout_base, stderr_base = process_base.communicate(timeout=runner.args.es2panda_timeout) 1610 if stderr_base: 1611 self.passed = False 1612 self.error = stderr_base.decode("utf-8", errors="ignore") 1613 self.output = stdout_base.decode("utf-8", errors="ignore") + stderr_base.decode("utf-8", errors="ignore") 1614 else: 1615 self.gen_files_infos(True) 1616 process_patch = run_subprocess_with_beta3(None, patch_test_cmd) 1617 process_patch = subprocess.Popen(patch_test_cmd, stdout=subprocess.PIPE, 1618 stderr=subprocess.PIPE) 1619 stdout_patch, stderr_patch = process_patch.communicate(timeout=runner.args.es2panda_timeout) 1620 if stderr_patch: 1621 self.passed = False 1622 self.error = stderr_patch.decode("utf-8", errors="ignore") 1623 self.output = stdout_patch.decode("utf-8", errors="ignore") + stderr_patch.decode("utf-8", errors="ignore") 1624 1625 expected_path = os.path.join(self.path, 'expected.txt') 1626 try: 1627 with open(expected_path, 'r') as fp: 1628 # ignore license description lines and skip leading blank lines 1629 expected = (''.join((fp.readlines()[12:]))).lstrip() 1630 self.passed = expected == self.output 1631 except Exception: 1632 self.passed = False 1633 1634 if not self.passed: 1635 self.error = "expected output:" + os.linesep + expected + os.linesep + "actual output:" + os.linesep +\ 1636 self.output 1637 files_info_txt = os.path.join(self.path, 'filesInfo.txt') 1638 if not self.preserve_files: 1639 os.remove(symbol_table_file) 1640 os.remove(origin_output_abc) 1641 if (os.path.exists(modified_output_abc)): 1642 os.remove(modified_output_abc) 1643 if (os.path.exists(files_info_txt)): 1644 os.remove(files_info_txt) 1645 return self 1646 1647 1648class PatchRunner(Runner): 1649 def __init__(self, args, name): 1650 Runner.__init__(self, args, name) 1651 self.preserve_files = args.error 1652 self.tests_in_dirs = [] 1653 dirs = os.listdir(path.join(self.test_root, "patch")) 1654 for target_version_path in dirs: 1655 self.add_tests(target_version_path, name) 1656 1657 def add_tests(self, target_version_path, name): 1658 name_dir = os.path.join(self.test_root, "patch", target_version_path, name) 1659 if not os.path.exists(name_dir): 1660 return 1661 target_version = 0 1662 if target_version_path.isdigit(): 1663 target_version = int(target_version_path) 1664 for sub_path in os.listdir(name_dir): 1665 test_base_path = os.path.join(name_dir, sub_path) 1666 if name != "coldreload": 1667 for dirpath, dirnames, filenames in os.walk(test_base_path): 1668 if not dirnames: 1669 self.tests_in_dirs.append(dirpath) 1670 self.tests.append(PatchTest(dirpath, name, target_version, self.preserve_files)) 1671 else: 1672 self.tests_in_dirs.append(test_base_path) 1673 self.tests.append(PatchTest(test_base_path, name, target_version, self.preserve_files)) 1674 1675 def test_path(self, src): 1676 return os.path.basename(src) 1677 1678 1679class HotfixRunner(PatchRunner): 1680 def __init__(self, args): 1681 PatchRunner.__init__(self, args, "hotfix") 1682 1683 1684class HotreloadRunner(PatchRunner): 1685 def __init__(self, args): 1686 PatchRunner.__init__(self, args, "hotreload") 1687 1688 1689class ColdfixRunner(PatchRunner): 1690 def __init__(self, args): 1691 PatchRunner.__init__(self, args, "coldfix") 1692 1693 1694class ColdreloadRunner(PatchRunner): 1695 def __init__(self, args): 1696 PatchRunner.__init__(self, args, "coldreload") 1697 1698 1699class DebuggerTest(Test): 1700 def __init__(self, test_path, mode): 1701 Test.__init__(self, test_path, "") 1702 self.mode = mode 1703 1704 def run(self, runner): 1705 cmd = runner.cmd_prefix + [runner.es2panda, "--module"] 1706 input_file_name = 'base.js' 1707 if self.mode == "debug-mode": 1708 cmd.extend(['--debug-info']) 1709 cmd.extend([os.path.join(self.path, input_file_name)]) 1710 cmd.extend(['--dump-assembly']) 1711 process = run_subprocess_with_beta3(self, cmd) 1712 stdout, stderr = process.communicate(timeout=runner.args.es2panda_timeout) 1713 if stderr: 1714 self.passed = False 1715 self.error = stderr.decode("utf-8", errors="ignore") 1716 return self 1717 1718 self.output = stdout.decode("utf-8", errors="ignore") 1719 1720 expected_path = os.path.join(self.path, 'expected.txt') 1721 try: 1722 with open(expected_path, 'r') as fp: 1723 expected = (''.join((fp.readlines()[12:]))).lstrip() 1724 self.passed = expected == self.output 1725 except Exception: 1726 self.passed = False 1727 1728 if not self.passed: 1729 self.error = "expected output:" + os.linesep + expected + os.linesep + "actual output:" + os.linesep +\ 1730 self.output 1731 1732 if os.path.exists("base.abc"): 1733 os.remove("base.abc") 1734 1735 return self 1736 1737 1738class DebuggerRunner(Runner): 1739 def __init__(self, args): 1740 Runner.__init__(self, args, "debugger") 1741 self.test_directory = path.join(self.test_root, "debugger") 1742 self.add_test() 1743 1744 def add_test(self): 1745 self.tests = [] 1746 self.tests.append(DebuggerTest(os.path.join(self.test_directory, "debugger-in-debug"), "debug-mode")) 1747 self.tests.append(DebuggerTest(os.path.join(self.test_directory, "debugger-in-release"), "release-mode")) 1748 1749 1750class Base64Test(Test): 1751 def __init__(self, test_path, input_type): 1752 Test.__init__(self, test_path, "") 1753 self.input_type = input_type 1754 1755 def run(self, runner): 1756 cmd = runner.cmd_prefix + [runner.es2panda, "--base64Output"] 1757 if self.input_type == "file": 1758 input_file_name = 'input.js' 1759 cmd.extend(['--source-file', input_file_name]) 1760 cmd.extend([os.path.join(self.path, input_file_name)]) 1761 elif self.input_type == "string": 1762 input_file = os.path.join(self.path, "input.txt") 1763 try: 1764 with open(input_file, 'r') as fp: 1765 base64_input = (''.join((fp.readlines()[12:]))).lstrip() # ignore license description lines 1766 cmd.extend(["--base64Input", base64_input]) 1767 except Exception: 1768 self.passed = False 1769 elif self.input_type == "targetApiVersion": 1770 # base64 test for all available target api version. 1771 version = os.path.basename(self.path) 1772 cmd.extend(['--target-api-version', version]) 1773 if version == "12": 1774 cmd.append("--target-api-sub-version=beta3") 1775 input_file = os.path.join(self.path, "input.txt") 1776 try: 1777 with open(input_file, 'r') as fp: 1778 base64_input = (''.join((fp.readlines()[12:]))).lstrip() # ignore license description lines 1779 cmd.extend(["--base64Input", base64_input]) 1780 except Exception: 1781 self.passed = False 1782 else: 1783 self.error = "Unsupported base64 input type" 1784 self.passed = False 1785 return self 1786 1787 version = os.path.basename(self.path) 1788 if not self.input_type == "targetApiVersion": 1789 cmd.append("--target-api-sub-version=beta3") 1790 1791 self.log_cmd(cmd) 1792 1793 process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 1794 stdout, stderr = process.communicate(timeout=runner.args.es2panda_timeout) 1795 if stderr: 1796 self.passed = False 1797 self.error = stderr.decode("utf-8", errors="ignore") 1798 return self 1799 1800 self.output = stdout.decode("utf-8", errors="ignore") 1801 1802 expected_path = os.path.join(self.path, 'expected.txt') 1803 try: 1804 with open(expected_path, 'r') as fp: 1805 expected = (''.join((fp.readlines()[12:]))).lstrip() 1806 self.passed = expected == self.output 1807 except Exception: 1808 self.passed = False 1809 1810 if not self.passed: 1811 self.error = "expected output:" + os.linesep + expected + os.linesep + "actual output:" + os.linesep +\ 1812 self.output 1813 1814 return self 1815 1816 1817class Base64Runner(Runner): 1818 def __init__(self, args): 1819 Runner.__init__(self, args, "Base64") 1820 self.test_directory = path.join(self.test_root, "base64") 1821 self.add_test() 1822 1823 def add_test(self): 1824 self.tests = [] 1825 self.tests.append(Base64Test(os.path.join(self.test_directory, "inputFile"), "file")) 1826 self.tests.append(Base64Test(os.path.join(self.test_directory, "inputString"), "string")) 1827 # current target api version is 12, once a new version is addded, a new testcase should be added here. 1828 current_version = 12 1829 available_target_api_versions = [9, 10, 11, current_version] 1830 for version in available_target_api_versions: 1831 self.tests.append(Base64Test(os.path.join(self.test_directory, "availableTargetApiVersion", str(version)), 1832 "targetApiVersion")) 1833 1834 def test_path(self, src): 1835 return os.path.basename(src) 1836 1837 1838class BytecodeRunner(Runner): 1839 def __init__(self, args): 1840 Runner.__init__(self, args, "Bytecode") 1841 1842 def add_directory(self, directory, extension, flags, func=Test): 1843 glob_expression = path.join( 1844 self.test_root, directory, "**/*.%s" % (extension)) 1845 files = glob(glob_expression, recursive=True) 1846 files = fnmatch.filter(files, self.test_root + '**' + self.args.filter) 1847 self.tests += list(map(lambda f: func(f, flags), files)) 1848 1849 def test_path(self, src): 1850 return src 1851 1852 1853class ArkJsVmDownload: # Obtain different versions of ark_js_vm and their dependent libraries 1854 def __init__(self, args): 1855 self.build_dir = args.build_dir 1856 self.url = "https://gitee.com/zhongmingwei123123/ark_js_vm_version.git" 1857 self.local_path = path.join(self.build_dir, "ark_js_vm_version") 1858 self.max_retries = 3 1859 1860 def run_cmd_cwd(self, cmd): 1861 try: 1862 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 1863 _, _ = proc.communicate() 1864 return proc.wait() 1865 except Exception as e: 1866 print(f"Error executing command: {e}") 1867 return -1 1868 1869 def git_clone(self, git_url, code_dir): 1870 cmd = ["git", "clone", git_url, code_dir, "--depth=1"] 1871 retries = 1 1872 while retries <= self.max_retries: 1873 ret = self.run_cmd_cwd(cmd) 1874 if ret == 0: 1875 break 1876 else: 1877 print(f"\n warning: Atempt: #{retries} to clone '{git_url}' failed. Try cloining again") 1878 retries += 1 1879 assert not ret, f"\n error: Cloning '{git_url}' failed." 1880 1881 def run(self): 1882 if not os.path.exists(self.local_path): 1883 print("\nstart downLoad ark_js_vm_version ...\n") 1884 self.git_clone(self.url, self.local_path) 1885 print("\ndownload finish.\n") 1886 1887 1888class AbcTestCasesPrepare: 1889 def __init__(self, args): 1890 self.test_root = path.dirname(path.abspath(__file__)) 1891 self.es2panda = path.join(args.build_dir, "es2abc") 1892 self.args = args 1893 self.valid_mode_list = ["non_merge_mode", "merge_mode"] 1894 self.test_abc_path_list = set() 1895 1896 @staticmethod 1897 def split_api_version(version_str): 1898 parts = version_str.split("API")[1].split("beta") 1899 main_part = parts[0] 1900 beta_part = "beta%s" % parts[1] if len(parts) > 1 else "" 1901 return (main_part, beta_part) 1902 1903 def add_abc_directory(self, directory, extension): 1904 test_directory = path.join(self.test_root, directory) 1905 glob_expression = path.join(test_directory, "*.%s" % (extension)) 1906 files = glob(glob_expression) 1907 files = fnmatch.filter(files, self.test_root + "**" + self.args.filter) 1908 return files 1909 1910 def gen_abc_versions(self, flags, source_path): 1911 for api_version in API_VERSION_MAP: 1912 main_version, beta_version = AbcTestCasesPrepare.split_api_version(api_version) 1913 output_path = "%s_version_API%s%s.abc" % ( 1914 path.splitext(source_path)[0], 1915 main_version, 1916 beta_version, 1917 ) 1918 self.test_abc_path_list.add(output_path) 1919 _, stderr = self.compile_for_target_version(flags, source_path, output_path, main_version, beta_version) 1920 if stderr: 1921 raise RuntimeError(f"abc generate error: " % (stderr.decode("utf-8", errors="ignore"))) 1922 1923 def gen_abc_tests(self, directory, extension, flags, abc_mode): 1924 if abc_mode not in self.valid_mode_list: 1925 raise ValueError(f"Invalid abc_mode value: {abc_mode}") 1926 test_source_list = self.add_abc_directory(directory, extension) 1927 for input_path in test_source_list: 1928 self.gen_abc_versions(flags, input_path) 1929 1930 def compile_for_target_version(self, flags, input_path, output_path, target_api_version, target_api_sub_version=""): 1931 cmd = [] 1932 cmd.append(self.es2panda) 1933 cmd.append(input_path) 1934 cmd.extend(flags) 1935 cmd.append("--target-api-version=%s" % (target_api_version)) 1936 cmd.extend(["--output=%s" % (output_path)]) 1937 if target_api_version != "": 1938 cmd.append("--target-api-sub-version=%s" % (target_api_sub_version)) 1939 process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 1940 stdout, stderr = process.communicate(timeout=10) 1941 if stderr: 1942 stderr = "Error executing command: %s\n%s" % (cmd, stderr) 1943 return stdout, stderr 1944 1945 def remove_abc_tests(self): 1946 for abc_path in self.test_abc_path_list: 1947 if path.exists(abc_path): 1948 os.remove(abc_path) 1949 1950 1951class AbcVersionControlRunner(Runner): 1952 def __init__(self, args): 1953 super().__init__(args, "AbcVersionControl") 1954 self.valid_mode_list = ["non_merge_mode", "merge_mode", "mix_compile_mode"] 1955 1956 def add_directory(self, directory, extension, flags, abc_mode, is_discard=False): 1957 if abc_mode not in self.valid_mode_list: 1958 raise ValueError(f"Invalid abc_mode value: {abc_mode}") 1959 glob_expression = path.join(self.test_root, directory, "*.%s" % (extension)) 1960 files = glob(glob_expression) 1961 files = fnmatch.filter(files, self.test_root + "**" + self.args.filter) 1962 if abc_mode == "mix_compile_mode": 1963 files = [f for f in files if not f.endswith("-expected.txt")] 1964 self.tests += list(map(lambda f: TestAbcVersionControl(f, flags, abc_mode, is_discard), files)) 1965 1966 def test_path(self, src): 1967 return src 1968 1969 def run(self): 1970 for test in self.tests: 1971 test.run(self) 1972 self.args.abc_tests_prepare.remove_abc_tests() 1973 1974 1975class VersionControlRunner(Runner): 1976 def __init__(self, args): 1977 Runner.__init__(self, args, "VersionControl") 1978 1979 def add_directory(self, directory, extension, flags, test_version, feature_type, module_dir=None, func=Test): 1980 glob_expression = path.join(self.test_root, directory, "*.%s" % (extension)) 1981 files = glob(glob_expression) 1982 files = fnmatch.filter(files, self.test_root + "**" + self.args.filter) 1983 module_path_list = [] 1984 if module_dir is not None: 1985 module_path_list = self.add_module_path(module_dir) 1986 self.tests += list( 1987 map(lambda f: TestVersionControl(f, flags, test_version, feature_type, module_path_list), files) 1988 ) 1989 1990 def add_module_path(self, module_dir): 1991 module_path_list = [] 1992 glob_expression_ts = path.join(self.test_root, module_dir, "*.%s" % ("ts")) 1993 glob_expression_js = path.join(self.test_root, module_dir, "*.%s" % ("js")) 1994 module_path_list = glob(glob_expression_ts) 1995 module_path_list.extend(glob(glob_expression_js)) 1996 module_path_list = fnmatch.filter(module_path_list, self.test_root + "**" + self.args.filter) 1997 return module_path_list 1998 1999 def test_path(self, src): 2000 return src 2001 2002 def run(self): 2003 for test in self.tests: 2004 test.run(self) 2005 2006 2007class TestAbcVersionControl(Test): 2008 def __init__(self, test_path, flags, abc_mode, is_discard): 2009 super().__init__(test_path, flags) 2010 self.min_support_version_number = API_VERSION_MAP.get(MIN_SUPPORT_BC_VERSION) 2011 self.abc_mode = abc_mode 2012 self.is_discard = is_discard 2013 self.output = None 2014 self.process = None 2015 self.is_support = False 2016 self.test_abc_list = list() 2017 self.test_input = None 2018 self.target_abc_path = None 2019 self.entry_point = self.get_entry_point() 2020 2021 @staticmethod 2022 def compare_version_number(version1, version2): 2023 v1 = TestAbcVersionControl.version_number_to_tuple(version1) 2024 v2 = TestAbcVersionControl.version_number_to_tuple(version2) 2025 for num1, num2 in zip(v1, v2): 2026 if num1 > num2: 2027 return 1 2028 elif num1 < num2: 2029 return -1 2030 return 0 2031 2032 @staticmethod 2033 def version_number_to_tuple(version): 2034 return tuple(int(part) for part in version.split(".")) 2035 2036 def get_entry_point(self): 2037 if self.abc_mode == "merge_mode": 2038 base_name = os.path.basename(self.path) 2039 return os.path.splitext(base_name)[0] 2040 elif self.abc_mode == "mix_compile_mode": 2041 return MIX_COMPILE_ENTRY_POINT 2042 return "" 2043 2044 def get_path_to_expected(self, is_support=False, test_stage=""): 2045 support_name = "supported_" if is_support else "unsupported_" 2046 if self.abc_mode == "mix_compile_mode" and test_stage != "runtime": 2047 support_name = "" 2048 expected_name = path.splitext(self.path)[0].split("_version_API")[0] 2049 expected_path = "%s_%s%s-expected.txt" % (expected_name, support_name, test_stage) 2050 return expected_path 2051 2052 def run_process(self, cmd): 2053 self.process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 2054 stdout, stderr = self.process.communicate() 2055 self.output = stdout.decode("utf-8", errors="ignore") + stderr.decode("utf-8", errors="ignore").split("\n")[0] 2056 if stderr: 2057 stderr = "Error executing command: %s\n%s" % (cmd, stderr) 2058 return stdout, stderr 2059 2060 def compile_for_target_version( 2061 self, runner, input_path, output_path, target_api_version, target_api_sub_version="" 2062 ): 2063 cmd = [] 2064 cmd.append(runner.es2panda) 2065 if self.abc_mode == "mix_compile_mode": 2066 input_path = "@%s" % (input_path) 2067 cmd.append(input_path) 2068 cmd.extend(self.flags) 2069 cmd.append("--target-api-version=%s" % (target_api_version)) 2070 cmd.extend(["--output=%s" % (output_path)]) 2071 if target_api_version != "": 2072 cmd.append("--target-api-sub-version=%s" % (target_api_sub_version)) 2073 stdout, stderr = self.run_process(cmd) 2074 return stdout, stderr 2075 2076 def generate_abc(self, runner, target_api_version, target_api_sub_version=""): 2077 compile_expected_path = None 2078 target_abc_name = ( 2079 "%s_target_%s%s.abc" % (path.splitext(self.path)[0], target_api_version, target_api_sub_version) 2080 ).replace("/", "_") 2081 self.target_abc_path = path.join(runner.build_dir, target_abc_name) 2082 _, stderr = self.compile_for_target_version( 2083 runner, self.path, self.target_abc_path, target_api_version, target_api_sub_version 2084 ) 2085 format_content = "" 2086 self.is_support = False 2087 2088 # Extract the API versions of the input abc files from the file name of the test case. 2089 input_api_versions = self.extract_api_versions(path.split(self.path)[1]) 2090 input_version_numbers = [API_VERSION_MAP.get(api) for api in input_api_versions] 2091 sorted(input_version_numbers, key=TestAbcVersionControl.version_number_to_tuple) 2092 min_input_version_number = input_version_numbers[0] 2093 max_input_version_number = input_version_numbers[-1] 2094 target_version = "API" + target_api_version + target_api_sub_version 2095 target_version_number = API_VERSION_MAP.get(target_version) 2096 2097 if TestAbcVersionControl.compare_version_number(target_version_number, self.min_support_version_number) < 0: 2098 compile_expected_path = self.get_path_to_expected( 2099 self.is_support, "compile_target_version_below_min_support" 2100 ) 2101 format_content = target_api_version 2102 elif ( 2103 TestAbcVersionControl.compare_version_number(min_input_version_number, self.min_support_version_number) < 0 2104 ): 2105 compile_expected_path = self.get_path_to_expected(self.is_support, "compile_cur_version_below_min_support") 2106 format_content = self.path 2107 elif TestAbcVersionControl.compare_version_number(target_version_number, max_input_version_number) < 0: 2108 compile_expected_path = self.get_path_to_expected(self.is_support, "compile_target_version_below_cur") 2109 format_content = self.path 2110 elif self.is_discard: 2111 compile_expected_path = self.get_path_to_expected(self.is_support, "compile_discard") 2112 else: 2113 self.is_support = True 2114 if stderr: 2115 self.passed = False 2116 return stderr 2117 2118 try: 2119 with open(compile_expected_path, "r") as fp: 2120 expected = fp.read() 2121 self.passed = expected.format(format_content) in self.output and self.process.returncode in [0, 1] 2122 except Exception: 2123 self.passed = False 2124 return stderr 2125 2126 def execute_abc(self, runner, vm_api_version, vm_api_sub_version="", entry_point=""): 2127 cmd = [] 2128 if vm_api_version != "12": 2129 vm_api_sub_version = "" 2130 # there is no virtual machine with version api12beta2 available. 2131 # chosen api12beta1 as a replacement. 2132 elif vm_api_version == "12" and vm_api_sub_version == "beta2": 2133 vm_api_sub_version = "beta1" 2134 ark_js_vm_dir = os.path.join( 2135 runner.build_dir, 2136 "ark_js_vm_version", 2137 "API%s%s" % (vm_api_version, vm_api_sub_version), 2138 ) 2139 ld_library_path = os.path.join(ark_js_vm_dir, "lib") 2140 os.environ["LD_LIBRARY_PATH"] = ld_library_path 2141 ark_js_vm_path = os.path.join(ark_js_vm_dir, "ark_js_vm") 2142 cmd.append(ark_js_vm_path) 2143 if entry_point != "": 2144 cmd.append("--entry-point=%s" % entry_point) 2145 cmd.append(self.target_abc_path) 2146 stdout, stderr = self.run_process(cmd) 2147 return stdout, stderr 2148 2149 def test_abc_execution(self, runner, target_api_version, target_api_sub_version=""): 2150 stderr = None 2151 target_version = "API" + target_api_version + target_api_sub_version 2152 target_version_number = API_VERSION_MAP.get(target_version) 2153 for api_version in API_VERSION_MAP: 2154 vm_api_version, vm_api_sub_version = AbcTestCasesPrepare.split_api_version(api_version) 2155 vm_version = "API" + vm_api_version + vm_api_sub_version 2156 vm_version_number = API_VERSION_MAP.get(vm_version) 2157 _, stderr = self.execute_abc(runner, vm_api_version, vm_api_sub_version, self.entry_point) 2158 self.is_support = ( 2159 TestAbcVersionControl.compare_version_number(vm_version_number, target_version_number) >= 0 2160 ) 2161 runtime_expect_path = self.get_path_to_expected(self.is_support, "runtime") 2162 try: 2163 with open(runtime_expect_path, "r") as fp: 2164 expected = fp.read() 2165 if self.is_support and self.abc_mode != "merge_mode": 2166 self.passed = expected == self.output and self.process.returncode in [0, 1, 255] 2167 else: 2168 self.passed = expected in self.output 2169 pass 2170 except Exception: 2171 self.passed = False 2172 if not self.passed: 2173 return stderr 2174 return stderr 2175 2176 def extract_api_versions(self, file_name): 2177 pattern = r"(API\d+)(beta\d+)?" 2178 matches = re.findall(pattern, file_name) 2179 api_versions = [f"{api}{f'{beta}' if beta else ''}" for api, beta in matches] 2180 return api_versions 2181 2182 def remove_abc(self, abc_path): 2183 if path.exists(abc_path): 2184 os.remove(abc_path) 2185 2186 def run(self, runner): 2187 for api_version in API_VERSION_MAP: 2188 target_api_version, target_api_sub_version = AbcTestCasesPrepare.split_api_version(api_version) 2189 stderr = self.generate_abc(runner, target_api_version, target_api_sub_version) 2190 if not self.passed: 2191 self.error = stderr.decode("utf-8", errors="ignore") 2192 return self 2193 if stderr: 2194 continue 2195 stderr = self.test_abc_execution(runner, target_api_version, target_api_sub_version) 2196 self.remove_abc(self.target_abc_path) 2197 if not self.passed: 2198 self.error = stderr.decode("utf-8", errors="ignore") 2199 return self 2200 return self 2201 2202 2203class TestVersionControl(Test): 2204 def __init__(self, test_path, flags, test_version, feature_type, module_path_list): 2205 Test.__init__(self, test_path, flags) 2206 self.beta_version_default = 3 2207 self.version_with_sub_version_list = [12] 2208 self.target_api_version_list = ["9", "10", "11", "12", "18"] 2209 self.target_api_sub_version_list = ["beta1", "beta2", "beta3"] 2210 self.specific_api_version_list = ["API11", "API12beta3"] 2211 self.output = None 2212 self.process = None 2213 self.test_version = test_version 2214 self.test_abc_path = None 2215 self.feature_type = feature_type 2216 self.module_path_list = module_path_list 2217 self.module_abc_path_set = set() 2218 2219 def split_version(self, version_str): 2220 parts = version_str.split("API")[1].split("beta") 2221 main_part = int(parts[0]) 2222 beta_part = int(parts[1]) if len(parts) > 1 else self.beta_version_default 2223 return (main_part, beta_part) 2224 2225 def compare_two_versions(self, version1, version2): 2226 version1_parsed = self.split_version(version1) 2227 version2_parsed = self.split_version(version2) 2228 2229 if version1_parsed < version2_parsed: 2230 return -1 2231 elif version1_parsed > version2_parsed: 2232 return 1 2233 else: 2234 return 0 2235 2236 def get_relative_path(self, from_dir, to_dir): 2237 from_dir = os.path.normpath(from_dir) 2238 to_dir = os.path.normpath(to_dir) 2239 from_dir = os.path.abspath(from_dir) 2240 to_dir = os.path.abspath(to_dir) 2241 from_parts = from_dir.split(os.sep) 2242 to_parts = to_dir.split(os.sep) 2243 common_prefix_length = 0 2244 for part1, part2 in zip(from_parts, to_parts): 2245 if part1 == part2: 2246 common_prefix_length += 1 2247 else: 2248 break 2249 relative_parts = [".."] * (len(from_parts) - common_prefix_length) + to_parts[common_prefix_length:] 2250 relative_path = os.path.join(*relative_parts) 2251 return relative_path 2252 2253 def generate_single_module_abc(self, runner, module_path, target_version): 2254 cmd = [] 2255 cmd.append(runner.es2panda) 2256 cmd.append(module_path) 2257 cmd.append("--module") 2258 main_version, sub_version = self.split_version(target_version) 2259 cmd.append("--target-api-version=%s" % (main_version)) 2260 if main_version == 12: 2261 cmd.append("--target-api-sub-version=beta%s" % (sub_version)) 2262 2263 basename = os.path.basename(module_path) 2264 module_abc_name = "%s.abc" % (path.splitext(basename)[0]) 2265 relative_path = self.get_relative_path(path.split(self.path)[0], path.split(module_path)[0]) 2266 module_abc_dir = path.join(runner.build_dir, relative_path) 2267 if not os.path.exists(module_abc_dir): 2268 os.makedirs(module_abc_dir) 2269 module_abc_path = path.join(module_abc_dir, module_abc_name) 2270 self.module_abc_path_set.add(module_abc_path) 2271 cmd.extend(["--output=%s" % (module_abc_path)]) 2272 2273 self.process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 2274 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 2275 _, stderr = proc.communicate() 2276 proc.wait() 2277 if stderr: 2278 print(stderr.decode("utf-8", errors="ignore")) 2279 2280 def generate_module_abc(self, runner, target_version): 2281 for module_path in self.module_path_list: 2282 self.generate_single_module_abc(runner, module_path, target_version) 2283 2284 def remove_module_abc(self): 2285 for module_abc_path in self.module_abc_path_set: 2286 if path.exists(module_abc_path): 2287 os.remove(module_abc_path) 2288 2289 def get_path_to_expected( 2290 self, is_support, expected_stage, target_api_version="", specific_api_version="", dump_type="" 2291 ): 2292 support_name = "supported" if is_support else "unsupported" 2293 api_name = "" 2294 # Higher than the specific API version, expected results may differ 2295 if target_api_version != "" and specific_api_version != "": 2296 if self.compare_two_versions(target_api_version, specific_api_version) >= 0: 2297 api_name = "for_higher_or_equal_to_%s_" % (specific_api_version) 2298 else: 2299 api_name = "for_below_%s_" % (specific_api_version) 2300 if dump_type == "ast": 2301 dump_type = "" 2302 elif dump_type == "asm": 2303 dump_type = "asm_" 2304 expected_path = "%s_%s_%s_%s%sversion-expected.txt" % ( 2305 path.splitext(self.path)[0], 2306 support_name, 2307 expected_stage, 2308 api_name, 2309 dump_type, 2310 ) 2311 return expected_path 2312 2313 def get_path_to_runtime_output_below_version_expected(self): 2314 expected_path = "%s_runtime_below_abc_api_version-expected.txt" % ( 2315 path.splitext(self.path)[0]) 2316 return expected_path 2317 2318 def get_path_to_runtime_output_expected(self, is_support, target_api_version, is_below_abc_api_version): 2319 path_expected = None 2320 if is_below_abc_api_version: 2321 path_expected = self.get_path_to_runtime_output_below_version_expected() 2322 return path_expected 2323 for specific_api_version in self.specific_api_version_list: 2324 if self.compare_two_versions(target_api_version, specific_api_version) > 0: 2325 continue 2326 path_expected = self.get_path_to_expected(is_support, "runtime", target_api_version, specific_api_version) 2327 if path.exists(path_expected): 2328 return path_expected 2329 return self.get_path_to_expected(is_support, "runtime", target_api_version) 2330 2331 def get_path_to_compile_ast_output_expected(self, is_support): 2332 return self.get_path_to_expected(is_support, "compile") 2333 2334 def get_path_to_compile_asm_output_expected(self, is_support, target_api_version): 2335 path_expected = None 2336 for specific_api_version in self.specific_api_version_list: 2337 path_expected = self.get_path_to_expected( 2338 is_support, "compile", target_api_version, specific_api_version, "asm" 2339 ) 2340 if path.exists(path_expected): 2341 return path_expected 2342 return self.get_path_to_expected(is_support, "compile", "", "", "asm") 2343 2344 def run_process(self, cmd): 2345 self.process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 2346 stdout, stderr = self.process.communicate() 2347 self.output = stdout.decode("utf-8", errors="ignore") + stderr.decode("utf-8", errors="ignore").split("\n")[0] 2348 return stdout, stderr 2349 2350 def run_process_compile(self, runner, target_api_version, target_api_sub_version="bata3", dump_type=""): 2351 cmd = [] 2352 cmd.append(runner.es2panda) 2353 cmd.append(self.path) 2354 cmd.extend(self.flags) 2355 cmd.append("--target-api-version=%s" % (target_api_version)) 2356 test_abc_name = ("%s.abc" % (path.splitext(self.path)[0])).replace("/", "_") 2357 self.test_abc_path = path.join(runner.build_dir, test_abc_name) 2358 cmd.extend(["--output=%s" % (self.test_abc_path)]) 2359 if target_api_version == "12": 2360 cmd.append("--target-api-sub-version=%s" % (target_api_sub_version)) 2361 if dump_type == "ast": 2362 cmd.append("--dump-ast") 2363 elif dump_type == "assembly": 2364 cmd.append("--dump-assembly") 2365 stdout, stderr = self.run_process(cmd) 2366 return stdout, stderr 2367 2368 def generate_ast_of_target_version(self, runner, target_api_version, target_api_sub_version="bata3"): 2369 return self.run_process_compile(runner, target_api_version, target_api_sub_version, dump_type="ast") 2370 2371 def generate_asm_of_target_version(self, runner, target_api_version, target_api_sub_version="bata3"): 2372 return self.run_process_compile(runner, target_api_version, target_api_sub_version, dump_type="assembly") 2373 2374 def runtime_for_target_version(self, runner, target_api_version, target_api_sub_version="bata3"): 2375 cmd = [] 2376 if target_api_version != "12": 2377 target_api_sub_version = "" 2378 # there is no virtual machine with version api12beta2 available. 2379 # We have chosen api12beta1 as a replacement. 2380 if target_api_version == "12" and target_api_sub_version == "beta2": 2381 target_api_sub_version = "beta1" 2382 ark_js_vm_dir = os.path.join( 2383 runner.build_dir, 2384 "ark_js_vm_version", 2385 "API%s%s" % (target_api_version, target_api_sub_version), 2386 ) 2387 ld_library_path = os.path.join(ark_js_vm_dir, "lib") 2388 os.environ["LD_LIBRARY_PATH"] = ld_library_path 2389 ark_js_vm_path = os.path.join(ark_js_vm_dir, "ark_js_vm") 2390 cmd.append(ark_js_vm_path) 2391 cmd.append(self.test_abc_path) 2392 self.process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 2393 stdout, stderr = self.process.communicate() 2394 self.output = stdout.decode("utf-8", errors="ignore") + stderr.decode("utf-8", errors="ignore").split("\n")[0] 2395 return stdout, stderr 2396 2397 def run_for_single_version(self, runner, target_api_version, target_api_sub_version="beta3"): 2398 cur_api_version = "API" + target_api_version + target_api_sub_version 2399 is_support = True if self.compare_two_versions(cur_api_version, self.test_version) >= 0 else False 2400 compile_expected_path = None 2401 stderr = None 2402 if self.feature_type == "syntax_feature": 2403 compile_expected_path = self.get_path_to_compile_ast_output_expected(is_support) 2404 _, stderr = self.generate_ast_of_target_version(runner, target_api_version, target_api_sub_version) 2405 elif self.feature_type == "bytecode_feature": 2406 compile_expected_path = self.get_path_to_compile_asm_output_expected(is_support, cur_api_version) 2407 _, stderr = self.generate_asm_of_target_version( 2408 runner, target_api_version, target_api_sub_version 2409 ) 2410 try: 2411 with open(compile_expected_path, "r") as fp: 2412 expected = fp.read() 2413 self.passed = expected == self.output and self.process.returncode in [0, 1] 2414 except Exception: 2415 self.passed = False 2416 if not self.passed or (stderr and self.passed): 2417 return stderr 2418 for api_version in self.target_api_version_list: 2419 # The interception capability of API9 version of ark_js_vm has not yet been launched. 2420 if api_version == "9": 2421 continue 2422 for api_sub_version in self.target_api_sub_version_list: 2423 if not api_version in self.version_with_sub_version_list and api_sub_version != "beta3": 2424 continue 2425 cur_runtime_api_version = "API" + api_version + api_sub_version 2426 is_below_abc_version = ( 2427 False if self.compare_two_versions(cur_runtime_api_version, cur_api_version) >= 0 else True 2428 ) 2429 self.generate_module_abc(runner, cur_runtime_api_version) 2430 _, stderr = self.runtime_for_target_version(runner, api_version, api_sub_version) 2431 runtime_expected_path = self.get_path_to_runtime_output_expected( 2432 is_support, cur_api_version, is_below_abc_version 2433 ) 2434 self.remove_module_abc() 2435 try: 2436 with open(runtime_expected_path, "r") as fp: 2437 expected = fp.read() 2438 if is_below_abc_version: 2439 self.passed = expected in self.output 2440 else: 2441 self.passed = expected == self.output 2442 except Exception: 2443 self.passed = False 2444 if not self.passed: 2445 return stderr 2446 return stderr 2447 2448 def run(self, runner): 2449 for target_api_version in self.target_api_version_list: 2450 stderr = None 2451 if target_api_version == "12": 2452 for target_api_sub_version in self.target_api_sub_version_list: 2453 stderr = self.run_for_single_version(runner, target_api_version, target_api_sub_version) 2454 if path.exists(self.test_abc_path): 2455 os.remove(self.test_abc_path) 2456 if not self.passed: 2457 self.error = stderr.decode("utf-8", errors="ignore") 2458 return self 2459 else: 2460 stderr = self.run_for_single_version(runner, target_api_version) 2461 if not self.passed: 2462 self.error = stderr.decode("utf-8", errors="ignore") 2463 return self 2464 return self 2465 2466 2467class CompilerTestInfo(object): 2468 def __init__(self, directory, extension, flags): 2469 self.directory = directory 2470 self.extension = extension 2471 self.flags = flags 2472 2473 def update_dir(self, prefiex_dir): 2474 self.directory = os.path.sep.join([prefiex_dir, self.directory]) 2475 2476 2477# Copy compiler directory to test/.local directory, and do inplace obfuscation. 2478def prepare_for_obfuscation(compiler_test_infos, test_root): 2479 tmp_dir_name = ".local" 2480 tmp_path = os.path.join(test_root, tmp_dir_name) 2481 if not os.path.exists(tmp_path): 2482 os.mkdir(tmp_path) 2483 2484 test_root_dirs = set() 2485 for info in compiler_test_infos: 2486 root_dir = info.directory.split("/")[0] 2487 test_root_dirs.add(root_dir) 2488 2489 for test_dir in test_root_dirs: 2490 src_dir = os.path.join(test_root, test_dir) 2491 target_dir = os.path.join(tmp_path, test_dir) 2492 if os.path.exists(target_dir): 2493 shutil.rmtree(target_dir) 2494 shutil.copytree(src_dir, target_dir) 2495 2496 for info in compiler_test_infos: 2497 info.update_dir(tmp_dir_name) 2498 2499 2500def add_directory_for_version_control(runners, args): 2501 ark_js_vm_prepared = ArkJsVmDownload(args) 2502 ark_js_vm_prepared.run() 2503 runner = VersionControlRunner(args) 2504 runner.add_directory( 2505 "version_control/API11/syntax_feature", 2506 "js", 2507 ["--module"], 2508 "API11", 2509 "syntax_feature", 2510 ) 2511 runner.add_directory( 2512 "version_control/API11/syntax_feature", 2513 "ts", 2514 ["--module"], 2515 "API11", 2516 "syntax_feature", 2517 ) 2518 runner.add_directory( 2519 "version_control/API12beta1_and_beta2/syntax_feature", 2520 "ts", ["--module"], 2521 "API12beta1", 2522 "syntax_feature", 2523 ) 2524 runner.add_directory( 2525 "version_control/API12beta1_and_beta2/syntax_feature", 2526 "js", 2527 ["--module"], 2528 "API12beta1", 2529 "syntax_feature", 2530 ) 2531 runner.add_directory( 2532 "version_control/API12beta3/syntax_feature", 2533 "ts", 2534 ["--module"], 2535 "API12beta3", 2536 "syntax_feature", 2537 "version_control/API12beta3/syntax_feature/import_target", 2538 ) 2539 runner.add_directory( 2540 "version_control/API12beta3/syntax_feature", 2541 "js", 2542 ["--module"], 2543 "API12beta3", 2544 "syntax_feature", 2545 "version_control/API12beta3/syntax_feature/import_target", 2546 ) 2547 runner.add_directory( 2548 "version_control/API11/bytecode_feature", 2549 "ts", 2550 ["--module"], 2551 "API11", 2552 "bytecode_feature", 2553 ) 2554 runner.add_directory( 2555 "version_control/API11/bytecode_feature", 2556 "js", 2557 ["--module"], 2558 "API11", 2559 "bytecode_feature", 2560 ) 2561 runner.add_directory( 2562 "version_control/API12beta1_and_beta2/bytecode_feature", 2563 "ts", 2564 ["--module"], 2565 "API12beta1", 2566 "bytecode_feature", 2567 "version_control/API12beta1_and_beta2/bytecode_feature/import_target", 2568 ) 2569 runner.add_directory( 2570 "version_control/API12beta1_and_beta2/bytecode_feature", 2571 "js", 2572 ["--module"], 2573 "API12beta1", 2574 "bytecode_feature", 2575 "version_control/API12beta1_and_beta2/bytecode_feature/import_target", 2576 ) 2577 runner.add_directory( 2578 "version_control/API12beta3/bytecode_feature", 2579 "ts", 2580 ["--module"], 2581 "API12beta3", 2582 "bytecode_feature", 2583 "version_control/API12beta3/bytecode_feature/import_target", 2584 ) 2585 runner.add_directory( 2586 "version_control/API12beta3/bytecode_feature", 2587 "js", 2588 ["--module"], 2589 "API12beta3", 2590 "bytecode_feature", 2591 "version_control/API12beta3/bytecode_feature/import_target", 2592 ) 2593 runner.add_directory( 2594 "version_control/API16/bytecode_feature", 2595 "js", 2596 [], 2597 "API16", 2598 "bytecode_feature", 2599 ) 2600 runner.add_directory( 2601 "version_control/API16/bytecode_feature", 2602 "ts", 2603 ["--module"], 2604 "API16", 2605 "bytecode_feature", 2606 ) 2607 runners.append(runner) 2608 2609 abc_tests_prepare = AbcTestCasesPrepare(args) 2610 abc_tests_prepare.gen_abc_tests( 2611 "version_control/bytecode_version_control/non_merge_mode", 2612 "js", 2613 ["--module"], 2614 "non_merge_mode", 2615 ) 2616 abc_tests_prepare.gen_abc_tests( 2617 "version_control/bytecode_version_control/merge_mode", 2618 "js", 2619 ["--module", "--merge-abc"], 2620 "merge_mode", 2621 ) 2622 abc_tests_prepare.gen_abc_tests( 2623 "version_control/bytecode_version_control/mixed_compile", 2624 "js", 2625 ["--module", "--merge-abc"], 2626 "merge_mode", 2627 ) 2628 2629 args.abc_tests_prepare = abc_tests_prepare 2630 abc_version_control_runner = AbcVersionControlRunner(args) 2631 abc_version_control_runner.add_directory( 2632 "version_control/bytecode_version_control/non_merge_mode", 2633 "abc", 2634 ["--module", "--enable-abc-input"], 2635 "non_merge_mode", 2636 ) 2637 abc_version_control_runner.add_directory( 2638 "version_control/bytecode_version_control/merge_mode", 2639 "abc", 2640 ["--module", "--enable-abc-input", "--merge-abc"], 2641 "merge_mode", 2642 ) 2643 abc_version_control_runner.add_directory( 2644 "version_control/bytecode_version_control/mixed_compile", 2645 "txt", 2646 ["--module", "--enable-abc-input", "--merge-abc"], 2647 "mix_compile_mode", 2648 ) 2649 runners.append(abc_version_control_runner) 2650 2651def add_directory_for_regression(runners, args): 2652 runner = RegressionRunner(args) 2653 runner.add_directory("parser/concurrent", "js", ["--module", "--dump-ast"]) 2654 runner.add_directory("parser/js", "js", ["--parse-only", "--dump-ast"]) 2655 runner.add_directory("parser/script", "ts", ["--parse-only", "--dump-ast"]) 2656 runner.add_directory("parser/ts", "ts", 2657 ["--parse-only", "--module", "--dump-ast"]) 2658 runner.add_directory("parser/ts/type_checker", "ts", 2659 ["--parse-only", "--enable-type-check", "--module", "--dump-ast"]) 2660 runner.add_directory("parser/ts/cases/declaration", "d.ts", 2661 ["--parse-only", "--module", "--dump-ast"], TSDeclarationTest) 2662 runner.add_directory("parser/commonjs", "js", ["--commonjs", "--parse-only", "--dump-ast"]) 2663 runner.add_directory("parser/binder", "js", ["--dump-assembly", "--dump-literal-buffer", "--module", "--target-api-sub-version=beta3"]) 2664 runner.add_directory("parser/binder", "ts", ["--dump-assembly", "--dump-literal-buffer", "--module", "--target-api-sub-version=beta3"]) 2665 runner.add_directory("parser/binder/noModule", "ts", ["--dump-assembly", "--dump-literal-buffer", "--target-api-sub-version=beta3"]) 2666 runner.add_directory("parser/binder/api12beta2", "js", ["--dump-assembly", "--target-api-version=12", "--target-api-sub-version=beta2"]) 2667 runner.add_directory("parser/binder/debugInfo", "ts", ["--dump-assembly", "--dump-literal-buffer", "--debug-info", "--module"]) 2668 runner.add_directory("parser/js/emptySource", "js", ["--dump-assembly"]) 2669 runner.add_directory("parser/js/language/arguments-object", "js", ["--parse-only"]) 2670 runner.add_directory("parser/js/language/statements/for-statement", "js", ["--parse-only", "--dump-ast"]) 2671 runner.add_directory("parser/js/language/expressions/optional-chain", "js", ["--parse-only", "--dump-ast"]) 2672 runner.add_directory("parser/js/language/import/syntax/api18", "js", 2673 ["--parse-only", "--module", "--target-api-version=18"]) 2674 runner.add_directory("parser/js/language/import/syntax/api12/beta3", "js", 2675 ["--parse-only", "--module", "--target-api-version=12", "--target-api-sub-version=beta3"]) 2676 runner.add_directory("parser/js/language/import/syntax/api12/beta2", "js", 2677 ["--parse-only", "--module", "--target-api-version=12", "--target-api-sub-version=beta2"]) 2678 runner.add_directory("parser/js/language/import", "ts", 2679 ["--dump-assembly", "--dump-literal-buffer", "--module", "--target-api-version=18"]) 2680 runner.add_directory("parser/sendable_class", "ts", 2681 ["--dump-assembly", "--dump-literal-buffer", "--module", "--target-api-sub-version=beta3"]) 2682 runner.add_directory("parser/sendable_class/api12beta2", "ts", 2683 ["--dump-assembly", "--dump-literal-buffer", "--module", "--target-api-version=12", "--target-api-sub-version=beta2"]) 2684 runner.add_directory("parser/unicode", "js", ["--parse-only"]) 2685 runner.add_directory("parser/ts/stack_overflow", "ts", ["--parse-only", "--dump-ast"]) 2686 runner.add_directory("parser/js/module-record/module-record-field-name-option.js", "js", 2687 ["--module-record-field-name=abc", "--source-file=abc", "--module", "--dump-normalized-asm-program"]) 2688 runner.add_directory("parser/annotations", "ts", ["--module", "--dump-ast", "--enable-annotations"]) 2689 runner.add_directory("parser/ts/inline-property", "ts", ["--dump-assembly", "--module"]) 2690 2691 runners.append(runner) 2692 2693 transformer_runner = TransformerRunner(args) 2694 transformer_runner.add_directory("parser/ts/transformed_cases", "ts", 2695 ["--parse-only", "--module", "--dump-transformed-ast", 2696 "--check-transformed-ast-structure"]) 2697 2698 runners.append(transformer_runner) 2699 2700 bc_version_runner = BcVersionRunner(args) 2701 bc_version_runner.add_cmd() 2702 2703 runners.append(bc_version_runner) 2704 2705 transformer_api_version_10_runner = TransformerInTargetApiVersion10Runner(args) 2706 transformer_api_version_10_runner.add_directory("parser/ts/transformed_cases_api_version_10", "ts", 2707 ["--parse-only", "--module", "--target-api-version=10", 2708 "--dump-transformed-ast"]) 2709 2710 runners.append(transformer_api_version_10_runner) 2711 2712def add_directory_for_asm(runners, args, mode=""): 2713 runner = AbcToAsmRunner(args, True if mode == "debug" else False) 2714 runner.add_directory("abc2asm/js", "js", []) 2715 runner.add_directory("abc2asm/ts", "ts", []) 2716 runner.add_directory("compiler/js", "js", []) 2717 runner.add_directory("compiler/ts/cases/compiler", "ts", []) 2718 runner.add_directory("compiler/ts/projects", "ts", ["--module"]) 2719 runner.add_directory("compiler/ts/projects", "ts", ["--module", "--merge-abc"]) 2720 runner.add_directory("compiler/dts", "d.ts", ["--module", "--opt-level=0"]) 2721 runner.add_directory("compiler/commonjs", "js", ["--commonjs"]) 2722 runner.add_directory("parser/concurrent", "js", ["--module"]) 2723 runner.add_directory("parser/js", "js", []) 2724 runner.add_directory("parser/script", "ts", []) 2725 runner.add_directory("parser/ts", "ts", ["--module"]) 2726 runner.add_directory("parser/ts/type_checker", "ts", ["--enable-type-check", "--module"]) 2727 runner.add_directory("parser/commonjs", "js", ["--commonjs"]) 2728 runner.add_directory("parser/binder", "js", ["--dump-assembly", "--dump-literal-buffer", "--module"]) 2729 runner.add_directory("parser/binder", "ts", ["--dump-assembly", "--dump-literal-buffer", "--module"]) 2730 runner.add_directory("parser/binder/noModule", "ts", ["--dump-assembly", "--dump-literal-buffer"]) 2731 runner.add_directory("parser/js/emptySource", "js", []) 2732 runner.add_directory("parser/js/language/arguments-object", "js", []) 2733 runner.add_directory("parser/js/language/statements/for-statement", "js", []) 2734 runner.add_directory("parser/js/language/expressions/optional-chain", "js", []) 2735 runner.add_directory("parser/sendable_class", "ts", ["--module"]) 2736 runner.add_directory("parser/unicode", "js", []) 2737 runner.add_directory("parser/ts/stack_overflow", "ts", []) 2738 2739 runners.append(runner) 2740 2741 2742def add_directory_for_compiler(runners, args): 2743 runner = CompilerRunner(args) 2744 compiler_test_infos = [] 2745 compiler_test_infos.append(CompilerTestInfo("compiler/js", "js", ["--module"])) 2746 compiler_test_infos.append(CompilerTestInfo("compiler/ts/cases", "ts", [])) 2747 compiler_test_infos.append(CompilerTestInfo("compiler/ts/projects", "ts", ["--module"])) 2748 compiler_test_infos.append(CompilerTestInfo("compiler/ts/projects", "ts", ["--module", "--merge-abc"])) 2749 compiler_test_infos.append(CompilerTestInfo("compiler/annotations-projects", "ts", ["--module", "--enable-annotations", "--merge-abc"])) 2750 compiler_test_infos.append(CompilerTestInfo("compiler/dts", "d.ts", ["--module", "--opt-level=0"])) 2751 compiler_test_infos.append(CompilerTestInfo("compiler/commonjs", "js", ["--commonjs"])) 2752 compiler_test_infos.append(CompilerTestInfo("compiler/interpreter/lexicalEnv", "js", [])) 2753 compiler_test_infos.append(CompilerTestInfo("compiler/sendable", "ts", ["--module", "--target-api-sub-version=beta3"])) 2754 compiler_test_infos.append(CompilerTestInfo("optimizer/js/branch-elimination", "js", 2755 ["--module", "--branch-elimination", "--dump-assembly"])) 2756 compiler_test_infos.append(CompilerTestInfo("optimizer/js/opt-try-catch-func", "js", 2757 ["--module", "--dump-assembly"])) 2758 compiler_test_infos.append(CompilerTestInfo("optimizer/js/unused-inst-opt", "js", 2759 ["--module", "--dump-assembly"])) 2760 compiler_test_infos.append(CompilerTestInfo("compiler/debugInfo/", "js", 2761 ["--debug-info", "--dump-debug-info", "--source-file", "debug-info.js"])) 2762 compiler_test_infos.append(CompilerTestInfo("compiler/js/module-record-field-name-option.js", "js", 2763 ["--module", "--module-record-field-name=abc"])) 2764 compiler_test_infos.append(CompilerTestInfo("compiler/annotations", "ts", ["--module", "--enable-annotations"])) 2765 compiler_test_infos.append(CompilerTestInfo("compiler/generateCache-projects", "ts", 2766 ["--merge-abc", "--file-threads=0", "--cache-file"])) 2767 # Following directories of test cases are for dump-assembly comparison only, and is not executed. 2768 # Check CompilerProjectTest for more details. 2769 compiler_test_infos.append(CompilerTestInfo("optimizer/ts/branch-elimination/projects", "ts", 2770 ["--module", "--branch-elimination", "--merge-abc", "--dump-assembly", 2771 "--file-threads=8"])) 2772 compiler_test_infos.append(CompilerTestInfo("compiler/bytecodehar/projects", "ts", 2773 ["--merge-abc", "--dump-assembly", "--enable-abc-input", 2774 "--dump-deps-info", "--remove-redundant-file", 2775 "--dump-literal-buffer", "--dump-string", "--abc-class-threads=4"])) 2776 compiler_test_infos.append(CompilerTestInfo("compiler/bytecodehar/js/projects", "js", 2777 ["--merge-abc", "--dump-assembly", "--enable-abc-input", 2778 "--dump-deps-info", "--remove-redundant-file", 2779 "--dump-literal-buffer", "--dump-string", "--abc-class-threads=4"])) 2780 compiler_test_infos.append(CompilerTestInfo("compiler/bytecodehar/merge_abc_consistence_check/projects", "js", 2781 ["--merge-abc", "--dump-assembly", "--enable-abc-input", 2782 "--abc-class-threads=4"])) 2783 compiler_test_infos.append(CompilerTestInfo("compiler/cache_projects", "ts", 2784 ["--merge-abc", "--dump-assembly", "--enable-abc-input", 2785 "--dump-deps-info", "--remove-redundant-file", 2786 "--dump-literal-buffer", "--dump-string", "--abc-class-threads=4", 2787 "--cache-file"])) 2788 2789 compiler_test_infos.append(CompilerTestInfo("compiler/ts/shared_module/projects", "ts", 2790 ["--module", "--merge-abc", "--dump-assembly"])) 2791 compiler_test_infos.append(CompilerTestInfo("compiler/protobin", "ts", [])) 2792 compiler_test_infos.append(CompilerTestInfo("compiler/merge_hap/projects", "ts", 2793 ["--merge-abc", "--dump-assembly", "--enable-abc-input", 2794 "--dump-literal-buffer", "--dump-string", "--abc-class-threads=4"])) 2795 compiler_test_infos.append(CompilerTestInfo("compiler/abc2program", "ts", 2796 ["--merge-abc", "--module", "--dump-assembly", "--enable-abc-input", 2797 "--dump-literal-buffer", "--dump-string", "--source-file=source.ts", 2798 "--module-record-field-name=source"])) 2799 2800 if args.enable_arkguard: 2801 prepare_for_obfuscation(compiler_test_infos, runner.test_root) 2802 2803 for info in compiler_test_infos: 2804 runner.add_directory(info.directory, info.extension, info.flags) 2805 2806 filesinfo_compiler_infos = [] 2807 filesinfo_runner = FilesInfoRunner(args) 2808 filesinfo_compiler_infos.append(CompilerTestInfo("compiler/filesInfoTest/sourceLang", "txt", 2809 ["--module", "--merge-abc", "--dump-assembly"])) 2810 2811 for info in filesinfo_compiler_infos: 2812 filesinfo_runner.add_directory(info.directory, info.extension, info.flags) 2813 2814 runners.append(runner) 2815 runners.append(filesinfo_runner) 2816 2817 2818def add_directory_for_bytecode(runners, args): 2819 runner = BytecodeRunner(args) 2820 runner.add_directory("bytecode/commonjs", "js", ["--commonjs", "--dump-assembly"]) 2821 runner.add_directory("bytecode/js", "js", ["--dump-assembly"]) 2822 runner.add_directory("bytecode/ts/cases", "ts", ["--dump-assembly"]) 2823 runner.add_directory("bytecode/ts/ic", "ts", ["--dump-assembly"]) 2824 runner.add_directory("bytecode/ts/api11", "ts", ["--dump-assembly", "--module", "--target-api-version=11"]) 2825 runner.add_directory("bytecode/ts/api12", "ts", ["--dump-assembly", "--module", "--target-api-version=12"]) 2826 runner.add_directory("bytecode/ts/api18", "ts", ["--dump-assembly", "--module", "--target-api-version=18"]) 2827 runner.add_directory("bytecode/watch-expression", "js", ["--debugger-evaluate-expression", "--dump-assembly"]) 2828 2829 runners.append(runner) 2830 2831 2832def add_directory_for_debug(runners, args): 2833 runner = RegressionRunner(args) 2834 runner.add_directory("debug/parser", "js", ["--parse-only", "--dump-ast"]) 2835 2836 runners.append(runner) 2837 2838 2839def add_cmd_for_aop_transform(runners, args): 2840 runner = AopTransform(args) 2841 2842 aop_file_path = path.join(runner.test_root, "aop") 2843 lib_suffix = '.so' 2844 #cpp src, deal type, result compare str, abc compare str 2845 msg_list = [ 2846 ["correct_modify.cpp", "compile", "aop_transform_start", "new_abc_content"], 2847 ["correct_no_modify.cpp", "compile", "aop_transform_start", ""], 2848 ["exec_error.cpp", "compile", "Transform exec fail", ""], 2849 ["no_func_transform.cpp", "compile", "os::library_loader::ResolveSymbol get func Transform error", ""], 2850 ["error_format.cpp", "copy_lib", "os::library_loader::Load error", ""], 2851 ["".join(["no_exist", lib_suffix]), "dirct_use", "Failed to find file", ""], 2852 ["error_suffix.xxx", "direct_use", "aop transform file suffix support", ""] 2853 ] 2854 for msg in msg_list: 2855 cpp_file = path.join(aop_file_path, msg[0]) 2856 if msg[1] == 'compile': 2857 lib_file = cpp_file.replace('.cpp', lib_suffix) 2858 remove_file = lib_file 2859 runner.add_cmd(["g++", "--share", "-o", lib_file, cpp_file], "", "", "") 2860 elif msg[1] == 'copy_lib': 2861 lib_file = cpp_file.replace('.cpp', lib_suffix) 2862 remove_file = lib_file 2863 if not os.path.exists(lib_file): 2864 with open(cpp_file, "r") as source_file: 2865 fd = os.open(lib_file, os.O_RDWR | os.O_CREAT | os.O_TRUNC) 2866 target_file = os.fdopen(fd, 'w') 2867 target_file.write(source_file.read()) 2868 elif msg[1] == 'direct_use': 2869 lib_file = cpp_file 2870 remove_file = "" 2871 2872 js_file = path.join(aop_file_path, "test_aop.js") 2873 runner.add_cmd([runner.es2panda, "--merge-abc", "--transform-lib", lib_file, js_file], msg[2], msg[3], remove_file) 2874 2875 runners.append(runner) 2876 2877 2878class AopTransform(Runner): 2879 def __init__(self, args): 2880 Runner.__init__(self, args, "AopTransform") 2881 2882 def add_cmd(self, cmd, compare_str, compare_abc_str, remove_file, func=TestAop): 2883 self.tests += [func(cmd, compare_str, compare_abc_str, remove_file)] 2884 2885 def test_path(self, src): 2886 return src 2887 2888 2889def main(): 2890 args = get_args() 2891 2892 runners = [] 2893 2894 if args.regression: 2895 add_directory_for_regression(runners, args) 2896 2897 if args.abc_to_asm: 2898 add_directory_for_asm(runners, args) 2899 add_directory_for_asm(runners, args, "debug") 2900 2901 if args.tsc: 2902 runners.append(TSCRunner(args)) 2903 2904 if args.compiler: 2905 add_directory_for_compiler(runners, args) 2906 2907 if args.hotfix: 2908 runners.append(HotfixRunner(args)) 2909 2910 if args.hotreload: 2911 runners.append(HotreloadRunner(args)) 2912 2913 if args.coldfix: 2914 runners.append(ColdfixRunner(args)) 2915 2916 if args.coldreload: 2917 runners.append(ColdreloadRunner(args)) 2918 2919 if args.debugger: 2920 runners.append(DebuggerRunner(args)) 2921 2922 if args.base64: 2923 runners.append(Base64Runner(args)) 2924 2925 if args.bytecode: 2926 add_directory_for_bytecode(runners, args) 2927 2928 if args.aop_transform: 2929 add_cmd_for_aop_transform(runners, args) 2930 2931 if args.debug: 2932 add_directory_for_debug(runners, args) 2933 2934 if args.version_control: 2935 add_directory_for_version_control(runners, args) 2936 2937 failed_tests = 0 2938 2939 for runner in runners: 2940 runner.run() 2941 failed_tests += runner.summarize() 2942 2943 if failed_tests > 0: 2944 exit(1) 2945 exit(0) 2946 2947 2948if __name__ == "__main__": 2949 main() 2950