1#!/usr/bin/env python 2# -*- coding: utf-8 -*- 3# Copyright (c) 2024 Huawei Device Co., Ltd. 4# Licensed under the Apache License, Version 2.0 (the "License"); 5# you may not use this file except in compliance with the License. 6# You may obtain a copy of the License at 7# 8# http://www.apache.org/licenses/LICENSE-2.0 9# 10# Unless required by applicable law or agreed to in writing, software 11# distributed under the License is distributed on an "AS IS" BASIS, 12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13# See the License for the specific language governing permissions and 14# limitations under the License. 15import subprocess 16import sys 17import os 18import argparse 19import shutil 20import json 21import time 22import re 23import urllib.request 24 25 26def _get_args(): 27 parser = argparse.ArgumentParser(add_help=True) 28 parser.add_argument("-op", "--out_path", default=r"./", type=str, 29 help="path of out.", ) 30 parser.add_argument("-rp", "--root_path", default=r"./", type=str, 31 help="path of root. default: ./", ) 32 parser.add_argument("-cl", "--components_list", default="", type=str, 33 help="components_list , " 34 "pass in the components' name, separated by commas , " 35 "example: A,B,C . " 36 "default: none", ) 37 parser.add_argument("-bt", "--build_type", default=0, type=int, 38 help="build_type ,default: 0", ) 39 parser.add_argument("-on", "--organization_name", default='ohos', type=str, 40 help="organization_name ,default: '' ", ) 41 parser.add_argument("-os", "--os_arg", default=r"linux", type=str, 42 help="path of output file. default: linux", ) 43 parser.add_argument("-ba", "--build_arch", default=r"x86", type=str, 44 help="build_arch_arg. default: x86", ) 45 parser.add_argument("-lt", "--local_test", default=0, type=int, 46 help="local test ,default: not local , 0", ) 47 args = parser.parse_args() 48 return args 49 50 51def _check_label(public_deps, value): 52 for i in value["innerapis"]: 53 if i: 54 label = i.get("label") 55 if public_deps == label: 56 return label.split(':')[-1] 57 return "" 58 return "" 59 60 61def _get_public_external_deps(data, public_deps): 62 if not isinstance(data, dict): 63 return "" 64 for key, value in data.items(): 65 if not isinstance(value, dict): 66 continue 67 _data = _check_label(public_deps, value) 68 if _data: 69 return key + ":" + _data 70 return "" 71 72 73def _is_innerkit(data, part, module): 74 if not isinstance(data, dict): 75 return False 76 77 part_data = data.get(part) 78 if not isinstance(part_data, dict): 79 return False 80 module_list = [] 81 for i in part_data["innerapis"]: 82 if i: 83 module_list.append(i["name"]) 84 if module in module_list: 85 return True 86 return False 87 88 89def _get_components_json(out_path): 90 jsondata = "" 91 json_path = os.path.join(out_path + "/build_configs/parts_info/components.json") 92 f = open(json_path, 'r') 93 try: 94 jsondata = json.load(f) 95 except Exception as e: 96 print('--_get_components_json parse json error--') 97 return jsondata 98 99 100def _handle_one_layer_json(json_key, json_data, desc_list): 101 data_list = json_data.get(json_key) 102 if isinstance(data_list, list) and len(json_data.get(json_key)) >= 1: 103 desc_list.extend(data_list) 104 else: 105 desc_list.append(json_data.get(json_key)) 106 107 108def _handle_two_layer_json(json_key, json_data, desc_list): 109 value_depth = len(json_data.get(json_key)) 110 for i in range(value_depth): 111 _include_dirs = json_data.get(json_key)[i].get('include_dirs') 112 if _include_dirs: 113 desc_list.extend(_include_dirs) 114 115 116def _get_json_data(args, module): 117 json_path = os.path.join(args.get("out_path"), 118 args.get("subsystem_name"), args.get("part_name"), "publicinfo", module + ".json") 119 f = open(json_path, 'r') 120 try: 121 jsondata = json.load(f) 122 except Exception as e: 123 print(json_path) 124 print('--_get_json_data parse json error--') 125 return jsondata 126 127 128def _handle_deps_data(json_data): 129 dep_list = [] 130 if json_data.get('public_deps'): 131 _handle_one_layer_json('public_deps', json_data, dep_list) 132 return dep_list 133 134 135def _handle_includes_data(json_data): 136 include_list = [] 137 if json_data.get('public_configs'): 138 _handle_two_layer_json('public_configs', json_data, include_list) 139 if json_data.get('all_dependent_configs'): 140 _handle_two_layer_json('all_dependent_configs', json_data, include_list) 141 return include_list 142 143 144def _get_static_lib_path(args, json_data): 145 label = json_data.get('label') 146 split_label = label.split("//")[1].split(":")[0] 147 real_static_lib_path = os.path.join(args.get("out_path"), "obj", 148 split_label, json_data.get('out_name')) 149 return real_static_lib_path 150 151 152def _copy_dir(src_path, target_path): 153 if not os.path.isdir(src_path): 154 return False 155 filelist_src = os.listdir(src_path) 156 for file in filelist_src: 157 path = os.path.join(os.path.abspath(src_path), file) 158 if os.path.isdir(path): 159 if file.startswith("."): 160 continue 161 path1 = os.path.join(target_path, file) 162 _copy_dir(path, path1) 163 else: 164 if not (path.endswith(".h") or path.endswith(".hpp") or path.endswith(".in")): 165 continue 166 with open(path, 'rb') as read_stream: 167 contents = read_stream.read() 168 if not os.path.exists(target_path): 169 print('target_path,target_path', target_path) 170 os.makedirs(target_path) 171 path1 = os.path.join(target_path, file) 172 with os.fdopen(os.open(path1, os.O_WRONLY | os.O_CREAT, mode=0o640), "wb") as write_stream: 173 write_stream.write(contents) 174 return True 175 176 177def _copy_includes(args, module, includes: list): 178 if module == 'ipc_single': 179 includes = [ 180 "//foundation/communication/ipc/interfaces/innerkits/ipc_core/include", 181 "//foundation/communication/ipc/ipc/native/src/core/include", 182 "//foundation/communication/ipc/ipc/native/src/mock/include", 183 ] 184 includes_out_dir = os.path.join(args.get("out_path"), "component_package", 185 args.get("part_path"), "innerapis", module, "includes") 186 if not os.path.exists(includes_out_dir): 187 os.makedirs(includes_out_dir) 188 for include in includes: 189 _sub_include = include.split(args.get("part_path") + '/')[-1] 190 split_include = include.split("//")[1] 191 real_include_path = os.path.join(args.get("root_path"), split_include) 192 if args.get('part_name') == 'libunwind': 193 _out_dir = os.path.join(includes_out_dir, _sub_include) 194 _copy_dir(real_include_path, _out_dir) 195 continue 196 _copy_dir(real_include_path, includes_out_dir) 197 print("_copy_includes has done ") 198 199 200def _copy_lib(args, json_data, module): 201 so_path = "" 202 if json_data.get('type') == 'static_library': 203 so_path = _get_static_lib_path(args, json_data) 204 else: 205 so_path = os.path.join(args.get("out_path"), args.get("subsystem_name"), 206 args.get("part_name"), json_data.get('out_name')) 207 if os.path.isfile(so_path): 208 lib_out_dir = os.path.join(args.get("out_path"), "component_package", 209 args.get("part_path"), "innerapis", module, "libs") 210 if not os.path.exists(lib_out_dir): 211 os.makedirs(lib_out_dir) 212 shutil.copy(so_path, lib_out_dir) 213 return True 214 else: 215 return False 216 217 218def _dirs_handler(bundlejson_out): 219 dirs = dict() 220 dirs['./'] = [] 221 directory = bundlejson_out 222 for filename in os.listdir(directory): 223 filepath = os.path.join(directory, filename) 224 if os.path.isfile(filepath): 225 dirs['./'].append(filename) 226 else: 227 dirs[filename] = [filename + "/*"] 228 delete_list = ['LICENSE', 'README.md', 'README_zh.md', 'README_en.md', 'bundle.json'] 229 for delete_txt in delete_list: 230 if delete_txt in dirs['./']: 231 dirs['./'].remove(delete_txt) 232 if dirs['./'] == []: 233 del dirs['./'] 234 return dirs 235 236 237def _copy_bundlejson(args, public_deps_list): 238 bundlejson_out = os.path.join(args.get("out_path"), "component_package", args.get("part_path")) 239 print("bundlejson_out : ", bundlejson_out) 240 if not os.path.exists(bundlejson_out): 241 os.makedirs(bundlejson_out) 242 bundlejson = os.path.join(args.get("root_path"), args.get("part_path"), "bundle.json") 243 dependencies_dict = {} 244 for public_deps in public_deps_list: 245 _public_dep = '@' + args.get('organization_name') + '/' + public_deps.split(':')[0] 246 dependencies_dict.update({_public_dep: "*"}) 247 if os.path.isfile(bundlejson): 248 with open(bundlejson, 'r') as f: 249 bundle_data = json.load(f) 250 bundle_data['publishAs'] = 'binary' 251 bundle_data.update({'os': args.get('os')}) 252 bundle_data.update({'buildArch': args.get('buildArch')}) 253 dirs = _dirs_handler(bundlejson_out) 254 bundle_data['dirs'] = dirs 255 bundle_data['version'] = str(bundle_data['version']) 256 if bundle_data['version'] == '': 257 bundle_data['version'] = '1.0.0' 258 pattern = r'^(\d+)\.(\d+)(-[a-zA-Z]+)?$' # 正则表达式匹配a.b[-后缀]格式的字符串 259 match = re.match(pattern, bundle_data['version']) 260 if match: 261 a = match.group(1) 262 b = match.group(2) 263 suffix = match.group(3) if match.group(3) else "" 264 bundle_data['version'] = f"{a}.{b}.0{suffix}" 265 if args.get('build_type') in [0, 1]: 266 bundle_data['version'] += '-snapshot' 267 if args.get('organization_name'): 268 _name_pattern = r'@(.*.)/' 269 bundle_data['name'] = re.sub(_name_pattern, '@' + args.get('organization_name') + '/', 270 bundle_data['name']) 271 if bundle_data.get('scripts'): 272 bundle_data.update({'scripts': {}}) 273 if bundle_data.get('licensePath'): 274 del bundle_data['licensePath'] 275 if bundle_data.get('readmePath'): 276 del bundle_data['readmePath'] 277 bundle_data['dependencies'] = dependencies_dict 278 with os.fdopen(os.open(os.path.join(bundlejson_out, "bundle.json"), os.O_WRONLY | os.O_CREAT, mode=0o640), 279 "w", 280 encoding='utf-8') as fd: 281 json.dump(bundle_data, fd, indent=4, ensure_ascii=False) 282 283 284def _copy_license(args): 285 license_out = os.path.join(args.get("out_path"), "component_package", args.get("part_path")) 286 print("license_out : ", license_out) 287 if not os.path.exists(license_out): 288 os.makedirs(license_out) 289 license = os.path.join(args.get("root_path"), args.get("part_path"), "LICENSE") 290 if os.path.isfile(license): 291 shutil.copy(license, license_out) 292 else: 293 license_default = os.path.join(args.get("root_path"), "build", "LICENSE") 294 shutil.copy(license_default, license_out) 295 bundlejson_out = os.path.join(args.get("out_path"), "component_package", args.get("part_path"), 'bundle.json') 296 with open(bundlejson_out, 'r') as f: 297 bundle_data = json.load(f) 298 bundle_data.update({"license": "Apache License 2.0"}) 299 if os.path.isfile(bundlejson_out): 300 os.remove(bundlejson_out) 301 with os.fdopen(os.open(bundlejson_out, os.O_WRONLY | os.O_CREAT, mode=0o640), "w", 302 encoding='utf-8') as fd: 303 json.dump(bundle_data, fd, indent=4, ensure_ascii=False) 304 305 306def _copy_readme(args): 307 readme_out = os.path.join(args.get("out_path"), "component_package", args.get("part_path")) 308 print("readme_out : ", readme_out) 309 if not os.path.exists(readme_out): 310 os.makedirs(readme_out) 311 readme = os.path.join(args.get("root_path"), args.get("part_path"), "README.md") 312 readme_zh = os.path.join(args.get("root_path"), args.get("part_path"), "README_zh.md") 313 readme_en = os.path.join(args.get("root_path"), args.get("part_path"), "README_en.md") 314 readme_out_file = os.path.join(readme_out, "README.md") 315 if os.path.isfile(readme): 316 shutil.copy(readme, readme_out) 317 elif os.path.isfile(readme_zh): 318 shutil.copy(readme_zh, readme_out_file) 319 elif os.path.isfile(readme_en): 320 shutil.copy(readme_en, readme_out_file) 321 else: 322 try: 323 fd = os.open(readme_out_file, os.O_WRONLY | os.O_CREAT, mode=0o640) 324 fp = os.fdopen(fd, 'w') 325 fp.write('READ.ME') 326 except FileExistsError: 327 pass 328 329 330def _generate_import(fp): 331 fp.write('import("//build/ohos.gni")\n') 332 333 334def _generate_configs(fp, module): 335 fp.write('\nconfig("' + module + '_configs") {\n') 336 fp.write(' visibility = [ ":*" ]\n') 337 fp.write(' include_dirs = [\n') 338 fp.write(' "includes",\n') 339 if module == 'libunwind': 340 fp.write(' "includes/src",\n') 341 fp.write(' "includes/include",\n') 342 fp.write(' "includes/include/tdep-arm",\n') 343 fp.write(' ]\n') 344 if module == 'libunwind': 345 fp.write(' cflags = [\n') 346 fp.write(""" "-D_GNU_SOURCE", 347 "-DHAVE_CONFIG_H", 348 "-DNDEBUG", 349 "-DCC_IS_CLANG", 350 "-fcommon", 351 "-Werror", 352 "-Wno-absolute-value", 353 "-Wno-header-guard", 354 "-Wno-unused-parameter", 355 "-Wno-unused-variable", 356 "-Wno-int-to-pointer-cast", 357 "-Wno-pointer-to-int-cast", 358 "-Wno-inline-asm", 359 "-Wno-shift-count-overflow", 360 "-Wno-tautological-constant-out-of-range-compare", 361 "-Wno-unused-function",\n""") 362 fp.write(' ]\n') 363 fp.write(' }\n') 364 365 366def _generate_prebuilt_shared_library(fp, type, module): 367 if type == 'static_library': 368 fp.write('ohos_prebuilt_static_library("' + module + '") {\n') 369 elif type == 'executable': 370 fp.write('ohos_prebuilt_executable("' + module + '") {\n') 371 elif type == 'etc': 372 fp.write('ohos_prebuilt_etc("' + module + '") {\n') 373 else: 374 fp.write('ohos_prebuilt_shared_library("' + module + '") {\n') 375 376 377def _generate_public_configs(fp, module): 378 fp.write(' public_configs = [":' + module + '_configs"]\n') 379 380 381def _generate_public_deps(fp, deps: list, components_json, public_deps_list: list): 382 if not deps: 383 return public_deps_list 384 fp.write(' public_external_deps = [\n') 385 for dep in deps: 386 public_external_deps = _get_public_external_deps(components_json, dep) 387 if len(public_external_deps) > 0: 388 fp.write(' "' + public_external_deps + '",\n') 389 public_deps_list.append(public_external_deps) 390 fp.write(' ]\n') 391 return public_deps_list 392 393 394def _generate_other(fp, args, json_data, module): 395 so_name = json_data.get('out_name') 396 fp.write(' source = "libs/' + so_name + '"\n') 397 fp.write(' part_name = "' + args.get("part_name") + '"\n') 398 fp.write(' subsystem_name = "' + args.get("subsystem_name") + '"\n') 399 400 401def _generate_end(fp): 402 fp.write('}') 403 404 405def _generate_build_gn(args, module, json_data, deps: list, components_json, public_deps_list): 406 gn_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path"), 407 "innerapis", module, "BUILD.gn") 408 fd = os.open(gn_path, os.O_WRONLY | os.O_CREAT, mode=0o640) 409 fp = os.fdopen(fd, 'w') 410 _generate_import(fp) 411 _generate_configs(fp, module) 412 _generate_prebuilt_shared_library(fp, json_data.get('type'), module) 413 _generate_public_configs(fp, module) 414 _list = _generate_public_deps(fp, deps, components_json, public_deps_list) 415 _generate_other(fp, args, json_data, module) 416 _generate_end(fp) 417 print("_generate_build_gn has done ") 418 fp.close() 419 return _list 420 421 422def _parse_module_list(args): 423 module_list = [] 424 publicinfo_path = os.path.join(args.get("out_path"), 425 args.get("subsystem_name"), args.get("part_name"), "publicinfo") 426 print('publicinfo_path', publicinfo_path) 427 if os.path.exists(publicinfo_path) is False: 428 return module_list 429 publicinfo_dir = os.listdir(publicinfo_path) 430 for filename in publicinfo_dir: 431 if filename.endswith(".json"): 432 module_name = filename.split(".json")[0] 433 module_list.append(module_name) 434 print('filename', filename) 435 print('module_list', module_list) 436 return module_list 437 438 439def _lib_special_handler(part_name, module, args): 440 if part_name == 'mksh': 441 mksh_file_path = os.path.join(args.get('out_path'), 'startup', 'init', 'sh') 442 sh_out = os.path.join(args.get("out_path"), "thirdparty", "mksh") 443 if os.path.isfile(mksh_file_path): 444 shutil.copy(mksh_file_path, sh_out) 445 if module == 'blkid': 446 blkid_file_path = os.path.join(args.get('out_path'), 'filemanagement', 'storage_service', 'blkid') 447 blkid_out = os.path.join(args.get("out_path"), "thirdparty", "e2fsprogs") 448 if os.path.isfile(blkid_file_path): 449 shutil.copy(blkid_file_path, blkid_out) 450 if module == 'grpc_cpp_plugin': 451 blkid_file_path = os.path.join(args.get('out_path'), 'clang_x64', 'thirdparty', 'grpc', 'grpc_cpp_plugin') 452 blkid_out = os.path.join(args.get("out_path"), "thirdparty", "grpc") 453 if os.path.isfile(blkid_file_path): 454 shutil.copy(blkid_file_path, blkid_out) 455 456 457def _generate_component_package(args, components_json): 458 part_name = args.get("part_name") 459 modules = _parse_module_list(args) 460 print('modules', modules) 461 if len(modules) == 0: 462 return 463 is_component_build = False 464 _public_deps_list = [] 465 for module in modules: 466 public_deps_list = [] 467 if _is_innerkit(components_json, args.get("part_name"), module) == False: 468 continue 469 json_data = _get_json_data(args, module) 470 _lib_special_handler(part_name, module, args) 471 lib_exists = _copy_lib(args, json_data, module) 472 if lib_exists is False: 473 continue 474 is_component_build = True 475 includes = _handle_includes_data(json_data) 476 deps = _handle_deps_data(json_data) 477 _copy_includes(args, module, includes) 478 _list = _generate_build_gn(args, module, json_data, deps, components_json, public_deps_list) 479 if _list: 480 _public_deps_list.extend(_list) 481 if is_component_build: 482 _copy_bundlejson(args, _public_deps_list) 483 _copy_license(args) 484 _copy_readme(args) 485 if args.get("build_type") in [0, 1]: 486 _hpm_pack(args) 487 _copy_hpm_pack(args) 488 489 490def _get_part_subsystem(out_path): 491 jsondata = "" 492 json_path = os.path.join(out_path + "/build_configs/parts_info/part_subsystem.json") 493 print("json_path", json_path) 494 f = open(json_path, 'r') 495 try: 496 jsondata = json.load(f) 497 except Exception as e: 498 print('--_get_part_subsystem parse json error--') 499 return jsondata 500 501 502def _get_parts_path_info(out_path): 503 jsondata = "" 504 json_path = os.path.join(out_path + "/build_configs/parts_info/parts_path_info.json") 505 f = open(json_path, 'r') 506 try: 507 jsondata = json.load(f) 508 except Exception as e: 509 print('--_get_parts_path_info parse json error--') 510 return jsondata 511 512 513def _get_parts_path(json_data, part_name): 514 parts_path = None 515 if json_data.get(part_name) is not None: 516 parts_path = json_data[part_name] 517 return parts_path 518 519 520def _hpm_pack(args): 521 part_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path")) 522 cmd = ['hpm', 'pack'] 523 try: 524 subprocess.run(cmd, shell=False, cwd=part_path) 525 except Exception as e: 526 print("{} pack fail".format(args.get("part_name"))) 527 print("{} pack succ".format(args.get("part_name"))) 528 529 530def _copy_hpm_pack(args): 531 hpm_packages_path = args.get('hpm_packages_path') 532 part_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path")) 533 dirs = os.listdir(part_path) 534 tgz_file_name = '' 535 for file in dirs: 536 if file.endswith(".tgz"): 537 tgz_file_name = file 538 tgz_file_out = os.path.join(part_path, tgz_file_name) 539 if tgz_file_name: 540 shutil.copy(tgz_file_out, hpm_packages_path) 541 542 543def _make_hpm_packages_dir(root_path): 544 _out_path = os.path.join(root_path, 'out') 545 hpm_packages_path = os.path.join(_out_path, 'hpm_packages') 546 os.makedirs(hpm_packages_path, exist_ok=True) 547 return hpm_packages_path 548 549 550def _del_exist_component_package(out_path): 551 _component_package_path = os.path.join(out_path, 'component_package') 552 if os.path.isdir(_component_package_path): 553 try: 554 print('del dir component_package start..') 555 shutil.rmtree(_component_package_path) 556 print('del dir component_package end..') 557 except Exception as e: 558 print('del dir component_package FAILED') 559 560 561def _get_component_check() -> list: 562 check_list = [] 563 contents = urllib.request.urlopen("https://ci.openharmony.cn/api/daily_build/component/check/list").read().decode( 564 encoding="utf-8") 565 _check_json = json.loads(contents) 566 try: 567 check_list.extend(_check_json["data"]["dep_list"]) 568 check_list.extend(_check_json["data"]["indep_list"]) 569 except Exception as e: 570 print("Call the component check API something wrong, plz check the API return..") 571 check_list = list(set(check_list)) 572 check_list = sorted(check_list) 573 return check_list 574 575 576def _generate_component_package_handler(args, components_list, parts_path_info, part_name, components_json, part_path): 577 if not components_list: 578 part_path = _get_parts_path(parts_path_info, part_name) 579 if part_path is None: 580 return 581 _generate_component_package(args, components_json) 582 for component in components_list: 583 if part_name == component: 584 if part_path is None: 585 return 586 _generate_component_package(args, components_json) 587 588 589def generate_component_package(out_path, root_path, components_list=None, build_type=0, organization_name='ohos', 590 os_arg='linux', build_arch_arg='x86', local_test=0): 591 """ 592 593 Args: 594 out_path: output path of code default : out/rk3568 595 root_path: root path of code default : oh/ 596 components_list: list of all components that need to be built 597 build_type: build type 598 0: default pack,do not change organization_name 599 1:pack ,change organization_name 600 2:do not pack,do not change organization_name 601 organization_name: default ohos, if diff then change 602 os_arg: default : linux 603 build_arch_arg: default : x86 604 local_test: 1 to open local test , 0 to close 605 Returns: 606 607 """ 608 start_time = time.time() 609 _check_list = _get_component_check() 610 print(components_list) 611 if not components_list: 612 components_list = [] 613 elif local_test == 1 and components_list: 614 components_list = [component for component in components_list.split(",")] 615 else: 616 components_list = [component for component in components_list.split(",") if component in _check_list] 617 if not components_list: 618 sys.exit("stop for no target to pack..") 619 part_subsystem = _get_part_subsystem(out_path) 620 parts_path_info = _get_parts_path_info(out_path) 621 components_json = _get_components_json(out_path) 622 hpm_packages_path = _make_hpm_packages_dir(root_path) 623 # del component_package 624 _del_exist_component_package(out_path) 625 for key, value in part_subsystem.items(): 626 part_name = key 627 subsystem_name = value 628 part_path = _get_parts_path(parts_path_info, part_name) 629 args = {"subsystem_name": subsystem_name, "part_name": part_name, 630 "out_path": out_path, "root_path": root_path, "part_path": part_path, 631 "os": os_arg, "buildArch": build_arch_arg, "hpm_packages_path": hpm_packages_path, 632 "build_type": build_type, "organization_name": organization_name 633 } 634 # components_list is NONE or part name in components_list 635 _generate_component_package_handler(args, components_list, parts_path_info, part_name, 636 components_json, 637 part_path) 638 639 end_time = time.time() 640 run_time = end_time - start_time 641 print("generate_component_package out_path", out_path) 642 print(f"Generating binary product package takes time:{run_time}") 643 644 645def main(): 646 py_args = _get_args() 647 generate_component_package(py_args.out_path, 648 py_args.root_path, 649 components_list=py_args.components_list, 650 build_type=py_args.build_type, 651 organization_name=py_args.organization_name, 652 os_arg=py_args.os_arg, 653 build_arch_arg=py_args.build_arch, 654 local_test=py_args.local_test) 655 656 657if __name__ == '__main__': 658 main() 659