1#!/usr/bin/env python3 2# -*- coding: utf-8 -*- 3 4# Copyright (c) 2022 Huawei Device Co., Ltd. 5# Licensed under the Apache License, Version 2.0 (the "License"); 6# you may not use this file except in compliance with the License. 7# You may obtain a copy of the License at 8# 9# http://www.apache.org/licenses/LICENSE-2.0 10# 11# Unless required by applicable law or agreed to in writing, software 12# distributed under the License is distributed on an "AS IS" BASIS, 13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14# See the License for the specific language governing permissions and 15# limitations under the License. 16""" 17Description : Generate the update.bin file 18""" 19import os 20import struct 21import hashlib 22import subprocess 23from log_exception import UPDATE_LOGGER 24from utils import OPTIONS_MANAGER 25from create_hashdata import HashType 26from create_hashdata import CreateHash 27from create_hashdata import HASH_BLOCK_SIZE 28from create_chunk import CreateChunk 29from cryptography.hazmat.primitives import serialization 30from cryptography.hazmat.primitives import hashes 31from cryptography.hazmat.backends import default_backend 32from cryptography.hazmat.primitives.asymmetric import padding 33 34UPGRADE_FILE_HEADER_LEN = 180 35UPGRADE_RESERVE_LEN = 16 36SIGN_SHA256_LEN = 256 37SIGN_SHA384_LEN = 384 38UPGRADE_SIGNATURE_LEN = SIGN_SHA256_LEN + SIGN_SHA384_LEN 39TLV_SIZE = 4 40UPGRADE_PKG_HEADER_SIZE = 136 41UPGRADE_PKG_TIME_SIZE = 32 42UPGRADE_COMPINFO_SIZE = 71 43UPGRADE_COMPINFO_SIZE_L2 = 87 44COMPONENT_ADDR_SIZE = 16 45COMPONENT_ADDR_SIZE_L2 = 32 46COMPONENT_INFO_FMT_SIZE = 5 47COMPONENT_VERSION_SIZE = 10 48COMPONENT_SIZE_FMT_SIZE = 8 49COMPONENT_DIGEST_SIZE = 32 50BLOCK_SIZE = 8192 51HEADER_TLV_TYPE = 0x11 52HEADER_TLV_TYPE_L2 = 0x01 53ZIP_TLV_TYPE = 0xaa 54# signature algorithm 55SIGN_ALGO_RSA = "SHA256withRSA" 56SIGN_ALGO_PSS = "SHA256withPSS" 57# chunkdata offset 58CHUNK_INFO_OFFSET = 262 59 60""" 61Format 62H: unsigned short 63I: unsigned int 64B: unsigned char 65s: char[] 66""" 67TLV_FMT = "2H" 68UPGRADE_PKG_HEADER_FMT = "2I64s64s" 69UPGRADE_PKG_TIME_FMT = "16s16s" 70COMPONENT_INFO_FMT = "H3B" 71COMPONENT_SIZE_FMT = "iI" 72 73 74class CreatePackage(object): 75 """ 76 Create the update.bin file 77 """ 78 79 def __init__(self, head_list, component_list, save_path, key_path): 80 self.head_list = head_list 81 self.component_list = component_list 82 self.save_path = save_path 83 self.key_path = key_path 84 self.compinfo_offset = 0 85 self.component_offset = 0 86 self.sign_offset = 0 87 self.hash_info_offset = 0 88 self.chunk_info_offset = 0 89 self.chunk_data_offset = 0 90 self.hash_write_start_offset = 0 91 self.hash_write_end_offset = 0 92 self.chunk_hash_offset = 0 93 self.chunk_sign_offset = 0 94 95 if OPTIONS_MANAGER.not_l2: 96 self.upgrade_compinfo_size = UPGRADE_COMPINFO_SIZE 97 self.header_tlv_type = HEADER_TLV_TYPE 98 else: 99 self.upgrade_compinfo_size = UPGRADE_COMPINFO_SIZE_L2 100 self.header_tlv_type = HEADER_TLV_TYPE_L2 101 102 def verify_param(self): 103 if self.head_list is None or self.component_list is None or \ 104 self.save_path is None or self.key_path is None: 105 UPDATE_LOGGER.print_log("Check param failed!", UPDATE_LOGGER.ERROR_LOG) 106 return False 107 if os.path.isdir(self.key_path): 108 UPDATE_LOGGER.print_log("Invalid keyname", UPDATE_LOGGER.ERROR_LOG) 109 return False 110 if self.head_list.__sizeof__() <= 0 or self.component_list.__sizeof__() <= 0: 111 UPDATE_LOGGER.print_log("Invalid param", UPDATE_LOGGER.ERROR_LOG) 112 return False 113 return True 114 115 def write_update_zip(self, package_file): 116 try: 117 build_tools_zip_path = OPTIONS_MANAGER.signed_package 118 UPDATE_LOGGER.print_log("build_tools_zip_path = %s" % build_tools_zip_path, UPDATE_LOGGER.ERROR_LOG) 119 with open(build_tools_zip_path, 'rb') as f: 120 value_data = f.read() 121 value_length = len(value_data) 122 tlv_header = struct.pack('<HI', ZIP_TLV_TYPE, value_length) 123 package_file.write(tlv_header) 124 package_file.write(value_data) 125 UPDATE_LOGGER.print_log( 126 f"Successfully wrote build_tools.zip (Type={ZIP_TLV_TYPE}, Length={value_length} bytes)", 127 UPDATE_LOGGER.INFO_LOG 128 ) 129 130 offset = len(tlv_header) + len(value_data) 131 132 except Exception as e: 133 UPDATE_LOGGER.print_log( 134 f"Failed to write build_tools.zip: {str(e)}", 135 UPDATE_LOGGER.ERROR_LOG 136 ) 137 raise RuntimeError 138 139 return offset 140 141 def write_pkginfo(self, package_file, offset): 142 try: 143 # Type is 1 for package header in TLV format 144 header_tlv = struct.pack(TLV_FMT, self.header_tlv_type, UPGRADE_PKG_HEADER_SIZE) 145 pkg_info_length = \ 146 UPGRADE_RESERVE_LEN + TLV_SIZE + TLV_SIZE + TLV_SIZE + \ 147 UPGRADE_PKG_HEADER_SIZE + UPGRADE_PKG_TIME_SIZE + \ 148 self.upgrade_compinfo_size * self.head_list.entry_count 149 upgrade_pkg_header = struct.pack( 150 UPGRADE_PKG_HEADER_FMT, pkg_info_length, self.head_list.update_file_version, 151 self.head_list.product_update_id, self.head_list.software_version) 152 153 # Type is 2 for time in TLV format 154 time_tlv = struct.pack(TLV_FMT, 0x02, UPGRADE_PKG_TIME_SIZE) 155 upgrade_pkg_time = struct.pack( 156 UPGRADE_PKG_TIME_FMT, self.head_list.date, self.head_list.time) 157 158 # Type is 5 for component in TLV format 159 component_tlv = struct.pack( 160 TLV_FMT, 0x05, self.upgrade_compinfo_size * self.head_list.entry_count) 161 except struct.error: 162 UPDATE_LOGGER.print_log("Pack fail!", log_type=UPDATE_LOGGER.ERROR_LOG) 163 return False 164 165 # write pkginfo 166 pkginfo = header_tlv + upgrade_pkg_header + time_tlv + upgrade_pkg_time + component_tlv 167 try: 168 package_file.seek(offset) 169 package_file.write(pkginfo) 170 except IOError: 171 UPDATE_LOGGER.print_log("write fail!", log_type=UPDATE_LOGGER.ERROR_LOG) 172 return False 173 UPDATE_LOGGER.print_log("Write package header complete") 174 return True 175 176 def write_component_info(self, component, package_file): 177 UPDATE_LOGGER.print_log("component information StartOffset:%s"\ 178 % self.compinfo_offset) 179 if OPTIONS_MANAGER.not_l2: 180 component_addr_size = COMPONENT_ADDR_SIZE 181 else: 182 component_addr_size = COMPONENT_ADDR_SIZE_L2 183 184 try: 185 package_file.seek(self.compinfo_offset) 186 package_file.write(component.component_addr) 187 self.compinfo_offset += component_addr_size 188 189 package_file.seek(self.compinfo_offset) 190 component_info = struct.pack( 191 COMPONENT_INFO_FMT, component.id, component.res_type, 192 component.flags, component.type) 193 package_file.write(component_info) 194 self.compinfo_offset += COMPONENT_INFO_FMT_SIZE 195 196 package_file.seek(self.compinfo_offset) 197 package_file.write(component.version) 198 self.compinfo_offset += COMPONENT_VERSION_SIZE 199 200 package_file.seek(self.compinfo_offset) 201 component_size = struct.pack( 202 COMPONENT_SIZE_FMT, component.size, component.original_size) 203 package_file.write(component_size) 204 self.compinfo_offset += COMPONENT_SIZE_FMT_SIZE 205 206 package_file.seek(self.compinfo_offset) 207 package_file.write(component.digest) 208 self.compinfo_offset += COMPONENT_DIGEST_SIZE 209 except (struct.error, IOError): 210 return False 211 return True 212 213 def write_component(self, component, package_file): 214 UPDATE_LOGGER.print_log("Add component to package StartOffset:%s"\ 215 % self.component_offset) 216 try: 217 with open(component.file_path, "rb") as component_file: 218 component_data = component_file.read() 219 package_file.seek(self.component_offset) 220 package_file.write(component_data) 221 component_len = len(component_data) 222 self.component_offset += component_len 223 except IOError: 224 return False 225 UPDATE_LOGGER.print_log("Write component complete ComponentSize:%s"\ 226 % component_len) 227 return True 228 229 def calculate_hash(self, package_file): 230 hash_sha256 = hashlib.sha256() 231 remain_len = self.component_offset 232 233 package_file.seek(0) 234 while remain_len > BLOCK_SIZE: 235 hash_sha256.update(package_file.read(BLOCK_SIZE)) 236 remain_len -= BLOCK_SIZE 237 if remain_len > 0: 238 hash_sha256.update(package_file.read(remain_len)) 239 return hash_sha256.digest() 240 241 def calculate_header_hash(self, package_file): 242 hash_sha256 = hashlib.sha256() 243 remain_len = self.hash_info_offset 244 package_file.seek(OPTIONS_MANAGER.zip_offset) 245 while remain_len > BLOCK_SIZE: 246 hash_sha256.update(package_file.read(BLOCK_SIZE)) 247 remain_len -= BLOCK_SIZE 248 if remain_len > 0: 249 hash_sha256.update(package_file.read(remain_len)) 250 return hash_sha256.digest() 251 252 def sign_digest_with_pss(self, digest): 253 try: 254 with open(self.key_path, 'rb') as f_r: 255 key_data = f_r.read() 256 private_key = serialization.load_pem_private_key( 257 key_data, 258 password=None, 259 backend=default_backend()) 260 261 pad = padding.PSS( 262 mgf=padding.MGF1(hashes.SHA256()), 263 salt_length=padding.PSS.MAX_LENGTH) 264 265 signature = private_key.sign(digest, pad, hashes.SHA256()) 266 except (OSError, ValueError): 267 return False 268 return signature 269 270 def sign_digest(self, digest): 271 try: 272 with open(self.key_path, 'rb') as f_r: 273 key_data = f_r.read() 274 private_key = serialization.load_pem_private_key( 275 key_data, 276 password=None, 277 backend=default_backend()) 278 signature = private_key.sign(digest, padding.PKCS1v15(), hashes.SHA256()) 279 except (OSError, ValueError): 280 return False 281 return signature 282 283 def sign(self, sign_algo): 284 with open(self.save_path, "rb+") as package_file: 285 # calculate hash for .bin package 286 digest = self.calculate_hash(package_file) 287 if not digest: 288 UPDATE_LOGGER.print_log("calculate hash for .bin package failed", 289 log_type=UPDATE_LOGGER.ERROR_LOG) 290 return False 291 292 # sign .bin package 293 if sign_algo == SIGN_ALGO_RSA: 294 signature = self.sign_digest(digest) 295 elif sign_algo == SIGN_ALGO_PSS: 296 signature = self.sign_digest_with_pss(digest) 297 else: 298 UPDATE_LOGGER.print_log("invalid sign_algo!", log_type=UPDATE_LOGGER.ERROR_LOG) 299 return False 300 if not signature: 301 UPDATE_LOGGER.print_log("sign .bin package failed!", log_type=UPDATE_LOGGER.ERROR_LOG) 302 return False 303 304 if len(signature) == SIGN_SHA384_LEN: 305 self.sign_offset += SIGN_SHA256_LEN 306 307 # write signed .bin package 308 package_file.seek(self.sign_offset) 309 package_file.write(signature) 310 UPDATE_LOGGER.print_log( 311 ".bin package signing success! SignOffset: %s" % self.sign_offset) 312 return True 313 314 def sign_header(self, sign_algo, hash_check_data, package_file): 315 # calculate hash for .bin package 316 digest = self.calculate_header_hash(package_file) 317 if not digest: 318 UPDATE_LOGGER.print_log("calculate hash for .bin package failed", 319 log_type=UPDATE_LOGGER.ERROR_LOG) 320 return False 321 322 # sign .bin header 323 if sign_algo == SIGN_ALGO_RSA: 324 signature = self.sign_digest(digest) 325 elif sign_algo == SIGN_ALGO_PSS: 326 signature = self.sign_digest_with_pss(digest) 327 else: 328 UPDATE_LOGGER.print_log("invalid sign_algo!", log_type=UPDATE_LOGGER.ERROR_LOG) 329 return False 330 if not signature: 331 UPDATE_LOGGER.print_log("sign .bin package failed!", log_type=UPDATE_LOGGER.ERROR_LOG) 332 return False 333 334 # write signed .bin header 335 hash_check_data.write_signdata(signature) 336 package_file.seek(self.hash_info_offset) 337 package_file.write(hash_check_data.signdata) 338 self.hash_info_offset += len(hash_check_data.signdata) 339 UPDATE_LOGGER.print_log( 340 ".bin package header signing success! SignOffset: %s" % self.hash_info_offset) 341 return True 342 343 def calculate_hash_all_image(self, package_file): 344 """ 345 Calculate the SHA-256 hash for a specified range of data in the package file. 346 :param package_file: The file object representing the package from which to calculate the hash. 347 :return: The SHA-256 hash digest of the specified data range. 348 """ 349 hash_sha256 = hashlib.sha256() 350 # 计算的hash从hash info head开始 351 remain_len = self.hash_write_end_offset - self.hash_write_start_offset + 1 352 print(f'remain_len:{remain_len}') 353 package_file.seek(self.hash_write_start_offset) 354 while remain_len > BLOCK_SIZE: 355 hash_sha256.update(package_file.read(BLOCK_SIZE)) 356 remain_len -= BLOCK_SIZE 357 if remain_len > 0: 358 hash_sha256.update(package_file.read(remain_len)) 359 return hash_sha256.digest() 360 361 def sign_all_imgae_hash(self, sign_algo, hash_check_data, package_file): 362 """ 363 Sign the hash of all images using the specified signing algorithm. 364 :param sign_algo: The signing algorithm to use (e.g., RSA or PSS). 365 :param hash_check_data: An object that holds hash check data and methods to write signature data. 366 :param package_file: The file object representing the package where the signature will be written. 367 :return: True if signing is successful, False otherwise. 368 """ 369 digest = self.calculate_hash_all_image(package_file) 370 if not digest: 371 UPDATE_LOGGER.print_log("calculate hash for all image hash failed", 372 log_type=UPDATE_LOGGER.ERROR_LOG) 373 return False 374 375 # sign all image hash 376 if sign_algo == SIGN_ALGO_RSA: 377 signature = self.sign_digest(digest) 378 elif sign_algo == SIGN_ALGO_PSS: 379 signature = self.sign_digest_with_pss(digest) 380 else: 381 UPDATE_LOGGER.print_log("invalid sign_algo!", log_type=UPDATE_LOGGER.ERROR_LOG) 382 return False 383 if not signature: 384 UPDATE_LOGGER.print_log("sign .bin package failed!", log_type=UPDATE_LOGGER.ERROR_LOG) 385 return False 386 print(f'write signature:{signature}') 387 # 所有镜像hash(包括hash info head)的sign打包封装 388 hash_check_data.write_all_image_signdata(signature) 389 UPDATE_LOGGER.print_log( 390 "start write full image sign offset: %s" % self.chunk_sign_offset) 391 package_file.seek(self.chunk_sign_offset) 392 package_file.write(hash_check_data.signdata) 393 self.chunk_sign_offset += len(hash_check_data.signdata) 394 UPDATE_LOGGER.print_log( 395 ".bin package header signing success! SignOffset: %s" % self.chunk_sign_offset) 396 return True 397 398 def handle_stream_update(self, package_file): 399 # Incremental streaming update 400 if OPTIONS_MANAGER.stream_update and OPTIONS_MANAGER.incremental_img_list: 401 try: 402 self.create_incremental_package(package_file) 403 UPDATE_LOGGER.print_log("Write incremental streaming update chunk complete!", 404 log_type=UPDATE_LOGGER.INFO_LOG) 405 406 except IOError: 407 UPDATE_LOGGER.print_log("Add Chunk data info failed!", log_type=UPDATE_LOGGER.ERROR_LOG) 408 return False 409 410 # Full streaming update 411 if OPTIONS_MANAGER.stream_update and len(OPTIONS_MANAGER.full_img_list): 412 try: 413 self.create_full_package(package_file) 414 UPDATE_LOGGER.print_log("Write full streaming update chunk complete!", 415 log_type=UPDATE_LOGGER.INFO_LOG) 416 417 except IOError: 418 UPDATE_LOGGER.print_log("Add Chunk data info failed!", log_type=UPDATE_LOGGER.ERROR_LOG) 419 return False 420 421 return True 422 423 def create_package(self): 424 """ 425 Create the update.bin file 426 return: update package creation result 427 """ 428 if not self.verify_param(): 429 UPDATE_LOGGER.print_log("verify param failed!", UPDATE_LOGGER.ERROR_LOG) 430 return False 431 432 hash_check_data = CreateHash(HashType.SHA256, self.head_list.entry_count) 433 hash_check_data.write_hashinfo() 434 UPDATE_LOGGER.print_log("self.save_path: %s" 435 % self.save_path, UPDATE_LOGGER.INFO_LOG) 436 package_fd = os.open(self.save_path, os.O_RDWR | os.O_CREAT, 0o755) 437 with os.fdopen(package_fd, "wb+") as package_file: 438 # 如果为流式升级,将zip嵌入到update_bin 439 if OPTIONS_MANAGER.stream_update: 440 zip_file_offset = self.write_update_zip(package_file) 441 else: 442 zip_file_offset = 0 443 OPTIONS_MANAGER.zip_offset = zip_file_offset 444 # Add information to package 445 if not self.write_pkginfo(package_file, zip_file_offset): 446 UPDATE_LOGGER.print_log("Write pkginfo failed!", log_type=UPDATE_LOGGER.ERROR_LOG) 447 return False 448 # Add component to package 449 self.compinfo_offset = UPGRADE_FILE_HEADER_LEN + zip_file_offset 450 UPDATE_LOGGER.print_log("self.compinfo_offset: %s" 451 % self.compinfo_offset, UPDATE_LOGGER.INFO_LOG) 452 self.component_offset = UPGRADE_FILE_HEADER_LEN + \ 453 self.head_list.entry_count * self.upgrade_compinfo_size + \ 454 UPGRADE_RESERVE_LEN + SIGN_SHA256_LEN + SIGN_SHA384_LEN 455 for i in range(0, self.head_list.entry_count): 456 UPDATE_LOGGER.print_log("Add component %s" % self.component_list[i].component_addr) 457 if not self.write_component_info(self.component_list[i], package_file): 458 UPDATE_LOGGER.print_log("write component info failed: %s" 459 % self.component_list[i].component_addr, UPDATE_LOGGER.ERROR_LOG) 460 return False 461 if OPTIONS_MANAGER.sd_card and (not hash_check_data.write_component_hash_data(self.component_list[i])): 462 UPDATE_LOGGER.print_log("write component hash data failed: %s" 463 % self.component_list[i].component_addr, UPDATE_LOGGER.ERROR_LOG) 464 return False 465 466 try: 467 # Add descriptPackageId to package 468 package_file.seek(self.compinfo_offset) 469 package_file.write( 470 (self.head_list.describe_package_id.decode().ljust(UPGRADE_RESERVE_LEN, "\0")).encode()) 471 except IOError: 472 UPDATE_LOGGER.print_log("Add descriptPackageId failed!", log_type=UPDATE_LOGGER.ERROR_LOG) 473 return False 474 self.hash_info_offset = self.compinfo_offset + UPGRADE_RESERVE_LEN 475 if OPTIONS_MANAGER.sd_card: 476 try: 477 # Add hash check data to package 478 hash_check_data.write_hashdata() 479 package_file.seek(self.hash_info_offset) 480 package_file.write(hash_check_data.hashinfo_value + hash_check_data.hashdata) 481 self.hash_info_offset += len(hash_check_data.hashinfo_value + hash_check_data.hashdata) 482 483 except IOError: 484 UPDATE_LOGGER.print_log("Add hash check data failed!", log_type=UPDATE_LOGGER.ERROR_LOG) 485 return False 486 self.sign_header(SIGN_ALGO_RSA, hash_check_data, package_file) 487 self.component_offset = self.hash_info_offset 488 if not OPTIONS_MANAGER.stream_update: 489 if not self.write_component_list(package_file): 490 return False 491 self.chunk_info_offset = self.component_offset 492 493 if not self.handle_stream_update(package_file): 494 UPDATE_LOGGER.print_log("Handle stream update failed!", log_type=UPDATE_LOGGER.ERROR_LOG) 495 return False 496 497 return True 498 499 def write_component_list(self, package_file): 500 """ 501 Write components to the package file. 502 503 :param package_file: The file object to write components to. 504 :return: Boolean indicating success or failure. 505 """ 506 for i in range(0, self.head_list.entry_count): 507 if not self.write_component(self.component_list[i], package_file): 508 UPDATE_LOGGER.print_log("write component failed: %s" 509 % self.component_list[i].component_addr, 510 UPDATE_LOGGER.ERROR_LOG) 511 return False 512 return True 513 514 def create_incremental_package(self, package_file): 515 """ 516 Create the incremental update.bin file 517 return: incremental update package creation result 518 """ 519 chunk_check_data = CreateChunk(1, 1) 520 # Adding chunk list of pkg chunks 521 # Determine if a no_map file exists 522 if OPTIONS_MANAGER.no_map_image_exist: 523 UPDATE_LOGGER.print_log("OPTIONS_MANAGER.no_map_file_list:%s" % OPTIONS_MANAGER.no_map_file_list, 524 log_type=UPDATE_LOGGER.INFO_LOG) 525 self.chunk_data_offset = self.chunk_info_offset 526 # Add the no map mirror chunk command 527 for each_image in OPTIONS_MANAGER.no_map_file_list: 528 self.chunk_data_offset = chunk_check_data.write_chunklist_full_image(each_image, package_file, 529 OPTIONS_MANAGER.image_chunk[each_image], 530 OPTIONS_MANAGER.image_block_sets[each_image], 531 self.chunk_data_offset) 532 UPDATE_LOGGER.print_log("write pkg no map chunk partition name is %s" % each_image, 533 log_type=UPDATE_LOGGER.INFO_LOG) 534 else: 535 self.chunk_data_offset = self.chunk_info_offset 536 537 # Remove no map file list element 538 reduce_no_map_list = [item for item in OPTIONS_MANAGER.incremental_img_list 539 if item not in OPTIONS_MANAGER.no_map_file_list] 540 for each_image in reduce_no_map_list: 541 print(f'each_image_name:{each_image}') 542 UPDATE_LOGGER.print_log("write pkg chunk partition name is %s" % each_image, log_type=UPDATE_LOGGER.INFO_LOG) 543 self.chunk_data_offset = chunk_check_data.write_chunklist(each_image, package_file, self.chunk_data_offset) 544 self.chunk_hash_offset = self.chunk_data_offset 545 546 # Record where the hash starts writing 547 self.hash_write_start_offset = self.chunk_hash_offset 548 549 # Add the hash info header 550 image_number = len(OPTIONS_MANAGER.incremental_img_list) 551 self.chunk_hash_offset = chunk_check_data.write_hash_info(image_number, package_file, self.chunk_hash_offset) 552 553 # Add hash for each image and the large data 554 for each_image in OPTIONS_MANAGER.incremental_img_list: 555 self.chunk_hash_offset = chunk_check_data.write_image_hashdata(each_image, package_file, self.chunk_hash_offset) 556 UPDATE_LOGGER.print_log("write image hashdata complete!", log_type=UPDATE_LOGGER.INFO_LOG) 557 self.chunk_hash_offset = chunk_check_data.write_image_large(each_image, package_file, self.chunk_hash_offset) 558 UPDATE_LOGGER.print_log("write image large complete!", log_type=UPDATE_LOGGER.INFO_LOG) 559 self.chunk_sign_offset = self.chunk_hash_offset 560 561 # Record the location of the end-of-hash write 562 self.hash_write_end_offset = self.chunk_sign_offset 563 564 # Add the sign of the encapsulated hash for all images 565 self.sign_all_imgae_hash(SIGN_ALGO_RSA, chunk_check_data, package_file) 566 567 def create_full_package(self, package_file): 568 """ 569 Create the full update.bin file 570 return: full update package creation result 571 """ 572 chunk_check_data = CreateChunk(1, 1) 573 # Adding chunk list of pkg chunks 574 for each_img_name in OPTIONS_MANAGER.full_img_name_list: 575 each_img = each_img_name[:-4] 576 self.chunk_info_offset = chunk_check_data.write_chunklist_full_image(each_img, package_file, 577 OPTIONS_MANAGER.full_chunk[each_img], 578 OPTIONS_MANAGER.full_block_sets[each_img], 579 self.chunk_info_offset) 580 UPDATE_LOGGER.print_log("Write full streaming update [%s] chunk complete!" % each_img, 581 log_type=UPDATE_LOGGER.INFO_LOG) 582 self.chunk_hash_offset = self.chunk_info_offset 583 584 # Record where the hash starts writing 585 self.hash_write_start_offset = self.chunk_hash_offset 586 587 # Add the hash info header 588 image_number = len(OPTIONS_MANAGER.full_img_name_list) 589 self.chunk_hash_offset = chunk_check_data.write_hash_info(image_number, package_file, self.chunk_hash_offset) 590 591 # Add hash for each image and the large data 592 for each_image_name in OPTIONS_MANAGER.full_img_name_list: 593 each_img = os.path.splitext(each_image_name)[0] 594 self.chunk_hash_offset = chunk_check_data.write_image_hashdata(each_img, package_file, self.chunk_hash_offset) 595 UPDATE_LOGGER.print_log("write image hashdata complete!", log_type=UPDATE_LOGGER.INFO_LOG) 596 self.chunk_hash_offset = chunk_check_data.write_image_large(each_img, package_file, self.chunk_hash_offset) 597 UPDATE_LOGGER.print_log("write image large complete!", log_type=UPDATE_LOGGER.INFO_LOG) 598 self.chunk_sign_offset = self.chunk_hash_offset 599 600 # Record the location of the end-of-hash write 601 self.hash_write_end_offset = self.chunk_sign_offset 602 603 # Add the sign of the encapsulated hash for all images 604 self.sign_all_imgae_hash(SIGN_ALGO_RSA, chunk_check_data, package_file)