1#!/usr/bin/env python3 2# -*- coding: utf-8 -*- 3 4# Copyright (c) 2023 Huawei Device Co., Ltd. 5# Licensed under the Apache License, Version 2.0 (the "License"); 6# you may not use this file except in compliance with the License. 7# You may obtain a copy of the License at 8# 9# http://www.apache.org/licenses/LICENSE-2.0 10# 11# Unless required by applicable law or agreed to in writing, software 12# distributed under the License is distributed on an "AS IS" BASIS, 13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14# See the License for the specific language governing permissions and 15# limitations under the License. 16""" 17Description : Generate the update.bin hash check data 18""" 19import os 20import struct 21import hashlib 22import enum 23from log_exception import UPDATE_LOGGER 24 25# hash data sample 26# hash info module: 27# hash info:1 32 3 4194304 28# hash value module: 29# /version_list (32bytes) 30# 1 176 31# 1 176 bf10259a1fc1b2f780a49ce6XXXXXXXX 32# hash sign module: 33# hash sign:45ef8ec12e56e3b82c9a05XXXXXX 34 35HashType = enum.Enum('HashType', ('SHA256', 'SHA384', 'SHA512')) 36HashAlgo = {HashType.SHA256 : hashlib.sha256, 37 HashType.SHA384 : hashlib.sha384, 38 HashType.SHA512 : hashlib.sha512} 39HASH_TYPE_SIZE = 2 40HASH_LENGTH_SIZE = 4 41HASH_TLV_SIZE = HASH_TYPE_SIZE + HASH_LENGTH_SIZE 42UPGRADE_HASHINFO_SIZE = 10 43HASH_DATA_HEADER_SIZE = 38 44HASH_DATA_ADDR_SIZE = 8 45COMPONENT_NAME_SIZE = 32 46# hash block size 47HASH_BLOCK_SIZE = 4 * 1024 * 1024 48 49""" 50Format 51H: unsigned short 52I: unsigned int 53B: unsigned char 54s: char[] 55""" 56HASH_TLV_FMT = "<HI" 57HASH_INFO_FMT = "<3HI" 58HASH_DATA_HEADER_FMT = "<32sHI" 59HASH_DATA_ADDR_FMT = "<2I" 60 61class CreateHash(object): 62 """ 63 Create the component hash data 64 """ 65 66 def __init__(self, hash_type, count): 67 self.hashinfo_tlv_type = 0x06 68 self.hashdata_tlv_type = 0x07 69 self.sign_tlv_type = 0x08 70 self.hash_type = hash_type 71 self.hash_digest_size = HashAlgo[hash_type]().digest_size 72 self.component_num = count 73 self.block_size = HASH_BLOCK_SIZE 74 self.hashinfo_value = bytes() 75 self.hashdata = bytes() 76 self.signdata = bytes() 77 self.hashdata_list = [] 78 79 def write_hashinfo(self): 80 try: 81 hashinfo_tlv = struct.pack(HASH_TLV_FMT, self.hashinfo_tlv_type, UPGRADE_HASHINFO_SIZE) 82 hashinfo_header = struct.pack(HASH_INFO_FMT, self.hash_type.value, self.hash_digest_size, 83 self.component_num, self.block_size) 84 except struct.error: 85 UPDATE_LOGGER.print_log("Pack fail!", log_type=UPDATE_LOGGER.ERROR_LOG) 86 return False 87 88 # write hashinfo 89 self.hashinfo_value = hashinfo_tlv + hashinfo_header 90 return True 91 92 def write_hashdata(self): 93 try: 94 hashdata_len = len(self.hashdata) 95 hashdata_tlv = struct.pack(HASH_TLV_FMT, self.hashdata_tlv_type, hashdata_len) 96 except struct.error: 97 UPDATE_LOGGER.print_log("Pack fail!", log_type=UPDATE_LOGGER.ERROR_LOG) 98 return False 99 100 UPDATE_LOGGER.print_log("Write hashdata hash len %d" % hashdata_len) 101 # write hashdata 102 self.hashdata = hashdata_tlv + self.hashdata 103 UPDATE_LOGGER.print_log("Write hashdata hash tlv complete") 104 return True 105 106 def write_signdata(self, signdata): 107 try: 108 signdata_len = len(signdata) 109 signdata_tlv = struct.pack(HASH_TLV_FMT, self.sign_tlv_type, signdata_len) 110 except struct.error: 111 UPDATE_LOGGER.print_log("Pack fail!", log_type=UPDATE_LOGGER.ERROR_LOG) 112 return False 113 114 # write signdata 115 self.signdata = signdata_tlv + signdata 116 UPDATE_LOGGER.print_log("Write hashdata sign tlv complete") 117 return True 118 119 def calculate_hash_data(self, data): 120 hash_algo = HashAlgo[self.hash_type]() 121 hash_algo.update(data) 122 return hash_algo.digest() 123 124 def write_component_hash_data(self, component): 125 UPDATE_LOGGER.print_log("calc component hash") 126 try: 127 with open(component.file_path, "rb") as component_file: 128 component_len = os.path.getsize(component.file_path) 129 block_num = component_len // HASH_BLOCK_SIZE 130 component_name = component.component_addr.decode().ljust(COMPONENT_NAME_SIZE, "\0") 131 UPDATE_LOGGER.print_log("calc component hash component name:%s %d" % (component_name,len(component_name))) 132 total_block = block_num + 1 if component_len % HASH_BLOCK_SIZE > 0 else block_num 133 self.hashdata += struct.pack(HASH_DATA_HEADER_FMT, component_name.encode(), 134 total_block, component_len) 135 UPDATE_LOGGER.print_log("calc component hash block_num:%d" % total_block) 136 write_len = 0 137 for i in range(0, block_num): 138 component_file.seek(write_len) 139 component_data = component_file.read(HASH_BLOCK_SIZE) 140 write_len += HASH_BLOCK_SIZE 141 self.hashdata += struct.pack(HASH_DATA_ADDR_FMT, (i * HASH_BLOCK_SIZE if i != 0 else 0), 142 write_len - 1) + self.calculate_hash_data(component_data) 143 if component_len - write_len > 0 : 144 component_file.seek(write_len) 145 component_data = component_file.read(component_len - write_len) 146 self.hashdata += struct.pack(HASH_DATA_ADDR_FMT, (write_len if write_len != 0 else 0), 147 component_len - 1) + self.calculate_hash_data(component_data) 148 except (struct.error, IOError): 149 return False 150 UPDATE_LOGGER.print_log("calc component hash complete ComponentSize:%d" % component_len) 151 return True 152 153 def parse_hashinfo(self, data): 154 # parse hashinfo 155 hash_type_value = 0 156 try: 157 hash_type_value, self.hash_digest_size, self.component_num, self.block_size = \ 158 struct.unpack(HASH_INFO_FMT, data[:UPGRADE_HASHINFO_SIZE]) 159 self.hash_type = HashType(hash_type_value) 160 except struct.error: 161 return False 162 163 UPDATE_LOGGER.print_log("parese hashinfo complete, %d %d %d %d" % (hash_type_value, 164 self.hash_digest_size, self.component_num, self.block_size)) 165 return True 166 167 def parse_hashdata(self, data): 168 offset = 0 169 try: 170 for i in range(0, self.component_num): 171 img_name, hash_num, img_size = struct.unpack(HASH_DATA_HEADER_FMT, 172 data[offset: HASH_DATA_HEADER_SIZE + offset]) 173 UPDATE_LOGGER.print_log("parese hashinfo complete, %s %d %d" % (img_name, 174 hash_num, img_size)) 175 offset += HASH_DATA_HEADER_SIZE 176 self.hashdata_list.append((img_name.decode(), hash_num, img_size)) 177 for j in range(0, hash_num): 178 hash_data_star, hash_data_end = struct.unpack(HASH_DATA_ADDR_FMT, 179 data[offset: HASH_DATA_ADDR_SIZE + offset]) 180 hash_data = data[HASH_DATA_ADDR_SIZE + offset:HASH_DATA_ADDR_SIZE + self.hash_digest_size + offset] 181 offset += (HASH_DATA_ADDR_SIZE + self.hash_digest_size) 182 self.hashdata_list.append((hash_data_star, hash_data_end, hash_data)) 183 except struct.error: 184 return False 185 186 UPDATE_LOGGER.print_log("parese hashdata complete") 187 return True 188 189 def parse_signdata(self, data): 190 # parse signdata 191 self.signdata = data 192 UPDATE_LOGGER.print_log("parese hashdata sign complete") 193 return True 194 195 def parse_print_hashdata(self, save_path): 196 hash_check_file_p = open(os.path.join(save_path + "hash_check_file_parse"), "wb+") 197 hash_check_file_p.write(("hash info:").encode()) 198 hash_check_file_p.write((HashType(self.hash_type.value).name + ' ' + str(self.hash_digest_size) + \ 199 ' ' + str(self.component_num) + ' ' + str(self.block_size) + '\n').encode()) 200 201 offset = 0 202 for i in range(0, self.component_num): 203 hash_check_file_p.write((self.hashdata_list[offset][0] + '\n').encode()) 204 hash_check_file_p.write((str(self.hashdata_list[offset][1]) + ' ' + \ 205 str(self.hashdata_list[offset][2]) + '\n').encode()) 206 for j in range(0, self.hashdata_list[offset][1]): 207 index = offset + 1 208 hashdata_hexstr = "".join("%02x" % b for b in self.hashdata_list[j + index][2]) 209 hash_check_file_p.write((str(self.hashdata_list[j + index][0]) + ' ' + \ 210 str(self.hashdata_list[j + index][1]) + ' ' + hashdata_hexstr + \ 211 '\n').encode()) 212 offset += (1 + self.hashdata_list[offset][1]) 213 214 signdata_hexstr = "".join("%02x" % b for b in self.signdata) 215 hash_check_file_p.write(("hash sign:").encode()) 216 hash_check_file_p.write(signdata_hexstr.encode()) 217 hash_check_file_p.close()