• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1import binascii
2import hashlib
3import struct
4
5
6class ChunkParser:
7    def __init__(self):
8        self.chunkdata_partition_tlv_type = 0x01  # Example type
9        self.chunkdata_cmd_tlv_type = 0x02  # Example type
10        self.chunkdata_value_tlv_type = 0x03  # Example type
11        self.chunkhash_info_tlv_type = 0x04  # Example type
12
13        # Define formats for struct.pack and struct.unpack
14        self.chunkdata_partiton_fmt = '<HH'
15        self.chunkdata_cmd_fmt = '<HH'
16        self.chunkdata_data_fmt = '<HI'
17        self.chunkhash_header_fmt = '<3H'
18        self.chunkhash_data_fmt = '<HH'
19        self.chunksign_value_fmt = '<HI'
20
21
22    def parse_chunklist(self, package_file, offset):
23        print(f'start read chunklist offset:{offset}')
24        package_file.seek(offset)  # Start reading from the beginning
25        while True:
26            # Read partition TLV
27            partition_data = package_file.read(struct.calcsize(self.chunkdata_partiton_fmt))
28            print(f'partition_data:{partition_data}')
29            if not partition_data:
30                break  # End of file
31
32            partition_type, partition_length = struct.unpack(self.chunkdata_partiton_fmt, partition_data[:4])
33            partition_name = package_file.read(partition_length).decode('utf-8')
34            print(f"Partition Type: {partition_type}, Partition Name: {partition_name}")
35            if(partition_type != 18):
36                print(f'stop read chunklist')
37                break
38
39            # Read command TLV
40            cmd_data = package_file.read(struct.calcsize(self.chunkdata_cmd_fmt))
41            cmd_type, cmd_length = struct.unpack(self.chunkdata_cmd_fmt, cmd_data[:4])
42            cmd_info = package_file.read(cmd_length).decode('utf-8')
43            print(f"Command Type: {cmd_type}, Command Info: {cmd_info}")
44
45            # Read data TLV
46            data_data = package_file.read(struct.calcsize(self.chunkdata_data_fmt))
47            print(f'self.chunkdata_data_fmt :{data_data}')
48            data_type, data_length = struct.unpack(self.chunkdata_data_fmt, data_data[:6])
49            data_value = package_file.read(data_length)
50            print(f"Data Type: {data_type}, Data Length: {data_length}, Data Value: {data_value}")
51
52            # Process the data based on cmd_info (extract patch or new data)
53            if 'pkgdiff' in cmd_info:
54                with open('new_patch.dat', 'ab') as patch_file:
55                    patch_file.write(data_value)
56            elif 'new' in cmd_info:
57                with open('new_new.dat', 'ab') as new_file:
58                    new_file.write(data_value)
59            with open('new_transfer.list', 'ab') as list_file:
60                list_file.write((cmd_info + '\n').encode())
61
62    def parse_hash_info(self, package_file, offset):
63        print(f'start read hash_info offset:{offset}')
64        package_file.seek(offset)  # Start reading from the beginning
65        while True:
66            hash_info_data = package_file.read(struct.calcsize(self.chunkhash_header_fmt))
67            if not hash_info_data:
68                break  # End of file
69
70            hash_type, num_images, image_number = struct.unpack(self.chunkhash_header_fmt, hash_info_data)
71            print(f"Hash Type: {hash_type}, Number of Images: {num_images}, Image Number: {image_number}")
72            if(hash_type != 22):
73                print(f'stop read hash_info offset')
74                break
75
76    def parse_hash_data(self, package_file, offset):
77        print(f'start read hash_data offset:{offset}')
78        package_file.seek(offset)
79        while True:
80            partition_data = package_file.read(struct.calcsize(self.chunkdata_partiton_fmt))
81            print(f'partition_data:{partition_data}')
82            if not partition_data:
83                break  # End of file
84
85            partition_type, partition_length = struct.unpack(self.chunkdata_partiton_fmt, partition_data[:4])
86            if(partition_type != 23):
87                print(f'stop read hash_data offset')
88                break
89            partition_name = package_file.read(partition_length).decode('utf-8')
90            print(f"Partition Type: {partition_type}, Partition Name: {partition_name}")
91
92            hash_data = package_file.read(struct.calcsize(self.chunkhash_data_fmt))
93            print(f'self.chunkdata_data_fmt :{hash_data}')
94            data_type, data_length = struct.unpack(self.chunkhash_data_fmt, hash_data[:4])
95            data_value = package_file.read(data_length)
96            print(f"Data Type: {data_type}, Data Length: {data_length}, Data Value: {data_value}")
97
98            large_data = package_file.read(struct.calcsize('<2HI'))
99            print(f'self.chunkdata_data_fmt :{large_data}')
100            large_data_type, large_data_length, large_data_value = struct.unpack('<2HI', large_data[:8])
101            print(f"image large Type: {large_data_type}, image large Length: {large_data_length}, image large Value: {large_data_value}")
102
103            with open('new_hash.txt', 'ab') as file:
104                file.write(data_value)
105
106    def parse_full_sign(self, package_file, offset):
107        print(f'start read full image sign offset:{offset}')
108        package_file.seek(offset)
109        while True:
110            sign_data = package_file.read(struct.calcsize(self.CHUNK_SIGN_VALUE_FMT))
111            print(f'self.chunkdata_data_fmt :{sign_data}')
112            data_type, data_length = struct.unpack(self.CHUNK_SIGN_VALUE_FMT, sign_data[:6])
113            data_value = package_file.read(data_length)
114            print(f"Data Type: {data_type}, Data Length: {data_length}, Data Value: {data_value}")
115            if(data_type != 26):
116                print(f'stop read full image sign')
117                break
118            with open('new_sign.txt', 'ab') as file:
119                hex_data = binascii.hexlify(data_value)
120                file.write(hex_data + b'\n')
121
122
123    def parse_full_stream(self, package_file, offset):
124        print(f'start read full stream offset:{offset}')
125        package_file.seek(offset)
126        while True:
127            partition_name = package_file.read(struct.calcsize(self.chunkdata_partiton_fmt))
128            print(f'self.chunkdata_data_fmt :{partition_name}')
129            partition_type, partition_length = struct.unpack(self.chunkdata_partiton_fmt, partition_name[:4])
130            partiton_value = package_file.read(partition_length)
131            print(f"Data Type: {partition_type}, Data Length: {partition_length}, Data Value: {partiton_value}")
132            if(partition_type != 18):
133                print(f'stop read full image')
134                break
135            cmd = package_file.read(struct.calcsize(self.chunkdata_partiton_fmt))
136            print(f'self.chunkdata_data_fmt :{cmd}')
137            cmd_type, cmd_length = struct.unpack(self.chunkdata_partiton_fmt, cmd[:4])
138            cmd_value = package_file.read(cmd_length).decode('utf-8')
139            print(f"Data Type: {cmd_type}, Data Length: {cmd_length}, Data Value: {cmd_value}")
140            with open('new_full.list', 'ab') as file:
141                file.write((cmd_value + '\n').encode())
142
143            image_data = package_file.read(struct.calcsize(self.chunkdata_data_fmt))
144            data_type, data_length = struct.unpack(self.chunkdata_data_fmt, image_data[:6])
145            data_value = package_file.read(data_length)
146            with open('new_iamge.img', 'ab') as file:
147                file.write(data_value)
148
149
150def get_file_sha256(update_package):
151    sha256obj = hashlib.sha256()
152    maxbuf = 8192
153    with open(update_package, 'rb') as package_file:
154        while True:
155            buf = package_file.read(maxbuf)
156            if not buf:
157                break
158            sha256obj.update(buf)
159    hash_value_hex = sha256obj.hexdigest()
160    hash_value = sha256obj.digest()
161    return str(hash_value_hex).upper()
162
163
164def calculate_file_hash(file_path):
165    hash_sha256 = hashlib.sha256()  # You can change this to sha1(), md5(), etc. if needed
166    with open(file_path, 'rb') as f:
167        # Read the file in chunks to avoid using too much memory
168        for chunk in iter(lambda: f.read(4096), b""):
169            hash_sha256.update(chunk)
170    return hash_sha256.hexdigest()
171
172
173def read_update_bin(file_path):
174    with open(file_path, 'rb') as f:
175        return f.read()
176
177
178def parse_update_bin(data, offsets):
179    parsed_data = {}
180    for name, (offset, length) in offsets.items():
181        parsed_data[name] = data[offset:offset + length]
182    return parsed_data
183
184
185def reconstruct_patch(parsed_data):
186    # 假设patch是一个字节串,您可以根据需要进行修改
187    patch = b''
188    for name in parsed_data:
189        patch += parsed_data[name]
190    return patch
191
192
193def verify_patch(original_patch, reconstructed_patch):
194    return original_patch == reconstructed_patch
195
196
197def main():
198    chunk_parser = ChunkParser()
199    with open('update.bin', 'rb') as package_file:
200        # 全量流式旧步骤说明(参数仅供历史参考),参数需要在制作包的打印信息中获取:
201        # 步骤1 - parse_full_stream 偏移量 632, 使用chunk_parser.parse_full_stream(package_file, 632)
202        # 步骤2 - parse_hash_info 偏移量 1975016790,使用chunk_parser.parse_hash_info(package_file, 1975016790)
203        # 步骤3 - parse_hash_data 偏移量 1975016796,使用chunk_parser.parse_hash_data(package_file, 1975016796)
204        # 步骤4 - parse_full_sign 偏移量 1975017344,使用chunk_parser.parse_full_sign(package_file, 1975017344)
205        chunk_parser.parse_chunklist(package_file, 632)
206        chunk_parser.parse_hash_info(package_file, 21590734)
207        chunk_parser.parse_hash_data(package_file, 21590740)
208        chunk_parser.parse_full_sign(package_file, 21590922)
209
210
211if __name__ == "__main__":
212    main()