• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python3
2# -*- coding: utf-8 -*-
3
4# Copyright (c) 2021 Huawei Device Co., Ltd.
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16
17import multiprocessing
18import subprocess
19import tempfile
20import zipfile
21from ctypes import pointer
22from log_exception import UPDATE_LOGGER
23from blocks_manager import BlocksManager
24from transfers_manager import ActionType
25from update_package import PkgHeader
26from update_package import PkgComponent
27from utils import OPTIONS_MANAGER
28from utils import ON_SERVER
29from utils import DIFF_EXE_PATH
30from patch_package_chunk import PatchPackageChunk
31from create_chunk import get_chunk_sha256
32
33NEW_DAT = "new.dat"
34PATCH_DAT = "patch.dat"
35TRANSFER_LIST = "transfer.list"
36
37
38class PatchProcess:
39    def __init__(self, partition, tgt_image, src_image,
40                 actions_list):
41        self.actions_list = actions_list
42        self.worker_threads = multiprocessing.cpu_count() // 2
43        self.partition = partition
44        self.tgt_img_obj = tgt_image
45        self.src_img_obj = src_image
46        self.version = 1
47        self.touched_src_ranges = BlocksManager()
48        self.touched_src_sha256 = None
49        self.package_patch_zip = PackagePatchZip(partition)
50        # ab copy param
51        self.chunk_data_list = []
52        self.chunk_new_list = []
53        self.transfer_content_in_chunk = []
54
55    @staticmethod
56    def get_transfer_content(max_stashed_blocks, total_blocks_count,
57                             transfer_content):
58        """
59        Get the tranfer content.
60        """
61        transfer_content = ''.join(transfer_content)
62        transfer_content = \
63            transfer_content.replace("TOTAL_MARK", str(total_blocks_count))
64        transfer_content = \
65            transfer_content.replace("MAX_STASH_MARK", str(max_stashed_blocks))
66        transfer_content = \
67            transfer_content.replace("ActionType.MOVE", "move")
68        transfer_content = \
69            transfer_content.replace("ActionType.ZERO", "zero")
70        transfer_content = \
71            transfer_content.replace("ActionType.NEW", "new")
72        return transfer_content
73
74    @staticmethod
75    def check_partition(total, seq):
76        so_far = BlocksManager()
77        for i in seq:
78            if so_far.is_overlaps(i):
79                raise RuntimeError
80            so_far = so_far.get_union_with_other(i)
81        if so_far != total:
82            raise RuntimeError
83
84    @staticmethod
85    def write_split_transfers(self, transfer_content, type_str, target_blocks, each_img_file):
86        """
87        Limit the size of operand in command 'new' and 'zero' to 1024 blocks.
88        :param transfer_content: transfer content list
89        :param type_str: type of the action to be processed.
90        :param target_blocks: BlocksManager of the target blocks
91        :return: total
92        """
93        if type_str not in (ActionType.NEW, ActionType.ZERO):
94            raise RuntimeError
95        if OPTIONS_MANAGER.stream_update and type_str == ActionType.NEW:
96            blocks_limit = OPTIONS_MANAGER.chunk_limit
97        else:
98            blocks_limit = 1024
99        total = 0
100        while target_blocks.size() != 0:
101            blocks_to_write = target_blocks.get_first_block_obj(blocks_limit)
102            # Find the corresponding new.dat from the set of blocks
103            new_data = b''
104            if type_str == ActionType.NEW:
105                new_data = self.process_new_blocks(self, new_data, blocks_to_write, each_img_file)
106                self.chunk_new_list.append(new_data)
107            UPDATE_LOGGER.print_log("blocks_to_write: %s! and blocks_limit: %d" % (
108                                        blocks_to_write.to_string_raw(), blocks_limit))
109            # 为流式升级new添加hash值
110            if OPTIONS_MANAGER.stream_update and type_str == ActionType.NEW:
111                transfer_content.append(
112                    "%s %s %s\n" % (type_str, get_chunk_sha256(new_data),
113                                    blocks_to_write.to_string_raw()))
114            else:
115                transfer_content.append(
116                    "%s %s\n" % (type_str, blocks_to_write.to_string_raw()))
117            total += blocks_to_write.size()
118            target_blocks = \
119                target_blocks.get_subtract_with_other(blocks_to_write)
120        return total
121
122    @staticmethod
123    def process_new_blocks(self, new_data, blocks_to_write, each_img_file):
124        """
125        Process the new blocks to read data from the image file.
126        :param blocks_to_write: The blocks to write.
127        :param each_img_file: The image file to read from.
128        :return: new_data
129        """
130         # 解析写入的块
131        parse_range = self.parse_ranges(blocks_to_write.to_string_raw())
132        for start, end in parse_range:
133            start_index = start * 4096
134            end_index = end * 4096
135            print(f'start_index: {start}, end_index: {end}')
136            with open(each_img_file, 'rb') as f:
137                tartget_new_content = f.read()
138                new_data += tartget_new_content[start_index:end_index]
139        OPTIONS_MANAGER.len_block += len(new_data)
140        print(f'new_data: {len(new_data)}')
141        print(f'total blcok length: {OPTIONS_MANAGER.len_block}')
142        return new_data
143
144    @staticmethod
145    def apply_compute_patch(src_file, tgt_file, limit, pkgdiff=False):
146        """
147        Add command content to the script.
148        :param src_file: source file name
149        :param tgt_file: target file name
150        :param pkgdiff: whether to execute pkgdiff judgment
151        :return:
152        """
153        patch_file_obj = \
154            tempfile.NamedTemporaryFile(prefix="patch-", mode='wb')
155
156        OPTIONS_MANAGER.incremental_temp_file_obj_list.append(
157            patch_file_obj)
158        cmd = [DIFF_EXE_PATH] if pkgdiff else [DIFF_EXE_PATH, '-b', '1']
159
160        cmd.extend(['-s', src_file, '-d', tgt_file,
161                    '-p', patch_file_obj.name, '-l', f'{limit}'])
162        sub_p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
163                                 stderr=subprocess.STDOUT)
164        output, _ = sub_p.communicate()
165        sub_p.wait()
166        patch_file_obj.seek(0)
167
168        if sub_p.returncode != 0:
169            raise ValueError(output)
170
171        with open(patch_file_obj.name, 'rb') as file_read:
172            patch_content = file_read.read()
173        return patch_content, pkgdiff
174
175    def patch_process(self, each_img_file):
176        """
177        Generate patches through calculation.
178        """
179        UPDATE_LOGGER.print_log("Patch Process!")
180
181        new_dat_file_obj, patch_dat_file_obj, transfer_list_file_obj = \
182            self.package_patch_zip.get_file_obj()
183
184        stashes = {}
185        total_blocks_count = 0
186        stashed_blocks = 0
187        max_stashed_blocks = 0
188        transfer_content = ["%d\n" % self.version, "TOTAL_MARK\n",
189                            "0\n", "MAX_STASH_MARK\n"]
190
191        diff_offset = 0
192        for each_action in self.actions_list:
193            max_stashed_blocks, stashed_blocks = self.add_stash_command(
194                each_action, max_stashed_blocks, stashed_blocks, stashes,
195                transfer_content)
196
197            free_commands_list, free_size, src_str_list = \
198                self.add_free_command(each_action, stashes)
199
200            src_str = " ".join(src_str_list)
201            tgt_size = each_action.tgt_block_set.size()
202
203            if each_action.type_str == ActionType.ZERO:
204                total_blocks_count = \
205                    self.apply_zero_type(each_action, total_blocks_count,
206                                         transfer_content)
207            elif each_action.type_str == ActionType.NEW:
208                total_blocks_count = \
209                    self.apply_new_type(each_action, new_dat_file_obj,
210                                        tgt_size, total_blocks_count,
211                                        transfer_content, each_img_file)
212            elif each_action.type_str == ActionType.DIFFERENT:
213                max_stashed_blocks, stashed_blocks, total_blocks_count, diff_offset = \
214                    self.apply_diff_style(
215                        diff_offset, each_action, max_stashed_blocks,
216                        patch_dat_file_obj, src_str, stashed_blocks, tgt_size,
217                        total_blocks_count, transfer_content, self.chunk_data_list, each_img_file)
218            else:
219                UPDATE_LOGGER.print_log("Unknown action type: %s!" %
220                                        each_action.type_str)
221                raise RuntimeError
222            if free_commands_list:
223                transfer_content.append("".join(free_commands_list))
224                stashed_blocks -= free_size
225
226        self.after_for_process(max_stashed_blocks, total_blocks_count,
227                               transfer_content, transfer_list_file_obj, each_img_file)
228
229    def apply_new_type(self, each_action, new_dat_file_obj, tgt_size,
230                       total_blocks_count, transfer_content, each_img_file):
231        self.tgt_img_obj.write_range_data_2_fd(
232            each_action.tgt_block_set, new_dat_file_obj)
233        UPDATE_LOGGER.print_log("%7s %s %s" % (
234            each_action.type_str, each_action.tgt_name,
235            str(each_action.tgt_block_set)))
236        temp_size = self.write_split_transfers(self,
237            transfer_content,
238            each_action.type_str, each_action.tgt_block_set, each_img_file)
239        if tgt_size != temp_size:
240            raise RuntimeError
241        total_blocks_count += temp_size
242        return total_blocks_count
243
244    def apply_zero_type(self, each_action, total_blocks_count,
245                        transfer_content, each_img_file):
246        UPDATE_LOGGER.print_log("%7s %s %s" % (
247            each_action.type_str, each_action.tgt_name,
248            str(each_action.tgt_block_set)))
249        to_zero = \
250            each_action.tgt_block_set.get_subtract_with_other(
251                each_action.src_block_set)
252        if self.write_split_transfers(self, transfer_content, each_action.type_str,
253                                      to_zero, each_img_file) != to_zero.size():
254            raise RuntimeError
255        total_blocks_count += to_zero.size()
256        return total_blocks_count
257
258    def apply_diff_style(self, *args):
259        """
260        Process actions of the diff type.
261        """
262        diff_offset, each_action, max_stashed_blocks,\
263            patch_dat_file_obj, src_str, stashed_blocks, tgt_size,\
264            total_blocks_count, transfer_content, chunk_data_list, each_img_file = args
265        if self.tgt_img_obj. \
266                range_sha256(each_action.tgt_block_set) == \
267                self.src_img_obj.\
268                range_sha256(each_action.src_block_set):
269            each_action.type_str = ActionType.MOVE
270            UPDATE_LOGGER.print_log("%7s %s %s (from %s %s)" % (
271                each_action.type_str, each_action.tgt_name,
272                str(each_action.tgt_block_set),
273                each_action.src_name,
274                str(each_action.src_block_set)))
275
276            max_stashed_blocks, stashed_blocks, total_blocks_count = \
277                self.add_move_command(
278                    each_action, max_stashed_blocks, src_str,
279                    stashed_blocks, tgt_size, total_blocks_count,
280                    transfer_content)
281        elif each_action.tgt_block_set.size() > 125 * 1024: # target_file_size > 125 * 1024 * 4KB = 500M
282            each_action.type_str = ActionType.NEW
283            new_dat_file_obj, patch_dat_file_obj, transfer_list_file_obj = \
284                self.package_patch_zip.get_file_obj()
285            total_blocks_count = \
286                self.apply_new_type(each_action, new_dat_file_obj,
287                                    tgt_size, total_blocks_count,
288                                    transfer_content, each_img_file)
289        else:
290            # Streaming update for files larger than 45KB, sliced
291            do_pkg_diff, patch_value, diff_offset = self.compute_diff_patch(
292                each_action, patch_dat_file_obj, diff_offset, src_str, transfer_content,
293                chunk_data_list, tgt_size, total_blocks_count, each_img_file)
294            if len(patch_value) > 0:
295                stashed_blocks, max_stashed_blocks = self.update_stashed_blocks(each_action, stashed_blocks, max_stashed_blocks)
296
297                self.add_diff_command(diff_offset, do_pkg_diff,
298                                      each_action, patch_value, src_str,
299                                      transfer_content)
300                # Add dependency patch
301                self.chunk_data_list.append(patch_value)
302
303                diff_offset += len(patch_value)
304                total_blocks_count += tgt_size
305            else:
306                UPDATE_LOGGER.print_log("0 patch: %s", patch_value)
307        return max_stashed_blocks, stashed_blocks, total_blocks_count, diff_offset
308
309    def update_stashed_blocks(self, each_action, stashed_blocks, max_stashed_blocks):
310        """
311        Update the stashed blocks based on overlaps between source and target block sets.
312        """
313        if each_action.src_block_set.is_overlaps(
314                each_action.tgt_block_set):
315            stashed_blocks = \
316                stashed_blocks + each_action.src_block_set.size()
317            if stashed_blocks > max_stashed_blocks:
318                max_stashed_blocks = stashed_blocks
319        return stashed_blocks, max_stashed_blocks
320
321    def after_for_process(self, max_stashed_blocks, total_blocks_count,
322                          transfer_content, transfer_list_file_obj, each_img):
323        """
324        Implement processing after cyclical actions_list processing.
325        :param max_stashed_blocks: maximum number of stashed blocks in actions
326        :param total_blocks_count: total number of blocks
327        :param transfer_content: transfer content
328        :param transfer_list_file_obj: transfer file object
329        :return:
330        """
331        self.touched_src_sha256 = self.src_img_obj.range_sha256(
332            self.touched_src_ranges)
333        if self.tgt_img_obj.extended_range:
334            if self.write_split_transfers(self,
335                    transfer_content, ActionType.ZERO,
336                    self.tgt_img_obj.extended_range, each_img) != \
337                    self.tgt_img_obj.extended_range.size():
338                raise RuntimeError
339            total_blocks_count += self.tgt_img_obj.extended_range.size()
340        all_tgt = BlocksManager(
341            range_data=(0, self.tgt_img_obj.total_blocks))
342        all_tgt_minus_extended = all_tgt.get_subtract_with_other(
343            self.tgt_img_obj.extended_range)
344        new_not_care = all_tgt_minus_extended.get_subtract_with_other(
345            self.tgt_img_obj.care_block_range)
346        self.add_erase_content(new_not_care, transfer_content)
347        transfer_content = self.get_transfer_content(
348            max_stashed_blocks, total_blocks_count, transfer_content)
349        # Print the transfer_content before writing it to the file
350        if OPTIONS_MANAGER.stream_update:
351            # 暂时先不写入transfer_list 等到copy命令处理完,再统一写入
352            self.transfer_content_in_chunk = transfer_content
353            OPTIONS_MANAGER.max_stash_size = max(max_stashed_blocks * 4096, OPTIONS_MANAGER.max_stash_size)
354        else:
355            transfer_list_file_obj.write(transfer_content.encode())
356            OPTIONS_MANAGER.max_stash_size = max(max_stashed_blocks * 4096, OPTIONS_MANAGER.max_stash_size)
357
358    def add_diff_command(self, *args):
359        """
360        Add the diff command.
361        """
362        diff_offset, do_pkg_diff, each_action,\
363            patch_value, src_str, transfer_content = args
364        self.touched_src_ranges = self.touched_src_ranges.get_union_with_other(
365            each_action.src_block_set)
366        diff_type = "pkgdiff" if do_pkg_diff else "bsdiff"
367        transfer_content.append("%s %d %d %s %s %s %s\n" % (
368            diff_type,
369            diff_offset, len(patch_value),
370            self.src_img_obj.range_sha256(each_action.src_block_set),
371            self.tgt_img_obj.range_sha256(each_action.tgt_block_set),
372            each_action.tgt_block_set.to_string_raw(), src_str))
373
374    def compute_diff_patch(self, each_action, patch_dat_file_obj, diff_offset,
375                           src_str, transfer_content, chunk_data_list, tgt_size, total_blocks_count, each_img_file):
376        """
377        Run the command to calculate the differential patch.
378        """
379        src_file_obj = \
380            tempfile.NamedTemporaryFile(prefix="src-", mode='wb')
381        self.src_img_obj.write_range_data_2_fd(
382            each_action.src_block_set, src_file_obj)
383        src_file_obj.seek(0)
384        tgt_file_obj = tempfile.NamedTemporaryFile(
385            prefix="tgt-", mode='wb')
386        self.tgt_img_obj.write_range_data_2_fd(
387            each_action.tgt_block_set, tgt_file_obj)
388        tgt_file_obj.seek(0)
389        OPTIONS_MANAGER.incremental_temp_file_obj_list.append(
390            src_file_obj)
391        OPTIONS_MANAGER.incremental_temp_file_obj_list.append(
392            tgt_file_obj)
393        do_pkg_diff = True
394        try:
395            patch_value, do_pkg_diff = self.apply_compute_patch(
396                src_file_obj.name, tgt_file_obj.name, 4096, do_pkg_diff)
397            # If the patch is larger than 45kb
398            if OPTIONS_MANAGER.stream_update and len(patch_value) > OPTIONS_MANAGER.chunk_limit * 4096:
399                self.touched_src_ranges = self.touched_src_ranges.get_union_with_other(
400                    each_action.src_block_set)
401                if each_action.tgt_block_set.size() % 10 == 1 or each_action.src_block_set.size() <= 1:
402                    each_action.type_str = ActionType.NEW
403                    new_dat_file_obj, patch_dat_file_obj, transfer_list_file_obj = \
404                        self.package_patch_zip.get_file_obj()
405                    total_blocks_count = \
406                        self.apply_new_type(each_action, new_dat_file_obj,
407                                            tgt_size, total_blocks_count,
408                                            transfer_content, each_img_file)
409                # Do the slicing here
410                else:
411                    patch_package_chunk_obj = PatchPackageChunk(src_file_obj.name, tgt_file_obj.name, do_pkg_diff, transfer_content,
412                                                                diff_offset, patch_dat_file_obj, self.src_img_obj, self.tgt_img_obj, each_action,
413                                                                chunk_data_list)
414                    diff_offset = patch_package_chunk_obj.diff_offset
415                patch_value = ''
416            src_file_obj.close()
417            tgt_file_obj.close()
418        except ValueError:
419            UPDATE_LOGGER.print_log("Patch process Failed!")
420            UPDATE_LOGGER.print_log("%7s %s %s (from %s %s)" % (
421                each_action.type_str, each_action.tgt_name,
422                str(each_action.tgt_block_set),
423                each_action.src_name,
424                str(each_action.src_block_set)),
425                                    UPDATE_LOGGER.ERROR_LOG)
426            raise ValueError
427        if len(patch_value) > 0:
428            patch_dat_file_obj.write(patch_value)
429
430        return do_pkg_diff, patch_value, diff_offset
431
432    def add_move_command(self, *args):
433        """
434        Add the move command.
435        """
436        each_action, max_stashed_blocks, src_str,\
437            stashed_blocks, tgt_size, total_blocks_count,\
438            transfer_content = args
439        src_block_set = each_action.src_block_set
440        tgt_block_set = each_action.tgt_block_set
441        if src_block_set != tgt_block_set:
442            if src_block_set.is_overlaps(tgt_block_set):
443                stashed_blocks = stashed_blocks + \
444                                   src_block_set.size()
445                if stashed_blocks > max_stashed_blocks:
446                    max_stashed_blocks = stashed_blocks
447
448            self.touched_src_ranges = \
449                self.touched_src_ranges.get_union_with_other(src_block_set)
450
451            transfer_content.append(
452                "{type_str} {tgt_hash} {tgt_string} {src_str}\n".
453                format(type_str=each_action.type_str,
454                       tgt_hash=self.tgt_img_obj.
455                       range_sha256(each_action.tgt_block_set),
456                       tgt_string=tgt_block_set.to_string_raw(),
457                       src_str=src_str))
458            total_blocks_count += tgt_size
459        return max_stashed_blocks, stashed_blocks, total_blocks_count
460
461    def add_free_command(self, each_action, stashes):
462        """
463        Add the free command.
464        :param each_action: action object to be processed
465        :param stashes: Stash dict
466        :return: free_commands_list, free_size, src_str_list
467        """
468        free_commands_list = []
469        free_size = 0
470        src_blocks_size = each_action.src_block_set.size()
471        src_str_list = [str(src_blocks_size)]
472        un_stashed_src_ranges = each_action.src_block_set
473        mapped_stashes = []
474        for _, each_stash_before in each_action.use_stash:
475            un_stashed_src_ranges = \
476                un_stashed_src_ranges.get_subtract_with_other(
477                    each_stash_before)
478            src_range_sha = \
479                self.src_img_obj.range_sha256(each_stash_before)
480            each_stash_before = \
481                each_action.src_block_set.get_map_within(each_stash_before)
482            mapped_stashes.append(each_stash_before)
483            if src_range_sha not in stashes:
484                raise RuntimeError
485            src_str_list.append(
486                "%s:%s" % (src_range_sha, each_stash_before.to_string_raw()))
487            stashes[src_range_sha] -= 1
488            if stashes[src_range_sha] == 0:
489                free_commands_list.append("free %s\n" % (src_range_sha,))
490                free_size += each_stash_before.size()
491                stashes.pop(src_range_sha)
492        self.apply_stashed_range(each_action, mapped_stashes, src_blocks_size,
493                                 src_str_list, un_stashed_src_ranges)
494        return free_commands_list, free_size, src_str_list
495
496    def apply_stashed_range(self, *args):
497        each_action, mapped_stashes, src_blocks_size,\
498            src_str_list, un_stashed_src_ranges = args
499        if un_stashed_src_ranges.size() != 0:
500            src_str_list.insert(1, un_stashed_src_ranges.to_string_raw())
501            if each_action.use_stash:
502                mapped_un_stashed = each_action.src_block_set.get_map_within(
503                    un_stashed_src_ranges)
504                src_str_list.insert(2, mapped_un_stashed.to_string_raw())
505                mapped_stashes.append(mapped_un_stashed)
506                self.check_partition(
507                    BlocksManager(range_data=(0, src_blocks_size)),
508                    mapped_stashes)
509        else:
510            src_str_list.insert(1, "-")
511            self.check_partition(
512                BlocksManager(range_data=(0, src_blocks_size)), mapped_stashes)
513
514    def add_stash_command(self, each_action, max_stashed_blocks,
515                          stashed_blocks, stashes, transfer_content):
516        """
517        Add the stash command.
518        :param each_action: action object to be processed
519        :param max_stashed_blocks: number of max stash blocks in all actions
520        :param stashed_blocks: number of stash blocks
521        :param stashes: Stash dict
522        :param transfer_content: transfer content list
523        :return: max_stashed_blocks, stashed_blocks
524        """
525        for _, each_stash_before in each_action.stash_before:
526            src_range_sha = \
527                self.src_img_obj.range_sha256(each_stash_before)
528            if src_range_sha in stashes:
529                stashes[src_range_sha] += 1
530            else:
531                stashes[src_range_sha] = 1
532                stashed_blocks += each_stash_before.size()
533                self.touched_src_ranges = \
534                    self.touched_src_ranges.\
535                    get_union_with_other(each_stash_before)
536                transfer_content.append("stash %s %s\n" % (
537                    src_range_sha, each_stash_before.to_string_raw()))
538        if stashed_blocks > max_stashed_blocks:
539            max_stashed_blocks = stashed_blocks
540        return max_stashed_blocks, stashed_blocks
541
542    def write_script(self, partition, script_check_cmd_list,
543                     script_write_cmd_list, verse_script):
544        """
545        Add command content to the script.
546        :param partition: image name
547        :param script_check_cmd_list: incremental check command list
548        :param script_write_cmd_list: incremental write command list
549        :param verse_script: verse script object
550        :return:
551        """
552        ranges_str = self.touched_src_ranges.to_string_raw()
553        expected_sha = self.touched_src_sha256
554
555        sha_check_cmd = verse_script.sha_check(
556            ranges_str, expected_sha, partition)
557
558        first_block_check_cmd = verse_script.first_block_check(partition)
559
560        abort_cmd = verse_script.abort(partition)
561
562        cmd = 'if ({sha_check_cmd} != 0 || ' \
563              '{first_block_check_cmd} != 0)' \
564              '{{\n    {abort_cmd}}}\n'.format(
565                sha_check_cmd=sha_check_cmd,
566                first_block_check_cmd=first_block_check_cmd,
567                abort_cmd=abort_cmd)
568
569        script_check_cmd_list.append(cmd)
570
571        block_update_cmd = verse_script.block_update(partition)
572
573        cmd = '%s_WRITE_FLAG%s' % (partition, block_update_cmd)
574        script_write_cmd_list.append(cmd)
575
576    def add_erase_content(self, new_not_care, transfer_content):
577        """
578        Add the erase command.
579        :param new_not_care: blocks that don't need to be cared about
580        :param transfer_content: transfer content list
581        :return:
582        """
583        erase_first = new_not_care.\
584            get_subtract_with_other(self.touched_src_ranges)
585        if erase_first.size() != 0:
586            transfer_content.insert(
587                4, "erase %s\n" % (erase_first.to_string_raw(),))
588        erase_last = new_not_care.get_subtract_with_other(erase_first)
589        if erase_last.size() != 0:
590            transfer_content.append(
591                "erase %s\n" % (erase_last.to_string_raw(),))
592
593    def add_ab_copy_content(self, blocks_length, need_copy_blocks_list, transfer_content):
594        """
595        Add the copy command.
596        :param all_blocks: all blocks of data in a image
597        :param no_copy_blocks: remove blocks operated on in action_list
598        :param transfer_content: transfer content list
599        :return:transfer_content
600        """
601        new_dat_file_obj, patch_dat_file_obj, transfer_list_file_obj = \
602            self.package_patch_zip.get_file_obj()
603        transfer_lines = transfer_content.splitlines()
604        if len(need_copy_blocks_list):
605            tgt_string = ','.join(map(str, need_copy_blocks_list))
606
607            new_line = "{type_str} {tgt_string_len},{tgt_string} {length} {src_str_len},{src_str}".format(\
608                type_str='copy',
609                tgt_string_len=len(need_copy_blocks_list),
610                tgt_string=tgt_string,
611                length=blocks_length,
612                src_str_len=len(need_copy_blocks_list),
613                src_str=tgt_string)
614        else:
615            new_line = "copy 2,0,1 1 2,0,1"
616        if len(transfer_lines) >= 4:  # Check if there are at least 3 lines
617            transfer_lines.insert(4, new_line)  # Insert before the fourth line
618        else:
619            transfer_lines.append(new_line)  # If there are less than 4 lines, just append
620        transfer_content = "\n".join(transfer_lines)
621        transfer_list_file_obj.write(transfer_content.encode())
622        UPDATE_LOGGER.print_log('AB Copy block[%s]' % need_copy_blocks_list, UPDATE_LOGGER.INFO_LOG)
623        return transfer_content
624
625    def parse_ranges(self, ranges_str):
626        """Parses the range string and returns a list of tuples with start and end."""
627        ranges = []
628        parts = ranges_str.split(',')[1:]  # Skip the first part
629        # Iterate through the parts in pairs
630        for i in range(0, len(parts), 2):
631            start = int(parts[i])
632            end = int(parts[i + 1]) if (i + 1) < len(parts) else start + 1  # Handle last range case
633        # Append the range as (start, end) where end is exclusive
634            ranges.append((start, end))
635        print(f'Parase blocks Ranges: {ranges}')
636        return ranges
637
638    def get_chunk_pkgdiff_list(self):
639        return self.chunk_data_list
640
641    def get_chunk_new_list(self):
642        return self.chunk_new_list
643
644    def get_transfer_content_in_chunk(self):
645        return self.transfer_content_in_chunk
646
647
648class PackagePatchZip:
649    """
650    Compress the patch file generated by the
651    differential calculation as *.zip file.
652    """
653    def __init__(self, partition):
654        self.partition = partition
655        self.partition_new_dat_file_name = "%s.%s" % (partition, NEW_DAT)
656        self.partition_patch_dat_file_name = "%s.%s" % (partition, PATCH_DAT)
657        self.partition_transfer_file_name = "%s.%s" % (partition, TRANSFER_LIST)
658
659        self.new_dat_file_obj = tempfile.NamedTemporaryFile(
660            dir=OPTIONS_MANAGER.target_package, prefix="%s-" % NEW_DAT, mode='wb')
661        self.patch_dat_file_obj = tempfile.NamedTemporaryFile(
662            dir=OPTIONS_MANAGER.target_package, prefix="%s-" % PATCH_DAT, mode='wb')
663        self.transfer_list_file_obj = tempfile.NamedTemporaryFile(
664            dir=OPTIONS_MANAGER.target_package, prefix="%s-" % TRANSFER_LIST, mode='wb')
665
666        OPTIONS_MANAGER.incremental_temp_file_obj_list.append(
667            self.new_dat_file_obj)
668        OPTIONS_MANAGER.incremental_temp_file_obj_list.append(
669            self.patch_dat_file_obj)
670        OPTIONS_MANAGER.incremental_temp_file_obj_list.append(
671            self.transfer_list_file_obj)
672
673        self.partition_file_obj = tempfile.NamedTemporaryFile(
674            dir=OPTIONS_MANAGER.target_package, prefix="partition_patch-")
675
676    def get_file_obj(self):
677        """
678        Obtain file objects.
679        """
680        self.new_dat_file_obj.flush()
681        self.patch_dat_file_obj.flush()
682        self.transfer_list_file_obj.flush()
683        return self.new_dat_file_obj, self.patch_dat_file_obj, \
684            self.transfer_list_file_obj
685
686    def package_block_patch(self, zip_file):
687        self.new_dat_file_obj.flush()
688        self.patch_dat_file_obj.flush()
689        self.transfer_list_file_obj.flush()
690        if not OPTIONS_MANAGER.stream_update:
691            # add new.dat to ota.zip
692            zip_file.write(self.new_dat_file_obj.name, self.partition_new_dat_file_name)
693            # add patch.dat to ota.zip
694            zip_file.write(self.patch_dat_file_obj.name, self.partition_patch_dat_file_name)
695            # add transfer.list to ota.zip
696            zip_file.write(self.transfer_list_file_obj.name, self.partition_transfer_file_name)
697