1 /* 2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 package com.ohos.hapsigntool.utils; 17 18 import com.ohos.hapsigntool.hap.entity.Pair; 19 import com.ohos.hapsigntool.hap.entity.SigningBlock; 20 import com.ohos.hapsigntool.hap.exception.SignatureNotFoundException; 21 import com.ohos.hapsigntool.hap.sign.ContentDigestAlgorithm; 22 import com.ohos.hapsigntool.hap.sign.SignHap; 23 import com.ohos.hapsigntool.zip.MessageDigestZipDataOutput; 24 import com.ohos.hapsigntool.zip.ZipDataInput; 25 import com.ohos.hapsigntool.zip.ZipDataOutput; 26 import com.ohos.hapsigntool.zip.ZipFileInfo; 27 28 import org.apache.logging.log4j.LogManager; 29 import org.apache.logging.log4j.Logger; 30 import org.bouncycastle.util.Arrays; 31 32 import java.io.ByteArrayOutputStream; 33 import java.io.FileInputStream; 34 import java.io.IOException; 35 import java.nio.BufferUnderflowException; 36 import java.nio.ByteBuffer; 37 import java.nio.ByteOrder; 38 import java.security.DigestException; 39 import java.security.MessageDigest; 40 import java.security.NoSuchAlgorithmException; 41 import java.util.HashMap; 42 import java.util.HashSet; 43 import java.util.List; 44 import java.util.Map; 45 import java.util.Set; 46 import java.util.Collections; 47 48 /** 49 * Hap util, parse hap, find signature block. 50 * 51 * @since 2021/12/20 52 */ 53 public class HapUtils { 54 private static final Logger LOGGER = LogManager.getLogger(HapUtils.class); 55 56 /** 57 * ID of hap signature blocks of version 1 58 */ 59 public static final int HAP_SIGNATURE_SCHEME_V1_BLOCK_ID = 0x20000000; 60 61 /** 62 * ID of hap proof of rotation block 63 */ 64 public static final int HAP_PROOF_OF_ROTATION_BLOCK_ID = 0x20000001; 65 66 /** 67 * ID of profile block 68 */ 69 public static final int HAP_PROFILE_BLOCK_ID = 0x20000002; 70 71 /** 72 * ID of property block 73 */ 74 public static final int HAP_PROPERTY_BLOCK_ID = 0x20000003; 75 76 /** 77 * The size of data block used to get digest 78 */ 79 80 public static final int CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES = 1024 * 1024; 81 82 /** 83 * Content version 84 */ 85 public static final int CONTENT_VERSION = 2; 86 87 /** 88 * bit size 89 */ 90 public static final int BIT_SIZE = 8; 91 92 /** 93 * half bit size 94 */ 95 public static final int HALF_BIT_SIZE = 4; 96 97 /** 98 * int size 99 */ 100 public static final int INT_SIZE = 4; 101 102 /** 103 * block number 104 */ 105 public static final int BLOCK_NUMBER = 1; 106 107 /** 108 * hap sign schema v2 signature block version 109 */ 110 public static final int HAP_SIGN_SCHEME_V2_BLOCK_VERSION = 2; 111 112 /** 113 * hap sign schema v3 signature block version 114 */ 115 public static final int HAP_SIGN_SCHEME_V3_BLOCK_VERSION = 3; 116 117 /** 118 * The value of lower 8-bytes of old magic word 119 */ 120 public static final long HAP_SIG_BLOCK_MAGIC_LO_V2 = 0x2067695320504148L; 121 122 /** 123 * The value of higher 8-bytes of old magic word 124 */ 125 public static final long HAP_SIG_BLOCK_MAGIC_HI_V2 = 0x3234206b636f6c42L; 126 HapUtils()127 private HapUtils() { 128 } 129 130 /** 131 * The set of IDs of optional blocks in hap signature block. 132 */ 133 private static final Set<Integer> HAP_SIGNATURE_OPTIONAL_BLOCK_IDS ; 134 135 /** 136 * Minimum api version for hap sign schema v3. 137 */ 138 private static final int MIN_COMPATIBLE_VERSION_FOR_SCHEMA_V3 = 8; 139 140 /** 141 * Magic word of hap signature block v2 142 */ 143 private static final byte[] HAP_SIGNING_BLOCK_MAGIC_V2 = 144 new byte[] {0x48, 0x41, 0x50, 0x20, 0x53, 0x69, 0x67, 0x20, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x20, 0x34, 0x32}; 145 146 /** 147 * Magic word of hap signature block 148 */ 149 private static final byte[] HAP_SIGNING_BLOCK_MAGIC_V3 = 150 new byte[] {0x3c, 0x68, 0x61, 0x70, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x3e}; 151 152 /** 153 * The value of lower 8 bytes of magic word 154 */ 155 public static final long HAP_SIG_BLOCK_MAGIC_LO_V3 = 0x676973207061683cL; 156 157 /** 158 * The value of higher 8 bytes of magic word 159 */ 160 public static final long HAP_SIG_BLOCK_MAGIC_HI_V3 = 0x3e6b636f6c62206eL; 161 162 /** 163 * Size of hap signature block header 164 */ 165 public static final int HAP_SIG_BLOCK_HEADER_SIZE = 32; 166 167 /** 168 * The min size of hap signature block 169 */ 170 public static final int HAP_SIG_BLOCK_MIN_SIZE = HAP_SIG_BLOCK_HEADER_SIZE; 171 172 private static final byte ZIP_FIRST_LEVEL_CHUNK_PREFIX = 0x5a; 173 private static final byte ZIP_SECOND_LEVEL_CHUNK_PREFIX = (byte) 0xa5; 174 private static final int DIGEST_PRIFIX_LENGTH = 5; 175 private static final int BUFFER_LENGTH = 4096; 176 177 /** 178 * Get HAP_SIGNATURE_OPTIONAL_BLOCK_IDS 179 * 180 * @return HAP_SIGNATURE_OPTIONAL_BLOCK_IDS 181 */ getHapSignatureOptionalBlockIds()182 public static Set<Integer> getHapSignatureOptionalBlockIds() { 183 return HAP_SIGNATURE_OPTIONAL_BLOCK_IDS; 184 } 185 186 /** 187 * Get HAP_SIGNING_BLOCK_MAGIC 188 * 189 * @param compatibleVersion compatible api version 190 * @return HAP_SIGNING_BLOCK_MAGIC 191 */ getHapSigningBlockMagic(int compatibleVersion)192 public static byte[] getHapSigningBlockMagic(int compatibleVersion) { 193 if (compatibleVersion >= MIN_COMPATIBLE_VERSION_FOR_SCHEMA_V3) { 194 return HAP_SIGNING_BLOCK_MAGIC_V3.clone(); 195 } 196 return HAP_SIGNING_BLOCK_MAGIC_V2.clone(); 197 } 198 199 /** 200 * Get version number of hap signature block 201 * 202 * @param compatibleVersion compatible api version 203 * @return magic to number 204 */ getHapSigningBlockVersion(int compatibleVersion)205 public static int getHapSigningBlockVersion(int compatibleVersion) { 206 if (compatibleVersion >= MIN_COMPATIBLE_VERSION_FOR_SCHEMA_V3) { 207 return HAP_SIGN_SCHEME_V3_BLOCK_VERSION; 208 } 209 return HAP_SIGN_SCHEME_V2_BLOCK_VERSION; 210 } 211 212 /** 213 * The set of IDs of optional blocks in hap signature block. 214 */ 215 static { 216 Set<Integer> blockIds = new HashSet<Integer>(); 217 blockIds.add(HAP_PROOF_OF_ROTATION_BLOCK_ID); 218 blockIds.add(HAP_PROFILE_BLOCK_ID); 219 blockIds.add(HAP_PROPERTY_BLOCK_ID); 220 HAP_SIGNATURE_OPTIONAL_BLOCK_IDS = Collections.unmodifiableSet(blockIds); 221 } 222 223 /** 224 * Read data from hap file. 225 * 226 * @param file input file path. 227 * @return true, if read successfully. 228 * @throws IOException on error. 229 */ readFileToByte(String file)230 public static byte[] readFileToByte(String file) throws IOException { 231 try (FileInputStream in = new FileInputStream(file); 232 ByteArrayOutputStream out = new ByteArrayOutputStream(in.available());) { 233 byte[] buf = new byte[BUFFER_LENGTH]; 234 int len = 0; 235 while ((len = in.read(buf)) != -1) { 236 out.write(buf, 0, len); 237 } 238 return out.toByteArray(); 239 } 240 } 241 getChunkCount(ZipDataInput[] contents)242 private static long getChunkCount(ZipDataInput[] contents) { 243 long chunkCount = 0L; 244 for (ZipDataInput content : contents) { 245 chunkCount += ((content.size() + CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES - 1) / 246 CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES); 247 } 248 return chunkCount; 249 } 250 251 /** 252 * compute digests of contents 253 * 254 * @param digestAlgorithms algorithm of digest 255 * @param zipData content used to get digest 256 * @param optionalBlocks list of optional blocks used to get digest 257 * @return digests 258 * @throws DigestException digest error 259 * @throws IOException if an IO error occurs when compute hap file digest 260 */ computeDigests( Set<ContentDigestAlgorithm> digestAlgorithms, ZipDataInput[] zipData, List<SigningBlock> optionalBlocks)261 public static Map<ContentDigestAlgorithm, byte[]> computeDigests( 262 Set<ContentDigestAlgorithm> digestAlgorithms, ZipDataInput[] zipData, List<SigningBlock> optionalBlocks) 263 throws DigestException, IOException { 264 long chunkCountLong = getChunkCount(zipData); 265 if (chunkCountLong > Integer.MAX_VALUE) { 266 throw new DigestException("Input too long: " + chunkCountLong + " chunks"); 267 } 268 int chunkCount = (int) chunkCountLong; 269 ContentDigestAlgorithm[] contentDigestAlgorithms = digestAlgorithms.toArray( 270 new ContentDigestAlgorithm[digestAlgorithms.size()]); 271 MessageDigest[] messageDigests = new MessageDigest[contentDigestAlgorithms.length]; 272 int[] digestOutputSizes = new int[contentDigestAlgorithms.length]; 273 byte[][] digestOfChunks = new byte[contentDigestAlgorithms.length][]; 274 initComputeItem(chunkCount, contentDigestAlgorithms, messageDigests, digestOutputSizes, digestOfChunks); 275 int chunkIndex = 0; 276 byte[] chunkContentPrefix = new byte[DIGEST_PRIFIX_LENGTH]; 277 chunkContentPrefix[0] = ZIP_SECOND_LEVEL_CHUNK_PREFIX; 278 byte[] buf = new byte[CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES]; 279 ZipDataOutput digests = new MessageDigestZipDataOutput(messageDigests); 280 for (ZipDataInput content : zipData) { 281 long offset = 0L; 282 long remaining = content.size(); 283 while (remaining > 0) { 284 int chunkSize = (int) Math.min(buf.length, remaining); 285 setUInt32ToByteArrayWithLittleEngian(chunkSize, chunkContentPrefix, 1); 286 for (int i = 0; i < contentDigestAlgorithms.length; i++) { 287 messageDigests[i].update(chunkContentPrefix); 288 } 289 try { 290 content.copyTo(offset, chunkSize, digests); 291 } catch (IOException e) { 292 throw new IOException("Failed to read chunk #" + chunkIndex, e); 293 } 294 295 getDigests(contentDigestAlgorithms, digestOutputSizes, messageDigests, digestOfChunks, chunkIndex); 296 offset += chunkSize; 297 remaining -= chunkSize; 298 chunkIndex++; 299 } 300 } 301 return getContentDigestAlgorithmMap(optionalBlocks, contentDigestAlgorithms, messageDigests, digestOfChunks); 302 } 303 getDigests(ContentDigestAlgorithm[] contentDigestAlgorithms, int[] digestOutputSizes, MessageDigest[] messageDigests, byte[][] digestOfChunks, int chunkIndex)304 private static void getDigests(ContentDigestAlgorithm[] contentDigestAlgorithms, int[] digestOutputSizes, 305 MessageDigest[] messageDigests, byte[][] digestOfChunks, int chunkIndex) throws DigestException { 306 for (int i = 0; i < contentDigestAlgorithms.length; i++) { 307 int expectedDigestSizeBytes = digestOutputSizes[i]; 308 int actualDigestSizeBytes = messageDigests[i].digest(digestOfChunks[i], 309 chunkIndex * expectedDigestSizeBytes + DIGEST_PRIFIX_LENGTH, expectedDigestSizeBytes); 310 if (actualDigestSizeBytes != expectedDigestSizeBytes) { 311 throw new DigestException("Unexpected output size of " + messageDigests[i].getAlgorithm() 312 + " digest: " + actualDigestSizeBytes); 313 } 314 } 315 } 316 initComputeItem(int chunkCount, ContentDigestAlgorithm[] contentDigestAlgorithms, MessageDigest[] messageDigests, int[] digestOutputSizes, byte[][] digestOfChunks)317 private static void initComputeItem(int chunkCount, ContentDigestAlgorithm[] contentDigestAlgorithms, 318 MessageDigest[] messageDigests, int[] digestOutputSizes, 319 byte[][] digestOfChunks) throws DigestException { 320 try { 321 for (int i = 0; i < contentDigestAlgorithms.length; i++) { 322 int digestOutputSizeBytes = contentDigestAlgorithms[i].getDigestOutputByteSize(); 323 byte[] concatenationOfChunkCountAndChunkDigests = 324 new byte[DIGEST_PRIFIX_LENGTH + chunkCount * digestOutputSizeBytes]; 325 concatenationOfChunkCountAndChunkDigests[0] = ZIP_FIRST_LEVEL_CHUNK_PREFIX; 326 setUInt32ToByteArrayWithLittleEngian(chunkCount, concatenationOfChunkCountAndChunkDigests, 1); 327 digestOfChunks[i] = concatenationOfChunkCountAndChunkDigests; 328 messageDigests[i] = MessageDigest.getInstance(contentDigestAlgorithms[i].getDigestAlgorithm()); 329 digestOutputSizes[i] = contentDigestAlgorithms[i].getDigestOutputByteSize(); 330 } 331 } catch (NoSuchAlgorithmException e) { 332 throw new DigestException("Digest algorithm not supported", e); 333 } 334 } 335 getContentDigestAlgorithmMap(List<SigningBlock> optionalBlocks, ContentDigestAlgorithm[] contentDigestAlgorithms, MessageDigest[] messageDigests, byte[][] digestOfChunks)336 private static Map<ContentDigestAlgorithm, byte[]> getContentDigestAlgorithmMap(List<SigningBlock> optionalBlocks, 337 ContentDigestAlgorithm[] contentDigestAlgorithms, MessageDigest[] messageDigests, byte[][] digestOfChunks) { 338 Map<ContentDigestAlgorithm, byte[]> result = new HashMap<>(contentDigestAlgorithms.length); 339 for (int i = 0; i < contentDigestAlgorithms.length; i++) { 340 messageDigests[i].update(digestOfChunks[i]); 341 for (SigningBlock signingBlock : optionalBlocks) { 342 messageDigests[i].update(signingBlock.getValue()); 343 } 344 result.put(contentDigestAlgorithms[i], messageDigests[i].digest()); 345 } 346 return result; 347 } 348 setUInt32ToByteArrayWithLittleEngian(int value, byte[] result, int offset)349 private static void setUInt32ToByteArrayWithLittleEngian(int value, byte[] result, int offset) { 350 for (int i = 0; i < INT_SIZE; i++) { 351 result[offset + i] = (byte) ((value >> (BIT_SIZE * i)) & 0xff); 352 } 353 } 354 355 private static final char[] HEX_CHAR_ARRAY = "0123456789ABCDEF".toCharArray(); 356 357 /** 358 * Slice buffer to target size. 359 * 360 * @param source input data buffer 361 * @param targetSize target buffer's size 362 * @return target buffer of target size 363 */ sliceBuffer(ByteBuffer source, int targetSize)364 public static ByteBuffer sliceBuffer(ByteBuffer source, int targetSize) { 365 int limit = source.limit(); 366 int position = source.position(); 367 int targetLimit = position + targetSize; 368 if ((targetLimit < position) || (targetLimit > limit)) { 369 LOGGER.error("targetSize: " + targetSize); 370 throw new BufferUnderflowException(); 371 } 372 try { 373 source.limit(targetLimit); 374 ByteBuffer target = source.slice(); 375 target.order(source.order()); 376 return target; 377 } finally { 378 source.position(targetLimit); 379 source.limit(limit); 380 } 381 } 382 sliceBuffer(ByteBuffer source, int startPos, int endPos)383 private static ByteBuffer sliceBuffer(ByteBuffer source, int startPos, int endPos) { 384 int capacity = source.capacity(); 385 if (startPos < 0 || endPos < startPos || endPos > capacity) { 386 throw new IllegalArgumentException( 387 "startPos: " + startPos + ", endPos: " + endPos + ", capacity: " + capacity); 388 } 389 int limit = source.limit(); 390 int position = source.position(); 391 try { 392 source.position(0); 393 source.limit(endPos); 394 source.position(startPos); 395 ByteBuffer target = source.slice(); 396 target.order(source.order()); 397 return target; 398 } finally { 399 source.limit(limit); 400 source.position(position); 401 } 402 } 403 404 /** 405 * Slice buffer from startPos to endPos, and then reverse it. 406 * 407 * @param hapSigningBlock input buffer used to slice. 408 * @param startPos start position of slice buffer. 409 * @param endPos end position of slice buffer. 410 * @return new buffer. 411 */ reverseSliceBuffer(ByteBuffer hapSigningBlock, int startPos, int endPos)412 public static ByteBuffer reverseSliceBuffer(ByteBuffer hapSigningBlock, int startPos, int endPos) { 413 ByteBuffer header = HapUtils.sliceBuffer(hapSigningBlock, startPos, endPos); 414 byte[] signatureBlockBytes = new byte[header.capacity()]; 415 header.get(signatureBlockBytes, 0, signatureBlockBytes.length); 416 return ByteBuffer.wrap(Arrays.reverse(signatureBlockBytes)); 417 } 418 419 /** 420 * Check whether buffer is little endian. 421 * 422 * @param buffer ByteBuffer used to check 423 */ checkBufferLittleEndian(ByteBuffer buffer)424 public static void checkBufferLittleEndian(ByteBuffer buffer) { 425 if (buffer.order() == ByteOrder.LITTLE_ENDIAN) { 426 return; 427 } 428 throw new IllegalArgumentException("ByteBuffer is not little endian"); 429 } 430 431 /** 432 * TLV encode list of pairs 433 * 434 * @param pairList input list of pairs 435 * @return byte array after encoding 436 */ encodeListOfPairsToByteArray(List<Pair<Integer, byte[]>> pairList)437 public static byte[] encodeListOfPairsToByteArray(List<Pair<Integer, byte[]>> pairList) { 438 int encodeSize = 0; 439 encodeSize += INT_SIZE + INT_SIZE; 440 for (Pair<Integer, byte[]> pair : pairList) { 441 encodeSize += INT_SIZE+INT_SIZE+INT_SIZE + pair.getSecond().length; 442 } 443 ByteBuffer encodeBytes = ByteBuffer.allocate(encodeSize); 444 encodeBytes.order(ByteOrder.LITTLE_ENDIAN); 445 encodeBytes.putInt(CONTENT_VERSION); // version 446 encodeBytes.putInt(BLOCK_NUMBER); // block number 447 for (Pair<Integer, byte[]> pair : pairList) { 448 byte[] second = pair.getSecond(); 449 encodeBytes.putInt(INT_SIZE+INT_SIZE + second.length); 450 encodeBytes.putInt(pair.getFirst()); 451 encodeBytes.putInt(second.length); 452 encodeBytes.put(second); 453 } 454 return encodeBytes.array(); 455 } 456 457 /** 458 * Translate value to Hex string. 459 * 460 * @param value input byte array. 461 * @param separator symbol insert between two bytes. 462 * @return a hex-values string. 463 */ toHex(byte[] value, String separator)464 public static String toHex(byte[] value, String separator) { 465 StringBuilder sb = new StringBuilder(value.length + value.length); 466 String useSeparator = separator == null ? "" : separator; 467 int len = value.length; 468 for (int i = 0; i < len; i++) { 469 int hi = (value[i] & 0xff) >>> HALF_BIT_SIZE; 470 int lo = value[i] & 0x0f; 471 sb.append(HEX_CHAR_ARRAY[hi]).append(HEX_CHAR_ARRAY[lo]); 472 if (i != len - 1) { 473 sb.append(useSeparator); 474 } 475 } 476 return sb.toString(); 477 } 478 479 /** 480 * find signing block from hap file 481 * 482 * @param hap ZipDataInput object of zip file 483 * @param zipInfo ZipFileInfo object of hap file 484 * @return pair of offset of signing block and data of signing block 485 * @throws SignatureNotFoundException No signing block is found 486 * @throws IOException file operation error 487 */ findHapSigningBlock(ZipDataInput hap, ZipFileInfo zipInfo)488 public static HapSignBlockInfo findHapSigningBlock(ZipDataInput hap, ZipFileInfo zipInfo) 489 throws SignatureNotFoundException, IOException { 490 long centralDirectoryStartOffset = zipInfo.getCentralDirectoryOffset(); 491 long centralDirectorySize = zipInfo.getCentralDirectorySize(); 492 long eocdOffset = zipInfo.getEocdOffset(); 493 long centralDirectoryEndOffset = centralDirectoryStartOffset + centralDirectorySize; 494 if (eocdOffset != centralDirectoryEndOffset) { 495 throw new SignatureNotFoundException("ZIP Central Directory is not immediately followed by End of Central" 496 + "Directory. CD end: " + centralDirectoryEndOffset + ", EoCD start: " + eocdOffset); 497 } 498 if (centralDirectoryStartOffset < HAP_SIG_BLOCK_MIN_SIZE) { 499 throw new SignatureNotFoundException("Hap too small for Hap Signing Block. ZIP Central Directory offset: " 500 + centralDirectoryStartOffset); 501 } 502 long hapSigningBlockHeaderOffset = centralDirectoryStartOffset - HAP_SIG_BLOCK_HEADER_SIZE; 503 ByteBuffer hapSigningBlockHeader = hap.createByteBuffer(hapSigningBlockHeaderOffset, HAP_SIG_BLOCK_HEADER_SIZE); 504 hapSigningBlockHeader.order(ByteOrder.LITTLE_ENDIAN); 505 int blockCount = hapSigningBlockHeader.getInt(); 506 long hapSigBlockSize = hapSigningBlockHeader.getLong(); 507 long hapSignBlockMagicLo = hapSigningBlockHeader.getLong(); 508 long hapSignBlockMagicHi = hapSigningBlockHeader.getLong(); 509 int version = hapSigningBlockHeader.getInt(); 510 if (!isVersionAndMagicNumValid(version, hapSignBlockMagicLo, hapSignBlockMagicHi)) { 511 throw new SignatureNotFoundException("No Hap Signing Block before ZIP Central Directory"); 512 } 513 if ((hapSigBlockSize < HAP_SIG_BLOCK_HEADER_SIZE) || 514 (hapSigBlockSize > Integer.MAX_VALUE - SignHap.getBlockSize())) { 515 throw new SignatureNotFoundException("Hap Signing Block size out of range: " + hapSigBlockSize); 516 } 517 int totalSize = (int) hapSigBlockSize; 518 long hapSigningBlockOffset = centralDirectoryStartOffset - totalSize; 519 if (hapSigningBlockOffset < 0) { 520 throw new SignatureNotFoundException("Hap Signing Block offset out of range: " + hapSigningBlockOffset); 521 } 522 ByteBuffer hapSigningBlockByteBuffer = hap.createByteBuffer(hapSigningBlockOffset, totalSize) 523 .order(ByteOrder.LITTLE_ENDIAN); 524 LOGGER.info("Find Hap Signing Block success, version: {}, block count: {}", version, blockCount); 525 return new HapSignBlockInfo(hapSigningBlockOffset, version, hapSigningBlockByteBuffer); 526 } 527 isVersionAndMagicNumValid(int version, long hapSignBlockMagicLo, long hapSignBlockMagicHi)528 private static boolean isVersionAndMagicNumValid(int version, long hapSignBlockMagicLo, long hapSignBlockMagicHi) { 529 if (version < HAP_SIGN_SCHEME_V3_BLOCK_VERSION) { 530 return hapSignBlockMagicLo == HAP_SIG_BLOCK_MAGIC_LO_V2 && hapSignBlockMagicHi == HAP_SIG_BLOCK_MAGIC_HI_V2; 531 } 532 return hapSignBlockMagicLo == HAP_SIG_BLOCK_MAGIC_LO_V3 && hapSignBlockMagicHi == HAP_SIG_BLOCK_MAGIC_HI_V3; 533 } 534 535 /** 536 * Hap sign block info 537 */ 538 public static class HapSignBlockInfo { 539 private final long offset; 540 private final int version; 541 private final ByteBuffer content; 542 HapSignBlockInfo(long offset, int version, ByteBuffer content)543 public HapSignBlockInfo(long offset, int version, ByteBuffer content) { 544 this.offset = offset; 545 this.version = version; 546 this.content = content; 547 } 548 getVersion()549 public int getVersion() { 550 return version; 551 } 552 getContent()553 public ByteBuffer getContent() { 554 return content; 555 } 556 getOffset()557 public long getOffset() { 558 return offset; 559 } 560 } 561 }