1 /* 2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 package com.ohos.hapsigntool.utils; 17 18 import com.ohos.hapsigntool.hap.entity.Pair; 19 import com.ohos.hapsigntool.hap.entity.SigningBlock; 20 import com.ohos.hapsigntool.hap.exception.SignatureNotFoundException; 21 import com.ohos.hapsigntool.hap.sign.ContentDigestAlgorithm; 22 import com.ohos.hapsigntool.hap.sign.SignHap; 23 import com.ohos.hapsigntool.zip.MessageDigestZipDataOutput; 24 import com.ohos.hapsigntool.zip.ZipDataInput; 25 import com.ohos.hapsigntool.zip.ZipDataOutput; 26 import com.ohos.hapsigntool.zip.ZipFileInfo; 27 28 import org.apache.logging.log4j.LogManager; 29 import org.apache.logging.log4j.Logger; 30 import org.bouncycastle.util.Arrays; 31 32 import java.io.ByteArrayOutputStream; 33 import java.io.FileInputStream; 34 import java.io.IOException; 35 import java.nio.BufferUnderflowException; 36 import java.nio.ByteBuffer; 37 import java.nio.ByteOrder; 38 import java.security.DigestException; 39 import java.security.MessageDigest; 40 import java.security.NoSuchAlgorithmException; 41 import java.util.HashMap; 42 import java.util.HashSet; 43 import java.util.List; 44 import java.util.Map; 45 import java.util.Set; 46 import java.util.Collections; 47 48 /** 49 * Hap util, parse hap, find signature block. 50 * 51 * @since 2021/12/20 52 */ 53 public class HapUtils { 54 private static final Logger LOGGER = LogManager.getLogger(HapUtils.class); 55 56 /** 57 * ID of hap signature blocks of version 1 58 */ 59 public static final int HAP_SIGNATURE_SCHEME_V1_BLOCK_ID = 0x20000000; 60 61 /** 62 * ID of hap proof of rotation block 63 */ 64 public static final int HAP_PROOF_OF_ROTATION_BLOCK_ID = 0x20000001; 65 66 /** 67 * ID of profile block 68 */ 69 public static final int HAP_PROFILE_BLOCK_ID = 0x20000002; 70 71 /** 72 * ID of property block 73 */ 74 public static final int HAP_PROPERTY_BLOCK_ID = 0x20000003; 75 76 /** 77 * The size of data block used to get digest 78 */ 79 80 public static final int CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES = 1024 * 1024; 81 82 /** 83 * Content version 84 */ 85 public static final int CONTENT_VERSION = 2; 86 87 /** 88 * bit size 89 */ 90 public static final int BIT_SIZE = 8; 91 92 /** 93 * half bit size 94 */ 95 public static final int HALF_BIT_SIZE = 4; 96 97 /** 98 * int size 99 */ 100 public static final int INT_SIZE = 4; 101 102 /** 103 * block number 104 */ 105 public static final int BLOCK_NUMBER = 1; 106 HapUtils()107 private HapUtils() { 108 } 109 110 /** 111 * The set of IDs of optional blocks in hap signature block. 112 */ 113 private static final Set<Integer> HAP_SIGNATURE_OPTIONAL_BLOCK_IDS ; 114 115 /** 116 * Magic word of hap signature block/ 117 */ 118 private static final byte[] HAP_SIGNING_BLOCK_MAGIC = 119 new byte[] {0x3c, 0x68, 0x61, 0x70, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x3e}; 120 121 /** 122 * The value of lower 8 bytes of magic word 123 */ 124 public static final long HAP_SIG_BLOCK_MAGIC_LO = 0x676973207061683cL; 125 126 /** 127 * The value of higher 8 bytes of magic word 128 */ 129 public static final long HAP_SIG_BLOCK_MAGIC_HI = 0x3e6b636f6c62206eL; 130 131 /** 132 * Size of hap signature block header 133 */ 134 public static final int HAP_SIG_BLOCK_HEADER_SIZE = 32; 135 136 /** 137 * The min size of hap signature block 138 */ 139 public static final int HAP_SIG_BLOCK_MIN_SIZE = HAP_SIG_BLOCK_HEADER_SIZE; 140 141 private static final byte ZIP_FIRST_LEVEL_CHUNK_PREFIX = 0x5a; 142 private static final byte ZIP_SECOND_LEVEL_CHUNK_PREFIX = (byte) 0xa5; 143 private static final int DIGEST_PRIFIX_LENGTH = 5; 144 private static final int BUFFER_LENGTH = 4096; 145 146 /** 147 * Get HAP_SIGNATURE_OPTIONAL_BLOCK_IDS 148 * 149 * @return HAP_SIGNATURE_OPTIONAL_BLOCK_IDS 150 */ getHapSignatureOptionalBlockIds()151 public static Set<Integer> getHapSignatureOptionalBlockIds() { 152 return HAP_SIGNATURE_OPTIONAL_BLOCK_IDS; 153 } 154 155 /** 156 * Get HAP_SIGNING_BLOCK_MAGIC 157 * 158 * @return HAP_SIGNING_BLOCK_MAGIC 159 */ getHapSigningBlockMagic()160 public static byte[] getHapSigningBlockMagic() { 161 return HAP_SIGNING_BLOCK_MAGIC; 162 } 163 164 /** 165 * The set of IDs of optional blocks in hap signature block. 166 */ 167 static { 168 Set<Integer> blockIds = new HashSet<Integer>(); 169 blockIds.add(HAP_PROOF_OF_ROTATION_BLOCK_ID); 170 blockIds.add(HAP_PROFILE_BLOCK_ID); 171 blockIds.add(HAP_PROPERTY_BLOCK_ID); 172 HAP_SIGNATURE_OPTIONAL_BLOCK_IDS = Collections.unmodifiableSet(blockIds); 173 } 174 175 /** 176 * Read data from hap file. 177 * 178 * @param file input file path. 179 * @return true, if read successfully. 180 * @throws IOException on error. 181 */ readFileToByte(String file)182 public static byte[] readFileToByte(String file) throws IOException { 183 try (FileInputStream in = new FileInputStream(file); 184 ByteArrayOutputStream out = new ByteArrayOutputStream(in.available());) { 185 byte[] buf = new byte[BUFFER_LENGTH]; 186 int len = 0; 187 while ((len = in.read(buf)) != -1) { 188 out.write(buf, 0, len); 189 } 190 return out.toByteArray(); 191 } 192 } 193 getChunkCount(ZipDataInput[] contents)194 private static long getChunkCount(ZipDataInput[] contents) { 195 long chunkCount = 0L; 196 for (ZipDataInput content : contents) { 197 chunkCount += ((content.size() + CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES - 1) / 198 CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES); 199 } 200 return chunkCount; 201 } 202 203 /** 204 * compute digests of contents 205 * 206 * @param digestAlgorithms algorithm of digest 207 * @param zipData content used to get digest 208 * @param optionalBlocks list of optional blocks used to get digest 209 * @return digests 210 * @throws DigestException digest error 211 * @throws IOException if an IO error occurs when compute hap file digest 212 */ computeDigests( Set<ContentDigestAlgorithm> digestAlgorithms, ZipDataInput[] zipData, List<SigningBlock> optionalBlocks)213 public static Map<ContentDigestAlgorithm, byte[]> computeDigests( 214 Set<ContentDigestAlgorithm> digestAlgorithms, ZipDataInput[] zipData, List<SigningBlock> optionalBlocks) 215 throws DigestException, IOException { 216 long chunkCountLong = getChunkCount(zipData); 217 if (chunkCountLong > Integer.MAX_VALUE) { 218 throw new DigestException("Input too long: " + chunkCountLong + " chunks"); 219 } 220 int chunkCount = (int) chunkCountLong; 221 ContentDigestAlgorithm[] contentDigestAlgorithms = digestAlgorithms.toArray( 222 new ContentDigestAlgorithm[digestAlgorithms.size()]); 223 MessageDigest[] messageDigests = new MessageDigest[contentDigestAlgorithms.length]; 224 int[] digestOutputSizes = new int[contentDigestAlgorithms.length]; 225 byte[][] digestOfChunks = new byte[contentDigestAlgorithms.length][]; 226 initComputeItem(chunkCount, contentDigestAlgorithms, messageDigests, digestOutputSizes, digestOfChunks); 227 int chunkIndex = 0; 228 byte[] chunkContentPrefix = new byte[DIGEST_PRIFIX_LENGTH]; 229 chunkContentPrefix[0] = ZIP_SECOND_LEVEL_CHUNK_PREFIX; 230 byte[] buf = new byte[CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES]; 231 ZipDataOutput digests = new MessageDigestZipDataOutput(messageDigests); 232 for (ZipDataInput content : zipData) { 233 long offset = 0L; 234 long remaining = content.size(); 235 while (remaining > 0) { 236 int chunkSize = (int) Math.min(buf.length, remaining); 237 setUInt32ToByteArrayWithLittleEngian(chunkSize, chunkContentPrefix, 1); 238 for (int i = 0; i < contentDigestAlgorithms.length; i++) { 239 messageDigests[i].update(chunkContentPrefix); 240 } 241 try { 242 content.copyTo(offset, chunkSize, digests); 243 } catch (IOException e) { 244 throw new IOException("Failed to read chunk #" + chunkIndex, e); 245 } 246 247 getDigests(contentDigestAlgorithms, digestOutputSizes, messageDigests, digestOfChunks, chunkIndex); 248 offset += chunkSize; 249 remaining -= chunkSize; 250 chunkIndex++; 251 } 252 } 253 return getContentDigestAlgorithmMap(optionalBlocks, contentDigestAlgorithms, messageDigests, digestOfChunks); 254 } 255 getDigests(ContentDigestAlgorithm[] contentDigestAlgorithms, int[] digestOutputSizes, MessageDigest[] messageDigests, byte[][] digestOfChunks, int chunkIndex)256 private static void getDigests(ContentDigestAlgorithm[] contentDigestAlgorithms, int[] digestOutputSizes, 257 MessageDigest[] messageDigests, byte[][] digestOfChunks, int chunkIndex) throws DigestException { 258 for (int i = 0; i < contentDigestAlgorithms.length; i++) { 259 int expectedDigestSizeBytes = digestOutputSizes[i]; 260 int actualDigestSizeBytes = messageDigests[i].digest(digestOfChunks[i], 261 chunkIndex * expectedDigestSizeBytes + DIGEST_PRIFIX_LENGTH, expectedDigestSizeBytes); 262 if (actualDigestSizeBytes != expectedDigestSizeBytes) { 263 throw new DigestException("Unexpected output size of " + messageDigests[i].getAlgorithm() 264 + " digest: " + actualDigestSizeBytes); 265 } 266 } 267 } 268 initComputeItem(int chunkCount, ContentDigestAlgorithm[] contentDigestAlgorithms, MessageDigest[] messageDigests, int[] digestOutputSizes, byte[][] digestOfChunks)269 private static void initComputeItem(int chunkCount, ContentDigestAlgorithm[] contentDigestAlgorithms, 270 MessageDigest[] messageDigests, int[] digestOutputSizes, 271 byte[][] digestOfChunks) throws DigestException { 272 try { 273 for (int i = 0; i < contentDigestAlgorithms.length; i++) { 274 int digestOutputSizeBytes = contentDigestAlgorithms[i].getDigestOutputByteSize(); 275 byte[] concatenationOfChunkCountAndChunkDigests = 276 new byte[DIGEST_PRIFIX_LENGTH + chunkCount * digestOutputSizeBytes]; 277 concatenationOfChunkCountAndChunkDigests[0] = ZIP_FIRST_LEVEL_CHUNK_PREFIX; 278 setUInt32ToByteArrayWithLittleEngian(chunkCount, concatenationOfChunkCountAndChunkDigests, 1); 279 digestOfChunks[i] = concatenationOfChunkCountAndChunkDigests; 280 messageDigests[i] = MessageDigest.getInstance(contentDigestAlgorithms[i].getDigestAlgorithm()); 281 digestOutputSizes[i] = contentDigestAlgorithms[i].getDigestOutputByteSize(); 282 } 283 } catch (NoSuchAlgorithmException e) { 284 throw new DigestException("Digest algorithm not supported", e); 285 } 286 } 287 getContentDigestAlgorithmMap(List<SigningBlock> optionalBlocks, ContentDigestAlgorithm[] contentDigestAlgorithms, MessageDigest[] messageDigests, byte[][] digestOfChunks)288 private static Map<ContentDigestAlgorithm, byte[]> getContentDigestAlgorithmMap(List<SigningBlock> optionalBlocks, 289 ContentDigestAlgorithm[] contentDigestAlgorithms, MessageDigest[] messageDigests, byte[][] digestOfChunks) { 290 Map<ContentDigestAlgorithm, byte[]> result = new HashMap<>(contentDigestAlgorithms.length); 291 for (int i = 0; i < contentDigestAlgorithms.length; i++) { 292 messageDigests[i].update(digestOfChunks[i]); 293 for (SigningBlock signingBlock : optionalBlocks) { 294 messageDigests[i].update(signingBlock.getValue()); 295 } 296 result.put(contentDigestAlgorithms[i], messageDigests[i].digest()); 297 } 298 return result; 299 } 300 setUInt32ToByteArrayWithLittleEngian(int value, byte[] result, int offset)301 private static void setUInt32ToByteArrayWithLittleEngian(int value, byte[] result, int offset) { 302 for (int i = 0; i < INT_SIZE; i++) { 303 result[offset + i] = (byte) ((value >> (BIT_SIZE * i)) & 0xff); 304 } 305 } 306 307 private static final char[] HEX_CHAR_ARRAY = "0123456789ABCDEF".toCharArray(); 308 309 /** 310 * Slice buffer to target size. 311 * 312 * @param source input data buffer 313 * @param targetSize target buffer's size 314 * @return target buffer of target size 315 */ sliceBuffer(ByteBuffer source, int targetSize)316 public static ByteBuffer sliceBuffer(ByteBuffer source, int targetSize) { 317 int limit = source.limit(); 318 int position = source.position(); 319 int targetLimit = position + targetSize; 320 if ((targetLimit < position) || (targetLimit > limit)) { 321 LOGGER.error("targetSize: " + targetSize); 322 throw new BufferUnderflowException(); 323 } 324 try { 325 source.limit(targetLimit); 326 ByteBuffer target = source.slice(); 327 target.order(source.order()); 328 return target; 329 } finally { 330 source.position(targetLimit); 331 source.limit(limit); 332 } 333 } 334 sliceBuffer(ByteBuffer source, int startPos, int endPos)335 private static ByteBuffer sliceBuffer(ByteBuffer source, int startPos, int endPos) { 336 int capacity = source.capacity(); 337 if (startPos < 0 || endPos < startPos || endPos > capacity) { 338 throw new IllegalArgumentException( 339 "startPos: " + startPos + ", endPos: " + endPos + ", capacity: " + capacity); 340 } 341 int limit = source.limit(); 342 int position = source.position(); 343 try { 344 source.position(0); 345 source.limit(endPos); 346 source.position(startPos); 347 ByteBuffer target = source.slice(); 348 target.order(source.order()); 349 return target; 350 } finally { 351 source.limit(limit); 352 source.position(position); 353 } 354 } 355 356 /** 357 * Slice buffer from startPos to endPos, and then reverse it. 358 * 359 * @param hapSigningBlock input buffer used to slice. 360 * @param startPos start position of slice buffer. 361 * @param endPos end position of slice buffer. 362 * @return new buffer. 363 */ reverseSliceBuffer(ByteBuffer hapSigningBlock, int startPos, int endPos)364 public static ByteBuffer reverseSliceBuffer(ByteBuffer hapSigningBlock, int startPos, int endPos) { 365 ByteBuffer header = HapUtils.sliceBuffer(hapSigningBlock, startPos, endPos); 366 byte[] signatureBlockBytes = new byte[header.capacity()]; 367 header.get(signatureBlockBytes, 0, signatureBlockBytes.length); 368 return ByteBuffer.wrap(Arrays.reverse(signatureBlockBytes)); 369 } 370 371 /** 372 * Check whether buffer is little endian. 373 * 374 * @param buffer ByteBuffer used to check 375 */ checkBufferLittleEndian(ByteBuffer buffer)376 public static void checkBufferLittleEndian(ByteBuffer buffer) { 377 if (buffer.order() == ByteOrder.LITTLE_ENDIAN) { 378 return; 379 } 380 throw new IllegalArgumentException("ByteBuffer is not little endian"); 381 } 382 383 /** 384 * TLV encode list of pairs 385 * 386 * @param pairList input list of pairs 387 * @return byte array after encoding 388 */ encodeListOfPairsToByteArray(List<Pair<Integer, byte[]>> pairList)389 public static byte[] encodeListOfPairsToByteArray(List<Pair<Integer, byte[]>> pairList) { 390 int encodeSize = 0; 391 encodeSize += INT_SIZE + INT_SIZE; 392 for (Pair<Integer, byte[]> pair : pairList) { 393 encodeSize += INT_SIZE+INT_SIZE+INT_SIZE + pair.getSecond().length; 394 } 395 ByteBuffer encodeBytes = ByteBuffer.allocate(encodeSize); 396 encodeBytes.order(ByteOrder.LITTLE_ENDIAN); 397 encodeBytes.putInt(CONTENT_VERSION); // version 398 encodeBytes.putInt(BLOCK_NUMBER); // block number 399 for (Pair<Integer, byte[]> pair : pairList) { 400 byte[] second = pair.getSecond(); 401 encodeBytes.putInt(INT_SIZE+INT_SIZE + second.length); 402 encodeBytes.putInt(pair.getFirst()); 403 encodeBytes.putInt(second.length); 404 encodeBytes.put(second); 405 } 406 return encodeBytes.array(); 407 } 408 409 /** 410 * Translate value to Hex string. 411 * 412 * @param value input byte array. 413 * @param separator symbol insert between two bytes. 414 * @return a hex-values string. 415 */ toHex(byte[] value, String separator)416 public static String toHex(byte[] value, String separator) { 417 StringBuilder sb = new StringBuilder(value.length + value.length); 418 String useSeparator = separator == null ? "" : separator; 419 int len = value.length; 420 for (int i = 0; i < len; i++) { 421 int hi = (value[i] & 0xff) >>> HALF_BIT_SIZE; 422 int lo = value[i] & 0x0f; 423 sb.append(HEX_CHAR_ARRAY[hi]).append(HEX_CHAR_ARRAY[lo]); 424 if (i != len - 1) { 425 sb.append(useSeparator); 426 } 427 } 428 return sb.toString(); 429 } 430 431 /** 432 * find signing block from hap file 433 * 434 * @param hap ZipDataInput object of zip file 435 * @param zipInfo ZipFileInfo object of hap file 436 * @return pair of offset of signing block and data of signing block 437 * @throws SignatureNotFoundException No signing block is found 438 * @throws IOException file operation error 439 */ findHapSigningBlock(ZipDataInput hap, ZipFileInfo zipInfo)440 public static Pair<Long, ByteBuffer> findHapSigningBlock(ZipDataInput hap, ZipFileInfo zipInfo) 441 throws SignatureNotFoundException, IOException { 442 long centralDirectoryStartOffset = zipInfo.getCentralDirectoryOffset(); 443 long centralDirectorySize = zipInfo.getCentralDirectorySize(); 444 long eocdOffset = zipInfo.getEocdOffset(); 445 long centralDirectoryEndOffset = centralDirectoryStartOffset + centralDirectorySize; 446 if (eocdOffset != centralDirectoryEndOffset) { 447 throw new SignatureNotFoundException("ZIP Central Directory is not immediately followed by End of Central" 448 + "Directory. CD end: " + centralDirectoryEndOffset + ", EoCD start: " + eocdOffset); 449 } 450 if (centralDirectoryStartOffset < HAP_SIG_BLOCK_MIN_SIZE) { 451 throw new SignatureNotFoundException("Hap too small for Hap Signing Block. ZIP Central Directory offset: " 452 + centralDirectoryStartOffset); 453 } 454 long hapSigningBlockHeaderOffset = centralDirectoryStartOffset - HAP_SIG_BLOCK_HEADER_SIZE; 455 ByteBuffer hapSigningBlockHeader = hap.createByteBuffer(hapSigningBlockHeaderOffset, HAP_SIG_BLOCK_HEADER_SIZE); 456 hapSigningBlockHeader.order(ByteOrder.LITTLE_ENDIAN); 457 int blockCount = hapSigningBlockHeader.getInt(); 458 long hapSigBlockSize = hapSigningBlockHeader.getLong(); 459 long hapSignBlockMagicLo = hapSigningBlockHeader.getLong(); 460 long hapSignBlockMagicHi = hapSigningBlockHeader.getLong(); 461 int version = hapSigningBlockHeader.getInt(); 462 if ((hapSignBlockMagicLo != HAP_SIG_BLOCK_MAGIC_LO) 463 || (hapSignBlockMagicHi != HAP_SIG_BLOCK_MAGIC_HI)) { 464 throw new SignatureNotFoundException("No Hap Signing Block before ZIP Central Directory"); 465 } 466 if ((hapSigBlockSize < HAP_SIG_BLOCK_HEADER_SIZE) || 467 (hapSigBlockSize > Integer.MAX_VALUE - SignHap.getBlockSize())) { 468 throw new SignatureNotFoundException("Hap Signing Block size out of range: " + hapSigBlockSize); 469 } 470 int totalSize = (int) hapSigBlockSize; 471 long hapSigningBlockOffset = centralDirectoryStartOffset - totalSize; 472 if (hapSigningBlockOffset < 0) { 473 throw new SignatureNotFoundException("Hap Signing Block offset out of range: " + hapSigningBlockOffset); 474 } 475 ByteBuffer hapSigningBlockByteBuffer = hap.createByteBuffer(hapSigningBlockOffset, totalSize) 476 .order(ByteOrder.LITTLE_ENDIAN); 477 LOGGER.info("Find Hap Signing Block success, version: {}, block count: {}", version, blockCount); 478 return Pair.create(hapSigningBlockOffset, hapSigningBlockByteBuffer); 479 } 480 }