• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 package com.ohos.hapsigntool.hap.utils;
17 
18 import com.ohos.hapsigntool.entity.Pair;
19 import com.ohos.hapsigntool.hap.entity.SigningBlock;
20 import com.ohos.hapsigntool.error.SignatureNotFoundException;
21 import com.ohos.hapsigntool.entity.ContentDigestAlgorithm;
22 import com.ohos.hapsigntool.zip.MessageDigestZipDataOutput;
23 import com.ohos.hapsigntool.zip.ZipDataInput;
24 import com.ohos.hapsigntool.zip.ZipDataOutput;
25 import com.ohos.hapsigntool.zip.ZipFileInfo;
26 
27 import org.apache.logging.log4j.LogManager;
28 import org.apache.logging.log4j.Logger;
29 import org.bouncycastle.util.Arrays;
30 
31 import java.io.ByteArrayOutputStream;
32 import java.io.FileInputStream;
33 import java.io.IOException;
34 import java.nio.BufferUnderflowException;
35 import java.nio.ByteBuffer;
36 import java.nio.ByteOrder;
37 import java.security.DigestException;
38 import java.security.MessageDigest;
39 import java.security.NoSuchAlgorithmException;
40 import java.util.Collections;
41 import java.util.HashMap;
42 import java.util.HashSet;
43 import java.util.List;
44 import java.util.Map;
45 import java.util.Set;
46 
47 /**
48  * Hap util, parse hap, find signature block.
49  *
50  * @since 2021/12/20
51  */
52 public class HapUtils {
53     private static final Logger LOGGER = LogManager.getLogger(HapUtils.class);
54 
55     /**
56      * ID of hap signature blocks of version 1
57      */
58     public static final int HAP_SIGNATURE_SCHEME_V1_BLOCK_ID = 0x20000000;
59 
60     /**
61      * ID of hap proof of rotation block
62      */
63     public static final int HAP_PROOF_OF_ROTATION_BLOCK_ID = 0x20000001;
64 
65     /**
66      * ID of profile block
67      */
68     public static final int HAP_PROFILE_BLOCK_ID = 0x20000002;
69 
70     /**
71      * ID of property block
72      */
73     public static final int HAP_PROPERTY_BLOCK_ID = 0x20000003;
74 
75     /**
76      * ID of property block
77      */
78     public static final int HAP_CODE_SIGN_BLOCK_ID = 0x30000001;
79 
80     /**
81      * The size of data block used to get digest
82      */
83 
84     public static final int CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES = 1024 * 1024;
85 
86     /**
87      * Content version
88      */
89     public static final int CONTENT_VERSION = 2;
90 
91     /**
92      * bit size
93      */
94     public static final int BIT_SIZE = 8;
95 
96     /**
97      * half bit size
98      */
99     public static final int HALF_BIT_SIZE = 4;
100 
101     /**
102      * int size
103      */
104     public static final int INT_SIZE = 4;
105 
106     /**
107      * block number
108      */
109     public static final int BLOCK_NUMBER = 1;
110 
111     /**
112      * hap sign schema v2 signature block version
113      */
114     public static final int HAP_SIGN_SCHEME_V2_BLOCK_VERSION = 2;
115 
116     /**
117      * hap sign schema v3 signature block version
118      */
119     public static final int HAP_SIGN_SCHEME_V3_BLOCK_VERSION = 3;
120 
121     /**
122      * The value of lower 8-bytes of old magic word
123      */
124     public static final long HAP_SIG_BLOCK_MAGIC_LO_V2 = 0x2067695320504148L;
125 
126     /**
127      * The value of higher 8-bytes of old magic word
128      */
129     public static final long HAP_SIG_BLOCK_MAGIC_HI_V2 = 0x3234206b636f6c42L;
130 
131     /**
132      * The value of lower 8 bytes of magic word
133      */
134     public static final long HAP_SIG_BLOCK_MAGIC_LO_V3 = 0x676973207061683cL;
135 
136     /**
137      * The value of higher 8 bytes of magic word
138      */
139     public static final long HAP_SIG_BLOCK_MAGIC_HI_V3 = 0x3e6b636f6c62206eL;
140 
141     /**
142      * Size of hap signature block header
143      */
144     public static final int HAP_SIG_BLOCK_HEADER_SIZE = 32;
145 
146     /**
147      * The min size of hap signature block
148      */
149     public static final int HAP_SIG_BLOCK_MIN_SIZE = HAP_SIG_BLOCK_HEADER_SIZE;
150 
151     /**
152      * hap block size
153      */
154     public static final int BLOCK_SIZE = 8;
155 
156     /**
157      * The set of IDs of optional blocks in hap signature block.
158      */
159     private static final Set<Integer> HAP_SIGNATURE_OPTIONAL_BLOCK_IDS ;
160 
161     /**
162      * Minimum api version for hap sign schema v3.
163      */
164     private static final int MIN_COMPATIBLE_VERSION_FOR_SCHEMA_V3 = 8;
165 
166     /**
167      * Magic word of hap signature block v2
168      */
169     private static final byte[] HAP_SIGNING_BLOCK_MAGIC_V2 =
170             new byte[] {0x48, 0x41, 0x50, 0x20, 0x53, 0x69, 0x67, 0x20, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x20, 0x34, 0x32};
171 
172     /**
173      * Magic word of hap signature block
174      */
175     private static final byte[] HAP_SIGNING_BLOCK_MAGIC_V3 =
176             new byte[] {0x3c, 0x68, 0x61, 0x70, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x3e};
177 
178     private static final byte ZIP_FIRST_LEVEL_CHUNK_PREFIX = 0x5a;
179     private static final byte ZIP_SECOND_LEVEL_CHUNK_PREFIX = (byte) 0xa5;
180     private static final int DIGEST_PRIFIX_LENGTH = 5;
181     private static final int BUFFER_LENGTH = 4096;
182     private static final char[] HEX_CHAR_ARRAY = "0123456789ABCDEF".toCharArray();
183 
184     /**
185      * The set of IDs of optional blocks in hap signature block.
186      */
187     static {
188         Set<Integer> blockIds = new HashSet<Integer>();
189         blockIds.add(HAP_PROOF_OF_ROTATION_BLOCK_ID);
190         blockIds.add(HAP_PROFILE_BLOCK_ID);
191         blockIds.add(HAP_PROPERTY_BLOCK_ID);
192         HAP_SIGNATURE_OPTIONAL_BLOCK_IDS = Collections.unmodifiableSet(blockIds);
193     }
194 
HapUtils()195     private HapUtils() {
196     }
197 
198     /**
199      * Get HAP_SIGNATURE_OPTIONAL_BLOCK_IDS
200      *
201      * @return HAP_SIGNATURE_OPTIONAL_BLOCK_IDS
202      */
getHapSignatureOptionalBlockIds()203     public static Set<Integer> getHapSignatureOptionalBlockIds() {
204         return HAP_SIGNATURE_OPTIONAL_BLOCK_IDS;
205     }
206 
207     /**
208      * Get HAP_SIGNING_BLOCK_MAGIC
209      *
210      * @param compatibleVersion compatible api version
211      * @return HAP_SIGNING_BLOCK_MAGIC
212      */
getHapSigningBlockMagic(int compatibleVersion)213     public static byte[] getHapSigningBlockMagic(int compatibleVersion) {
214         if (compatibleVersion >= MIN_COMPATIBLE_VERSION_FOR_SCHEMA_V3) {
215             return HAP_SIGNING_BLOCK_MAGIC_V3.clone();
216         }
217         return HAP_SIGNING_BLOCK_MAGIC_V2.clone();
218     }
219 
220     /**
221      * Get version number of hap signature block
222      *
223      * @param compatibleVersion compatible api version
224      * @return magic to number
225      */
getHapSigningBlockVersion(int compatibleVersion)226     public static int getHapSigningBlockVersion(int compatibleVersion) {
227         if (compatibleVersion >= MIN_COMPATIBLE_VERSION_FOR_SCHEMA_V3) {
228             return HAP_SIGN_SCHEME_V3_BLOCK_VERSION;
229         }
230         return HAP_SIGN_SCHEME_V2_BLOCK_VERSION;
231     }
232 
233     /**
234      * Read data from hap file.
235      *
236      * @param file input file path.
237      * @return true, if read successfully.
238      * @throws IOException on error.
239      */
readFileToByte(String file)240     public static byte[] readFileToByte(String file) throws IOException {
241         try (FileInputStream in = new FileInputStream(file);
242              ByteArrayOutputStream out = new ByteArrayOutputStream(in.available());) {
243             byte[] buf = new byte[BUFFER_LENGTH];
244             int len = 0;
245             while ((len = in.read(buf)) != -1) {
246                 out.write(buf, 0, len);
247             }
248             return out.toByteArray();
249         }
250     }
251 
getChunkCount(ZipDataInput[] contents)252     private static long getChunkCount(ZipDataInput[] contents) {
253         long chunkCount = 0L;
254         for (ZipDataInput content : contents) {
255             chunkCount += ((content.size() + CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES - 1)
256                     / CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES);
257         }
258         return chunkCount;
259     }
260 
261     /**
262      * compute digests of contents
263      *
264      * @param digestAlgorithms algorithm of digest
265      * @param zipData content used to get digest
266      * @param optionalBlocks list of optional blocks used to get digest
267      * @return digests
268      * @throws DigestException digest error
269      * @throws IOException if an IO error occurs when compute hap file digest
270      */
computeDigests( Set<ContentDigestAlgorithm> digestAlgorithms, ZipDataInput[] zipData, List<SigningBlock> optionalBlocks)271     public static Map<ContentDigestAlgorithm, byte[]> computeDigests(
272             Set<ContentDigestAlgorithm> digestAlgorithms, ZipDataInput[] zipData, List<SigningBlock> optionalBlocks)
273             throws DigestException, IOException {
274         long chunkCountLong = getChunkCount(zipData);
275         if (chunkCountLong > Integer.MAX_VALUE) {
276             throw new DigestException("Input too long: " + chunkCountLong + " chunks");
277         }
278         int chunkCount = (int) chunkCountLong;
279         ContentDigestAlgorithm[] contentDigestAlgorithms = digestAlgorithms.toArray(
280                 new ContentDigestAlgorithm[digestAlgorithms.size()]);
281         MessageDigest[] messageDigests = new MessageDigest[contentDigestAlgorithms.length];
282         int[] digestOutputSizes = new int[contentDigestAlgorithms.length];
283         byte[][] digestOfChunks = new byte[contentDigestAlgorithms.length][];
284         initComputeItem(chunkCount, contentDigestAlgorithms, messageDigests, digestOutputSizes, digestOfChunks);
285         int chunkIndex = 0;
286         byte[] chunkContentPrefix = new byte[DIGEST_PRIFIX_LENGTH];
287         chunkContentPrefix[0] = ZIP_SECOND_LEVEL_CHUNK_PREFIX;
288         byte[] buf = new byte[CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES];
289         ZipDataOutput digests = new MessageDigestZipDataOutput(messageDigests);
290         for (ZipDataInput content : zipData) {
291             long offset = 0L;
292             long remaining = content.size();
293             while (remaining > 0) {
294                 int chunkSize = (int) Math.min(buf.length, remaining);
295                 setUInt32ToByteArrayWithLittleEngian(chunkSize, chunkContentPrefix, 1);
296                 for (int i = 0; i < contentDigestAlgorithms.length; i++) {
297                     messageDigests[i].update(chunkContentPrefix);
298                 }
299                 try {
300                     content.copyTo(offset, chunkSize, digests);
301                 } catch (IOException e) {
302                     throw new IOException("Failed to read chunk #" + chunkIndex, e);
303                 }
304 
305                 getDigests(contentDigestAlgorithms, digestOutputSizes, messageDigests, digestOfChunks, chunkIndex);
306                 offset += chunkSize;
307                 remaining -= chunkSize;
308                 chunkIndex++;
309             }
310         }
311         return getContentDigestAlgorithmMap(optionalBlocks, contentDigestAlgorithms, messageDigests, digestOfChunks);
312     }
313 
getDigests(ContentDigestAlgorithm[] contentDigestAlgorithms, int[] digestOutputSizes, MessageDigest[] messageDigests, byte[][] digestOfChunks, int chunkIndex)314     private static void getDigests(ContentDigestAlgorithm[] contentDigestAlgorithms, int[] digestOutputSizes,
315         MessageDigest[] messageDigests, byte[][] digestOfChunks, int chunkIndex) throws DigestException {
316         for (int i = 0; i < contentDigestAlgorithms.length; i++) {
317             int expectedDigestSizeBytes = digestOutputSizes[i];
318             int actualDigestSizeBytes = messageDigests[i].digest(digestOfChunks[i],
319                     chunkIndex * expectedDigestSizeBytes + DIGEST_PRIFIX_LENGTH, expectedDigestSizeBytes);
320             if (actualDigestSizeBytes != expectedDigestSizeBytes) {
321                 throw new DigestException("Unexpected output size of " + messageDigests[i].getAlgorithm()
322                         + " digest: " + actualDigestSizeBytes);
323             }
324         }
325     }
326 
initComputeItem(int chunkCount, ContentDigestAlgorithm[] contentDigestAlgorithms, MessageDigest[] messageDigests, int[] digestOutputSizes, byte[][] digestOfChunks)327     private static void initComputeItem(int chunkCount, ContentDigestAlgorithm[] contentDigestAlgorithms,
328                                         MessageDigest[] messageDigests, int[] digestOutputSizes,
329                                         byte[][] digestOfChunks) throws DigestException {
330         try {
331             for (int i = 0; i < contentDigestAlgorithms.length; i++) {
332                 int digestOutputSizeBytes = contentDigestAlgorithms[i].getDigestOutputByteSize();
333                 byte[] concatenationOfChunkCountAndChunkDigests =
334                         new byte[DIGEST_PRIFIX_LENGTH + chunkCount * digestOutputSizeBytes];
335                 concatenationOfChunkCountAndChunkDigests[0] = ZIP_FIRST_LEVEL_CHUNK_PREFIX;
336                 setUInt32ToByteArrayWithLittleEngian(chunkCount, concatenationOfChunkCountAndChunkDigests, 1);
337                 digestOfChunks[i] = concatenationOfChunkCountAndChunkDigests;
338                 messageDigests[i] = MessageDigest.getInstance(contentDigestAlgorithms[i].getDigestAlgorithm());
339                 digestOutputSizes[i] = contentDigestAlgorithms[i].getDigestOutputByteSize();
340             }
341         } catch (NoSuchAlgorithmException e) {
342             throw new DigestException("Digest algorithm not supported", e);
343         }
344     }
345 
getContentDigestAlgorithmMap(List<SigningBlock> optionalBlocks, ContentDigestAlgorithm[] contentDigestAlgorithms, MessageDigest[] messageDigests, byte[][] digestOfChunks)346     private static Map<ContentDigestAlgorithm, byte[]> getContentDigestAlgorithmMap(List<SigningBlock> optionalBlocks,
347         ContentDigestAlgorithm[] contentDigestAlgorithms, MessageDigest[] messageDigests, byte[][] digestOfChunks) {
348         Map<ContentDigestAlgorithm, byte[]> result = new HashMap<>(contentDigestAlgorithms.length);
349         for (int i = 0; i < contentDigestAlgorithms.length; i++) {
350             messageDigests[i].update(digestOfChunks[i]);
351             for (SigningBlock signingBlock : optionalBlocks) {
352                 messageDigests[i].update(signingBlock.getValue());
353             }
354             result.put(contentDigestAlgorithms[i], messageDigests[i].digest());
355         }
356         return result;
357     }
358 
setUInt32ToByteArrayWithLittleEngian(int value, byte[] result, int offset)359     private static void setUInt32ToByteArrayWithLittleEngian(int value, byte[] result, int offset) {
360         for (int i = 0; i < INT_SIZE; i++) {
361             result[offset + i] = (byte) ((value >> (BIT_SIZE * i)) & 0xff);
362         }
363     }
364 
365     /**
366      * Slice buffer to target size.
367      *
368      * @param source input data buffer
369      * @param targetSize target buffer's size
370      * @return target buffer of target size
371      */
sliceBuffer(ByteBuffer source, int targetSize)372     public static ByteBuffer sliceBuffer(ByteBuffer source, int targetSize) {
373         int limit = source.limit();
374         int position = source.position();
375         int targetLimit = position + targetSize;
376         if ((targetLimit < position) || (targetLimit > limit)) {
377             LOGGER.error("targetSize: " + targetSize);
378             throw new BufferUnderflowException();
379         }
380         try {
381             source.limit(targetLimit);
382             ByteBuffer target = source.slice();
383             target.order(source.order());
384             return target;
385         } finally {
386             source.position(targetLimit);
387             source.limit(limit);
388         }
389     }
390 
sliceBuffer(ByteBuffer source, int startPos, int endPos)391     private static ByteBuffer sliceBuffer(ByteBuffer source, int startPos, int endPos) {
392         int capacity = source.capacity();
393         if (startPos < 0 || endPos < startPos || endPos > capacity) {
394             throw new IllegalArgumentException(
395                     "startPos: " + startPos + ", endPos: " + endPos + ", capacity: " + capacity);
396         }
397         int limit = source.limit();
398         int position = source.position();
399         try {
400             source.position(0);
401             source.limit(endPos);
402             source.position(startPos);
403             ByteBuffer target = source.slice();
404             target.order(source.order());
405             return target;
406         } finally {
407             source.limit(limit);
408             source.position(position);
409         }
410     }
411 
412     /**
413      * Slice buffer from startPos to endPos, and then reverse it.
414      *
415      * @param hapSigningBlock input buffer used to slice.
416      * @param startPos start position of slice buffer.
417      * @param endPos end position of slice buffer.
418      * @return new buffer.
419      */
reverseSliceBuffer(ByteBuffer hapSigningBlock, int startPos, int endPos)420     public static ByteBuffer reverseSliceBuffer(ByteBuffer hapSigningBlock, int startPos, int endPos) {
421         ByteBuffer header = HapUtils.sliceBuffer(hapSigningBlock, startPos, endPos);
422         byte[] signatureBlockBytes = new byte[header.capacity()];
423         header.get(signatureBlockBytes, 0, signatureBlockBytes.length);
424         return ByteBuffer.wrap(Arrays.reverse(signatureBlockBytes));
425     }
426 
427     /**
428      * Check whether buffer is little endian.
429      *
430      * @param buffer ByteBuffer used to check
431      */
checkBufferLittleEndian(ByteBuffer buffer)432     public static void checkBufferLittleEndian(ByteBuffer buffer) {
433         if (buffer.order() == ByteOrder.LITTLE_ENDIAN) {
434             return;
435         }
436         throw new IllegalArgumentException("ByteBuffer is not little endian");
437     }
438 
439     /**
440      * TLV encode list of pairs
441      *
442      * @param pairList input list of pairs
443      * @return byte array after encoding
444      */
encodeListOfPairsToByteArray(List<Pair<Integer, byte[]>> pairList)445     public static byte[] encodeListOfPairsToByteArray(List<Pair<Integer, byte[]>> pairList) {
446         int encodeSize = 0;
447         encodeSize += INT_SIZE + INT_SIZE;
448         for (Pair<Integer, byte[]> pair : pairList) {
449             encodeSize += INT_SIZE + INT_SIZE + INT_SIZE + pair.getSecond().length;
450         }
451         ByteBuffer encodeBytes = ByteBuffer.allocate(encodeSize);
452         encodeBytes.order(ByteOrder.LITTLE_ENDIAN);
453         encodeBytes.putInt(CONTENT_VERSION); // version
454         encodeBytes.putInt(BLOCK_NUMBER); // block number
455         for (Pair<Integer, byte[]> pair : pairList) {
456             byte[] second = pair.getSecond();
457             encodeBytes.putInt(INT_SIZE + INT_SIZE + second.length);
458             encodeBytes.putInt(pair.getFirst());
459             encodeBytes.putInt(second.length);
460             encodeBytes.put(second);
461         }
462         return encodeBytes.array();
463     }
464 
465     /**
466      * Translate value to Hex string.
467      *
468      * @param value input byte array.
469      * @param separator symbol insert between two bytes.
470      * @return a hex-values string.
471      */
toHex(byte[] value, String separator)472     public static String toHex(byte[] value, String separator) {
473         StringBuilder sb = new StringBuilder(value.length + value.length);
474         String useSeparator = separator == null ? "" : separator;
475         int len = value.length;
476         for (int i = 0; i < len; i++) {
477             int hi = (value[i] & 0xff) >>> HALF_BIT_SIZE;
478             int lo = value[i] & 0x0f;
479             sb.append(HEX_CHAR_ARRAY[hi]).append(HEX_CHAR_ARRAY[lo]);
480             if (i != len - 1) {
481                 sb.append(useSeparator);
482             }
483         }
484         return sb.toString();
485     }
486 
487     /**
488      * find signing block from hap file
489      *
490      * @param hap ZipDataInput object of zip file
491      * @param zipInfo ZipFileInfo object of hap file
492      * @return pair of offset of signing block and data of signing block
493      * @throws SignatureNotFoundException No signing block is found
494      * @throws IOException file operation error
495      */
findHapSigningBlock(ZipDataInput hap, ZipFileInfo zipInfo)496     public static HapSignBlockInfo findHapSigningBlock(ZipDataInput hap, ZipFileInfo zipInfo)
497             throws SignatureNotFoundException, IOException {
498         long centralDirectoryStartOffset = zipInfo.getCentralDirectoryOffset();
499         long centralDirectorySize = zipInfo.getCentralDirectorySize();
500         long eocdOffset = zipInfo.getEocdOffset();
501         long centralDirectoryEndOffset = centralDirectoryStartOffset + centralDirectorySize;
502         if (eocdOffset != centralDirectoryEndOffset) {
503             throw new SignatureNotFoundException("ZIP Central Directory is not immediately followed by End of Central"
504                     + "Directory. CD end: " + centralDirectoryEndOffset + ", EoCD start: " + eocdOffset);
505         }
506         if (centralDirectoryStartOffset < HAP_SIG_BLOCK_MIN_SIZE) {
507             throw new SignatureNotFoundException("Hap too small for Hap Signing Block. ZIP Central Directory offset: "
508                     + centralDirectoryStartOffset);
509         }
510         long hapSigningBlockHeaderOffset = centralDirectoryStartOffset - HAP_SIG_BLOCK_HEADER_SIZE;
511         ByteBuffer hapSigningBlockHeader = hap.createByteBuffer(hapSigningBlockHeaderOffset, HAP_SIG_BLOCK_HEADER_SIZE);
512         hapSigningBlockHeader.order(ByteOrder.LITTLE_ENDIAN);
513         int blockCount = hapSigningBlockHeader.getInt();
514         long hapSigBlockSize = hapSigningBlockHeader.getLong();
515         long hapSignBlockMagicLo = hapSigningBlockHeader.getLong();
516         long hapSignBlockMagicHi = hapSigningBlockHeader.getLong();
517         int version = hapSigningBlockHeader.getInt();
518         long hapSigningBlockOffset = verifySignBlock(hapSigBlockSize,
519                 hapSignBlockMagicLo, hapSignBlockMagicHi, version, centralDirectoryStartOffset);
520         ByteBuffer hapSigningBlockByteBuffer = hap.createByteBuffer(hapSigningBlockOffset, (int) hapSigBlockSize)
521                 .order(ByteOrder.LITTLE_ENDIAN);
522         LOGGER.info("Find Hap Signing Block success, version: {}, block count: {}", version, blockCount);
523         return new HapSignBlockInfo(hapSigningBlockOffset, version, hapSigningBlockByteBuffer);
524     }
525 
verifySignBlock(long hapSigBlockSize, long hapSignBlockMagicLo, long hapSignBlockMagicHi, int version, long centralDirectoryStartOffset)526     private static long verifySignBlock(long hapSigBlockSize, long hapSignBlockMagicLo,
527         long hapSignBlockMagicHi, int version, long centralDirectoryStartOffset) throws SignatureNotFoundException {
528         if (!isVersionAndMagicNumValid(version, hapSignBlockMagicLo, hapSignBlockMagicHi)) {
529             throw new SignatureNotFoundException("No Hap Signing Block before ZIP Central Directory");
530         }
531         if ((hapSigBlockSize < HAP_SIG_BLOCK_HEADER_SIZE)
532                 || (hapSigBlockSize > Integer.MAX_VALUE - BLOCK_SIZE)) {
533             throw new SignatureNotFoundException("Hap Signing Block size out of range: " + hapSigBlockSize);
534         }
535         int totalSize = (int) hapSigBlockSize;
536         long hapSigningBlockOffset = centralDirectoryStartOffset - totalSize;
537         if (hapSigningBlockOffset < 0) {
538             throw new SignatureNotFoundException("Hap Signing Block offset out of range: " + hapSigningBlockOffset);
539         }
540         return hapSigningBlockOffset;
541     }
542 
isVersionAndMagicNumValid(int version, long hapSignBlockMagicLo, long hapSignBlockMagicHi)543     private static boolean isVersionAndMagicNumValid(int version, long hapSignBlockMagicLo, long hapSignBlockMagicHi) {
544         if (version < HAP_SIGN_SCHEME_V3_BLOCK_VERSION) {
545             return hapSignBlockMagicLo == HAP_SIG_BLOCK_MAGIC_LO_V2 && hapSignBlockMagicHi == HAP_SIG_BLOCK_MAGIC_HI_V2;
546         }
547         return hapSignBlockMagicLo == HAP_SIG_BLOCK_MAGIC_LO_V3 && hapSignBlockMagicHi == HAP_SIG_BLOCK_MAGIC_HI_V3;
548     }
549 
550     /**
551      * Hap sign block info
552      */
553     public static class HapSignBlockInfo {
554         private final long offset;
555         private final int version;
556         private final ByteBuffer content;
557 
HapSignBlockInfo(long offset, int version, ByteBuffer content)558         public HapSignBlockInfo(long offset, int version, ByteBuffer content) {
559             this.offset = offset;
560             this.version = version;
561             this.content = content;
562         }
563 
getVersion()564         public int getVersion() {
565             return version;
566         }
567 
getContent()568         public ByteBuffer getContent() {
569             return content;
570         }
571 
getOffset()572         public long getOffset() {
573             return offset;
574         }
575     }
576 }