• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 package com.ohos.hapsigntool.hap.utils;
17 
18 import com.ohos.hapsigntool.entity.Pair;
19 import com.ohos.hapsigntool.hap.entity.SigningBlock;
20 import com.ohos.hapsigntool.error.SignatureNotFoundException;
21 import com.ohos.hapsigntool.entity.ContentDigestAlgorithm;
22 import com.ohos.hapsigntool.utils.LogUtils;
23 import com.ohos.hapsigntool.zip.MessageDigestZipDataOutput;
24 import com.ohos.hapsigntool.zip.ZipDataInput;
25 import com.ohos.hapsigntool.zip.ZipDataOutput;
26 import com.ohos.hapsigntool.zip.ZipFileInfo;
27 
28 import org.bouncycastle.util.Arrays;
29 
30 import java.io.ByteArrayOutputStream;
31 import java.io.FileInputStream;
32 import java.io.IOException;
33 import java.nio.BufferUnderflowException;
34 import java.nio.ByteBuffer;
35 import java.nio.ByteOrder;
36 import java.security.DigestException;
37 import java.security.MessageDigest;
38 import java.security.NoSuchAlgorithmException;
39 import java.util.Collections;
40 import java.util.HashMap;
41 import java.util.HashSet;
42 import java.util.List;
43 import java.util.Map;
44 import java.util.Set;
45 
46 /**
47  * Hap util, parse hap, find signature block.
48  *
49  * @since 2021/12/20
50  */
51 public class HapUtils {
52     private static final LogUtils LOGGER = new LogUtils(HapUtils.class);
53 
54     /**
55      * ID of hap signature blocks of version 1
56      */
57     public static final int HAP_SIGNATURE_SCHEME_V1_BLOCK_ID = 0x20000000;
58 
59     /**
60      * ID of hap proof of rotation block
61      */
62     public static final int HAP_PROOF_OF_ROTATION_BLOCK_ID = 0x20000001;
63 
64     /**
65      * ID of profile block
66      */
67     public static final int HAP_PROFILE_BLOCK_ID = 0x20000002;
68 
69     /**
70      * ID of property block
71      */
72     public static final int HAP_PROPERTY_BLOCK_ID = 0x20000003;
73 
74     /**
75      * ID of property block
76      */
77     public static final int HAP_CODE_SIGN_BLOCK_ID = 0x30000001;
78 
79     /**
80      * The size of data block used to get digest
81      */
82 
83     public static final int CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES = 1024 * 1024;
84 
85     /**
86      * Content version
87      */
88     public static final int CONTENT_VERSION = 2;
89 
90     /**
91      * bit size
92      */
93     public static final int BIT_SIZE = 8;
94 
95     /**
96      * half bit size
97      */
98     public static final int HALF_BIT_SIZE = 4;
99 
100     /**
101      * int size
102      */
103     public static final int INT_SIZE = 4;
104 
105     /**
106      * block number
107      */
108     public static final int BLOCK_NUMBER = 1;
109 
110     /**
111      * hap sign schema v2 signature block version
112      */
113     public static final int HAP_SIGN_SCHEME_V2_BLOCK_VERSION = 2;
114 
115     /**
116      * hap sign schema v3 signature block version
117      */
118     public static final int HAP_SIGN_SCHEME_V3_BLOCK_VERSION = 3;
119 
120     /**
121      * The value of lower 8-bytes of old magic word
122      */
123     public static final long HAP_SIG_BLOCK_MAGIC_LO_V2 = 0x2067695320504148L;
124 
125     /**
126      * The value of higher 8-bytes of old magic word
127      */
128     public static final long HAP_SIG_BLOCK_MAGIC_HI_V2 = 0x3234206b636f6c42L;
129 
130     /**
131      * The value of lower 8 bytes of magic word
132      */
133     public static final long HAP_SIG_BLOCK_MAGIC_LO_V3 = 0x676973207061683cL;
134 
135     /**
136      * The value of higher 8 bytes of magic word
137      */
138     public static final long HAP_SIG_BLOCK_MAGIC_HI_V3 = 0x3e6b636f6c62206eL;
139 
140     /**
141      * Size of hap signature block header
142      */
143     public static final int HAP_SIG_BLOCK_HEADER_SIZE = 32;
144 
145     /**
146      * The min size of hap signature block
147      */
148     public static final int HAP_SIG_BLOCK_MIN_SIZE = HAP_SIG_BLOCK_HEADER_SIZE;
149 
150     /**
151      * hap block size
152      */
153     public static final int BLOCK_SIZE = 8;
154 
155     /**
156      * The set of IDs of optional blocks in hap signature block.
157      */
158     private static final Set<Integer> HAP_SIGNATURE_OPTIONAL_BLOCK_IDS ;
159 
160     /**
161      * Minimum api version for hap sign schema v3.
162      */
163     private static final int MIN_COMPATIBLE_VERSION_FOR_SCHEMA_V3 = 8;
164 
165     /**
166      * Magic word of hap signature block v2
167      */
168     private static final byte[] HAP_SIGNING_BLOCK_MAGIC_V2 =
169             new byte[] {0x48, 0x41, 0x50, 0x20, 0x53, 0x69, 0x67, 0x20, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x20, 0x34, 0x32};
170 
171     /**
172      * Magic word of hap signature block
173      */
174     private static final byte[] HAP_SIGNING_BLOCK_MAGIC_V3 =
175             new byte[] {0x3c, 0x68, 0x61, 0x70, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x3e};
176 
177     private static final byte ZIP_FIRST_LEVEL_CHUNK_PREFIX = 0x5a;
178     private static final byte ZIP_SECOND_LEVEL_CHUNK_PREFIX = (byte) 0xa5;
179     private static final int DIGEST_PRIFIX_LENGTH = 5;
180     private static final int BUFFER_LENGTH = 4096;
181     private static final char[] HEX_CHAR_ARRAY = "0123456789ABCDEF".toCharArray();
182 
183     /**
184      * The set of IDs of optional blocks in hap signature block.
185      */
186     static {
187         Set<Integer> blockIds = new HashSet<Integer>();
188         blockIds.add(HAP_PROOF_OF_ROTATION_BLOCK_ID);
189         blockIds.add(HAP_PROFILE_BLOCK_ID);
190         blockIds.add(HAP_PROPERTY_BLOCK_ID);
191         HAP_SIGNATURE_OPTIONAL_BLOCK_IDS = Collections.unmodifiableSet(blockIds);
192     }
193 
HapUtils()194     private HapUtils() {
195     }
196 
197     /**
198      * Get HAP_SIGNATURE_OPTIONAL_BLOCK_IDS
199      *
200      * @return HAP_SIGNATURE_OPTIONAL_BLOCK_IDS
201      */
getHapSignatureOptionalBlockIds()202     public static Set<Integer> getHapSignatureOptionalBlockIds() {
203         return HAP_SIGNATURE_OPTIONAL_BLOCK_IDS;
204     }
205 
206     /**
207      * Get HAP_SIGNING_BLOCK_MAGIC
208      *
209      * @param compatibleVersion compatible api version
210      * @return HAP_SIGNING_BLOCK_MAGIC
211      */
getHapSigningBlockMagic(int compatibleVersion)212     public static byte[] getHapSigningBlockMagic(int compatibleVersion) {
213         if (compatibleVersion >= MIN_COMPATIBLE_VERSION_FOR_SCHEMA_V3) {
214             return HAP_SIGNING_BLOCK_MAGIC_V3.clone();
215         }
216         return HAP_SIGNING_BLOCK_MAGIC_V2.clone();
217     }
218 
219     /**
220      * Get version number of hap signature block
221      *
222      * @param compatibleVersion compatible api version
223      * @return magic to number
224      */
getHapSigningBlockVersion(int compatibleVersion)225     public static int getHapSigningBlockVersion(int compatibleVersion) {
226         if (compatibleVersion >= MIN_COMPATIBLE_VERSION_FOR_SCHEMA_V3) {
227             return HAP_SIGN_SCHEME_V3_BLOCK_VERSION;
228         }
229         return HAP_SIGN_SCHEME_V2_BLOCK_VERSION;
230     }
231 
232     /**
233      * Read data from hap file.
234      *
235      * @param file input file path.
236      * @return true, if read successfully.
237      * @throws IOException on error.
238      */
readFileToByte(String file)239     public static byte[] readFileToByte(String file) throws IOException {
240         try (FileInputStream in = new FileInputStream(file);
241              ByteArrayOutputStream out = new ByteArrayOutputStream(in.available());) {
242             byte[] buf = new byte[BUFFER_LENGTH];
243             int len = 0;
244             while ((len = in.read(buf)) != -1) {
245                 out.write(buf, 0, len);
246             }
247             return out.toByteArray();
248         }
249     }
250 
getChunkCount(ZipDataInput[] contents)251     private static long getChunkCount(ZipDataInput[] contents) {
252         long chunkCount = 0L;
253         for (ZipDataInput content : contents) {
254             chunkCount += ((content.size() + CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES - 1)
255                     / CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES);
256         }
257         return chunkCount;
258     }
259 
260     /**
261      * compute digests of contents
262      *
263      * @param digestAlgorithms algorithm of digest
264      * @param zipData content used to get digest
265      * @param optionalBlocks list of optional blocks used to get digest
266      * @return digests
267      * @throws DigestException digest error
268      * @throws IOException if an IO error occurs when compute hap file digest
269      */
computeDigests( Set<ContentDigestAlgorithm> digestAlgorithms, ZipDataInput[] zipData, List<SigningBlock> optionalBlocks)270     public static Map<ContentDigestAlgorithm, byte[]> computeDigests(
271             Set<ContentDigestAlgorithm> digestAlgorithms, ZipDataInput[] zipData, List<SigningBlock> optionalBlocks)
272             throws DigestException, IOException {
273         long chunkCountLong = getChunkCount(zipData);
274         if (chunkCountLong > Integer.MAX_VALUE) {
275             throw new DigestException("Input too long: " + chunkCountLong + " chunks");
276         }
277         int chunkCount = (int) chunkCountLong;
278         ContentDigestAlgorithm[] contentDigestAlgorithms = digestAlgorithms.toArray(
279                 new ContentDigestAlgorithm[digestAlgorithms.size()]);
280         MessageDigest[] messageDigests = new MessageDigest[contentDigestAlgorithms.length];
281         int[] digestOutputSizes = new int[contentDigestAlgorithms.length];
282         byte[][] digestOfChunks = new byte[contentDigestAlgorithms.length][];
283         initComputeItem(chunkCount, contentDigestAlgorithms, messageDigests, digestOutputSizes, digestOfChunks);
284         int chunkIndex = 0;
285         byte[] chunkContentPrefix = new byte[DIGEST_PRIFIX_LENGTH];
286         chunkContentPrefix[0] = ZIP_SECOND_LEVEL_CHUNK_PREFIX;
287         byte[] buf = new byte[CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES];
288         ZipDataOutput digests = new MessageDigestZipDataOutput(messageDigests);
289         for (ZipDataInput content : zipData) {
290             long offset = 0L;
291             long remaining = content.size();
292             while (remaining > 0) {
293                 int chunkSize = (int) Math.min(buf.length, remaining);
294                 setUInt32ToByteArrayWithLittleEngian(chunkSize, chunkContentPrefix, 1);
295                 for (int i = 0; i < contentDigestAlgorithms.length; i++) {
296                     messageDigests[i].update(chunkContentPrefix);
297                 }
298                 try {
299                     content.copyTo(offset, chunkSize, digests);
300                 } catch (IOException e) {
301                     throw new IOException("Failed to read chunk #" + chunkIndex, e);
302                 }
303 
304                 getDigests(contentDigestAlgorithms, digestOutputSizes, messageDigests, digestOfChunks, chunkIndex);
305                 offset += chunkSize;
306                 remaining -= chunkSize;
307                 chunkIndex++;
308             }
309         }
310         return getContentDigestAlgorithmMap(optionalBlocks, contentDigestAlgorithms, messageDigests, digestOfChunks);
311     }
312 
getDigests(ContentDigestAlgorithm[] contentDigestAlgorithms, int[] digestOutputSizes, MessageDigest[] messageDigests, byte[][] digestOfChunks, int chunkIndex)313     private static void getDigests(ContentDigestAlgorithm[] contentDigestAlgorithms, int[] digestOutputSizes,
314         MessageDigest[] messageDigests, byte[][] digestOfChunks, int chunkIndex) throws DigestException {
315         for (int i = 0; i < contentDigestAlgorithms.length; i++) {
316             int expectedDigestSizeBytes = digestOutputSizes[i];
317             int actualDigestSizeBytes = messageDigests[i].digest(digestOfChunks[i],
318                     chunkIndex * expectedDigestSizeBytes + DIGEST_PRIFIX_LENGTH, expectedDigestSizeBytes);
319             if (actualDigestSizeBytes != expectedDigestSizeBytes) {
320                 throw new DigestException("Unexpected output size of " + messageDigests[i].getAlgorithm()
321                         + " digest: " + actualDigestSizeBytes);
322             }
323         }
324     }
325 
initComputeItem(int chunkCount, ContentDigestAlgorithm[] contentDigestAlgorithms, MessageDigest[] messageDigests, int[] digestOutputSizes, byte[][] digestOfChunks)326     private static void initComputeItem(int chunkCount, ContentDigestAlgorithm[] contentDigestAlgorithms,
327                                         MessageDigest[] messageDigests, int[] digestOutputSizes,
328                                         byte[][] digestOfChunks) throws DigestException {
329         try {
330             for (int i = 0; i < contentDigestAlgorithms.length; i++) {
331                 int digestOutputSizeBytes = contentDigestAlgorithms[i].getDigestOutputByteSize();
332                 byte[] concatenationOfChunkCountAndChunkDigests =
333                         new byte[DIGEST_PRIFIX_LENGTH + chunkCount * digestOutputSizeBytes];
334                 concatenationOfChunkCountAndChunkDigests[0] = ZIP_FIRST_LEVEL_CHUNK_PREFIX;
335                 setUInt32ToByteArrayWithLittleEngian(chunkCount, concatenationOfChunkCountAndChunkDigests, 1);
336                 digestOfChunks[i] = concatenationOfChunkCountAndChunkDigests;
337                 messageDigests[i] = MessageDigest.getInstance(contentDigestAlgorithms[i].getDigestAlgorithm());
338                 digestOutputSizes[i] = contentDigestAlgorithms[i].getDigestOutputByteSize();
339             }
340         } catch (NoSuchAlgorithmException e) {
341             throw new DigestException("Digest algorithm not supported", e);
342         }
343     }
344 
getContentDigestAlgorithmMap(List<SigningBlock> optionalBlocks, ContentDigestAlgorithm[] contentDigestAlgorithms, MessageDigest[] messageDigests, byte[][] digestOfChunks)345     private static Map<ContentDigestAlgorithm, byte[]> getContentDigestAlgorithmMap(List<SigningBlock> optionalBlocks,
346         ContentDigestAlgorithm[] contentDigestAlgorithms, MessageDigest[] messageDigests, byte[][] digestOfChunks) {
347         Map<ContentDigestAlgorithm, byte[]> result = new HashMap<>(contentDigestAlgorithms.length);
348         for (int i = 0; i < contentDigestAlgorithms.length; i++) {
349             messageDigests[i].update(digestOfChunks[i]);
350             for (SigningBlock signingBlock : optionalBlocks) {
351                 messageDigests[i].update(signingBlock.getValue());
352             }
353             result.put(contentDigestAlgorithms[i], messageDigests[i].digest());
354         }
355         return result;
356     }
357 
setUInt32ToByteArrayWithLittleEngian(int value, byte[] result, int offset)358     private static void setUInt32ToByteArrayWithLittleEngian(int value, byte[] result, int offset) {
359         for (int i = 0; i < INT_SIZE; i++) {
360             result[offset + i] = (byte) ((value >> (BIT_SIZE * i)) & 0xff);
361         }
362     }
363 
364     /**
365      * Slice buffer to target size.
366      *
367      * @param source input data buffer
368      * @param targetSize target buffer's size
369      * @return target buffer of target size
370      */
sliceBuffer(ByteBuffer source, int targetSize)371     public static ByteBuffer sliceBuffer(ByteBuffer source, int targetSize) {
372         int limit = source.limit();
373         int position = source.position();
374         int targetLimit = position + targetSize;
375         if ((targetLimit < position) || (targetLimit > limit)) {
376             LOGGER.error("targetSize: " + targetSize);
377             throw new BufferUnderflowException();
378         }
379         try {
380             source.limit(targetLimit);
381             ByteBuffer target = source.slice();
382             target.order(source.order());
383             return target;
384         } finally {
385             source.position(targetLimit);
386             source.limit(limit);
387         }
388     }
389 
sliceBuffer(ByteBuffer source, int startPos, int endPos)390     private static ByteBuffer sliceBuffer(ByteBuffer source, int startPos, int endPos) {
391         int capacity = source.capacity();
392         if (startPos < 0 || endPos < startPos || endPos > capacity) {
393             throw new IllegalArgumentException(
394                     "startPos: " + startPos + ", endPos: " + endPos + ", capacity: " + capacity);
395         }
396         int limit = source.limit();
397         int position = source.position();
398         try {
399             source.position(0);
400             source.limit(endPos);
401             source.position(startPos);
402             ByteBuffer target = source.slice();
403             target.order(source.order());
404             return target;
405         } finally {
406             source.limit(limit);
407             source.position(position);
408         }
409     }
410 
411     /**
412      * Slice buffer from startPos to endPos, and then reverse it.
413      *
414      * @param hapSigningBlock input buffer used to slice.
415      * @param startPos start position of slice buffer.
416      * @param endPos end position of slice buffer.
417      * @return new buffer.
418      */
reverseSliceBuffer(ByteBuffer hapSigningBlock, int startPos, int endPos)419     public static ByteBuffer reverseSliceBuffer(ByteBuffer hapSigningBlock, int startPos, int endPos) {
420         ByteBuffer header = HapUtils.sliceBuffer(hapSigningBlock, startPos, endPos);
421         byte[] signatureBlockBytes = new byte[header.capacity()];
422         header.get(signatureBlockBytes, 0, signatureBlockBytes.length);
423         return ByteBuffer.wrap(Arrays.reverse(signatureBlockBytes));
424     }
425 
426     /**
427      * Check whether buffer is little endian.
428      *
429      * @param buffer ByteBuffer used to check
430      */
checkBufferLittleEndian(ByteBuffer buffer)431     public static void checkBufferLittleEndian(ByteBuffer buffer) {
432         if (buffer.order() == ByteOrder.LITTLE_ENDIAN) {
433             return;
434         }
435         throw new IllegalArgumentException("ByteBuffer is not little endian");
436     }
437 
438     /**
439      * TLV encode list of pairs
440      *
441      * @param pairList input list of pairs
442      * @return byte array after encoding
443      */
encodeListOfPairsToByteArray(List<Pair<Integer, byte[]>> pairList)444     public static byte[] encodeListOfPairsToByteArray(List<Pair<Integer, byte[]>> pairList) {
445         int encodeSize = 0;
446         encodeSize += INT_SIZE + INT_SIZE;
447         for (Pair<Integer, byte[]> pair : pairList) {
448             encodeSize += INT_SIZE + INT_SIZE + INT_SIZE + pair.getSecond().length;
449         }
450         ByteBuffer encodeBytes = ByteBuffer.allocate(encodeSize);
451         encodeBytes.order(ByteOrder.LITTLE_ENDIAN);
452         encodeBytes.putInt(CONTENT_VERSION); // version
453         encodeBytes.putInt(BLOCK_NUMBER); // block number
454         for (Pair<Integer, byte[]> pair : pairList) {
455             byte[] second = pair.getSecond();
456             encodeBytes.putInt(INT_SIZE + INT_SIZE + second.length);
457             encodeBytes.putInt(pair.getFirst());
458             encodeBytes.putInt(second.length);
459             encodeBytes.put(second);
460         }
461         return encodeBytes.array();
462     }
463 
464     /**
465      * Translate value to Hex string.
466      *
467      * @param value input byte array.
468      * @param separator symbol insert between two bytes.
469      * @return a hex-values string.
470      */
toHex(byte[] value, String separator)471     public static String toHex(byte[] value, String separator) {
472         StringBuilder sb = new StringBuilder(value.length + value.length);
473         String useSeparator = separator == null ? "" : separator;
474         int len = value.length;
475         for (int i = 0; i < len; i++) {
476             int hi = (value[i] & 0xff) >>> HALF_BIT_SIZE;
477             int lo = value[i] & 0x0f;
478             sb.append(HEX_CHAR_ARRAY[hi]).append(HEX_CHAR_ARRAY[lo]);
479             if (i != len - 1) {
480                 sb.append(useSeparator);
481             }
482         }
483         return sb.toString();
484     }
485 
486     /**
487      * find signing block from hap file
488      *
489      * @param hap ZipDataInput object of zip file
490      * @param zipInfo ZipFileInfo object of hap file
491      * @return pair of offset of signing block and data of signing block
492      * @throws SignatureNotFoundException No signing block is found
493      * @throws IOException file operation error
494      */
findHapSigningBlock(ZipDataInput hap, ZipFileInfo zipInfo)495     public static HapSignBlockInfo findHapSigningBlock(ZipDataInput hap, ZipFileInfo zipInfo)
496             throws SignatureNotFoundException, IOException {
497         long centralDirectoryStartOffset = zipInfo.getCentralDirectoryOffset();
498         long centralDirectorySize = zipInfo.getCentralDirectorySize();
499         long eocdOffset = zipInfo.getEocdOffset();
500         long centralDirectoryEndOffset = centralDirectoryStartOffset + centralDirectorySize;
501         if (eocdOffset != centralDirectoryEndOffset) {
502             throw new SignatureNotFoundException("ZIP Central Directory is not immediately followed by End of Central"
503                     + "Directory. CD end: " + centralDirectoryEndOffset + ", EoCD start: " + eocdOffset);
504         }
505         if (centralDirectoryStartOffset < HAP_SIG_BLOCK_MIN_SIZE) {
506             throw new SignatureNotFoundException("Hap too small for Hap Signing Block. ZIP Central Directory offset: "
507                     + centralDirectoryStartOffset);
508         }
509         long hapSigningBlockHeaderOffset = centralDirectoryStartOffset - HAP_SIG_BLOCK_HEADER_SIZE;
510         ByteBuffer hapSigningBlockHeader = hap.createByteBuffer(hapSigningBlockHeaderOffset, HAP_SIG_BLOCK_HEADER_SIZE);
511         hapSigningBlockHeader.order(ByteOrder.LITTLE_ENDIAN);
512         int blockCount = hapSigningBlockHeader.getInt();
513         long hapSigBlockSize = hapSigningBlockHeader.getLong();
514         long hapSignBlockMagicLo = hapSigningBlockHeader.getLong();
515         long hapSignBlockMagicHi = hapSigningBlockHeader.getLong();
516         int version = hapSigningBlockHeader.getInt();
517         long hapSigningBlockOffset = verifySignBlock(hapSigBlockSize,
518                 hapSignBlockMagicLo, hapSignBlockMagicHi, version, centralDirectoryStartOffset);
519         ByteBuffer hapSigningBlockByteBuffer = hap.createByteBuffer(hapSigningBlockOffset, (int) hapSigBlockSize)
520                 .order(ByteOrder.LITTLE_ENDIAN);
521         LOGGER.info("Find Hap Signing Block success, version: {}, block count: {}", version, blockCount);
522         return new HapSignBlockInfo(hapSigningBlockOffset, version, hapSigningBlockByteBuffer);
523     }
524 
verifySignBlock(long hapSigBlockSize, long hapSignBlockMagicLo, long hapSignBlockMagicHi, int version, long centralDirectoryStartOffset)525     private static long verifySignBlock(long hapSigBlockSize, long hapSignBlockMagicLo,
526         long hapSignBlockMagicHi, int version, long centralDirectoryStartOffset) throws SignatureNotFoundException {
527         if (!isVersionAndMagicNumValid(version, hapSignBlockMagicLo, hapSignBlockMagicHi)) {
528             throw new SignatureNotFoundException("No Hap Signing Block before ZIP Central Directory");
529         }
530         if ((hapSigBlockSize < HAP_SIG_BLOCK_HEADER_SIZE)
531                 || (hapSigBlockSize > Integer.MAX_VALUE - BLOCK_SIZE)) {
532             throw new SignatureNotFoundException("Hap Signing Block size out of range: " + hapSigBlockSize);
533         }
534         int totalSize = (int) hapSigBlockSize;
535         long hapSigningBlockOffset = centralDirectoryStartOffset - totalSize;
536         if (hapSigningBlockOffset < 0) {
537             throw new SignatureNotFoundException("Hap Signing Block offset out of range: " + hapSigningBlockOffset);
538         }
539         return hapSigningBlockOffset;
540     }
541 
isVersionAndMagicNumValid(int version, long hapSignBlockMagicLo, long hapSignBlockMagicHi)542     private static boolean isVersionAndMagicNumValid(int version, long hapSignBlockMagicLo, long hapSignBlockMagicHi) {
543         if (version < HAP_SIGN_SCHEME_V3_BLOCK_VERSION) {
544             return hapSignBlockMagicLo == HAP_SIG_BLOCK_MAGIC_LO_V2 && hapSignBlockMagicHi == HAP_SIG_BLOCK_MAGIC_HI_V2;
545         }
546         return hapSignBlockMagicLo == HAP_SIG_BLOCK_MAGIC_LO_V3 && hapSignBlockMagicHi == HAP_SIG_BLOCK_MAGIC_HI_V3;
547     }
548 
549     /**
550      * Hap sign block info
551      */
552     public static class HapSignBlockInfo {
553         private final long offset;
554         private final int version;
555         private final ByteBuffer content;
556 
HapSignBlockInfo(long offset, int version, ByteBuffer content)557         public HapSignBlockInfo(long offset, int version, ByteBuffer content) {
558             this.offset = offset;
559             this.version = version;
560             this.content = content;
561         }
562 
getVersion()563         public int getVersion() {
564             return version;
565         }
566 
getContent()567         public ByteBuffer getContent() {
568             return content;
569         }
570 
getOffset()571         public long getOffset() {
572             return offset;
573         }
574     }
575 }