• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2015-2016 The OpenSSL Project Authors. All Rights Reserved.
3  *
4  * Licensed under the OpenSSL license (the "License").  You may not use
5  * this file except in compliance with the License.  You can obtain a copy
6  * in the file LICENSE in the source distribution or at
7  * https://www.openssl.org/source/license.html
8  */
9 
10 #include <limits.h>
11 #include <stdlib.h>
12 #include <string.h>
13 
14 #include <algorithm>
15 #include <string>
16 #include <vector>
17 
18 #include <gtest/gtest.h>
19 
20 #include <openssl/aes.h>
21 #include <openssl/cipher.h>
22 #include <openssl/err.h>
23 #include <openssl/nid.h>
24 #include <openssl/rand.h>
25 #include <openssl/sha.h>
26 #include <openssl/span.h>
27 
28 #include "../internal.h"
29 #include "../test/file_test.h"
30 #include "../test/test_util.h"
31 #include "../test/wycheproof_util.h"
32 #include "./internal.h"
33 
34 
GetCipher(const std::string & name)35 static const EVP_CIPHER *GetCipher(const std::string &name) {
36   if (name == "DES-CBC") {
37     return EVP_des_cbc();
38   } else if (name == "DES-ECB") {
39     return EVP_des_ecb();
40   } else if (name == "DES-EDE") {
41     return EVP_des_ede();
42   } else if (name == "DES-EDE3") {
43     return EVP_des_ede3();
44   } else if (name == "DES-EDE-CBC") {
45     return EVP_des_ede_cbc();
46   } else if (name == "DES-EDE3-CBC") {
47     return EVP_des_ede3_cbc();
48   } else if (name == "RC4") {
49     return EVP_rc4();
50   } else if (name == "AES-128-ECB") {
51     return EVP_aes_128_ecb();
52   } else if (name == "AES-256-ECB") {
53     return EVP_aes_256_ecb();
54   } else if (name == "AES-128-CBC") {
55     return EVP_aes_128_cbc();
56   } else if (name == "AES-128-GCM") {
57     return EVP_aes_128_gcm();
58   } else if (name == "AES-128-OFB") {
59     return EVP_aes_128_ofb();
60   } else if (name == "AES-192-CBC") {
61     return EVP_aes_192_cbc();
62   } else if (name == "AES-192-CTR") {
63     return EVP_aes_192_ctr();
64   } else if (name == "AES-192-ECB") {
65     return EVP_aes_192_ecb();
66   } else if (name == "AES-192-GCM") {
67     return EVP_aes_192_gcm();
68   } else if (name == "AES-192-OFB") {
69     return EVP_aes_192_ofb();
70   } else if (name == "AES-256-CBC") {
71     return EVP_aes_256_cbc();
72   } else if (name == "AES-128-CTR") {
73     return EVP_aes_128_ctr();
74   } else if (name == "AES-256-CTR") {
75     return EVP_aes_256_ctr();
76   } else if (name == "AES-256-GCM") {
77     return EVP_aes_256_gcm();
78   } else if (name == "AES-256-OFB") {
79     return EVP_aes_256_ofb();
80   }
81   return nullptr;
82 }
83 
84 enum class Operation {
85   // kBoth tests both encryption and decryption.
86   kBoth,
87   // kEncrypt tests encryption. The result of encryption should always
88   // successfully decrypt, so this should only be used if the test file has a
89   // matching decrypt-only vector.
90   kEncrypt,
91   // kDecrypt tests decryption. This should only be used if the test file has a
92   // matching encrypt-only input, or if multiple ciphertexts are valid for
93   // a given plaintext and this is a non-canonical ciphertext.
94   kDecrypt,
95   // kInvalidDecrypt tests decryption and expects it to fail, e.g. due to
96   // invalid tag or padding.
97   kInvalidDecrypt,
98 };
99 
OperationToString(Operation op)100 static const char *OperationToString(Operation op) {
101   switch (op) {
102     case Operation::kBoth:
103       return "Both";
104     case Operation::kEncrypt:
105       return "Encrypt";
106     case Operation::kDecrypt:
107       return "Decrypt";
108     case Operation::kInvalidDecrypt:
109       return "InvalidDecrypt";
110   }
111   abort();
112 }
113 
114 // MaybeCopyCipherContext, if |copy| is true, replaces |*ctx| with a, hopefully
115 // equivalent, copy of it.
MaybeCopyCipherContext(bool copy,bssl::UniquePtr<EVP_CIPHER_CTX> * ctx)116 static bool MaybeCopyCipherContext(bool copy,
117                                    bssl::UniquePtr<EVP_CIPHER_CTX> *ctx) {
118   if (!copy) {
119     return true;
120   }
121   bssl::UniquePtr<EVP_CIPHER_CTX> ctx2(EVP_CIPHER_CTX_new());
122   if (!ctx2 || !EVP_CIPHER_CTX_copy(ctx2.get(), ctx->get())) {
123     return false;
124   }
125   *ctx = std::move(ctx2);
126   return true;
127 }
128 
TestCipherAPI(const EVP_CIPHER * cipher,Operation op,bool padding,bool copy,bool in_place,bool use_evp_cipher,size_t chunk_size,bssl::Span<const uint8_t> key,bssl::Span<const uint8_t> iv,bssl::Span<const uint8_t> plaintext,bssl::Span<const uint8_t> ciphertext,bssl::Span<const uint8_t> aad,bssl::Span<const uint8_t> tag)129 static void TestCipherAPI(const EVP_CIPHER *cipher, Operation op, bool padding,
130                           bool copy, bool in_place, bool use_evp_cipher,
131                           size_t chunk_size, bssl::Span<const uint8_t> key,
132                           bssl::Span<const uint8_t> iv,
133                           bssl::Span<const uint8_t> plaintext,
134                           bssl::Span<const uint8_t> ciphertext,
135                           bssl::Span<const uint8_t> aad,
136                           bssl::Span<const uint8_t> tag) {
137   bool encrypt = op == Operation::kEncrypt;
138   bool is_custom_cipher =
139       EVP_CIPHER_flags(cipher) & EVP_CIPH_FLAG_CUSTOM_CIPHER;
140   bssl::Span<const uint8_t> in = encrypt ? plaintext : ciphertext;
141   bssl::Span<const uint8_t> expected = encrypt ? ciphertext : plaintext;
142   bool is_aead = EVP_CIPHER_mode(cipher) == EVP_CIPH_GCM_MODE;
143 
144   // Some |EVP_CIPHER|s take a variable-length key, and need to first be
145   // configured with the key length, which requires configuring the cipher.
146   bssl::UniquePtr<EVP_CIPHER_CTX> ctx(EVP_CIPHER_CTX_new());
147   ASSERT_TRUE(ctx);
148   ASSERT_TRUE(EVP_CipherInit_ex(ctx.get(), cipher, /*engine=*/nullptr,
149                                 /*key=*/nullptr, /*iv=*/nullptr,
150                                 encrypt ? 1 : 0));
151   ASSERT_TRUE(EVP_CIPHER_CTX_set_key_length(ctx.get(), key.size()));
152   if (!padding) {
153     ASSERT_TRUE(EVP_CIPHER_CTX_set_padding(ctx.get(), 0));
154   }
155 
156   // Configure the key.
157   ASSERT_TRUE(MaybeCopyCipherContext(copy, &ctx));
158   ASSERT_TRUE(EVP_CipherInit_ex(ctx.get(), /*cipher=*/nullptr,
159                                 /*engine=*/nullptr, key.data(), /*iv=*/nullptr,
160                                 /*enc=*/-1));
161 
162   // Configure the IV to run the actual operation. Callers that wish to use a
163   // key for multiple, potentially concurrent, operations will likely copy at
164   // this point. The |EVP_CIPHER_CTX| API uses the same type to represent a
165   // pre-computed key schedule and a streaming operation.
166   ASSERT_TRUE(MaybeCopyCipherContext(copy, &ctx));
167   if (is_aead) {
168     ASSERT_LE(iv.size(), size_t{INT_MAX});
169     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IVLEN,
170                                     static_cast<int>(iv.size()), nullptr));
171     ASSERT_EQ(EVP_CIPHER_CTX_iv_length(ctx.get()), iv.size());
172   } else {
173     ASSERT_EQ(iv.size(), EVP_CIPHER_CTX_iv_length(ctx.get()));
174   }
175   ASSERT_TRUE(EVP_CipherInit_ex(ctx.get(), /*cipher=*/nullptr,
176                                 /*engine=*/nullptr,
177                                 /*key=*/nullptr, iv.data(), /*enc=*/-1));
178 
179   if (is_aead && !encrypt) {
180     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_TAG,
181                                     tag.size(),
182                                     const_cast<uint8_t *>(tag.data())));
183   }
184 
185   // Note: the deprecated |EVP_CIPHER|-based AEAD API is sensitive to whether
186   // parameters are NULL, so it is important to skip the |in| and |aad|
187   // |EVP_CipherUpdate| calls when empty.
188   while (!aad.empty()) {
189     size_t todo =
190         chunk_size == 0 ? aad.size() : std::min(aad.size(), chunk_size);
191     if (use_evp_cipher) {
192       // AEADs always use the "custom cipher" return value convention. Passing a
193       // null output pointer triggers the AAD logic.
194       ASSERT_TRUE(is_custom_cipher);
195       ASSERT_EQ(static_cast<int>(todo),
196                 EVP_Cipher(ctx.get(), nullptr, aad.data(), todo));
197     } else {
198       int len;
199       ASSERT_TRUE(EVP_CipherUpdate(ctx.get(), nullptr, &len, aad.data(), todo));
200       // Although it doesn't output anything, |EVP_CipherUpdate| should claim to
201       // output the input length.
202       EXPECT_EQ(len, static_cast<int>(todo));
203     }
204     aad = aad.subspan(todo);
205   }
206 
207   // Set up the output buffer.
208   size_t max_out = in.size();
209   size_t block_size = EVP_CIPHER_CTX_block_size(ctx.get());
210   if (block_size > 1 &&
211       (EVP_CIPHER_CTX_flags(ctx.get()) & EVP_CIPH_NO_PADDING) == 0 &&
212       EVP_CIPHER_CTX_encrypting(ctx.get())) {
213     max_out += block_size - (max_out % block_size);
214   }
215   std::vector<uint8_t> result(max_out);
216   if (in_place) {
217     std::copy(in.begin(), in.end(), result.begin());
218     in = bssl::MakeConstSpan(result).first(in.size());
219   }
220 
221   size_t total = 0;
222   int len;
223   while (!in.empty()) {
224     size_t todo = chunk_size == 0 ? in.size() : std::min(in.size(), chunk_size);
225     EXPECT_LE(todo, static_cast<size_t>(INT_MAX));
226     ASSERT_TRUE(MaybeCopyCipherContext(copy, &ctx));
227     if (use_evp_cipher) {
228       // |EVP_Cipher| sometimes returns the number of bytes written, or -1 on
229       // error, and sometimes 1 or 0, implicitly writing |in_len| bytes.
230       if (is_custom_cipher) {
231         len = EVP_Cipher(ctx.get(), result.data() + total, in.data(), todo);
232       } else {
233         ASSERT_EQ(
234             1, EVP_Cipher(ctx.get(), result.data() + total, in.data(), todo));
235         len = static_cast<int>(todo);
236       }
237     } else {
238       ASSERT_TRUE(EVP_CipherUpdate(ctx.get(), result.data() + total, &len,
239                                    in.data(), static_cast<int>(todo)));
240     }
241     ASSERT_GE(len, 0);
242     total += static_cast<size_t>(len);
243     in = in.subspan(todo);
244   }
245   if (op == Operation::kInvalidDecrypt) {
246     if (use_evp_cipher) {
247       // Only the "custom cipher" return value convention can report failures.
248       // Passing all nulls should act like |EVP_CipherFinal_ex|.
249       ASSERT_TRUE(is_custom_cipher);
250       EXPECT_EQ(-1, EVP_Cipher(ctx.get(), nullptr, nullptr, 0));
251     } else {
252       // Invalid padding and invalid tags all appear as a failed
253       // |EVP_CipherFinal_ex|.
254       EXPECT_FALSE(EVP_CipherFinal_ex(ctx.get(), result.data() + total, &len));
255     }
256   } else {
257     if (use_evp_cipher) {
258       if (is_custom_cipher) {
259         // Only the "custom cipher" convention has an |EVP_CipherFinal_ex|
260         // equivalent.
261         len = EVP_Cipher(ctx.get(), nullptr, nullptr, 0);
262       } else {
263         len = 0;
264       }
265     } else {
266       ASSERT_TRUE(EVP_CipherFinal_ex(ctx.get(), result.data() + total, &len));
267     }
268     ASSERT_GE(len, 0);
269     total += static_cast<size_t>(len);
270     result.resize(total);
271     EXPECT_EQ(Bytes(expected), Bytes(result));
272     if (encrypt && is_aead) {
273       uint8_t rtag[16];
274       ASSERT_LE(tag.size(), sizeof(rtag));
275       ASSERT_TRUE(MaybeCopyCipherContext(copy, &ctx));
276       ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_GET_TAG,
277                                       tag.size(), rtag));
278       EXPECT_EQ(Bytes(tag), Bytes(rtag, tag.size()));
279     }
280   }
281 }
282 
TestLowLevelAPI(const EVP_CIPHER * cipher,Operation op,bool in_place,size_t chunk_size,bssl::Span<const uint8_t> key,bssl::Span<const uint8_t> iv,bssl::Span<const uint8_t> plaintext,bssl::Span<const uint8_t> ciphertext)283 static void TestLowLevelAPI(
284     const EVP_CIPHER *cipher, Operation op, bool in_place, size_t chunk_size,
285     bssl::Span<const uint8_t> key, bssl::Span<const uint8_t> iv,
286     bssl::Span<const uint8_t> plaintext, bssl::Span<const uint8_t> ciphertext) {
287   bool encrypt = op == Operation::kEncrypt;
288   bssl::Span<const uint8_t> in = encrypt ? plaintext : ciphertext;
289   bssl::Span<const uint8_t> expected = encrypt ? ciphertext : plaintext;
290   int nid = EVP_CIPHER_nid(cipher);
291   bool is_ctr = nid == NID_aes_128_ctr || nid == NID_aes_192_ctr ||
292                 nid == NID_aes_256_ctr;
293   bool is_cbc = nid == NID_aes_128_cbc || nid == NID_aes_192_cbc ||
294                 nid == NID_aes_256_cbc;
295   bool is_ofb = nid == NID_aes_128_ofb128 || nid == NID_aes_192_ofb128 ||
296                 nid == NID_aes_256_ofb128;
297   if (!is_ctr && !is_cbc && !is_ofb) {
298     return;
299   }
300 
301   // Invalid ciphertexts are not possible in any of the ciphers where this API
302   // applies.
303   ASSERT_NE(op, Operation::kInvalidDecrypt);
304 
305   AES_KEY aes;
306   if (encrypt || !is_cbc) {
307     ASSERT_EQ(0, AES_set_encrypt_key(key.data(), key.size() * 8, &aes));
308   } else {
309     ASSERT_EQ(0, AES_set_decrypt_key(key.data(), key.size() * 8, &aes));
310   }
311 
312   std::vector<uint8_t> result;
313   if (in_place) {
314     result.assign(in.begin(), in.end());
315   } else {
316     result.resize(expected.size());
317   }
318   bssl::Span<uint8_t> out = bssl::MakeSpan(result);
319   // Input and output sizes for all the low-level APIs should match.
320   ASSERT_EQ(in.size(), out.size());
321 
322   // The low-level APIs all use block-size IVs.
323   ASSERT_EQ(iv.size(), size_t{AES_BLOCK_SIZE});
324   uint8_t ivec[AES_BLOCK_SIZE];
325   OPENSSL_memcpy(ivec, iv.data(), iv.size());
326 
327   if (is_ctr) {
328     unsigned num = 0;
329     uint8_t ecount_buf[AES_BLOCK_SIZE];
330     if (chunk_size == 0) {
331       AES_ctr128_encrypt(in.data(), out.data(), in.size(), &aes, ivec,
332                          ecount_buf, &num);
333     } else {
334       do {
335         size_t todo = std::min(in.size(), chunk_size);
336         AES_ctr128_encrypt(in.data(), out.data(), todo, &aes, ivec, ecount_buf,
337                            &num);
338         in = in.subspan(todo);
339         out = out.subspan(todo);
340       } while (!in.empty());
341     }
342     EXPECT_EQ(Bytes(expected), Bytes(result));
343   } else if (is_cbc && chunk_size % AES_BLOCK_SIZE == 0) {
344     // Note |AES_cbc_encrypt| requires block-aligned chunks.
345     if (chunk_size == 0) {
346       AES_cbc_encrypt(in.data(), out.data(), in.size(), &aes, ivec, encrypt);
347     } else {
348       do {
349         size_t todo = std::min(in.size(), chunk_size);
350         AES_cbc_encrypt(in.data(), out.data(), todo, &aes, ivec, encrypt);
351         in = in.subspan(todo);
352         out = out.subspan(todo);
353       } while (!in.empty());
354     }
355     EXPECT_EQ(Bytes(expected), Bytes(result));
356   } else if (is_ofb) {
357     int num = 0;
358     if (chunk_size == 0) {
359       AES_ofb128_encrypt(in.data(), out.data(), in.size(), &aes, ivec, &num);
360     } else {
361       do {
362         size_t todo = std::min(in.size(), chunk_size);
363         AES_ofb128_encrypt(in.data(), out.data(), todo, &aes, ivec, &num);
364         in = in.subspan(todo);
365         out = out.subspan(todo);
366       } while (!in.empty());
367     }
368     EXPECT_EQ(Bytes(expected), Bytes(result));
369   }
370 }
371 
TestCipher(const EVP_CIPHER * cipher,Operation input_op,bool padding,bssl::Span<const uint8_t> key,bssl::Span<const uint8_t> iv,bssl::Span<const uint8_t> plaintext,bssl::Span<const uint8_t> ciphertext,bssl::Span<const uint8_t> aad,bssl::Span<const uint8_t> tag)372 static void TestCipher(const EVP_CIPHER *cipher, Operation input_op,
373                        bool padding, bssl::Span<const uint8_t> key,
374                        bssl::Span<const uint8_t> iv,
375                        bssl::Span<const uint8_t> plaintext,
376                        bssl::Span<const uint8_t> ciphertext,
377                        bssl::Span<const uint8_t> aad,
378                        bssl::Span<const uint8_t> tag) {
379   size_t block_size = EVP_CIPHER_block_size(cipher);
380   std::vector<Operation> ops;
381   if (input_op == Operation::kBoth) {
382     ops = {Operation::kEncrypt, Operation::kDecrypt};
383   } else {
384     ops = {input_op};
385   }
386   for (Operation op : ops) {
387     SCOPED_TRACE(OperationToString(op));
388     // Zero indicates a single-shot API.
389     static const size_t kChunkSizes[] = {0,  1,  2,  5,  7,  8,  9,  15, 16,
390                                          17, 31, 32, 33, 63, 64, 65, 512};
391     for (size_t chunk_size : kChunkSizes) {
392       SCOPED_TRACE(chunk_size);
393       if (chunk_size > plaintext.size() && chunk_size > ciphertext.size() &&
394           chunk_size > aad.size()) {
395         continue;
396       }
397       for (bool in_place : {false, true}) {
398         SCOPED_TRACE(in_place);
399         for (bool copy : {false, true}) {
400           SCOPED_TRACE(copy);
401           TestCipherAPI(cipher, op, padding, copy, in_place,
402                         /*use_evp_cipher=*/false, chunk_size, key, iv,
403                         plaintext, ciphertext, aad, tag);
404           if (!padding && chunk_size % block_size == 0) {
405             TestCipherAPI(cipher, op, padding, copy, in_place,
406                           /*use_evp_cipher=*/true, chunk_size, key, iv,
407                           plaintext, ciphertext, aad, tag);
408           }
409         }
410         if (!padding) {
411           TestLowLevelAPI(cipher, op, in_place, chunk_size, key, iv, plaintext,
412                           ciphertext);
413         }
414       }
415     }
416   }
417 }
418 
CipherFileTest(FileTest * t)419 static void CipherFileTest(FileTest *t) {
420   std::string cipher_str;
421   ASSERT_TRUE(t->GetAttribute(&cipher_str, "Cipher"));
422   const EVP_CIPHER *cipher = GetCipher(cipher_str);
423   ASSERT_TRUE(cipher);
424 
425   std::vector<uint8_t> key, iv, plaintext, ciphertext, aad, tag;
426   ASSERT_TRUE(t->GetBytes(&key, "Key"));
427   ASSERT_TRUE(t->GetBytes(&plaintext, "Plaintext"));
428   ASSERT_TRUE(t->GetBytes(&ciphertext, "Ciphertext"));
429   if (EVP_CIPHER_iv_length(cipher) > 0) {
430     ASSERT_TRUE(t->GetBytes(&iv, "IV"));
431   }
432   if (EVP_CIPHER_mode(cipher) == EVP_CIPH_GCM_MODE) {
433     ASSERT_TRUE(t->GetBytes(&aad, "AAD"));
434     ASSERT_TRUE(t->GetBytes(&tag, "Tag"));
435   }
436 
437   Operation op = Operation::kBoth;
438   if (t->HasAttribute("Operation")) {
439     const std::string &str = t->GetAttributeOrDie("Operation");
440     if (str == "Encrypt" || str == "ENCRYPT") {
441       op = Operation::kEncrypt;
442     } else if (str == "Decrypt" || str == "DECRYPT") {
443       op = Operation::kDecrypt;
444     } else if (str == "InvalidDecrypt") {
445       op = Operation::kInvalidDecrypt;
446     } else {
447       FAIL() << "Unknown operation: " << str;
448     }
449   }
450 
451   TestCipher(cipher, op, /*padding=*/false, key, iv, plaintext, ciphertext, aad,
452              tag);
453 }
454 
TEST(CipherTest,TestVectors)455 TEST(CipherTest, TestVectors) {
456   FileTestGTest("crypto/cipher_extra/test/cipher_tests.txt", CipherFileTest);
457 }
458 
TEST(CipherTest,CAVP_AES_128_CBC)459 TEST(CipherTest, CAVP_AES_128_CBC) {
460   FileTestGTest("crypto/cipher_extra/test/nist_cavp/aes_128_cbc.txt",
461                 CipherFileTest);
462 }
463 
TEST(CipherTest,CAVP_AES_128_CTR)464 TEST(CipherTest, CAVP_AES_128_CTR) {
465   FileTestGTest("crypto/cipher_extra/test/nist_cavp/aes_128_ctr.txt",
466                 CipherFileTest);
467 }
468 
TEST(CipherTest,CAVP_AES_192_CBC)469 TEST(CipherTest, CAVP_AES_192_CBC) {
470   FileTestGTest("crypto/cipher_extra/test/nist_cavp/aes_192_cbc.txt",
471                 CipherFileTest);
472 }
473 
TEST(CipherTest,CAVP_AES_192_CTR)474 TEST(CipherTest, CAVP_AES_192_CTR) {
475   FileTestGTest("crypto/cipher_extra/test/nist_cavp/aes_192_ctr.txt",
476                 CipherFileTest);
477 }
478 
TEST(CipherTest,CAVP_AES_256_CBC)479 TEST(CipherTest, CAVP_AES_256_CBC) {
480   FileTestGTest("crypto/cipher_extra/test/nist_cavp/aes_256_cbc.txt",
481                 CipherFileTest);
482 }
483 
TEST(CipherTest,CAVP_AES_256_CTR)484 TEST(CipherTest, CAVP_AES_256_CTR) {
485   FileTestGTest("crypto/cipher_extra/test/nist_cavp/aes_256_ctr.txt",
486                 CipherFileTest);
487 }
488 
TEST(CipherTest,CAVP_TDES_CBC)489 TEST(CipherTest, CAVP_TDES_CBC) {
490   FileTestGTest("crypto/cipher_extra/test/nist_cavp/tdes_cbc.txt",
491                 CipherFileTest);
492 }
493 
TEST(CipherTest,CAVP_TDES_ECB)494 TEST(CipherTest, CAVP_TDES_ECB) {
495   FileTestGTest("crypto/cipher_extra/test/nist_cavp/tdes_ecb.txt",
496                 CipherFileTest);
497 }
498 
TEST(CipherTest,WycheproofAESCBC)499 TEST(CipherTest, WycheproofAESCBC) {
500   FileTestGTest("third_party/wycheproof_testvectors/aes_cbc_pkcs5_test.txt",
501                 [](FileTest *t) {
502                   t->IgnoreInstruction("type");
503                   t->IgnoreInstruction("ivSize");
504 
505                   std::string key_size;
506                   ASSERT_TRUE(t->GetInstruction(&key_size, "keySize"));
507                   const EVP_CIPHER *cipher;
508                   switch (atoi(key_size.c_str())) {
509                     case 128:
510                       cipher = EVP_aes_128_cbc();
511                       break;
512                     case 192:
513                       cipher = EVP_aes_192_cbc();
514                       break;
515                     case 256:
516                       cipher = EVP_aes_256_cbc();
517                       break;
518                     default:
519                       FAIL() << "Unsupported key size: " << key_size;
520                   }
521 
522                   std::vector<uint8_t> key, iv, msg, ct;
523                   ASSERT_TRUE(t->GetBytes(&key, "key"));
524                   ASSERT_TRUE(t->GetBytes(&iv, "iv"));
525                   ASSERT_TRUE(t->GetBytes(&msg, "msg"));
526                   ASSERT_TRUE(t->GetBytes(&ct, "ct"));
527                   WycheproofResult result;
528                   ASSERT_TRUE(GetWycheproofResult(t, &result));
529                   TestCipher(cipher,
530                              result.IsValid() ? Operation::kBoth
531                                               : Operation::kInvalidDecrypt,
532                              /*padding=*/true, key, iv, msg, ct, /*aad=*/{},
533                              /*tag=*/{});
534                 });
535 }
536 
TEST(CipherTest,SHA1WithSecretSuffix)537 TEST(CipherTest, SHA1WithSecretSuffix) {
538   uint8_t buf[SHA_CBLOCK * 4];
539   RAND_bytes(buf, sizeof(buf));
540   // Hashing should run in time independent of the bytes.
541   CONSTTIME_SECRET(buf, sizeof(buf));
542 
543   // Exhaustively testing interesting cases in this function is cubic in the
544   // block size, so we test in 3-byte increments.
545   constexpr size_t kSkip = 3;
546   // This value should be less than 8 to test the edge case when the 8-byte
547   // length wraps to the next block.
548   static_assert(kSkip < 8, "kSkip is too large");
549 
550   // |EVP_sha1_final_with_secret_suffix| is sensitive to the public length of
551   // the partial block previously hashed. In TLS, this is the HMAC prefix, the
552   // header, and the public minimum padding length.
553   for (size_t prefix = 0; prefix < SHA_CBLOCK; prefix += kSkip) {
554     SCOPED_TRACE(prefix);
555     // The first block is treated differently, so we run with up to three
556     // blocks of length variability.
557     for (size_t max_len = 0; max_len < 3 * SHA_CBLOCK; max_len += kSkip) {
558       SCOPED_TRACE(max_len);
559       for (size_t len = 0; len <= max_len; len += kSkip) {
560         SCOPED_TRACE(len);
561 
562         uint8_t expected[SHA_DIGEST_LENGTH];
563         SHA1(buf, prefix + len, expected);
564         CONSTTIME_DECLASSIFY(expected, sizeof(expected));
565 
566         // Make a copy of the secret length to avoid interfering with the loop.
567         size_t secret_len = len;
568         CONSTTIME_SECRET(&secret_len, sizeof(secret_len));
569 
570         SHA_CTX ctx;
571         SHA1_Init(&ctx);
572         SHA1_Update(&ctx, buf, prefix);
573         uint8_t computed[SHA_DIGEST_LENGTH];
574         ASSERT_TRUE(EVP_sha1_final_with_secret_suffix(
575             &ctx, computed, buf + prefix, secret_len, max_len));
576 
577         CONSTTIME_DECLASSIFY(computed, sizeof(computed));
578         EXPECT_EQ(Bytes(expected), Bytes(computed));
579       }
580     }
581   }
582 }
583 
TEST(CipherTest,SHA256WithSecretSuffix)584 TEST(CipherTest, SHA256WithSecretSuffix) {
585   uint8_t buf[SHA256_CBLOCK * 4];
586   RAND_bytes(buf, sizeof(buf));
587   // Hashing should run in time independent of the bytes.
588   CONSTTIME_SECRET(buf, sizeof(buf));
589 
590   // Exhaustively testing interesting cases in this function is cubic in the
591   // block size, so we test in 3-byte increments.
592   constexpr size_t kSkip = 3;
593   // This value should be less than 8 to test the edge case when the 8-byte
594   // length wraps to the next block.
595   static_assert(kSkip < 8, "kSkip is too large");
596 
597   // |EVP_sha256_final_with_secret_suffix| is sensitive to the public length of
598   // the partial block previously hashed. In TLS, this is the HMAC prefix, the
599   // header, and the public minimum padding length.
600   for (size_t prefix = 0; prefix < SHA256_CBLOCK; prefix += kSkip) {
601     SCOPED_TRACE(prefix);
602     // The first block is treated differently, so we run with up to three
603     // blocks of length variability.
604     for (size_t max_len = 0; max_len < 3 * SHA256_CBLOCK; max_len += kSkip) {
605       SCOPED_TRACE(max_len);
606       for (size_t len = 0; len <= max_len; len += kSkip) {
607         SCOPED_TRACE(len);
608 
609         uint8_t expected[SHA256_DIGEST_LENGTH];
610         SHA256(buf, prefix + len, expected);
611         CONSTTIME_DECLASSIFY(expected, sizeof(expected));
612 
613         // Make a copy of the secret length to avoid interfering with the loop.
614         size_t secret_len = len;
615         CONSTTIME_SECRET(&secret_len, sizeof(secret_len));
616 
617         SHA256_CTX ctx;
618         SHA256_Init(&ctx);
619         SHA256_Update(&ctx, buf, prefix);
620         uint8_t computed[SHA256_DIGEST_LENGTH];
621         ASSERT_TRUE(EVP_sha256_final_with_secret_suffix(
622             &ctx, computed, buf + prefix, secret_len, max_len));
623 
624         CONSTTIME_DECLASSIFY(computed, sizeof(computed));
625         EXPECT_EQ(Bytes(expected), Bytes(computed));
626       }
627     }
628   }
629 }
630 
TEST(CipherTest,GetCipher)631 TEST(CipherTest, GetCipher) {
632   const EVP_CIPHER *cipher = EVP_get_cipherbynid(NID_aes_128_gcm);
633   ASSERT_TRUE(cipher);
634   EXPECT_EQ(NID_aes_128_gcm, EVP_CIPHER_nid(cipher));
635 
636   cipher = EVP_get_cipherbyname("aes-128-gcm");
637   ASSERT_TRUE(cipher);
638   EXPECT_EQ(NID_aes_128_gcm, EVP_CIPHER_nid(cipher));
639 
640   cipher = EVP_get_cipherbyname("AES-128-GCM");
641   ASSERT_TRUE(cipher);
642   EXPECT_EQ(NID_aes_128_gcm, EVP_CIPHER_nid(cipher));
643 
644   // We support a tcpdump-specific alias for 3DES.
645   cipher = EVP_get_cipherbyname("3des");
646   ASSERT_TRUE(cipher);
647   EXPECT_EQ(NID_des_ede3_cbc, EVP_CIPHER_nid(cipher));
648 }
649 
650 // Test the AES-GCM EVP_CIPHER's internal IV management APIs. OpenSSH uses these
651 // APIs.
TEST(CipherTest,GCMIncrementingIV)652 TEST(CipherTest, GCMIncrementingIV) {
653   const EVP_CIPHER *kCipher = EVP_aes_128_gcm();
654   static const uint8_t kKey[16] = {0, 1, 2,  3,  4,  5,  6,  7,
655                                    8, 9, 10, 11, 12, 13, 14, 15};
656   static const uint8_t kInput[] = {'h', 'e', 'l', 'l', 'o'};
657 
658   auto expect_iv = [&](EVP_CIPHER_CTX *ctx, bssl::Span<const uint8_t> iv,
659                        bool enc) {
660     // Make a reference ciphertext.
661     bssl::ScopedEVP_CIPHER_CTX ref;
662     ASSERT_TRUE(EVP_EncryptInit_ex(ref.get(), kCipher, /*impl=*/nullptr,
663                                    kKey, /*iv=*/nullptr));
664     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ref.get(), EVP_CTRL_AEAD_SET_IVLEN,
665                                     static_cast<int>(iv.size()), nullptr));
666     ASSERT_TRUE(EVP_EncryptInit_ex(ref.get(), /*cipher=*/nullptr,
667                                    /*impl=*/nullptr, /*key=*/nullptr,
668                                    iv.data()));
669     uint8_t ciphertext[sizeof(kInput)];
670     int ciphertext_len;
671     ASSERT_TRUE(EVP_EncryptUpdate(ref.get(), ciphertext, &ciphertext_len,
672                                   kInput, sizeof(kInput)));
673     int extra_len;
674     ASSERT_TRUE(EVP_EncryptFinal_ex(ref.get(), nullptr, &extra_len));
675     ASSERT_EQ(extra_len, 0);
676     uint8_t tag[16];
677     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ref.get(), EVP_CTRL_AEAD_GET_TAG,
678                                     sizeof(tag), tag));
679 
680     if (enc) {
681       uint8_t actual[sizeof(kInput)];
682       int actual_len;
683       ASSERT_TRUE(
684           EVP_EncryptUpdate(ctx, actual, &actual_len, kInput, sizeof(kInput)));
685       ASSERT_TRUE(EVP_EncryptFinal_ex(ctx, nullptr, &extra_len));
686       ASSERT_EQ(extra_len, 0);
687       EXPECT_EQ(Bytes(actual, actual_len), Bytes(ciphertext, ciphertext_len));
688       uint8_t actual_tag[16];
689       ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_GET_TAG,
690                                       sizeof(actual_tag), actual_tag));
691       EXPECT_EQ(Bytes(actual_tag), Bytes(tag));
692     } else {
693       ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, sizeof(tag),
694                                       const_cast<uint8_t *>(tag)));
695       uint8_t actual[sizeof(kInput)];
696       int actual_len;
697       ASSERT_TRUE(EVP_DecryptUpdate(ctx, actual, &actual_len, ciphertext,
698                                     sizeof(ciphertext)));
699       ASSERT_TRUE(EVP_DecryptFinal_ex(ctx, nullptr, &extra_len));
700       ASSERT_EQ(extra_len, 0);
701       EXPECT_EQ(Bytes(actual, actual_len), Bytes(kInput));
702     }
703   };
704 
705   {
706     // Passing in a fixed IV length of -1 sets the whole IV, but then configures
707     // |EVP_CIPHER_CTX| to increment the bottom 8 bytes of the IV.
708     static const uint8_t kIV1[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12};
709     static const uint8_t kIV2[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13};
710     static const uint8_t kIV3[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 14};
711     static const uint8_t kIV4[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 15};
712 
713     bssl::ScopedEVP_CIPHER_CTX ctx;
714     ASSERT_TRUE(EVP_EncryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
715                                    /*iv=*/nullptr));
716     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, -1,
717                                     const_cast<uint8_t *>(kIV1)));
718 
719     // EVP_CTRL_GCM_IV_GEN both configures and returns the IV.
720     uint8_t iv[12];
721     ASSERT_TRUE(
722         EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
723     EXPECT_EQ(Bytes(iv), Bytes(kIV1));
724     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV1, /*enc=*/true));
725 
726     // Continuing to run EVP_CTRL_GCM_IV_GEN should increment the IV.
727     ASSERT_TRUE(
728         EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
729     EXPECT_EQ(Bytes(iv), Bytes(kIV2));
730     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV2, /*enc=*/true));
731 
732     // Passing in a shorter length outputs the suffix portion.
733     uint8_t suffix[8];
734     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN,
735                                     sizeof(suffix), suffix));
736     EXPECT_EQ(Bytes(suffix),
737               Bytes(bssl::MakeConstSpan(kIV3).last(sizeof(suffix))));
738     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV3, /*enc=*/true));
739 
740     // A length of -1 returns the whole IV.
741     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, -1, iv));
742     EXPECT_EQ(Bytes(iv), Bytes(kIV4));
743     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV4, /*enc=*/true));
744   }
745 
746   {
747     // Similar to the above, but for decrypting.
748     static const uint8_t kIV1[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12};
749     static const uint8_t kIV2[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13};
750 
751     bssl::ScopedEVP_CIPHER_CTX ctx;
752     ASSERT_TRUE(EVP_DecryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
753                                    /*iv=*/nullptr));
754     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, -1,
755                                     const_cast<uint8_t *>(kIV1)));
756 
757     uint8_t iv[12];
758     ASSERT_TRUE(
759         EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
760     EXPECT_EQ(Bytes(iv), Bytes(kIV1));
761     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV1, /*enc=*/false));
762 
763     ASSERT_TRUE(
764         EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
765     EXPECT_EQ(Bytes(iv), Bytes(kIV2));
766     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV2, /*enc=*/false));
767   }
768 
769   {
770     // Test that only the bottom 8 bytes are used as a counter.
771     static const uint8_t kIV1[12] = {0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
772                                      0xff, 0xff, 0xff, 0xff, 0xff, 0xff};
773     static const uint8_t kIV2[12] = {0xff, 0xff, 0xff, 0xff, 0x00, 0x00,
774                                      0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
775     static const uint8_t kIV3[12] = {0xff, 0xff, 0xff, 0xff, 0x00, 0x00,
776                                      0x00, 0x00, 0x00, 0x00, 0x00, 0x01};
777 
778     bssl::ScopedEVP_CIPHER_CTX ctx;
779     ASSERT_TRUE(EVP_EncryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
780                                    /*iv=*/nullptr));
781     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, -1,
782                                     const_cast<uint8_t *>(kIV1)));
783 
784     uint8_t iv[12];
785     ASSERT_TRUE(
786         EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
787     EXPECT_EQ(Bytes(iv), Bytes(kIV1));
788     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV1, /*enc=*/true));
789 
790     ASSERT_TRUE(
791         EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
792     EXPECT_EQ(Bytes(iv), Bytes(kIV2));
793     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV2, /*enc=*/true));
794 
795     ASSERT_TRUE(
796         EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
797     EXPECT_EQ(Bytes(iv), Bytes(kIV3));
798     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV3, /*enc=*/true));
799   }
800 
801   {
802     // Test with a longer IV length.
803     static const uint8_t kIV1[16] = {0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
804                                      0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
805                                      0xff, 0xff, 0xff, 0xff};
806     static const uint8_t kIV2[16] = {0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
807                                      0xff, 0xff, 0x00, 0x00, 0x00, 0x00,
808                                      0x00, 0x00, 0x00, 0x00};
809     static const uint8_t kIV3[16] = {0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
810                                      0xff, 0xff, 0x00, 0x00, 0x00, 0x00,
811                                      0x00, 0x00, 0x00, 0x01};
812 
813     bssl::ScopedEVP_CIPHER_CTX ctx;
814     ASSERT_TRUE(EVP_EncryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
815                                    /*iv=*/nullptr));
816     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IVLEN,
817                                     sizeof(kIV1), nullptr));
818     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, -1,
819                                     const_cast<uint8_t *>(kIV1)));
820 
821     uint8_t iv[16];
822     ASSERT_TRUE(
823         EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
824     EXPECT_EQ(Bytes(iv), Bytes(kIV1));
825     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV1, /*enc=*/true));
826 
827     ASSERT_TRUE(
828         EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
829     EXPECT_EQ(Bytes(iv), Bytes(kIV2));
830     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV2, /*enc=*/true));
831 
832     ASSERT_TRUE(
833         EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN, sizeof(iv), iv));
834     EXPECT_EQ(Bytes(iv), Bytes(kIV3));
835     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV3, /*enc=*/true));
836   }
837 
838   {
839     // When decrypting, callers are expected to configure the fixed half and
840     // invocation half separately. The two will get stitched together into the
841     // final IV.
842     const uint8_t kIV[12] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12};
843 
844     bssl::ScopedEVP_CIPHER_CTX ctx;
845     ASSERT_TRUE(EVP_DecryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
846                                    /*iv=*/nullptr));
847     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 4,
848                                     const_cast<uint8_t *>(kIV)));
849     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_SET_IV_INV, 8,
850                                     const_cast<uint8_t *>(kIV + 4)));
851     // EVP_CTRL_GCM_SET_IV_INV is sufficient to configure the IV. There is no
852     // need to call EVP_CTRL_GCM_IV_GEN.
853     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV, /*enc=*/false));
854   }
855 
856   {
857     // Stitching together a decryption IV that exceeds the standard IV length.
858     const uint8_t kIV[16] = {1, 2,  3,  4,  5,  6,  7,  8,
859                              9, 10, 11, 12, 13, 14, 15, 16};
860 
861     bssl::ScopedEVP_CIPHER_CTX ctx;
862     ASSERT_TRUE(EVP_DecryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
863                                    /*iv=*/nullptr));
864     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IVLEN,
865                                     sizeof(kIV), nullptr));
866 
867     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 4,
868                                     const_cast<uint8_t *>(kIV)));
869     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_SET_IV_INV, 12,
870                                     const_cast<uint8_t *>(kIV + 4)));
871     // EVP_CTRL_GCM_SET_IV_INV is sufficient to configure the IV. There is no
872     // need to call EVP_CTRL_GCM_IV_GEN.
873     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), kIV, /*enc=*/false));
874   }
875 
876   {
877     // Fixed IVs must be at least 4 bytes and admit at least an 8 byte counter.
878     const uint8_t kIV[16] = {1, 2,  3,  4,  5,  6,  7,  8,
879                              9, 10, 11, 12, 13, 14, 15, 16};
880 
881     bssl::ScopedEVP_CIPHER_CTX ctx;
882     ASSERT_TRUE(EVP_DecryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
883                                    /*iv=*/nullptr));
884 
885     // This means the default IV length only allows a 4/8 split.
886     EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 0,
887                                      const_cast<uint8_t *>(kIV)));
888     EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 3,
889                                      const_cast<uint8_t *>(kIV)));
890     EXPECT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 4,
891                                     const_cast<uint8_t *>(kIV)));
892     EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 5,
893                                      const_cast<uint8_t *>(kIV)));
894     EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 16,
895                                      const_cast<uint8_t *>(kIV)));
896 
897     // A longer IV allows a wider range.
898     ASSERT_TRUE(
899         EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IVLEN, 16, nullptr));
900     EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 0,
901                                      const_cast<uint8_t *>(kIV)));
902     EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 3,
903                                      const_cast<uint8_t *>(kIV)));
904     EXPECT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 4,
905                                     const_cast<uint8_t *>(kIV)));
906     EXPECT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 6,
907                                     const_cast<uint8_t *>(kIV)));
908     EXPECT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 8,
909                                     const_cast<uint8_t *>(kIV)));
910     EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 9,
911                                      const_cast<uint8_t *>(kIV)));
912     EXPECT_FALSE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED, 16,
913                                      const_cast<uint8_t *>(kIV)));
914   }
915 
916   {
917     // When encrypting, setting a fixed IV randomizes the counter portion.
918     const uint8_t kFixedIV[4] = {1, 2, 3, 4};
919     bssl::ScopedEVP_CIPHER_CTX ctx;
920     ASSERT_TRUE(EVP_EncryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
921                                    /*iv=*/nullptr));
922     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED,
923                                     sizeof(kFixedIV),
924                                     const_cast<uint8_t *>(kFixedIV)));
925     uint8_t counter[8];
926     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN,
927                                     sizeof(counter), counter));
928 
929     uint8_t iv[12];
930     memcpy(iv, kFixedIV, sizeof(kFixedIV));
931     memcpy(iv + sizeof(kFixedIV), counter, sizeof(counter));
932     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), iv, /*enc=*/true));
933 
934     // The counter continues to act as a counter.
935     uint8_t counter2[8];
936     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN,
937                                     sizeof(counter2), counter2));
938     EXPECT_EQ(CRYPTO_load_u64_be(counter2), CRYPTO_load_u64_be(counter) + 1);
939     memcpy(iv + sizeof(kFixedIV), counter2, sizeof(counter2));
940     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), iv, /*enc=*/true));
941   }
942 
943     {
944     // Same as above, but with a larger IV.
945     const uint8_t kFixedIV[8] = {1, 2, 3, 4, 5, 6, 7, 8};
946     bssl::ScopedEVP_CIPHER_CTX ctx;
947     ASSERT_TRUE(EVP_EncryptInit_ex(ctx.get(), kCipher, /*impl=*/nullptr, kKey,
948                                    /*iv=*/nullptr));
949     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IVLEN,
950                                     sizeof(kFixedIV) + 8, nullptr));
951     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_AEAD_SET_IV_FIXED,
952                                     sizeof(kFixedIV),
953                                     const_cast<uint8_t *>(kFixedIV)));
954     uint8_t counter[8];
955     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN,
956                                     sizeof(counter), counter));
957 
958     uint8_t iv[16];
959     memcpy(iv, kFixedIV, sizeof(kFixedIV));
960     memcpy(iv + sizeof(kFixedIV), counter, sizeof(counter));
961     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), iv, /*enc=*/true));
962 
963     // The counter continues to act as a counter.
964     uint8_t counter2[8];
965     ASSERT_TRUE(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_IV_GEN,
966                                     sizeof(counter2), counter2));
967     EXPECT_EQ(CRYPTO_load_u64_be(counter2), CRYPTO_load_u64_be(counter) + 1);
968     memcpy(iv + sizeof(kFixedIV), counter2, sizeof(counter2));
969     ASSERT_NO_FATAL_FAILURE(expect_iv(ctx.get(), iv, /*enc=*/true));
970   }
971 }
972