1# Test hashlib module 2# 3# $Id$ 4# 5# Copyright (C) 2005-2010 Gregory P. Smith (greg@krypto.org) 6# Licensed to PSF under a Contributor Agreement. 7# 8 9import array 10import hashlib 11import itertools 12import sys 13try: 14 import threading 15except ImportError: 16 threading = None 17import unittest 18import warnings 19from binascii import unhexlify 20 21from test import test_support 22from test.test_support import _4G, precisionbigmemtest 23 24# Were we compiled --with-pydebug or with #define Py_DEBUG? 25COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') 26 27 28def hexstr(s): 29 import string 30 h = string.hexdigits 31 r = '' 32 for c in s: 33 i = ord(c) 34 r = r + h[(i >> 4) & 0xF] + h[i & 0xF] 35 return r 36 37 38class HashLibTestCase(unittest.TestCase): 39 supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1', 40 'sha224', 'SHA224', 'sha256', 'SHA256', 41 'sha384', 'SHA384', 'sha512', 'SHA512' ) 42 43 _warn_on_extension_import = COMPILED_WITH_PYDEBUG 44 45 def _conditional_import_module(self, module_name): 46 """Import a module and return a reference to it or None on failure.""" 47 try: 48 exec('import '+module_name) 49 except ImportError, error: 50 if self._warn_on_extension_import: 51 warnings.warn('Did a C extension fail to compile? %s' % error) 52 return locals().get(module_name) 53 54 def __init__(self, *args, **kwargs): 55 algorithms = set() 56 for algorithm in self.supported_hash_names: 57 algorithms.add(algorithm.lower()) 58 self.constructors_to_test = {} 59 for algorithm in algorithms: 60 self.constructors_to_test[algorithm] = set() 61 62 # For each algorithm, test the direct constructor and the use 63 # of hashlib.new given the algorithm name. 64 for algorithm, constructors in self.constructors_to_test.items(): 65 constructors.add(getattr(hashlib, algorithm)) 66 def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm): 67 if data is None: 68 return hashlib.new(_alg) 69 return hashlib.new(_alg, data) 70 constructors.add(_test_algorithm_via_hashlib_new) 71 72 _hashlib = self._conditional_import_module('_hashlib') 73 if _hashlib: 74 # These two algorithms should always be present when this module 75 # is compiled. If not, something was compiled wrong. 76 assert hasattr(_hashlib, 'openssl_md5') 77 assert hasattr(_hashlib, 'openssl_sha1') 78 for algorithm, constructors in self.constructors_to_test.items(): 79 constructor = getattr(_hashlib, 'openssl_'+algorithm, None) 80 if constructor: 81 constructors.add(constructor) 82 83 _md5 = self._conditional_import_module('_md5') 84 if _md5: 85 self.constructors_to_test['md5'].add(_md5.new) 86 _sha = self._conditional_import_module('_sha') 87 if _sha: 88 self.constructors_to_test['sha1'].add(_sha.new) 89 _sha256 = self._conditional_import_module('_sha256') 90 if _sha256: 91 self.constructors_to_test['sha224'].add(_sha256.sha224) 92 self.constructors_to_test['sha256'].add(_sha256.sha256) 93 _sha512 = self._conditional_import_module('_sha512') 94 if _sha512: 95 self.constructors_to_test['sha384'].add(_sha512.sha384) 96 self.constructors_to_test['sha512'].add(_sha512.sha512) 97 98 super(HashLibTestCase, self).__init__(*args, **kwargs) 99 100 def test_hash_array(self): 101 a = array.array("b", range(10)) 102 constructors = self.constructors_to_test.itervalues() 103 for cons in itertools.chain.from_iterable(constructors): 104 c = cons(a) 105 c.hexdigest() 106 107 def test_algorithms_attribute(self): 108 self.assertEqual(hashlib.algorithms, 109 tuple([_algo for _algo in self.supported_hash_names if 110 _algo.islower()])) 111 112 def test_algorithms_guaranteed(self): 113 self.assertEqual(hashlib.algorithms_guaranteed, 114 set(_algo for _algo in self.supported_hash_names 115 if _algo.islower())) 116 117 def test_algorithms_available(self): 118 self.assertTrue(set(hashlib.algorithms_guaranteed). 119 issubset(hashlib.algorithms_available)) 120 121 def test_unknown_hash(self): 122 self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam') 123 self.assertRaises(TypeError, hashlib.new, 1) 124 125 def test_get_builtin_constructor(self): 126 get_builtin_constructor = hashlib.__dict__[ 127 '__get_builtin_constructor'] 128 self.assertRaises(ValueError, get_builtin_constructor, 'test') 129 try: 130 import _md5 131 except ImportError: 132 pass 133 # This forces an ImportError for "import _md5" statements 134 sys.modules['_md5'] = None 135 try: 136 self.assertRaises(ValueError, get_builtin_constructor, 'md5') 137 finally: 138 if '_md5' in locals(): 139 sys.modules['_md5'] = _md5 140 else: 141 del sys.modules['_md5'] 142 self.assertRaises(TypeError, get_builtin_constructor, 3) 143 144 def test_hexdigest(self): 145 for name in self.supported_hash_names: 146 h = hashlib.new(name) 147 self.assertTrue(hexstr(h.digest()) == h.hexdigest()) 148 149 def test_large_update(self): 150 aas = 'a' * 128 151 bees = 'b' * 127 152 cees = 'c' * 126 153 abcs = aas + bees + cees 154 155 for name in self.supported_hash_names: 156 m1 = hashlib.new(name) 157 m1.update(aas) 158 m1.update(bees) 159 m1.update(cees) 160 161 m2 = hashlib.new(name) 162 m2.update(abcs) 163 self.assertEqual(m1.digest(), m2.digest(), name+' update problem.') 164 165 m3 = hashlib.new(name, abcs) 166 self.assertEqual(m1.digest(), m3.digest(), name+' new problem.') 167 168 def check(self, name, data, digest): 169 constructors = self.constructors_to_test[name] 170 # 2 is for hashlib.name(...) and hashlib.new(name, ...) 171 self.assertGreaterEqual(len(constructors), 2) 172 for hash_object_constructor in constructors: 173 computed = hash_object_constructor(data).hexdigest() 174 self.assertEqual( 175 computed, digest, 176 "Hash algorithm %s constructed using %s returned hexdigest" 177 " %r for %d byte input data that should have hashed to %r." 178 % (name, hash_object_constructor, 179 computed, len(data), digest)) 180 181 def check_update(self, name, data, digest): 182 constructors = self.constructors_to_test[name] 183 # 2 is for hashlib.name(...) and hashlib.new(name, ...) 184 self.assertGreaterEqual(len(constructors), 2) 185 for hash_object_constructor in constructors: 186 h = hash_object_constructor() 187 h.update(data) 188 computed = h.hexdigest() 189 self.assertEqual( 190 computed, digest, 191 "Hash algorithm %s using %s when updated returned hexdigest" 192 " %r for %d byte input data that should have hashed to %r." 193 % (name, hash_object_constructor, 194 computed, len(data), digest)) 195 196 def check_unicode(self, algorithm_name): 197 # Unicode objects are not allowed as input. 198 expected = hashlib.new(algorithm_name, str(u'spam')).hexdigest() 199 self.check(algorithm_name, u'spam', expected) 200 201 def test_unicode(self): 202 # In python 2.x unicode is auto-encoded to the system default encoding 203 # when passed to hashlib functions. 204 self.check_unicode('md5') 205 self.check_unicode('sha1') 206 self.check_unicode('sha224') 207 self.check_unicode('sha256') 208 self.check_unicode('sha384') 209 self.check_unicode('sha512') 210 211 def test_case_md5_0(self): 212 self.check('md5', '', 'd41d8cd98f00b204e9800998ecf8427e') 213 214 def test_case_md5_1(self): 215 self.check('md5', 'abc', '900150983cd24fb0d6963f7d28e17f72') 216 217 def test_case_md5_2(self): 218 self.check('md5', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', 219 'd174ab98d277d9f5a5611c2c9f419d9f') 220 221 @unittest.skipIf(sys.maxsize < _4G + 5, 'test cannot run on 32-bit systems') 222 @precisionbigmemtest(size=_4G + 5, memuse=1, dry_run=False) 223 def test_case_md5_huge(self, size): 224 self.check('md5', 'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d') 225 226 @unittest.skipIf(sys.maxsize < _4G + 5, 'test cannot run on 32-bit systems') 227 @precisionbigmemtest(size=_4G + 5, memuse=1, dry_run=False) 228 def test_case_md5_huge_update(self, size): 229 self.check_update('md5', 'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d') 230 231 @unittest.skipIf(sys.maxsize < _4G - 1, 'test cannot run on 32-bit systems') 232 @precisionbigmemtest(size=_4G - 1, memuse=1, dry_run=False) 233 def test_case_md5_uintmax(self, size): 234 self.check('md5', 'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3') 235 236 # use the three examples from Federal Information Processing Standards 237 # Publication 180-1, Secure Hash Standard, 1995 April 17 238 # http://www.itl.nist.gov/div897/pubs/fip180-1.htm 239 240 def test_case_sha1_0(self): 241 self.check('sha1', "", 242 "da39a3ee5e6b4b0d3255bfef95601890afd80709") 243 244 def test_case_sha1_1(self): 245 self.check('sha1', "abc", 246 "a9993e364706816aba3e25717850c26c9cd0d89d") 247 248 def test_case_sha1_2(self): 249 self.check('sha1', "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", 250 "84983e441c3bd26ebaae4aa1f95129e5e54670f1") 251 252 def test_case_sha1_3(self): 253 self.check('sha1', "a" * 1000000, 254 "34aa973cd4c4daa4f61eeb2bdbad27316534016f") 255 256 @precisionbigmemtest(size=_4G + 5, memuse=1) 257 def test_case_sha1_huge(self, size): 258 if size == _4G + 5: 259 try: 260 self.check('sha1', 'A'*size, 261 '87d745c50e6b2879ffa0fb2c930e9fbfe0dc9a5b') 262 except OverflowError: 263 pass # 32-bit arch 264 265 @precisionbigmemtest(size=_4G + 5, memuse=1) 266 def test_case_sha1_huge_update(self, size): 267 if size == _4G + 5: 268 try: 269 self.check_update('sha1', 'A'*size, 270 '87d745c50e6b2879ffa0fb2c930e9fbfe0dc9a5b') 271 except OverflowError: 272 pass # 32-bit arch 273 274 # use the examples from Federal Information Processing Standards 275 # Publication 180-2, Secure Hash Standard, 2002 August 1 276 # http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf 277 278 def test_case_sha224_0(self): 279 self.check('sha224', "", 280 "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f") 281 282 def test_case_sha224_1(self): 283 self.check('sha224', "abc", 284 "23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7") 285 286 def test_case_sha224_2(self): 287 self.check('sha224', 288 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", 289 "75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525") 290 291 def test_case_sha224_3(self): 292 self.check('sha224', "a" * 1000000, 293 "20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67") 294 295 296 def test_case_sha256_0(self): 297 self.check('sha256', "", 298 "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") 299 300 def test_case_sha256_1(self): 301 self.check('sha256', "abc", 302 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") 303 304 def test_case_sha256_2(self): 305 self.check('sha256', 306 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", 307 "248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1") 308 309 def test_case_sha256_3(self): 310 self.check('sha256', "a" * 1000000, 311 "cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") 312 313 314 def test_case_sha384_0(self): 315 self.check('sha384', "", 316 "38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da"+ 317 "274edebfe76f65fbd51ad2f14898b95b") 318 319 def test_case_sha384_1(self): 320 self.check('sha384', "abc", 321 "cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed"+ 322 "8086072ba1e7cc2358baeca134c825a7") 323 324 def test_case_sha384_2(self): 325 self.check('sha384', 326 "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+ 327 "hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu", 328 "09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712"+ 329 "fcc7c71a557e2db966c3e9fa91746039") 330 331 def test_case_sha384_3(self): 332 self.check('sha384', "a" * 1000000, 333 "9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b"+ 334 "07b8b3dc38ecc4ebae97ddd87f3d8985") 335 336 337 def test_case_sha512_0(self): 338 self.check('sha512', "", 339 "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"+ 340 "47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e") 341 342 def test_case_sha512_1(self): 343 self.check('sha512', "abc", 344 "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a"+ 345 "2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f") 346 347 def test_case_sha512_2(self): 348 self.check('sha512', 349 "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+ 350 "hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu", 351 "8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018"+ 352 "501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909") 353 354 def test_case_sha512_3(self): 355 self.check('sha512', "a" * 1000000, 356 "e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+ 357 "de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b") 358 359 @unittest.skipUnless(threading, 'Threading required for this test.') 360 @test_support.reap_threads 361 def test_threaded_hashing(self): 362 # Updating the same hash object from several threads at once 363 # using data chunk sizes containing the same byte sequences. 364 # 365 # If the internal locks are working to prevent multiple 366 # updates on the same object from running at once, the resulting 367 # hash will be the same as doing it single threaded upfront. 368 hasher = hashlib.sha1() 369 num_threads = 5 370 smallest_data = 'swineflu' 371 data = smallest_data*200000 372 expected_hash = hashlib.sha1(data*num_threads).hexdigest() 373 374 def hash_in_chunks(chunk_size, event): 375 index = 0 376 while index < len(data): 377 hasher.update(data[index:index+chunk_size]) 378 index += chunk_size 379 event.set() 380 381 events = [] 382 for threadnum in xrange(num_threads): 383 chunk_size = len(data) // (10**threadnum) 384 assert chunk_size > 0 385 assert chunk_size % len(smallest_data) == 0 386 event = threading.Event() 387 events.append(event) 388 threading.Thread(target=hash_in_chunks, 389 args=(chunk_size, event)).start() 390 391 for event in events: 392 event.wait() 393 394 self.assertEqual(expected_hash, hasher.hexdigest()) 395 396 397class KDFTests(unittest.TestCase): 398 pbkdf2_test_vectors = [ 399 (b'password', b'salt', 1, None), 400 (b'password', b'salt', 2, None), 401 (b'password', b'salt', 4096, None), 402 # too slow, it takes over a minute on a fast CPU. 403 #(b'password', b'salt', 16777216, None), 404 (b'passwordPASSWORDpassword', b'saltSALTsaltSALTsaltSALTsaltSALTsalt', 405 4096, -1), 406 (b'pass\0word', b'sa\0lt', 4096, 16), 407 ] 408 409 pbkdf2_results = { 410 "sha1": [ 411 # official test vectors from RFC 6070 412 (unhexlify('0c60c80f961f0e71f3a9b524af6012062fe037a6'), None), 413 (unhexlify('ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957'), None), 414 (unhexlify('4b007901b765489abead49d926f721d065a429c1'), None), 415 #(unhexlify('eefe3d61cd4da4e4e9945b3d6ba2158c2634e984'), None), 416 (unhexlify('3d2eec4fe41c849b80c8d83662c0e44a8b291a964c' 417 'f2f07038'), 25), 418 (unhexlify('56fa6aa75548099dcc37d7f03425e0c3'), None),], 419 "sha256": [ 420 (unhexlify('120fb6cffcf8b32c43e7225256c4f837' 421 'a86548c92ccc35480805987cb70be17b'), None), 422 (unhexlify('ae4d0c95af6b46d32d0adff928f06dd0' 423 '2a303f8ef3c251dfd6e2d85a95474c43'), None), 424 (unhexlify('c5e478d59288c841aa530db6845c4c8d' 425 '962893a001ce4e11a4963873aa98134a'), None), 426 #(unhexlify('cf81c66fe8cfc04d1f31ecb65dab4089' 427 # 'f7f179e89b3b0bcb17ad10e3ac6eba46'), None), 428 (unhexlify('348c89dbcbd32b2f32d814b8116e84cf2b17' 429 '347ebc1800181c4e2a1fb8dd53e1c635518c7dac47e9'), 40), 430 (unhexlify('89b69d0516f829893c696226650a8687'), None),], 431 "sha512": [ 432 (unhexlify('867f70cf1ade02cff3752599a3a53dc4af34c7a669815ae5' 433 'd513554e1c8cf252c02d470a285a0501bad999bfe943c08f' 434 '050235d7d68b1da55e63f73b60a57fce'), None), 435 (unhexlify('e1d9c16aa681708a45f5c7c4e215ceb66e011a2e9f004071' 436 '3f18aefdb866d53cf76cab2868a39b9f7840edce4fef5a82' 437 'be67335c77a6068e04112754f27ccf4e'), None), 438 (unhexlify('d197b1b33db0143e018b12f3d1d1479e6cdebdcc97c5c0f8' 439 '7f6902e072f457b5143f30602641b3d55cd335988cb36b84' 440 '376060ecd532e039b742a239434af2d5'), None), 441 (unhexlify('8c0511f4c6e597c6ac6315d8f0362e225f3c501495ba23b8' 442 '68c005174dc4ee71115b59f9e60cd9532fa33e0f75aefe30' 443 '225c583a186cd82bd4daea9724a3d3b8'), 64), 444 (unhexlify('9d9e9c4cd21fe4be24d5b8244c759665'), None),], 445 } 446 447 def test_pbkdf2_hmac(self): 448 for digest_name, results in self.pbkdf2_results.items(): 449 for i, vector in enumerate(self.pbkdf2_test_vectors): 450 password, salt, rounds, dklen = vector 451 expected, overwrite_dklen = results[i] 452 if overwrite_dklen: 453 dklen = overwrite_dklen 454 out = hashlib.pbkdf2_hmac( 455 digest_name, password, salt, rounds, dklen) 456 self.assertEqual(out, expected, 457 (digest_name, password, salt, rounds, dklen)) 458 459 460def test_main(): 461 test_support.run_unittest(HashLibTestCase, KDFTests) 462 463if __name__ == "__main__": 464 test_main() 465