5 # Copyright (C) 2005-2010 Gregory P. Smith (greg@krypto.org)
6 # Licensed to PSF under a Contributor Agreement.
19 from test
import test_support
20 from test
.test_support
import _4G
, precisionbigmemtest
22 # Were we compiled --with-pydebug or with #define Py_DEBUG?
23 COMPILED_WITH_PYDEBUG
= hasattr(sys
, 'gettotalrefcount')
32 r
= r
+ h
[(i
>> 4) & 0xF] + h
[i
& 0xF]
36 class HashLibTestCase(unittest
.TestCase
):
37 supported_hash_names
= ( 'md5', 'MD5', 'sha1', 'SHA1',
38 'sha224', 'SHA224', 'sha256', 'SHA256',
39 'sha384', 'SHA384', 'sha512', 'SHA512' )
41 _warn_on_extension_import
= COMPILED_WITH_PYDEBUG
43 def _conditional_import_module(self
, module_name
):
44 """Import a module and return a reference to it or None on failure."""
46 exec('import '+module_name
)
47 except ImportError, error
:
48 if self
._warn
_on
_extension
_import
:
49 warnings
.warn('Did a C extension fail to compile? %s' % error
)
50 return locals().get(module_name
)
52 def __init__(self
, *args
, **kwargs
):
54 for algorithm
in self
.supported_hash_names
:
55 algorithms
.add(algorithm
.lower())
56 self
.constructors_to_test
= {}
57 for algorithm
in algorithms
:
58 self
.constructors_to_test
[algorithm
] = set()
60 # For each algorithm, test the direct constructor and the use
61 # of hashlib.new given the algorithm name.
62 for algorithm
, constructors
in self
.constructors_to_test
.items():
63 constructors
.add(getattr(hashlib
, algorithm
))
64 def _test_algorithm_via_hashlib_new(data
=None, _alg
=algorithm
):
66 return hashlib
.new(_alg
)
67 return hashlib
.new(_alg
, data
)
68 constructors
.add(_test_algorithm_via_hashlib_new
)
70 _hashlib
= self
._conditional
_import
_module
('_hashlib')
72 # These two algorithms should always be present when this module
73 # is compiled. If not, something was compiled wrong.
74 assert hasattr(_hashlib
, 'openssl_md5')
75 assert hasattr(_hashlib
, 'openssl_sha1')
76 for algorithm
, constructors
in self
.constructors_to_test
.items():
77 constructor
= getattr(_hashlib
, 'openssl_'+algorithm
, None)
79 constructors
.add(constructor
)
81 _md5
= self
._conditional
_import
_module
('_md5')
83 self
.constructors_to_test
['md5'].add(_md5
.new
)
84 _sha
= self
._conditional
_import
_module
('_sha')
86 self
.constructors_to_test
['sha1'].add(_sha
.new
)
87 _sha256
= self
._conditional
_import
_module
('_sha256')
89 self
.constructors_to_test
['sha224'].add(_sha256
.sha224
)
90 self
.constructors_to_test
['sha256'].add(_sha256
.sha256
)
91 _sha512
= self
._conditional
_import
_module
('_sha512')
93 self
.constructors_to_test
['sha384'].add(_sha512
.sha384
)
94 self
.constructors_to_test
['sha512'].add(_sha512
.sha512
)
96 super(HashLibTestCase
, self
).__init
__(*args
, **kwargs
)
98 def test_hash_array(self
):
99 a
= array
.array("b", range(10))
100 constructors
= self
.constructors_to_test
.itervalues()
101 for cons
in itertools
.chain
.from_iterable(constructors
):
105 def test_algorithms_attribute(self
):
106 self
.assertEqual(hashlib
.algorithms
,
107 tuple([_algo
for _algo
in self
.supported_hash_names
if
110 def test_unknown_hash(self
):
112 hashlib
.new('spam spam spam spam spam')
116 self
.assertTrue(0 == "hashlib didn't reject bogus hash name")
118 def test_hexdigest(self
):
119 for name
in self
.supported_hash_names
:
120 h
= hashlib
.new(name
)
121 self
.assertTrue(hexstr(h
.digest()) == h
.hexdigest())
123 def test_large_update(self
):
127 abcs
= aas
+ bees
+ cees
129 for name
in self
.supported_hash_names
:
130 m1
= hashlib
.new(name
)
135 m2
= hashlib
.new(name
)
137 self
.assertEqual(m1
.digest(), m2
.digest(), name
+' update problem.')
139 m3
= hashlib
.new(name
, abcs
)
140 self
.assertEqual(m1
.digest(), m3
.digest(), name
+' new problem.')
142 def check(self
, name
, data
, digest
):
143 constructors
= self
.constructors_to_test
[name
]
144 # 2 is for hashlib.name(...) and hashlib.new(name, ...)
145 self
.assertGreaterEqual(len(constructors
), 2)
146 for hash_object_constructor
in constructors
:
147 computed
= hash_object_constructor(data
).hexdigest()
150 "Hash algorithm %s constructed using %s returned hexdigest"
151 " %r for %d byte input data that should have hashed to %r."
152 % (name
, hash_object_constructor
,
153 computed
, len(data
), digest
))
155 def check_unicode(self
, algorithm_name
):
156 # Unicode objects are not allowed as input.
157 expected
= hashlib
.new(algorithm_name
, str(u
'spam')).hexdigest()
158 self
.check(algorithm_name
, u
'spam', expected
)
160 def test_unicode(self
):
161 # In python 2.x unicode is auto-encoded to the system default encoding
162 # when passed to hashlib functions.
163 self
.check_unicode('md5')
164 self
.check_unicode('sha1')
165 self
.check_unicode('sha224')
166 self
.check_unicode('sha256')
167 self
.check_unicode('sha384')
168 self
.check_unicode('sha512')
170 def test_case_md5_0(self
):
171 self
.check('md5', '', 'd41d8cd98f00b204e9800998ecf8427e')
173 def test_case_md5_1(self
):
174 self
.check('md5', 'abc', '900150983cd24fb0d6963f7d28e17f72')
176 def test_case_md5_2(self
):
177 self
.check('md5', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
178 'd174ab98d277d9f5a5611c2c9f419d9f')
180 @precisionbigmemtest(size
=_4G
+ 5, memuse
=1)
181 def test_case_md5_huge(self
, size
):
184 self
.check('md5', 'A'*size
, 'c9af2dff37468ce5dfee8f2cfc0a9c6d')
185 except OverflowError:
188 @precisionbigmemtest(size
=_4G
- 1, memuse
=1)
189 def test_case_md5_uintmax(self
, size
):
192 self
.check('md5', 'A'*size
, '28138d306ff1b8281f1a9067e1a1a2b3')
193 except OverflowError:
196 # use the three examples from Federal Information Processing Standards
197 # Publication 180-1, Secure Hash Standard, 1995 April 17
198 # http://www.itl.nist.gov/div897/pubs/fip180-1.htm
200 def test_case_sha1_0(self
):
201 self
.check('sha1', "",
202 "da39a3ee5e6b4b0d3255bfef95601890afd80709")
204 def test_case_sha1_1(self
):
205 self
.check('sha1', "abc",
206 "a9993e364706816aba3e25717850c26c9cd0d89d")
208 def test_case_sha1_2(self
):
209 self
.check('sha1', "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
210 "84983e441c3bd26ebaae4aa1f95129e5e54670f1")
212 def test_case_sha1_3(self
):
213 self
.check('sha1', "a" * 1000000,
214 "34aa973cd4c4daa4f61eeb2bdbad27316534016f")
217 # use the examples from Federal Information Processing Standards
218 # Publication 180-2, Secure Hash Standard, 2002 August 1
219 # http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf
221 def test_case_sha224_0(self
):
222 self
.check('sha224', "",
223 "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f")
225 def test_case_sha224_1(self
):
226 self
.check('sha224', "abc",
227 "23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7")
229 def test_case_sha224_2(self
):
231 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
232 "75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525")
234 def test_case_sha224_3(self
):
235 self
.check('sha224', "a" * 1000000,
236 "20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67")
239 def test_case_sha256_0(self
):
240 self
.check('sha256', "",
241 "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
243 def test_case_sha256_1(self
):
244 self
.check('sha256', "abc",
245 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad")
247 def test_case_sha256_2(self
):
249 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
250 "248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1")
252 def test_case_sha256_3(self
):
253 self
.check('sha256', "a" * 1000000,
254 "cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0")
257 def test_case_sha384_0(self
):
258 self
.check('sha384', "",
259 "38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da"+
260 "274edebfe76f65fbd51ad2f14898b95b")
262 def test_case_sha384_1(self
):
263 self
.check('sha384', "abc",
264 "cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed"+
265 "8086072ba1e7cc2358baeca134c825a7")
267 def test_case_sha384_2(self
):
269 "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
270 "hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
271 "09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712"+
272 "fcc7c71a557e2db966c3e9fa91746039")
274 def test_case_sha384_3(self
):
275 self
.check('sha384', "a" * 1000000,
276 "9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b"+
277 "07b8b3dc38ecc4ebae97ddd87f3d8985")
280 def test_case_sha512_0(self
):
281 self
.check('sha512', "",
282 "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"+
283 "47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e")
285 def test_case_sha512_1(self
):
286 self
.check('sha512', "abc",
287 "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a"+
288 "2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f")
290 def test_case_sha512_2(self
):
292 "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
293 "hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
294 "8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018"+
295 "501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909")
297 def test_case_sha512_3(self
):
298 self
.check('sha512', "a" * 1000000,
299 "e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+
300 "de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")
302 @unittest.skipUnless(threading
, 'Threading required for this test.')
303 @test_support.reap_threads
304 def test_threaded_hashing(self
):
305 # Updating the same hash object from several threads at once
306 # using data chunk sizes containing the same byte sequences.
308 # If the internal locks are working to prevent multiple
309 # updates on the same object from running at once, the resulting
310 # hash will be the same as doing it single threaded upfront.
311 hasher
= hashlib
.sha1()
313 smallest_data
= 'swineflu'
314 data
= smallest_data
*200000
315 expected_hash
= hashlib
.sha1(data
*num_threads
).hexdigest()
317 def hash_in_chunks(chunk_size
, event
):
319 while index
< len(data
):
320 hasher
.update(data
[index
:index
+chunk_size
])
325 for threadnum
in xrange(num_threads
):
326 chunk_size
= len(data
) // (10**threadnum
)
327 assert chunk_size
> 0
328 assert chunk_size
% len(smallest_data
) == 0
329 event
= threading
.Event()
331 threading
.Thread(target
=hash_in_chunks
,
332 args
=(chunk_size
, event
)).start()
337 self
.assertEqual(expected_hash
, hasher
.hexdigest())
340 test_support
.run_unittest(HashLibTestCase
)
342 if __name__
== "__main__":