Issue #5788: `datetime.timedelta` objects get a new `total_seconds()` method returning
[python.git] / Lib / test / test_hashlib.py
blob0183714b7d85512dfdbcf89a748db39c3bad874d
1 # Test hashlib module
3 # $Id$
5 # Copyright (C) 2005-2009 Gregory P. Smith (greg@krypto.org)
6 # Licensed to PSF under a Contributor Agreement.
9 import hashlib
10 import StringIO
11 try:
12 import threading
13 except ImportError:
14 threading = None
15 import unittest
16 from test import test_support
17 from test.test_support import _4G, precisionbigmemtest
19 def hexstr(s):
20 import string
21 h = string.hexdigits
22 r = ''
23 for c in s:
24 i = ord(c)
25 r = r + h[(i >> 4) & 0xF] + h[i & 0xF]
26 return r
29 class HashLibTestCase(unittest.TestCase):
30 supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
31 'sha224', 'SHA224', 'sha256', 'SHA256',
32 'sha384', 'SHA384', 'sha512', 'SHA512' )
34 def test_unknown_hash(self):
35 try:
36 hashlib.new('spam spam spam spam spam')
37 except ValueError:
38 pass
39 else:
40 self.assertTrue(0 == "hashlib didn't reject bogus hash name")
42 def test_hexdigest(self):
43 for name in self.supported_hash_names:
44 h = hashlib.new(name)
45 self.assertTrue(hexstr(h.digest()) == h.hexdigest())
47 def test_large_update(self):
48 aas = 'a' * 128
49 bees = 'b' * 127
50 cees = 'c' * 126
51 abcs = aas + bees + cees
53 for name in self.supported_hash_names:
54 m1 = hashlib.new(name)
55 m1.update(aas)
56 m1.update(bees)
57 m1.update(cees)
59 m2 = hashlib.new(name)
60 m2.update(abcs)
61 self.assertEqual(m1.digest(), m2.digest(), name+' update problem.')
63 m3 = hashlib.new(name, abcs)
64 self.assertEqual(m1.digest(), m3.digest(), name+' new problem.')
66 def check(self, name, data, digest):
67 # test the direct constructors
68 computed = getattr(hashlib, name)(data).hexdigest()
69 self.assertEqual(computed, digest)
70 # test the general new() interface
71 computed = hashlib.new(name, data).hexdigest()
72 self.assertEqual(computed, digest)
74 def check_no_unicode(self, algorithm_name):
75 # Unicode objects are not allowed as input.
76 self.assertRaises(TypeError, getattr(hashlib, algorithm_name), u'spam')
77 self.assertRaises(TypeError, hashlib.new, algorithm_name, u'spam')
79 def test_no_unicode(self):
80 self.check_no_unicode('md5')
81 self.check_no_unicode('sha1')
82 self.check_no_unicode('sha224')
83 self.check_no_unicode('sha256')
84 self.check_no_unicode('sha384')
85 self.check_no_unicode('sha512')
87 def test_case_md5_0(self):
88 self.check('md5', '', 'd41d8cd98f00b204e9800998ecf8427e')
90 def test_case_md5_1(self):
91 self.check('md5', 'abc', '900150983cd24fb0d6963f7d28e17f72')
93 def test_case_md5_2(self):
94 self.check('md5', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
95 'd174ab98d277d9f5a5611c2c9f419d9f')
97 @precisionbigmemtest(size=_4G + 5, memuse=1)
98 def test_case_md5_huge(self, size):
99 if size == _4G + 5:
100 try:
101 self.check('md5', 'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d')
102 except OverflowError:
103 pass # 32-bit arch
105 @precisionbigmemtest(size=_4G - 1, memuse=1)
106 def test_case_md5_uintmax(self, size):
107 if size == _4G - 1:
108 try:
109 self.check('md5', 'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3')
110 except OverflowError:
111 pass # 32-bit arch
113 # use the three examples from Federal Information Processing Standards
114 # Publication 180-1, Secure Hash Standard, 1995 April 17
115 # http://www.itl.nist.gov/div897/pubs/fip180-1.htm
117 def test_case_sha1_0(self):
118 self.check('sha1', "",
119 "da39a3ee5e6b4b0d3255bfef95601890afd80709")
121 def test_case_sha1_1(self):
122 self.check('sha1', "abc",
123 "a9993e364706816aba3e25717850c26c9cd0d89d")
125 def test_case_sha1_2(self):
126 self.check('sha1', "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
127 "84983e441c3bd26ebaae4aa1f95129e5e54670f1")
129 def test_case_sha1_3(self):
130 self.check('sha1', "a" * 1000000,
131 "34aa973cd4c4daa4f61eeb2bdbad27316534016f")
134 # use the examples from Federal Information Processing Standards
135 # Publication 180-2, Secure Hash Standard, 2002 August 1
136 # http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf
138 def test_case_sha224_0(self):
139 self.check('sha224', "",
140 "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f")
142 def test_case_sha224_1(self):
143 self.check('sha224', "abc",
144 "23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7")
146 def test_case_sha224_2(self):
147 self.check('sha224',
148 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
149 "75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525")
151 def test_case_sha224_3(self):
152 self.check('sha224', "a" * 1000000,
153 "20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67")
156 def test_case_sha256_0(self):
157 self.check('sha256', "",
158 "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
160 def test_case_sha256_1(self):
161 self.check('sha256', "abc",
162 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad")
164 def test_case_sha256_2(self):
165 self.check('sha256',
166 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
167 "248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1")
169 def test_case_sha256_3(self):
170 self.check('sha256', "a" * 1000000,
171 "cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0")
174 def test_case_sha384_0(self):
175 self.check('sha384', "",
176 "38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da"+
177 "274edebfe76f65fbd51ad2f14898b95b")
179 def test_case_sha384_1(self):
180 self.check('sha384', "abc",
181 "cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed"+
182 "8086072ba1e7cc2358baeca134c825a7")
184 def test_case_sha384_2(self):
185 self.check('sha384',
186 "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
187 "hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
188 "09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712"+
189 "fcc7c71a557e2db966c3e9fa91746039")
191 def test_case_sha384_3(self):
192 self.check('sha384', "a" * 1000000,
193 "9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b"+
194 "07b8b3dc38ecc4ebae97ddd87f3d8985")
197 def test_case_sha512_0(self):
198 self.check('sha512', "",
199 "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"+
200 "47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e")
202 def test_case_sha512_1(self):
203 self.check('sha512', "abc",
204 "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a"+
205 "2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f")
207 def test_case_sha512_2(self):
208 self.check('sha512',
209 "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
210 "hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
211 "8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018"+
212 "501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909")
214 def test_case_sha512_3(self):
215 self.check('sha512', "a" * 1000000,
216 "e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+
217 "de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")
219 def test_threaded_hashing(self):
220 if not threading:
221 raise unittest.SkipTest('No threading module.')
223 # Updating the same hash object from several threads at once
224 # using data chunk sizes containing the same byte sequences.
226 # If the internal locks are working to prevent multiple
227 # updates on the same object from running at once, the resulting
228 # hash will be the same as doing it single threaded upfront.
229 hasher = hashlib.sha1()
230 num_threads = 5
231 smallest_data = 'swineflu'
232 data = smallest_data*200000
233 expected_hash = hashlib.sha1(data*num_threads).hexdigest()
235 def hash_in_chunks(chunk_size, event):
236 index = 0
237 while index < len(data):
238 hasher.update(data[index:index+chunk_size])
239 index += chunk_size
240 event.set()
242 events = []
243 for threadnum in xrange(num_threads):
244 chunk_size = len(data) // (10**threadnum)
245 assert chunk_size > 0
246 assert chunk_size % len(smallest_data) == 0
247 event = threading.Event()
248 events.append(event)
249 threading.Thread(target=hash_in_chunks,
250 args=(chunk_size, event)).start()
252 for event in events:
253 event.wait()
255 self.assertEqual(expected_hash, hasher.hexdigest())
257 @test_support.reap_threads
258 def test_main():
259 test_support.run_unittest(HashLibTestCase)
261 if __name__ == "__main__":
262 test_main()