crypto: sha3-generic - deal with oversize stack frames
[linux-2.6/btrfs-unstable.git] / crypto / sha3_generic.c
blob951c4eb70262c58cf600b5310b0a5eb9bfd9c754
1 /*
2 * Cryptographic API.
4 * SHA-3, as specified in
5 * http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
7 * SHA-3 code by Jeff Garzik <jeff@garzik.org>
8 * Ard Biesheuvel <ard.biesheuvel@linaro.org>
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)•
13 * any later version.
16 #include <crypto/internal/hash.h>
17 #include <linux/init.h>
18 #include <linux/module.h>
19 #include <linux/types.h>
20 #include <crypto/sha3.h>
21 #include <asm/unaligned.h>
24 * On some 32-bit architectures (mn10300 and h8300), GCC ends up using
25 * over 1 KB of stack if we inline the round calculation into the loop
26 * in keccakf(). On the other hand, on 64-bit architectures with plenty
27 * of [64-bit wide] general purpose registers, not inlining it severely
28 * hurts performance. So let's use 64-bitness as a heuristic to decide
29 * whether to inline or not.
31 #ifdef CONFIG_64BIT
32 #define SHA3_INLINE inline
33 #else
34 #define SHA3_INLINE noinline
35 #endif
37 #define KECCAK_ROUNDS 24
39 static const u64 keccakf_rndc[24] = {
40 0x0000000000000001ULL, 0x0000000000008082ULL, 0x800000000000808aULL,
41 0x8000000080008000ULL, 0x000000000000808bULL, 0x0000000080000001ULL,
42 0x8000000080008081ULL, 0x8000000000008009ULL, 0x000000000000008aULL,
43 0x0000000000000088ULL, 0x0000000080008009ULL, 0x000000008000000aULL,
44 0x000000008000808bULL, 0x800000000000008bULL, 0x8000000000008089ULL,
45 0x8000000000008003ULL, 0x8000000000008002ULL, 0x8000000000000080ULL,
46 0x000000000000800aULL, 0x800000008000000aULL, 0x8000000080008081ULL,
47 0x8000000000008080ULL, 0x0000000080000001ULL, 0x8000000080008008ULL
50 /* update the state with given number of rounds */
52 static SHA3_INLINE void keccakf_round(u64 st[25])
54 u64 t[5], tt, bc[5];
56 /* Theta */
57 bc[0] = st[0] ^ st[5] ^ st[10] ^ st[15] ^ st[20];
58 bc[1] = st[1] ^ st[6] ^ st[11] ^ st[16] ^ st[21];
59 bc[2] = st[2] ^ st[7] ^ st[12] ^ st[17] ^ st[22];
60 bc[3] = st[3] ^ st[8] ^ st[13] ^ st[18] ^ st[23];
61 bc[4] = st[4] ^ st[9] ^ st[14] ^ st[19] ^ st[24];
63 t[0] = bc[4] ^ rol64(bc[1], 1);
64 t[1] = bc[0] ^ rol64(bc[2], 1);
65 t[2] = bc[1] ^ rol64(bc[3], 1);
66 t[3] = bc[2] ^ rol64(bc[4], 1);
67 t[4] = bc[3] ^ rol64(bc[0], 1);
69 st[0] ^= t[0];
71 /* Rho Pi */
72 tt = st[1];
73 st[ 1] = rol64(st[ 6] ^ t[1], 44);
74 st[ 6] = rol64(st[ 9] ^ t[4], 20);
75 st[ 9] = rol64(st[22] ^ t[2], 61);
76 st[22] = rol64(st[14] ^ t[4], 39);
77 st[14] = rol64(st[20] ^ t[0], 18);
78 st[20] = rol64(st[ 2] ^ t[2], 62);
79 st[ 2] = rol64(st[12] ^ t[2], 43);
80 st[12] = rol64(st[13] ^ t[3], 25);
81 st[13] = rol64(st[19] ^ t[4], 8);
82 st[19] = rol64(st[23] ^ t[3], 56);
83 st[23] = rol64(st[15] ^ t[0], 41);
84 st[15] = rol64(st[ 4] ^ t[4], 27);
85 st[ 4] = rol64(st[24] ^ t[4], 14);
86 st[24] = rol64(st[21] ^ t[1], 2);
87 st[21] = rol64(st[ 8] ^ t[3], 55);
88 st[ 8] = rol64(st[16] ^ t[1], 45);
89 st[16] = rol64(st[ 5] ^ t[0], 36);
90 st[ 5] = rol64(st[ 3] ^ t[3], 28);
91 st[ 3] = rol64(st[18] ^ t[3], 21);
92 st[18] = rol64(st[17] ^ t[2], 15);
93 st[17] = rol64(st[11] ^ t[1], 10);
94 st[11] = rol64(st[ 7] ^ t[2], 6);
95 st[ 7] = rol64(st[10] ^ t[0], 3);
96 st[10] = rol64( tt ^ t[1], 1);
98 /* Chi */
99 bc[ 0] = ~st[ 1] & st[ 2];
100 bc[ 1] = ~st[ 2] & st[ 3];
101 bc[ 2] = ~st[ 3] & st[ 4];
102 bc[ 3] = ~st[ 4] & st[ 0];
103 bc[ 4] = ~st[ 0] & st[ 1];
104 st[ 0] ^= bc[ 0];
105 st[ 1] ^= bc[ 1];
106 st[ 2] ^= bc[ 2];
107 st[ 3] ^= bc[ 3];
108 st[ 4] ^= bc[ 4];
110 bc[ 0] = ~st[ 6] & st[ 7];
111 bc[ 1] = ~st[ 7] & st[ 8];
112 bc[ 2] = ~st[ 8] & st[ 9];
113 bc[ 3] = ~st[ 9] & st[ 5];
114 bc[ 4] = ~st[ 5] & st[ 6];
115 st[ 5] ^= bc[ 0];
116 st[ 6] ^= bc[ 1];
117 st[ 7] ^= bc[ 2];
118 st[ 8] ^= bc[ 3];
119 st[ 9] ^= bc[ 4];
121 bc[ 0] = ~st[11] & st[12];
122 bc[ 1] = ~st[12] & st[13];
123 bc[ 2] = ~st[13] & st[14];
124 bc[ 3] = ~st[14] & st[10];
125 bc[ 4] = ~st[10] & st[11];
126 st[10] ^= bc[ 0];
127 st[11] ^= bc[ 1];
128 st[12] ^= bc[ 2];
129 st[13] ^= bc[ 3];
130 st[14] ^= bc[ 4];
132 bc[ 0] = ~st[16] & st[17];
133 bc[ 1] = ~st[17] & st[18];
134 bc[ 2] = ~st[18] & st[19];
135 bc[ 3] = ~st[19] & st[15];
136 bc[ 4] = ~st[15] & st[16];
137 st[15] ^= bc[ 0];
138 st[16] ^= bc[ 1];
139 st[17] ^= bc[ 2];
140 st[18] ^= bc[ 3];
141 st[19] ^= bc[ 4];
143 bc[ 0] = ~st[21] & st[22];
144 bc[ 1] = ~st[22] & st[23];
145 bc[ 2] = ~st[23] & st[24];
146 bc[ 3] = ~st[24] & st[20];
147 bc[ 4] = ~st[20] & st[21];
148 st[20] ^= bc[ 0];
149 st[21] ^= bc[ 1];
150 st[22] ^= bc[ 2];
151 st[23] ^= bc[ 3];
152 st[24] ^= bc[ 4];
155 static void __attribute__((__optimize__("O3"))) keccakf(u64 st[25])
157 int round;
159 for (round = 0; round < KECCAK_ROUNDS; round++) {
160 keccakf_round(st);
161 /* Iota */
162 st[0] ^= keccakf_rndc[round];
166 int crypto_sha3_init(struct shash_desc *desc)
168 struct sha3_state *sctx = shash_desc_ctx(desc);
169 unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
171 sctx->rsiz = 200 - 2 * digest_size;
172 sctx->rsizw = sctx->rsiz / 8;
173 sctx->partial = 0;
175 memset(sctx->st, 0, sizeof(sctx->st));
176 return 0;
178 EXPORT_SYMBOL(crypto_sha3_init);
180 int crypto_sha3_update(struct shash_desc *desc, const u8 *data,
181 unsigned int len)
183 struct sha3_state *sctx = shash_desc_ctx(desc);
184 unsigned int done;
185 const u8 *src;
187 done = 0;
188 src = data;
190 if ((sctx->partial + len) > (sctx->rsiz - 1)) {
191 if (sctx->partial) {
192 done = -sctx->partial;
193 memcpy(sctx->buf + sctx->partial, data,
194 done + sctx->rsiz);
195 src = sctx->buf;
198 do {
199 unsigned int i;
201 for (i = 0; i < sctx->rsizw; i++)
202 sctx->st[i] ^= get_unaligned_le64(src + 8 * i);
203 keccakf(sctx->st);
205 done += sctx->rsiz;
206 src = data + done;
207 } while (done + (sctx->rsiz - 1) < len);
209 sctx->partial = 0;
211 memcpy(sctx->buf + sctx->partial, src, len - done);
212 sctx->partial += (len - done);
214 return 0;
216 EXPORT_SYMBOL(crypto_sha3_update);
218 int crypto_sha3_final(struct shash_desc *desc, u8 *out)
220 struct sha3_state *sctx = shash_desc_ctx(desc);
221 unsigned int i, inlen = sctx->partial;
222 unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
223 __le64 *digest = (__le64 *)out;
225 sctx->buf[inlen++] = 0x06;
226 memset(sctx->buf + inlen, 0, sctx->rsiz - inlen);
227 sctx->buf[sctx->rsiz - 1] |= 0x80;
229 for (i = 0; i < sctx->rsizw; i++)
230 sctx->st[i] ^= get_unaligned_le64(sctx->buf + 8 * i);
232 keccakf(sctx->st);
234 for (i = 0; i < digest_size / 8; i++)
235 put_unaligned_le64(sctx->st[i], digest++);
237 if (digest_size & 4)
238 put_unaligned_le32(sctx->st[i], (__le32 *)digest);
240 memset(sctx, 0, sizeof(*sctx));
241 return 0;
243 EXPORT_SYMBOL(crypto_sha3_final);
245 static struct shash_alg algs[] = { {
246 .digestsize = SHA3_224_DIGEST_SIZE,
247 .init = crypto_sha3_init,
248 .update = crypto_sha3_update,
249 .final = crypto_sha3_final,
250 .descsize = sizeof(struct sha3_state),
251 .base.cra_name = "sha3-224",
252 .base.cra_driver_name = "sha3-224-generic",
253 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH,
254 .base.cra_blocksize = SHA3_224_BLOCK_SIZE,
255 .base.cra_module = THIS_MODULE,
256 }, {
257 .digestsize = SHA3_256_DIGEST_SIZE,
258 .init = crypto_sha3_init,
259 .update = crypto_sha3_update,
260 .final = crypto_sha3_final,
261 .descsize = sizeof(struct sha3_state),
262 .base.cra_name = "sha3-256",
263 .base.cra_driver_name = "sha3-256-generic",
264 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH,
265 .base.cra_blocksize = SHA3_256_BLOCK_SIZE,
266 .base.cra_module = THIS_MODULE,
267 }, {
268 .digestsize = SHA3_384_DIGEST_SIZE,
269 .init = crypto_sha3_init,
270 .update = crypto_sha3_update,
271 .final = crypto_sha3_final,
272 .descsize = sizeof(struct sha3_state),
273 .base.cra_name = "sha3-384",
274 .base.cra_driver_name = "sha3-384-generic",
275 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH,
276 .base.cra_blocksize = SHA3_384_BLOCK_SIZE,
277 .base.cra_module = THIS_MODULE,
278 }, {
279 .digestsize = SHA3_512_DIGEST_SIZE,
280 .init = crypto_sha3_init,
281 .update = crypto_sha3_update,
282 .final = crypto_sha3_final,
283 .descsize = sizeof(struct sha3_state),
284 .base.cra_name = "sha3-512",
285 .base.cra_driver_name = "sha3-512-generic",
286 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH,
287 .base.cra_blocksize = SHA3_512_BLOCK_SIZE,
288 .base.cra_module = THIS_MODULE,
289 } };
291 static int __init sha3_generic_mod_init(void)
293 return crypto_register_shashes(algs, ARRAY_SIZE(algs));
296 static void __exit sha3_generic_mod_fini(void)
298 crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
301 module_init(sha3_generic_mod_init);
302 module_exit(sha3_generic_mod_fini);
304 MODULE_LICENSE("GPL");
305 MODULE_DESCRIPTION("SHA-3 Secure Hash Algorithm");
307 MODULE_ALIAS_CRYPTO("sha3-224");
308 MODULE_ALIAS_CRYPTO("sha3-224-generic");
309 MODULE_ALIAS_CRYPTO("sha3-256");
310 MODULE_ALIAS_CRYPTO("sha3-256-generic");
311 MODULE_ALIAS_CRYPTO("sha3-384");
312 MODULE_ALIAS_CRYPTO("sha3-384-generic");
313 MODULE_ALIAS_CRYPTO("sha3-512");
314 MODULE_ALIAS_CRYPTO("sha3-512-generic");