SUNRPC: Switch tasks to using the rpc_waitqueue's timer function
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / crypto / aes_generic.c
blobcf30af74480f4519b50e3b1fcf0e9f7ebb21ac34
1 /*
2 * Cryptographic API.
4 * AES Cipher Algorithm.
6 * Based on Brian Gladman's code.
8 * Linux developers:
9 * Alexander Kjeldaas <astor@fast.no>
10 * Herbert Valerio Riedel <hvr@hvrlab.org>
11 * Kyle McMartin <kyle@debian.org>
12 * Adam J. Richter <adam@yggdrasil.com> (conversion to 2.5 API).
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * ---------------------------------------------------------------------------
20 * Copyright (c) 2002, Dr Brian Gladman <brg@gladman.me.uk>, Worcester, UK.
21 * All rights reserved.
23 * LICENSE TERMS
25 * The free distribution and use of this software in both source and binary
26 * form is allowed (with or without changes) provided that:
28 * 1. distributions of this source code include the above copyright
29 * notice, this list of conditions and the following disclaimer;
31 * 2. distributions in binary form include the above copyright
32 * notice, this list of conditions and the following disclaimer
33 * in the documentation and/or other associated materials;
35 * 3. the copyright holder's name is not used to endorse products
36 * built using this software without specific written permission.
38 * ALTERNATIVELY, provided that this notice is retained in full, this product
39 * may be distributed under the terms of the GNU General Public License (GPL),
40 * in which case the provisions of the GPL apply INSTEAD OF those given above.
42 * DISCLAIMER
44 * This software is provided 'as is' with no explicit or implied warranties
45 * in respect of its properties, including, but not limited to, correctness
46 * and/or fitness for purpose.
47 * ---------------------------------------------------------------------------
50 #include <crypto/aes.h>
51 #include <linux/module.h>
52 #include <linux/init.h>
53 #include <linux/types.h>
54 #include <linux/errno.h>
55 #include <linux/crypto.h>
56 #include <asm/byteorder.h>
58 static inline u8 byte(const u32 x, const unsigned n)
60 return x >> (n << 3);
63 static u8 pow_tab[256] __initdata;
64 static u8 log_tab[256] __initdata;
65 static u8 sbx_tab[256] __initdata;
66 static u8 isb_tab[256] __initdata;
67 static u32 rco_tab[10];
69 u32 crypto_ft_tab[4][256];
70 u32 crypto_fl_tab[4][256];
71 u32 crypto_it_tab[4][256];
72 u32 crypto_il_tab[4][256];
74 EXPORT_SYMBOL_GPL(crypto_ft_tab);
75 EXPORT_SYMBOL_GPL(crypto_fl_tab);
76 EXPORT_SYMBOL_GPL(crypto_it_tab);
77 EXPORT_SYMBOL_GPL(crypto_il_tab);
79 static inline u8 __init f_mult(u8 a, u8 b)
81 u8 aa = log_tab[a], cc = aa + log_tab[b];
83 return pow_tab[cc + (cc < aa ? 1 : 0)];
86 #define ff_mult(a, b) (a && b ? f_mult(a, b) : 0)
88 static void __init gen_tabs(void)
90 u32 i, t;
91 u8 p, q;
94 * log and power tables for GF(2**8) finite field with
95 * 0x011b as modular polynomial - the simplest primitive
96 * root is 0x03, used here to generate the tables
99 for (i = 0, p = 1; i < 256; ++i) {
100 pow_tab[i] = (u8) p;
101 log_tab[p] = (u8) i;
103 p ^= (p << 1) ^ (p & 0x80 ? 0x01b : 0);
106 log_tab[1] = 0;
108 for (i = 0, p = 1; i < 10; ++i) {
109 rco_tab[i] = p;
111 p = (p << 1) ^ (p & 0x80 ? 0x01b : 0);
114 for (i = 0; i < 256; ++i) {
115 p = (i ? pow_tab[255 - log_tab[i]] : 0);
116 q = ((p >> 7) | (p << 1)) ^ ((p >> 6) | (p << 2));
117 p ^= 0x63 ^ q ^ ((q >> 6) | (q << 2));
118 sbx_tab[i] = p;
119 isb_tab[p] = (u8) i;
122 for (i = 0; i < 256; ++i) {
123 p = sbx_tab[i];
125 t = p;
126 crypto_fl_tab[0][i] = t;
127 crypto_fl_tab[1][i] = rol32(t, 8);
128 crypto_fl_tab[2][i] = rol32(t, 16);
129 crypto_fl_tab[3][i] = rol32(t, 24);
131 t = ((u32) ff_mult(2, p)) |
132 ((u32) p << 8) |
133 ((u32) p << 16) | ((u32) ff_mult(3, p) << 24);
135 crypto_ft_tab[0][i] = t;
136 crypto_ft_tab[1][i] = rol32(t, 8);
137 crypto_ft_tab[2][i] = rol32(t, 16);
138 crypto_ft_tab[3][i] = rol32(t, 24);
140 p = isb_tab[i];
142 t = p;
143 crypto_il_tab[0][i] = t;
144 crypto_il_tab[1][i] = rol32(t, 8);
145 crypto_il_tab[2][i] = rol32(t, 16);
146 crypto_il_tab[3][i] = rol32(t, 24);
148 t = ((u32) ff_mult(14, p)) |
149 ((u32) ff_mult(9, p) << 8) |
150 ((u32) ff_mult(13, p) << 16) |
151 ((u32) ff_mult(11, p) << 24);
153 crypto_it_tab[0][i] = t;
154 crypto_it_tab[1][i] = rol32(t, 8);
155 crypto_it_tab[2][i] = rol32(t, 16);
156 crypto_it_tab[3][i] = rol32(t, 24);
160 /* initialise the key schedule from the user supplied key */
162 #define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b)
164 #define imix_col(y,x) do { \
165 u = star_x(x); \
166 v = star_x(u); \
167 w = star_x(v); \
168 t = w ^ (x); \
169 (y) = u ^ v ^ w; \
170 (y) ^= ror32(u ^ t, 8) ^ \
171 ror32(v ^ t, 16) ^ \
172 ror32(t, 24); \
173 } while (0)
175 #define ls_box(x) \
176 crypto_fl_tab[0][byte(x, 0)] ^ \
177 crypto_fl_tab[1][byte(x, 1)] ^ \
178 crypto_fl_tab[2][byte(x, 2)] ^ \
179 crypto_fl_tab[3][byte(x, 3)]
181 #define loop4(i) do { \
182 t = ror32(t, 8); \
183 t = ls_box(t) ^ rco_tab[i]; \
184 t ^= ctx->key_enc[4 * i]; \
185 ctx->key_enc[4 * i + 4] = t; \
186 t ^= ctx->key_enc[4 * i + 1]; \
187 ctx->key_enc[4 * i + 5] = t; \
188 t ^= ctx->key_enc[4 * i + 2]; \
189 ctx->key_enc[4 * i + 6] = t; \
190 t ^= ctx->key_enc[4 * i + 3]; \
191 ctx->key_enc[4 * i + 7] = t; \
192 } while (0)
194 #define loop6(i) do { \
195 t = ror32(t, 8); \
196 t = ls_box(t) ^ rco_tab[i]; \
197 t ^= ctx->key_enc[6 * i]; \
198 ctx->key_enc[6 * i + 6] = t; \
199 t ^= ctx->key_enc[6 * i + 1]; \
200 ctx->key_enc[6 * i + 7] = t; \
201 t ^= ctx->key_enc[6 * i + 2]; \
202 ctx->key_enc[6 * i + 8] = t; \
203 t ^= ctx->key_enc[6 * i + 3]; \
204 ctx->key_enc[6 * i + 9] = t; \
205 t ^= ctx->key_enc[6 * i + 4]; \
206 ctx->key_enc[6 * i + 10] = t; \
207 t ^= ctx->key_enc[6 * i + 5]; \
208 ctx->key_enc[6 * i + 11] = t; \
209 } while (0)
211 #define loop8(i) do { \
212 t = ror32(t, 8); \
213 t = ls_box(t) ^ rco_tab[i]; \
214 t ^= ctx->key_enc[8 * i]; \
215 ctx->key_enc[8 * i + 8] = t; \
216 t ^= ctx->key_enc[8 * i + 1]; \
217 ctx->key_enc[8 * i + 9] = t; \
218 t ^= ctx->key_enc[8 * i + 2]; \
219 ctx->key_enc[8 * i + 10] = t; \
220 t ^= ctx->key_enc[8 * i + 3]; \
221 ctx->key_enc[8 * i + 11] = t; \
222 t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \
223 ctx->key_enc[8 * i + 12] = t; \
224 t ^= ctx->key_enc[8 * i + 5]; \
225 ctx->key_enc[8 * i + 13] = t; \
226 t ^= ctx->key_enc[8 * i + 6]; \
227 ctx->key_enc[8 * i + 14] = t; \
228 t ^= ctx->key_enc[8 * i + 7]; \
229 ctx->key_enc[8 * i + 15] = t; \
230 } while (0)
232 int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
233 unsigned int key_len)
235 struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
236 const __le32 *key = (const __le32 *)in_key;
237 u32 *flags = &tfm->crt_flags;
238 u32 i, t, u, v, w, j;
240 if (key_len % 8) {
241 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
242 return -EINVAL;
245 ctx->key_length = key_len;
247 ctx->key_dec[key_len + 24] = ctx->key_enc[0] = le32_to_cpu(key[0]);
248 ctx->key_dec[key_len + 25] = ctx->key_enc[1] = le32_to_cpu(key[1]);
249 ctx->key_dec[key_len + 26] = ctx->key_enc[2] = le32_to_cpu(key[2]);
250 ctx->key_dec[key_len + 27] = ctx->key_enc[3] = le32_to_cpu(key[3]);
252 switch (key_len) {
253 case 16:
254 t = ctx->key_enc[3];
255 for (i = 0; i < 10; ++i)
256 loop4(i);
257 break;
259 case 24:
260 ctx->key_enc[4] = le32_to_cpu(key[4]);
261 t = ctx->key_enc[5] = le32_to_cpu(key[5]);
262 for (i = 0; i < 8; ++i)
263 loop6(i);
264 break;
266 case 32:
267 ctx->key_enc[4] = le32_to_cpu(key[4]);
268 ctx->key_enc[5] = le32_to_cpu(key[5]);
269 ctx->key_enc[6] = le32_to_cpu(key[6]);
270 t = ctx->key_enc[7] = le32_to_cpu(key[7]);
271 for (i = 0; i < 7; ++i)
272 loop8(i);
273 break;
276 ctx->key_dec[0] = ctx->key_enc[key_len + 24];
277 ctx->key_dec[1] = ctx->key_enc[key_len + 25];
278 ctx->key_dec[2] = ctx->key_enc[key_len + 26];
279 ctx->key_dec[3] = ctx->key_enc[key_len + 27];
281 for (i = 4; i < key_len + 24; ++i) {
282 j = key_len + 24 - (i & ~3) + (i & 3);
283 imix_col(ctx->key_dec[j], ctx->key_enc[i]);
285 return 0;
287 EXPORT_SYMBOL_GPL(crypto_aes_set_key);
289 /* encrypt a block of text */
291 #define f_rn(bo, bi, n, k) do { \
292 bo[n] = crypto_ft_tab[0][byte(bi[n], 0)] ^ \
293 crypto_ft_tab[1][byte(bi[(n + 1) & 3], 1)] ^ \
294 crypto_ft_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \
295 crypto_ft_tab[3][byte(bi[(n + 3) & 3], 3)] ^ *(k + n); \
296 } while (0)
298 #define f_nround(bo, bi, k) do {\
299 f_rn(bo, bi, 0, k); \
300 f_rn(bo, bi, 1, k); \
301 f_rn(bo, bi, 2, k); \
302 f_rn(bo, bi, 3, k); \
303 k += 4; \
304 } while (0)
306 #define f_rl(bo, bi, n, k) do { \
307 bo[n] = crypto_fl_tab[0][byte(bi[n], 0)] ^ \
308 crypto_fl_tab[1][byte(bi[(n + 1) & 3], 1)] ^ \
309 crypto_fl_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \
310 crypto_fl_tab[3][byte(bi[(n + 3) & 3], 3)] ^ *(k + n); \
311 } while (0)
313 #define f_lround(bo, bi, k) do {\
314 f_rl(bo, bi, 0, k); \
315 f_rl(bo, bi, 1, k); \
316 f_rl(bo, bi, 2, k); \
317 f_rl(bo, bi, 3, k); \
318 } while (0)
320 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
322 const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
323 const __le32 *src = (const __le32 *)in;
324 __le32 *dst = (__le32 *)out;
325 u32 b0[4], b1[4];
326 const u32 *kp = ctx->key_enc + 4;
327 const int key_len = ctx->key_length;
329 b0[0] = le32_to_cpu(src[0]) ^ ctx->key_enc[0];
330 b0[1] = le32_to_cpu(src[1]) ^ ctx->key_enc[1];
331 b0[2] = le32_to_cpu(src[2]) ^ ctx->key_enc[2];
332 b0[3] = le32_to_cpu(src[3]) ^ ctx->key_enc[3];
334 if (key_len > 24) {
335 f_nround(b1, b0, kp);
336 f_nround(b0, b1, kp);
339 if (key_len > 16) {
340 f_nround(b1, b0, kp);
341 f_nround(b0, b1, kp);
344 f_nround(b1, b0, kp);
345 f_nround(b0, b1, kp);
346 f_nround(b1, b0, kp);
347 f_nround(b0, b1, kp);
348 f_nround(b1, b0, kp);
349 f_nround(b0, b1, kp);
350 f_nround(b1, b0, kp);
351 f_nround(b0, b1, kp);
352 f_nround(b1, b0, kp);
353 f_lround(b0, b1, kp);
355 dst[0] = cpu_to_le32(b0[0]);
356 dst[1] = cpu_to_le32(b0[1]);
357 dst[2] = cpu_to_le32(b0[2]);
358 dst[3] = cpu_to_le32(b0[3]);
361 /* decrypt a block of text */
363 #define i_rn(bo, bi, n, k) do { \
364 bo[n] = crypto_it_tab[0][byte(bi[n], 0)] ^ \
365 crypto_it_tab[1][byte(bi[(n + 3) & 3], 1)] ^ \
366 crypto_it_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \
367 crypto_it_tab[3][byte(bi[(n + 1) & 3], 3)] ^ *(k + n); \
368 } while (0)
370 #define i_nround(bo, bi, k) do {\
371 i_rn(bo, bi, 0, k); \
372 i_rn(bo, bi, 1, k); \
373 i_rn(bo, bi, 2, k); \
374 i_rn(bo, bi, 3, k); \
375 k += 4; \
376 } while (0)
378 #define i_rl(bo, bi, n, k) do { \
379 bo[n] = crypto_il_tab[0][byte(bi[n], 0)] ^ \
380 crypto_il_tab[1][byte(bi[(n + 3) & 3], 1)] ^ \
381 crypto_il_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \
382 crypto_il_tab[3][byte(bi[(n + 1) & 3], 3)] ^ *(k + n); \
383 } while (0)
385 #define i_lround(bo, bi, k) do {\
386 i_rl(bo, bi, 0, k); \
387 i_rl(bo, bi, 1, k); \
388 i_rl(bo, bi, 2, k); \
389 i_rl(bo, bi, 3, k); \
390 } while (0)
392 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
394 const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
395 const __le32 *src = (const __le32 *)in;
396 __le32 *dst = (__le32 *)out;
397 u32 b0[4], b1[4];
398 const int key_len = ctx->key_length;
399 const u32 *kp = ctx->key_dec + 4;
401 b0[0] = le32_to_cpu(src[0]) ^ ctx->key_dec[0];
402 b0[1] = le32_to_cpu(src[1]) ^ ctx->key_dec[1];
403 b0[2] = le32_to_cpu(src[2]) ^ ctx->key_dec[2];
404 b0[3] = le32_to_cpu(src[3]) ^ ctx->key_dec[3];
406 if (key_len > 24) {
407 i_nround(b1, b0, kp);
408 i_nround(b0, b1, kp);
411 if (key_len > 16) {
412 i_nround(b1, b0, kp);
413 i_nround(b0, b1, kp);
416 i_nround(b1, b0, kp);
417 i_nround(b0, b1, kp);
418 i_nround(b1, b0, kp);
419 i_nround(b0, b1, kp);
420 i_nround(b1, b0, kp);
421 i_nround(b0, b1, kp);
422 i_nround(b1, b0, kp);
423 i_nround(b0, b1, kp);
424 i_nround(b1, b0, kp);
425 i_lround(b0, b1, kp);
427 dst[0] = cpu_to_le32(b0[0]);
428 dst[1] = cpu_to_le32(b0[1]);
429 dst[2] = cpu_to_le32(b0[2]);
430 dst[3] = cpu_to_le32(b0[3]);
433 static struct crypto_alg aes_alg = {
434 .cra_name = "aes",
435 .cra_driver_name = "aes-generic",
436 .cra_priority = 100,
437 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
438 .cra_blocksize = AES_BLOCK_SIZE,
439 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
440 .cra_alignmask = 3,
441 .cra_module = THIS_MODULE,
442 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
443 .cra_u = {
444 .cipher = {
445 .cia_min_keysize = AES_MIN_KEY_SIZE,
446 .cia_max_keysize = AES_MAX_KEY_SIZE,
447 .cia_setkey = crypto_aes_set_key,
448 .cia_encrypt = aes_encrypt,
449 .cia_decrypt = aes_decrypt
454 static int __init aes_init(void)
456 gen_tabs();
457 return crypto_register_alg(&aes_alg);
460 static void __exit aes_fini(void)
462 crypto_unregister_alg(&aes_alg);
465 module_init(aes_init);
466 module_exit(aes_fini);
468 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
469 MODULE_LICENSE("Dual BSD/GPL");
470 MODULE_ALIAS("aes");