ACPI: fix memory leak in acpi_thermal_add() error path
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / crypto / tea.c
bloba6a02b30e47072b76f41a695343709a82e21a4d3
1 /*
2 * Cryptographic API.
4 * TEA, XTEA, and XETA crypto alogrithms
6 * The TEA and Xtended TEA algorithms were developed by David Wheeler
7 * and Roger Needham at the Computer Laboratory of Cambridge University.
9 * Due to the order of evaluation in XTEA many people have incorrectly
10 * implemented it. XETA (XTEA in the wrong order), exists for
11 * compatibility with these implementations.
13 * Copyright (c) 2004 Aaron Grothe ajgrothe@yahoo.com
15 * This program is free software; you can redistribute it and/or modify
16 * it under the terms of the GNU General Public License as published by
17 * the Free Software Foundation; either version 2 of the License, or
18 * (at your option) any later version.
22 #include <linux/init.h>
23 #include <linux/module.h>
24 #include <linux/mm.h>
25 #include <asm/byteorder.h>
26 #include <asm/scatterlist.h>
27 #include <linux/crypto.h>
28 #include <linux/types.h>
30 #define TEA_KEY_SIZE 16
31 #define TEA_BLOCK_SIZE 8
32 #define TEA_ROUNDS 32
33 #define TEA_DELTA 0x9e3779b9
35 #define XTEA_KEY_SIZE 16
36 #define XTEA_BLOCK_SIZE 8
37 #define XTEA_ROUNDS 32
38 #define XTEA_DELTA 0x9e3779b9
40 struct tea_ctx {
41 u32 KEY[4];
44 struct xtea_ctx {
45 u32 KEY[4];
48 static int tea_setkey(void *ctx_arg, const u8 *in_key,
49 unsigned int key_len, u32 *flags)
51 struct tea_ctx *ctx = ctx_arg;
52 const __le32 *key = (const __le32 *)in_key;
54 if (key_len != 16)
56 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
57 return -EINVAL;
60 ctx->KEY[0] = le32_to_cpu(key[0]);
61 ctx->KEY[1] = le32_to_cpu(key[1]);
62 ctx->KEY[2] = le32_to_cpu(key[2]);
63 ctx->KEY[3] = le32_to_cpu(key[3]);
65 return 0;
69 static void tea_encrypt(void *ctx_arg, u8 *dst, const u8 *src)
71 u32 y, z, n, sum = 0;
72 u32 k0, k1, k2, k3;
74 struct tea_ctx *ctx = ctx_arg;
75 const __le32 *in = (const __le32 *)src;
76 __le32 *out = (__le32 *)dst;
78 y = le32_to_cpu(in[0]);
79 z = le32_to_cpu(in[1]);
81 k0 = ctx->KEY[0];
82 k1 = ctx->KEY[1];
83 k2 = ctx->KEY[2];
84 k3 = ctx->KEY[3];
86 n = TEA_ROUNDS;
88 while (n-- > 0) {
89 sum += TEA_DELTA;
90 y += ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
91 z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
94 out[0] = cpu_to_le32(y);
95 out[1] = cpu_to_le32(z);
98 static void tea_decrypt(void *ctx_arg, u8 *dst, const u8 *src)
100 u32 y, z, n, sum;
101 u32 k0, k1, k2, k3;
102 struct tea_ctx *ctx = ctx_arg;
103 const __le32 *in = (const __le32 *)src;
104 __le32 *out = (__le32 *)dst;
106 y = le32_to_cpu(in[0]);
107 z = le32_to_cpu(in[1]);
109 k0 = ctx->KEY[0];
110 k1 = ctx->KEY[1];
111 k2 = ctx->KEY[2];
112 k3 = ctx->KEY[3];
114 sum = TEA_DELTA << 5;
116 n = TEA_ROUNDS;
118 while (n-- > 0) {
119 z -= ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
120 y -= ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
121 sum -= TEA_DELTA;
124 out[0] = cpu_to_le32(y);
125 out[1] = cpu_to_le32(z);
128 static int xtea_setkey(void *ctx_arg, const u8 *in_key,
129 unsigned int key_len, u32 *flags)
131 struct xtea_ctx *ctx = ctx_arg;
132 const __le32 *key = (const __le32 *)in_key;
134 if (key_len != 16)
136 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
137 return -EINVAL;
140 ctx->KEY[0] = le32_to_cpu(key[0]);
141 ctx->KEY[1] = le32_to_cpu(key[1]);
142 ctx->KEY[2] = le32_to_cpu(key[2]);
143 ctx->KEY[3] = le32_to_cpu(key[3]);
145 return 0;
149 static void xtea_encrypt(void *ctx_arg, u8 *dst, const u8 *src)
151 u32 y, z, sum = 0;
152 u32 limit = XTEA_DELTA * XTEA_ROUNDS;
154 struct xtea_ctx *ctx = ctx_arg;
155 const __le32 *in = (const __le32 *)src;
156 __le32 *out = (__le32 *)dst;
158 y = le32_to_cpu(in[0]);
159 z = le32_to_cpu(in[1]);
161 while (sum != limit) {
162 y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]);
163 sum += XTEA_DELTA;
164 z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]);
167 out[0] = cpu_to_le32(y);
168 out[1] = cpu_to_le32(z);
171 static void xtea_decrypt(void *ctx_arg, u8 *dst, const u8 *src)
173 u32 y, z, sum;
174 struct tea_ctx *ctx = ctx_arg;
175 const __le32 *in = (const __le32 *)src;
176 __le32 *out = (__le32 *)dst;
178 y = le32_to_cpu(in[0]);
179 z = le32_to_cpu(in[1]);
181 sum = XTEA_DELTA * XTEA_ROUNDS;
183 while (sum) {
184 z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]);
185 sum -= XTEA_DELTA;
186 y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]);
189 out[0] = cpu_to_le32(y);
190 out[1] = cpu_to_le32(z);
194 static void xeta_encrypt(void *ctx_arg, u8 *dst, const u8 *src)
196 u32 y, z, sum = 0;
197 u32 limit = XTEA_DELTA * XTEA_ROUNDS;
199 struct xtea_ctx *ctx = ctx_arg;
200 const __le32 *in = (const __le32 *)src;
201 __le32 *out = (__le32 *)dst;
203 y = le32_to_cpu(in[0]);
204 z = le32_to_cpu(in[1]);
206 while (sum != limit) {
207 y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3];
208 sum += XTEA_DELTA;
209 z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3];
212 out[0] = cpu_to_le32(y);
213 out[1] = cpu_to_le32(z);
216 static void xeta_decrypt(void *ctx_arg, u8 *dst, const u8 *src)
218 u32 y, z, sum;
219 struct tea_ctx *ctx = ctx_arg;
220 const __le32 *in = (const __le32 *)src;
221 __le32 *out = (__le32 *)dst;
223 y = le32_to_cpu(in[0]);
224 z = le32_to_cpu(in[1]);
226 sum = XTEA_DELTA * XTEA_ROUNDS;
228 while (sum) {
229 z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3];
230 sum -= XTEA_DELTA;
231 y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3];
234 out[0] = cpu_to_le32(y);
235 out[1] = cpu_to_le32(z);
238 static struct crypto_alg tea_alg = {
239 .cra_name = "tea",
240 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
241 .cra_blocksize = TEA_BLOCK_SIZE,
242 .cra_ctxsize = sizeof (struct tea_ctx),
243 .cra_alignmask = 3,
244 .cra_module = THIS_MODULE,
245 .cra_list = LIST_HEAD_INIT(tea_alg.cra_list),
246 .cra_u = { .cipher = {
247 .cia_min_keysize = TEA_KEY_SIZE,
248 .cia_max_keysize = TEA_KEY_SIZE,
249 .cia_setkey = tea_setkey,
250 .cia_encrypt = tea_encrypt,
251 .cia_decrypt = tea_decrypt } }
254 static struct crypto_alg xtea_alg = {
255 .cra_name = "xtea",
256 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
257 .cra_blocksize = XTEA_BLOCK_SIZE,
258 .cra_ctxsize = sizeof (struct xtea_ctx),
259 .cra_alignmask = 3,
260 .cra_module = THIS_MODULE,
261 .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list),
262 .cra_u = { .cipher = {
263 .cia_min_keysize = XTEA_KEY_SIZE,
264 .cia_max_keysize = XTEA_KEY_SIZE,
265 .cia_setkey = xtea_setkey,
266 .cia_encrypt = xtea_encrypt,
267 .cia_decrypt = xtea_decrypt } }
270 static struct crypto_alg xeta_alg = {
271 .cra_name = "xeta",
272 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
273 .cra_blocksize = XTEA_BLOCK_SIZE,
274 .cra_ctxsize = sizeof (struct xtea_ctx),
275 .cra_alignmask = 3,
276 .cra_module = THIS_MODULE,
277 .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list),
278 .cra_u = { .cipher = {
279 .cia_min_keysize = XTEA_KEY_SIZE,
280 .cia_max_keysize = XTEA_KEY_SIZE,
281 .cia_setkey = xtea_setkey,
282 .cia_encrypt = xeta_encrypt,
283 .cia_decrypt = xeta_decrypt } }
286 static int __init init(void)
288 int ret = 0;
290 ret = crypto_register_alg(&tea_alg);
291 if (ret < 0)
292 goto out;
294 ret = crypto_register_alg(&xtea_alg);
295 if (ret < 0) {
296 crypto_unregister_alg(&tea_alg);
297 goto out;
300 ret = crypto_register_alg(&xeta_alg);
301 if (ret < 0) {
302 crypto_unregister_alg(&tea_alg);
303 crypto_unregister_alg(&xtea_alg);
304 goto out;
307 out:
308 return ret;
311 static void __exit fini(void)
313 crypto_unregister_alg(&tea_alg);
314 crypto_unregister_alg(&xtea_alg);
315 crypto_unregister_alg(&xeta_alg);
318 MODULE_ALIAS("xtea");
319 MODULE_ALIAS("xeta");
321 module_init(init);
322 module_exit(fini);
324 MODULE_LICENSE("GPL");
325 MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms");