hwrng: Kconfig - Fix device node name reference /dev/hw_random => /dev/hwrng
[linux-2.6/btrfs-unstable.git] / arch / sparc / crypto / des_glue.c
blobdd6a34fa6e19d2e36f5d30de6256655dc0ee2e0c
1 /* Glue code for DES encryption optimized for sparc64 crypto opcodes.
3 * Copyright (C) 2012 David S. Miller <davem@davemloft.net>
4 */
6 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
8 #include <linux/crypto.h>
9 #include <linux/init.h>
10 #include <linux/module.h>
11 #include <linux/mm.h>
12 #include <linux/types.h>
13 #include <crypto/algapi.h>
14 #include <crypto/des.h>
16 #include <asm/fpumacro.h>
17 #include <asm/pstate.h>
18 #include <asm/elf.h>
20 #include "opcodes.h"
22 struct des_sparc64_ctx {
23 u64 encrypt_expkey[DES_EXPKEY_WORDS / 2];
24 u64 decrypt_expkey[DES_EXPKEY_WORDS / 2];
27 struct des3_ede_sparc64_ctx {
28 u64 encrypt_expkey[DES3_EDE_EXPKEY_WORDS / 2];
29 u64 decrypt_expkey[DES3_EDE_EXPKEY_WORDS / 2];
32 static void encrypt_to_decrypt(u64 *d, const u64 *e)
34 const u64 *s = e + (DES_EXPKEY_WORDS / 2) - 1;
35 int i;
37 for (i = 0; i < DES_EXPKEY_WORDS / 2; i++)
38 *d++ = *s--;
41 extern void des_sparc64_key_expand(const u32 *input_key, u64 *key);
43 static int des_set_key(struct crypto_tfm *tfm, const u8 *key,
44 unsigned int keylen)
46 struct des_sparc64_ctx *dctx = crypto_tfm_ctx(tfm);
47 u32 *flags = &tfm->crt_flags;
48 u32 tmp[DES_EXPKEY_WORDS];
49 int ret;
51 /* Even though we have special instructions for key expansion,
52 * we call des_ekey() so that we don't have to write our own
53 * weak key detection code.
55 ret = des_ekey(tmp, key);
56 if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
57 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
58 return -EINVAL;
61 des_sparc64_key_expand((const u32 *) key, &dctx->encrypt_expkey[0]);
62 encrypt_to_decrypt(&dctx->decrypt_expkey[0], &dctx->encrypt_expkey[0]);
64 return 0;
67 extern void des_sparc64_crypt(const u64 *key, const u64 *input,
68 u64 *output);
70 static void des_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
72 struct des_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
73 const u64 *K = ctx->encrypt_expkey;
75 des_sparc64_crypt(K, (const u64 *) src, (u64 *) dst);
78 static void des_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
80 struct des_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
81 const u64 *K = ctx->decrypt_expkey;
83 des_sparc64_crypt(K, (const u64 *) src, (u64 *) dst);
86 extern void des_sparc64_load_keys(const u64 *key);
88 extern void des_sparc64_ecb_crypt(const u64 *input, u64 *output,
89 unsigned int len);
91 #define DES_BLOCK_MASK (~(DES_BLOCK_SIZE - 1))
93 static int __ecb_crypt(struct blkcipher_desc *desc,
94 struct scatterlist *dst, struct scatterlist *src,
95 unsigned int nbytes, bool encrypt)
97 struct des_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
98 struct blkcipher_walk walk;
99 int err;
101 blkcipher_walk_init(&walk, dst, src, nbytes);
102 err = blkcipher_walk_virt(desc, &walk);
103 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
105 if (encrypt)
106 des_sparc64_load_keys(&ctx->encrypt_expkey[0]);
107 else
108 des_sparc64_load_keys(&ctx->decrypt_expkey[0]);
109 while ((nbytes = walk.nbytes)) {
110 unsigned int block_len = nbytes & DES_BLOCK_MASK;
112 if (likely(block_len)) {
113 des_sparc64_ecb_crypt((const u64 *)walk.src.virt.addr,
114 (u64 *) walk.dst.virt.addr,
115 block_len);
117 nbytes &= DES_BLOCK_SIZE - 1;
118 err = blkcipher_walk_done(desc, &walk, nbytes);
120 fprs_write(0);
121 return err;
124 static int ecb_encrypt(struct blkcipher_desc *desc,
125 struct scatterlist *dst, struct scatterlist *src,
126 unsigned int nbytes)
128 return __ecb_crypt(desc, dst, src, nbytes, true);
131 static int ecb_decrypt(struct blkcipher_desc *desc,
132 struct scatterlist *dst, struct scatterlist *src,
133 unsigned int nbytes)
135 return __ecb_crypt(desc, dst, src, nbytes, false);
138 extern void des_sparc64_cbc_encrypt(const u64 *input, u64 *output,
139 unsigned int len, u64 *iv);
141 static int cbc_encrypt(struct blkcipher_desc *desc,
142 struct scatterlist *dst, struct scatterlist *src,
143 unsigned int nbytes)
145 struct des_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
146 struct blkcipher_walk walk;
147 int err;
149 blkcipher_walk_init(&walk, dst, src, nbytes);
150 err = blkcipher_walk_virt(desc, &walk);
151 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
153 des_sparc64_load_keys(&ctx->encrypt_expkey[0]);
154 while ((nbytes = walk.nbytes)) {
155 unsigned int block_len = nbytes & DES_BLOCK_MASK;
157 if (likely(block_len)) {
158 des_sparc64_cbc_encrypt((const u64 *)walk.src.virt.addr,
159 (u64 *) walk.dst.virt.addr,
160 block_len, (u64 *) walk.iv);
162 nbytes &= DES_BLOCK_SIZE - 1;
163 err = blkcipher_walk_done(desc, &walk, nbytes);
165 fprs_write(0);
166 return err;
169 extern void des_sparc64_cbc_decrypt(const u64 *input, u64 *output,
170 unsigned int len, u64 *iv);
172 static int cbc_decrypt(struct blkcipher_desc *desc,
173 struct scatterlist *dst, struct scatterlist *src,
174 unsigned int nbytes)
176 struct des_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
177 struct blkcipher_walk walk;
178 int err;
180 blkcipher_walk_init(&walk, dst, src, nbytes);
181 err = blkcipher_walk_virt(desc, &walk);
182 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
184 des_sparc64_load_keys(&ctx->decrypt_expkey[0]);
185 while ((nbytes = walk.nbytes)) {
186 unsigned int block_len = nbytes & DES_BLOCK_MASK;
188 if (likely(block_len)) {
189 des_sparc64_cbc_decrypt((const u64 *)walk.src.virt.addr,
190 (u64 *) walk.dst.virt.addr,
191 block_len, (u64 *) walk.iv);
193 nbytes &= DES_BLOCK_SIZE - 1;
194 err = blkcipher_walk_done(desc, &walk, nbytes);
196 fprs_write(0);
197 return err;
200 static int des3_ede_set_key(struct crypto_tfm *tfm, const u8 *key,
201 unsigned int keylen)
203 struct des3_ede_sparc64_ctx *dctx = crypto_tfm_ctx(tfm);
204 const u32 *K = (const u32 *)key;
205 u32 *flags = &tfm->crt_flags;
206 u64 k1[DES_EXPKEY_WORDS / 2];
207 u64 k2[DES_EXPKEY_WORDS / 2];
208 u64 k3[DES_EXPKEY_WORDS / 2];
210 if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
211 !((K[2] ^ K[4]) | (K[3] ^ K[5]))) &&
212 (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
213 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
214 return -EINVAL;
217 des_sparc64_key_expand((const u32 *)key, k1);
218 key += DES_KEY_SIZE;
219 des_sparc64_key_expand((const u32 *)key, k2);
220 key += DES_KEY_SIZE;
221 des_sparc64_key_expand((const u32 *)key, k3);
223 memcpy(&dctx->encrypt_expkey[0], &k1[0], sizeof(k1));
224 encrypt_to_decrypt(&dctx->encrypt_expkey[DES_EXPKEY_WORDS / 2], &k2[0]);
225 memcpy(&dctx->encrypt_expkey[(DES_EXPKEY_WORDS / 2) * 2],
226 &k3[0], sizeof(k3));
228 encrypt_to_decrypt(&dctx->decrypt_expkey[0], &k3[0]);
229 memcpy(&dctx->decrypt_expkey[DES_EXPKEY_WORDS / 2],
230 &k2[0], sizeof(k2));
231 encrypt_to_decrypt(&dctx->decrypt_expkey[(DES_EXPKEY_WORDS / 2) * 2],
232 &k1[0]);
234 return 0;
237 extern void des3_ede_sparc64_crypt(const u64 *key, const u64 *input,
238 u64 *output);
240 static void des3_ede_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
242 struct des3_ede_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
243 const u64 *K = ctx->encrypt_expkey;
245 des3_ede_sparc64_crypt(K, (const u64 *) src, (u64 *) dst);
248 static void des3_ede_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
250 struct des3_ede_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
251 const u64 *K = ctx->decrypt_expkey;
253 des3_ede_sparc64_crypt(K, (const u64 *) src, (u64 *) dst);
256 extern void des3_ede_sparc64_load_keys(const u64 *key);
258 extern void des3_ede_sparc64_ecb_crypt(const u64 *expkey, const u64 *input,
259 u64 *output, unsigned int len);
261 static int __ecb3_crypt(struct blkcipher_desc *desc,
262 struct scatterlist *dst, struct scatterlist *src,
263 unsigned int nbytes, bool encrypt)
265 struct des3_ede_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
266 struct blkcipher_walk walk;
267 const u64 *K;
268 int err;
270 blkcipher_walk_init(&walk, dst, src, nbytes);
271 err = blkcipher_walk_virt(desc, &walk);
272 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
274 if (encrypt)
275 K = &ctx->encrypt_expkey[0];
276 else
277 K = &ctx->decrypt_expkey[0];
278 des3_ede_sparc64_load_keys(K);
279 while ((nbytes = walk.nbytes)) {
280 unsigned int block_len = nbytes & DES_BLOCK_MASK;
282 if (likely(block_len)) {
283 const u64 *src64 = (const u64 *)walk.src.virt.addr;
284 des3_ede_sparc64_ecb_crypt(K, src64,
285 (u64 *) walk.dst.virt.addr,
286 block_len);
288 nbytes &= DES_BLOCK_SIZE - 1;
289 err = blkcipher_walk_done(desc, &walk, nbytes);
291 fprs_write(0);
292 return err;
295 static int ecb3_encrypt(struct blkcipher_desc *desc,
296 struct scatterlist *dst, struct scatterlist *src,
297 unsigned int nbytes)
299 return __ecb3_crypt(desc, dst, src, nbytes, true);
302 static int ecb3_decrypt(struct blkcipher_desc *desc,
303 struct scatterlist *dst, struct scatterlist *src,
304 unsigned int nbytes)
306 return __ecb3_crypt(desc, dst, src, nbytes, false);
309 extern void des3_ede_sparc64_cbc_encrypt(const u64 *expkey, const u64 *input,
310 u64 *output, unsigned int len,
311 u64 *iv);
313 static int cbc3_encrypt(struct blkcipher_desc *desc,
314 struct scatterlist *dst, struct scatterlist *src,
315 unsigned int nbytes)
317 struct des3_ede_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
318 struct blkcipher_walk walk;
319 const u64 *K;
320 int err;
322 blkcipher_walk_init(&walk, dst, src, nbytes);
323 err = blkcipher_walk_virt(desc, &walk);
324 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
326 K = &ctx->encrypt_expkey[0];
327 des3_ede_sparc64_load_keys(K);
328 while ((nbytes = walk.nbytes)) {
329 unsigned int block_len = nbytes & DES_BLOCK_MASK;
331 if (likely(block_len)) {
332 const u64 *src64 = (const u64 *)walk.src.virt.addr;
333 des3_ede_sparc64_cbc_encrypt(K, src64,
334 (u64 *) walk.dst.virt.addr,
335 block_len,
336 (u64 *) walk.iv);
338 nbytes &= DES_BLOCK_SIZE - 1;
339 err = blkcipher_walk_done(desc, &walk, nbytes);
341 fprs_write(0);
342 return err;
345 extern void des3_ede_sparc64_cbc_decrypt(const u64 *expkey, const u64 *input,
346 u64 *output, unsigned int len,
347 u64 *iv);
349 static int cbc3_decrypt(struct blkcipher_desc *desc,
350 struct scatterlist *dst, struct scatterlist *src,
351 unsigned int nbytes)
353 struct des3_ede_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
354 struct blkcipher_walk walk;
355 const u64 *K;
356 int err;
358 blkcipher_walk_init(&walk, dst, src, nbytes);
359 err = blkcipher_walk_virt(desc, &walk);
360 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
362 K = &ctx->decrypt_expkey[0];
363 des3_ede_sparc64_load_keys(K);
364 while ((nbytes = walk.nbytes)) {
365 unsigned int block_len = nbytes & DES_BLOCK_MASK;
367 if (likely(block_len)) {
368 const u64 *src64 = (const u64 *)walk.src.virt.addr;
369 des3_ede_sparc64_cbc_decrypt(K, src64,
370 (u64 *) walk.dst.virt.addr,
371 block_len,
372 (u64 *) walk.iv);
374 nbytes &= DES_BLOCK_SIZE - 1;
375 err = blkcipher_walk_done(desc, &walk, nbytes);
377 fprs_write(0);
378 return err;
381 static struct crypto_alg algs[] = { {
382 .cra_name = "des",
383 .cra_driver_name = "des-sparc64",
384 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
385 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
386 .cra_blocksize = DES_BLOCK_SIZE,
387 .cra_ctxsize = sizeof(struct des_sparc64_ctx),
388 .cra_alignmask = 7,
389 .cra_module = THIS_MODULE,
390 .cra_u = {
391 .cipher = {
392 .cia_min_keysize = DES_KEY_SIZE,
393 .cia_max_keysize = DES_KEY_SIZE,
394 .cia_setkey = des_set_key,
395 .cia_encrypt = des_encrypt,
396 .cia_decrypt = des_decrypt
399 }, {
400 .cra_name = "ecb(des)",
401 .cra_driver_name = "ecb-des-sparc64",
402 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
403 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
404 .cra_blocksize = DES_BLOCK_SIZE,
405 .cra_ctxsize = sizeof(struct des_sparc64_ctx),
406 .cra_alignmask = 7,
407 .cra_type = &crypto_blkcipher_type,
408 .cra_module = THIS_MODULE,
409 .cra_u = {
410 .blkcipher = {
411 .min_keysize = DES_KEY_SIZE,
412 .max_keysize = DES_KEY_SIZE,
413 .setkey = des_set_key,
414 .encrypt = ecb_encrypt,
415 .decrypt = ecb_decrypt,
418 }, {
419 .cra_name = "cbc(des)",
420 .cra_driver_name = "cbc-des-sparc64",
421 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
422 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
423 .cra_blocksize = DES_BLOCK_SIZE,
424 .cra_ctxsize = sizeof(struct des_sparc64_ctx),
425 .cra_alignmask = 7,
426 .cra_type = &crypto_blkcipher_type,
427 .cra_module = THIS_MODULE,
428 .cra_u = {
429 .blkcipher = {
430 .min_keysize = DES_KEY_SIZE,
431 .max_keysize = DES_KEY_SIZE,
432 .setkey = des_set_key,
433 .encrypt = cbc_encrypt,
434 .decrypt = cbc_decrypt,
437 }, {
438 .cra_name = "des3_ede",
439 .cra_driver_name = "des3_ede-sparc64",
440 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
441 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
442 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
443 .cra_ctxsize = sizeof(struct des3_ede_sparc64_ctx),
444 .cra_alignmask = 7,
445 .cra_module = THIS_MODULE,
446 .cra_u = {
447 .cipher = {
448 .cia_min_keysize = DES3_EDE_KEY_SIZE,
449 .cia_max_keysize = DES3_EDE_KEY_SIZE,
450 .cia_setkey = des3_ede_set_key,
451 .cia_encrypt = des3_ede_encrypt,
452 .cia_decrypt = des3_ede_decrypt
455 }, {
456 .cra_name = "ecb(des3_ede)",
457 .cra_driver_name = "ecb-des3_ede-sparc64",
458 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
459 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
460 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
461 .cra_ctxsize = sizeof(struct des3_ede_sparc64_ctx),
462 .cra_alignmask = 7,
463 .cra_type = &crypto_blkcipher_type,
464 .cra_module = THIS_MODULE,
465 .cra_u = {
466 .blkcipher = {
467 .min_keysize = DES3_EDE_KEY_SIZE,
468 .max_keysize = DES3_EDE_KEY_SIZE,
469 .setkey = des3_ede_set_key,
470 .encrypt = ecb3_encrypt,
471 .decrypt = ecb3_decrypt,
474 }, {
475 .cra_name = "cbc(des3_ede)",
476 .cra_driver_name = "cbc-des3_ede-sparc64",
477 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
478 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
479 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
480 .cra_ctxsize = sizeof(struct des3_ede_sparc64_ctx),
481 .cra_alignmask = 7,
482 .cra_type = &crypto_blkcipher_type,
483 .cra_module = THIS_MODULE,
484 .cra_u = {
485 .blkcipher = {
486 .min_keysize = DES3_EDE_KEY_SIZE,
487 .max_keysize = DES3_EDE_KEY_SIZE,
488 .setkey = des3_ede_set_key,
489 .encrypt = cbc3_encrypt,
490 .decrypt = cbc3_decrypt,
493 } };
495 static bool __init sparc64_has_des_opcode(void)
497 unsigned long cfr;
499 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
500 return false;
502 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
503 if (!(cfr & CFR_DES))
504 return false;
506 return true;
509 static int __init des_sparc64_mod_init(void)
511 int i;
513 for (i = 0; i < ARRAY_SIZE(algs); i++)
514 INIT_LIST_HEAD(&algs[i].cra_list);
516 if (sparc64_has_des_opcode()) {
517 pr_info("Using sparc64 des opcodes optimized DES implementation\n");
518 return crypto_register_algs(algs, ARRAY_SIZE(algs));
520 pr_info("sparc64 des opcodes not available.\n");
521 return -ENODEV;
524 static void __exit des_sparc64_mod_fini(void)
526 crypto_unregister_algs(algs, ARRAY_SIZE(algs));
529 module_init(des_sparc64_mod_init);
530 module_exit(des_sparc64_mod_fini);
532 MODULE_LICENSE("GPL");
533 MODULE_DESCRIPTION("DES & Triple DES EDE Cipher Algorithms, sparc64 des opcode accelerated");
535 MODULE_ALIAS_CRYPTO("des");
536 MODULE_ALIAS_CRYPTO("des3_ede");
538 #include "crop_devid.c"