4 * s390 implementation of the DES Cipher Algorithm.
6 * Copyright IBM Corp. 2003,2011
7 * Author(s): Thomas Spatzier
8 * Jan Glauber (jan.glauber@de.ibm.com)
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
17 #include <linux/init.h>
18 #include <linux/module.h>
19 #include <linux/crypto.h>
20 #include <crypto/algapi.h>
21 #include <crypto/des.h>
23 #include "crypt_s390.h"
25 #define DES3_KEY_SIZE (3 * DES_KEY_SIZE)
30 u8 iv
[DES_BLOCK_SIZE
];
31 u8 key
[DES3_KEY_SIZE
];
34 static int des_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
37 struct s390_des_ctx
*ctx
= crypto_tfm_ctx(tfm
);
38 u32
*flags
= &tfm
->crt_flags
;
39 u32 tmp
[DES_EXPKEY_WORDS
];
41 /* check for weak keys */
42 if (!des_ekey(tmp
, key
) && (*flags
& CRYPTO_TFM_REQ_WEAK_KEY
)) {
43 *flags
|= CRYPTO_TFM_RES_WEAK_KEY
;
47 memcpy(ctx
->key
, key
, key_len
);
51 static void des_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
53 struct s390_des_ctx
*ctx
= crypto_tfm_ctx(tfm
);
55 crypt_s390_km(KM_DEA_ENCRYPT
, ctx
->key
, out
, in
, DES_BLOCK_SIZE
);
58 static void des_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
60 struct s390_des_ctx
*ctx
= crypto_tfm_ctx(tfm
);
62 crypt_s390_km(KM_DEA_DECRYPT
, ctx
->key
, out
, in
, DES_BLOCK_SIZE
);
65 static struct crypto_alg des_alg
= {
67 .cra_driver_name
= "des-s390",
68 .cra_priority
= CRYPT_S390_PRIORITY
,
69 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
70 .cra_blocksize
= DES_BLOCK_SIZE
,
71 .cra_ctxsize
= sizeof(struct s390_des_ctx
),
72 .cra_module
= THIS_MODULE
,
73 .cra_list
= LIST_HEAD_INIT(des_alg
.cra_list
),
76 .cia_min_keysize
= DES_KEY_SIZE
,
77 .cia_max_keysize
= DES_KEY_SIZE
,
78 .cia_setkey
= des_setkey
,
79 .cia_encrypt
= des_encrypt
,
80 .cia_decrypt
= des_decrypt
,
85 static int ecb_desall_crypt(struct blkcipher_desc
*desc
, long func
,
86 u8
*key
, struct blkcipher_walk
*walk
)
88 int ret
= blkcipher_walk_virt(desc
, walk
);
91 while ((nbytes
= walk
->nbytes
)) {
92 /* only use complete blocks */
93 unsigned int n
= nbytes
& ~(DES_BLOCK_SIZE
- 1);
94 u8
*out
= walk
->dst
.virt
.addr
;
95 u8
*in
= walk
->src
.virt
.addr
;
97 ret
= crypt_s390_km(func
, key
, out
, in
, n
);
98 BUG_ON((ret
< 0) || (ret
!= n
));
100 nbytes
&= DES_BLOCK_SIZE
- 1;
101 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
107 static int cbc_desall_crypt(struct blkcipher_desc
*desc
, long func
,
108 u8
*iv
, struct blkcipher_walk
*walk
)
110 int ret
= blkcipher_walk_virt(desc
, walk
);
111 unsigned int nbytes
= walk
->nbytes
;
116 memcpy(iv
, walk
->iv
, DES_BLOCK_SIZE
);
118 /* only use complete blocks */
119 unsigned int n
= nbytes
& ~(DES_BLOCK_SIZE
- 1);
120 u8
*out
= walk
->dst
.virt
.addr
;
121 u8
*in
= walk
->src
.virt
.addr
;
123 ret
= crypt_s390_kmc(func
, iv
, out
, in
, n
);
124 BUG_ON((ret
< 0) || (ret
!= n
));
126 nbytes
&= DES_BLOCK_SIZE
- 1;
127 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
128 } while ((nbytes
= walk
->nbytes
));
129 memcpy(walk
->iv
, iv
, DES_BLOCK_SIZE
);
135 static int ecb_des_encrypt(struct blkcipher_desc
*desc
,
136 struct scatterlist
*dst
, struct scatterlist
*src
,
139 struct s390_des_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
140 struct blkcipher_walk walk
;
142 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
143 return ecb_desall_crypt(desc
, KM_DEA_ENCRYPT
, ctx
->key
, &walk
);
146 static int ecb_des_decrypt(struct blkcipher_desc
*desc
,
147 struct scatterlist
*dst
, struct scatterlist
*src
,
150 struct s390_des_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
151 struct blkcipher_walk walk
;
153 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
154 return ecb_desall_crypt(desc
, KM_DEA_DECRYPT
, ctx
->key
, &walk
);
157 static struct crypto_alg ecb_des_alg
= {
158 .cra_name
= "ecb(des)",
159 .cra_driver_name
= "ecb-des-s390",
160 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
161 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
162 .cra_blocksize
= DES_BLOCK_SIZE
,
163 .cra_ctxsize
= sizeof(struct s390_des_ctx
),
164 .cra_type
= &crypto_blkcipher_type
,
165 .cra_module
= THIS_MODULE
,
166 .cra_list
= LIST_HEAD_INIT(ecb_des_alg
.cra_list
),
169 .min_keysize
= DES_KEY_SIZE
,
170 .max_keysize
= DES_KEY_SIZE
,
171 .setkey
= des_setkey
,
172 .encrypt
= ecb_des_encrypt
,
173 .decrypt
= ecb_des_decrypt
,
178 static int cbc_des_encrypt(struct blkcipher_desc
*desc
,
179 struct scatterlist
*dst
, struct scatterlist
*src
,
182 struct s390_des_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
183 struct blkcipher_walk walk
;
185 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
186 return cbc_desall_crypt(desc
, KMC_DEA_ENCRYPT
, ctx
->iv
, &walk
);
189 static int cbc_des_decrypt(struct blkcipher_desc
*desc
,
190 struct scatterlist
*dst
, struct scatterlist
*src
,
193 struct s390_des_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
194 struct blkcipher_walk walk
;
196 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
197 return cbc_desall_crypt(desc
, KMC_DEA_DECRYPT
, ctx
->iv
, &walk
);
200 static struct crypto_alg cbc_des_alg
= {
201 .cra_name
= "cbc(des)",
202 .cra_driver_name
= "cbc-des-s390",
203 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
204 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
205 .cra_blocksize
= DES_BLOCK_SIZE
,
206 .cra_ctxsize
= sizeof(struct s390_des_ctx
),
207 .cra_type
= &crypto_blkcipher_type
,
208 .cra_module
= THIS_MODULE
,
209 .cra_list
= LIST_HEAD_INIT(cbc_des_alg
.cra_list
),
212 .min_keysize
= DES_KEY_SIZE
,
213 .max_keysize
= DES_KEY_SIZE
,
214 .ivsize
= DES_BLOCK_SIZE
,
215 .setkey
= des_setkey
,
216 .encrypt
= cbc_des_encrypt
,
217 .decrypt
= cbc_des_decrypt
,
225 * For DES-EDE3, there is no known need to reject weak or
226 * complementation keys. Any weakness is obviated by the use of
229 * However, if the first two or last two independent 64-bit keys are
230 * equal (k1 == k2 or k2 == k3), then the DES3 operation is simply the
231 * same as DES. Implementers MUST reject keys that exhibit this
235 static int des3_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
236 unsigned int key_len
)
238 struct s390_des_ctx
*ctx
= crypto_tfm_ctx(tfm
);
239 u32
*flags
= &tfm
->crt_flags
;
241 if (!(memcmp(key
, &key
[DES_KEY_SIZE
], DES_KEY_SIZE
) &&
242 memcmp(&key
[DES_KEY_SIZE
], &key
[DES_KEY_SIZE
* 2],
244 (*flags
& CRYPTO_TFM_REQ_WEAK_KEY
)) {
245 *flags
|= CRYPTO_TFM_RES_WEAK_KEY
;
248 memcpy(ctx
->key
, key
, key_len
);
252 static void des3_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
254 struct s390_des_ctx
*ctx
= crypto_tfm_ctx(tfm
);
256 crypt_s390_km(KM_TDEA_192_ENCRYPT
, ctx
->key
, dst
, src
, DES_BLOCK_SIZE
);
259 static void des3_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
261 struct s390_des_ctx
*ctx
= crypto_tfm_ctx(tfm
);
263 crypt_s390_km(KM_TDEA_192_DECRYPT
, ctx
->key
, dst
, src
, DES_BLOCK_SIZE
);
266 static struct crypto_alg des3_alg
= {
267 .cra_name
= "des3_ede",
268 .cra_driver_name
= "des3_ede-s390",
269 .cra_priority
= CRYPT_S390_PRIORITY
,
270 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
271 .cra_blocksize
= DES_BLOCK_SIZE
,
272 .cra_ctxsize
= sizeof(struct s390_des_ctx
),
273 .cra_module
= THIS_MODULE
,
274 .cra_list
= LIST_HEAD_INIT(des3_alg
.cra_list
),
277 .cia_min_keysize
= DES3_KEY_SIZE
,
278 .cia_max_keysize
= DES3_KEY_SIZE
,
279 .cia_setkey
= des3_setkey
,
280 .cia_encrypt
= des3_encrypt
,
281 .cia_decrypt
= des3_decrypt
,
286 static int ecb_des3_encrypt(struct blkcipher_desc
*desc
,
287 struct scatterlist
*dst
, struct scatterlist
*src
,
290 struct s390_des_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
291 struct blkcipher_walk walk
;
293 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
294 return ecb_desall_crypt(desc
, KM_TDEA_192_ENCRYPT
, ctx
->key
, &walk
);
297 static int ecb_des3_decrypt(struct blkcipher_desc
*desc
,
298 struct scatterlist
*dst
, struct scatterlist
*src
,
301 struct s390_des_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
302 struct blkcipher_walk walk
;
304 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
305 return ecb_desall_crypt(desc
, KM_TDEA_192_DECRYPT
, ctx
->key
, &walk
);
308 static struct crypto_alg ecb_des3_alg
= {
309 .cra_name
= "ecb(des3_ede)",
310 .cra_driver_name
= "ecb-des3_ede-s390",
311 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
312 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
313 .cra_blocksize
= DES_BLOCK_SIZE
,
314 .cra_ctxsize
= sizeof(struct s390_des_ctx
),
315 .cra_type
= &crypto_blkcipher_type
,
316 .cra_module
= THIS_MODULE
,
317 .cra_list
= LIST_HEAD_INIT(
318 ecb_des3_alg
.cra_list
),
321 .min_keysize
= DES3_KEY_SIZE
,
322 .max_keysize
= DES3_KEY_SIZE
,
323 .setkey
= des3_setkey
,
324 .encrypt
= ecb_des3_encrypt
,
325 .decrypt
= ecb_des3_decrypt
,
330 static int cbc_des3_encrypt(struct blkcipher_desc
*desc
,
331 struct scatterlist
*dst
, struct scatterlist
*src
,
334 struct s390_des_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
335 struct blkcipher_walk walk
;
337 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
338 return cbc_desall_crypt(desc
, KMC_TDEA_192_ENCRYPT
, ctx
->iv
, &walk
);
341 static int cbc_des3_decrypt(struct blkcipher_desc
*desc
,
342 struct scatterlist
*dst
, struct scatterlist
*src
,
345 struct s390_des_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
346 struct blkcipher_walk walk
;
348 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
349 return cbc_desall_crypt(desc
, KMC_TDEA_192_DECRYPT
, ctx
->iv
, &walk
);
352 static struct crypto_alg cbc_des3_alg
= {
353 .cra_name
= "cbc(des3_ede)",
354 .cra_driver_name
= "cbc-des3_ede-s390",
355 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
356 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
357 .cra_blocksize
= DES_BLOCK_SIZE
,
358 .cra_ctxsize
= sizeof(struct s390_des_ctx
),
359 .cra_type
= &crypto_blkcipher_type
,
360 .cra_module
= THIS_MODULE
,
361 .cra_list
= LIST_HEAD_INIT(
362 cbc_des3_alg
.cra_list
),
365 .min_keysize
= DES3_KEY_SIZE
,
366 .max_keysize
= DES3_KEY_SIZE
,
367 .ivsize
= DES_BLOCK_SIZE
,
368 .setkey
= des3_setkey
,
369 .encrypt
= cbc_des3_encrypt
,
370 .decrypt
= cbc_des3_decrypt
,
375 static int ctr_desall_crypt(struct blkcipher_desc
*desc
, long func
,
376 struct s390_des_ctx
*ctx
, struct blkcipher_walk
*walk
)
378 int ret
= blkcipher_walk_virt_block(desc
, walk
, DES_BLOCK_SIZE
);
379 unsigned int i
, n
, nbytes
;
380 u8 buf
[DES_BLOCK_SIZE
];
383 memcpy(ctrblk
, walk
->iv
, DES_BLOCK_SIZE
);
384 while ((nbytes
= walk
->nbytes
) >= DES_BLOCK_SIZE
) {
385 out
= walk
->dst
.virt
.addr
;
386 in
= walk
->src
.virt
.addr
;
387 while (nbytes
>= DES_BLOCK_SIZE
) {
388 /* align to block size, max. PAGE_SIZE */
389 n
= (nbytes
> PAGE_SIZE
) ? PAGE_SIZE
:
390 nbytes
& ~(DES_BLOCK_SIZE
- 1);
391 for (i
= DES_BLOCK_SIZE
; i
< n
; i
+= DES_BLOCK_SIZE
) {
392 memcpy(ctrblk
+ i
, ctrblk
+ i
- DES_BLOCK_SIZE
,
394 crypto_inc(ctrblk
+ i
, DES_BLOCK_SIZE
);
396 ret
= crypt_s390_kmctr(func
, ctx
->key
, out
, in
, n
, ctrblk
);
397 BUG_ON((ret
< 0) || (ret
!= n
));
398 if (n
> DES_BLOCK_SIZE
)
399 memcpy(ctrblk
, ctrblk
+ n
- DES_BLOCK_SIZE
,
401 crypto_inc(ctrblk
, DES_BLOCK_SIZE
);
406 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
409 /* final block may be < DES_BLOCK_SIZE, copy only nbytes */
411 out
= walk
->dst
.virt
.addr
;
412 in
= walk
->src
.virt
.addr
;
413 ret
= crypt_s390_kmctr(func
, ctx
->key
, buf
, in
,
414 DES_BLOCK_SIZE
, ctrblk
);
415 BUG_ON(ret
< 0 || ret
!= DES_BLOCK_SIZE
);
416 memcpy(out
, buf
, nbytes
);
417 crypto_inc(ctrblk
, DES_BLOCK_SIZE
);
418 ret
= blkcipher_walk_done(desc
, walk
, 0);
420 memcpy(walk
->iv
, ctrblk
, DES_BLOCK_SIZE
);
424 static int ctr_des_encrypt(struct blkcipher_desc
*desc
,
425 struct scatterlist
*dst
, struct scatterlist
*src
,
428 struct s390_des_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
429 struct blkcipher_walk walk
;
431 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
432 return ctr_desall_crypt(desc
, KMCTR_DEA_ENCRYPT
, ctx
, &walk
);
435 static int ctr_des_decrypt(struct blkcipher_desc
*desc
,
436 struct scatterlist
*dst
, struct scatterlist
*src
,
439 struct s390_des_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
440 struct blkcipher_walk walk
;
442 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
443 return ctr_desall_crypt(desc
, KMCTR_DEA_DECRYPT
, ctx
, &walk
);
446 static struct crypto_alg ctr_des_alg
= {
447 .cra_name
= "ctr(des)",
448 .cra_driver_name
= "ctr-des-s390",
449 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
450 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
452 .cra_ctxsize
= sizeof(struct s390_des_ctx
),
453 .cra_type
= &crypto_blkcipher_type
,
454 .cra_module
= THIS_MODULE
,
455 .cra_list
= LIST_HEAD_INIT(ctr_des_alg
.cra_list
),
458 .min_keysize
= DES_KEY_SIZE
,
459 .max_keysize
= DES_KEY_SIZE
,
460 .ivsize
= DES_BLOCK_SIZE
,
461 .setkey
= des_setkey
,
462 .encrypt
= ctr_des_encrypt
,
463 .decrypt
= ctr_des_decrypt
,
468 static int ctr_des3_encrypt(struct blkcipher_desc
*desc
,
469 struct scatterlist
*dst
, struct scatterlist
*src
,
472 struct s390_des_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
473 struct blkcipher_walk walk
;
475 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
476 return ctr_desall_crypt(desc
, KMCTR_TDEA_192_ENCRYPT
, ctx
, &walk
);
479 static int ctr_des3_decrypt(struct blkcipher_desc
*desc
,
480 struct scatterlist
*dst
, struct scatterlist
*src
,
483 struct s390_des_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
484 struct blkcipher_walk walk
;
486 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
487 return ctr_desall_crypt(desc
, KMCTR_TDEA_192_DECRYPT
, ctx
, &walk
);
490 static struct crypto_alg ctr_des3_alg
= {
491 .cra_name
= "ctr(des3_ede)",
492 .cra_driver_name
= "ctr-des3_ede-s390",
493 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
494 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
496 .cra_ctxsize
= sizeof(struct s390_des_ctx
),
497 .cra_type
= &crypto_blkcipher_type
,
498 .cra_module
= THIS_MODULE
,
499 .cra_list
= LIST_HEAD_INIT(ctr_des3_alg
.cra_list
),
502 .min_keysize
= DES3_KEY_SIZE
,
503 .max_keysize
= DES3_KEY_SIZE
,
504 .ivsize
= DES_BLOCK_SIZE
,
505 .setkey
= des3_setkey
,
506 .encrypt
= ctr_des3_encrypt
,
507 .decrypt
= ctr_des3_decrypt
,
512 static int __init
des_s390_init(void)
516 if (!crypt_s390_func_available(KM_DEA_ENCRYPT
, CRYPT_S390_MSA
) ||
517 !crypt_s390_func_available(KM_TDEA_192_ENCRYPT
, CRYPT_S390_MSA
))
520 ret
= crypto_register_alg(&des_alg
);
523 ret
= crypto_register_alg(&ecb_des_alg
);
526 ret
= crypto_register_alg(&cbc_des_alg
);
529 ret
= crypto_register_alg(&des3_alg
);
532 ret
= crypto_register_alg(&ecb_des3_alg
);
535 ret
= crypto_register_alg(&cbc_des3_alg
);
539 if (crypt_s390_func_available(KMCTR_DEA_ENCRYPT
,
540 CRYPT_S390_MSA
| CRYPT_S390_MSA4
) &&
541 crypt_s390_func_available(KMCTR_TDEA_192_ENCRYPT
,
542 CRYPT_S390_MSA
| CRYPT_S390_MSA4
)) {
543 ret
= crypto_register_alg(&ctr_des_alg
);
546 ret
= crypto_register_alg(&ctr_des3_alg
);
549 ctrblk
= (u8
*) __get_free_page(GFP_KERNEL
);
559 crypto_unregister_alg(&ctr_des3_alg
);
561 crypto_unregister_alg(&ctr_des_alg
);
563 crypto_unregister_alg(&cbc_des3_alg
);
565 crypto_unregister_alg(&ecb_des3_alg
);
567 crypto_unregister_alg(&des3_alg
);
569 crypto_unregister_alg(&cbc_des_alg
);
571 crypto_unregister_alg(&ecb_des_alg
);
573 crypto_unregister_alg(&des_alg
);
578 static void __exit
des_s390_exit(void)
581 crypto_unregister_alg(&ctr_des_alg
);
582 crypto_unregister_alg(&ctr_des3_alg
);
583 free_page((unsigned long) ctrblk
);
585 crypto_unregister_alg(&cbc_des3_alg
);
586 crypto_unregister_alg(&ecb_des3_alg
);
587 crypto_unregister_alg(&des3_alg
);
588 crypto_unregister_alg(&cbc_des_alg
);
589 crypto_unregister_alg(&ecb_des_alg
);
590 crypto_unregister_alg(&des_alg
);
593 module_init(des_s390_init
);
594 module_exit(des_s390_exit
);
597 MODULE_ALIAS("des3_ede");
599 MODULE_LICENSE("GPL");
600 MODULE_DESCRIPTION("DES & Triple DES EDE Cipher Algorithms");