1 // SPDX-License-Identifier: GPL-2.0-only
3 * Intel IXP4xx NPE-C crypto driver
5 * Copyright (C) 2008 Christian Hohnstaedt <chohnstaedt@innominate.com>
8 #include <linux/platform_device.h>
9 #include <linux/dma-mapping.h>
10 #include <linux/dmapool.h>
11 #include <linux/crypto.h>
12 #include <linux/kernel.h>
13 #include <linux/rtnetlink.h>
14 #include <linux/interrupt.h>
15 #include <linux/spinlock.h>
16 #include <linux/gfp.h>
17 #include <linux/module.h>
19 #include <crypto/ctr.h>
20 #include <crypto/internal/des.h>
21 #include <crypto/aes.h>
22 #include <crypto/hmac.h>
23 #include <crypto/sha.h>
24 #include <crypto/algapi.h>
25 #include <crypto/internal/aead.h>
26 #include <crypto/internal/skcipher.h>
27 #include <crypto/authenc.h>
28 #include <crypto/scatterwalk.h>
30 #include <linux/soc/ixp4xx/npe.h>
31 #include <linux/soc/ixp4xx/qmgr.h>
35 /* hash: cfgword + 2 * digestlen; crypt: keylen + cfgword */
36 #define NPE_CTX_LEN 80
37 #define AES_BLOCK128 16
39 #define NPE_OP_HASH_VERIFY 0x01
40 #define NPE_OP_CCM_ENABLE 0x04
41 #define NPE_OP_CRYPT_ENABLE 0x08
42 #define NPE_OP_HASH_ENABLE 0x10
43 #define NPE_OP_NOT_IN_PLACE 0x20
44 #define NPE_OP_HMAC_DISABLE 0x40
45 #define NPE_OP_CRYPT_ENCRYPT 0x80
47 #define NPE_OP_CCM_GEN_MIC 0xcc
48 #define NPE_OP_HASH_GEN_ICV 0x50
49 #define NPE_OP_ENC_GEN_KEY 0xc9
51 #define MOD_ECB 0x0000
52 #define MOD_CTR 0x1000
53 #define MOD_CBC_ENC 0x2000
54 #define MOD_CBC_DEC 0x3000
55 #define MOD_CCM_ENC 0x4000
56 #define MOD_CCM_DEC 0x5000
62 #define CIPH_DECR 0x0000
63 #define CIPH_ENCR 0x0400
65 #define MOD_DES 0x0000
66 #define MOD_TDEA2 0x0100
67 #define MOD_3DES 0x0200
68 #define MOD_AES 0x0800
69 #define MOD_AES128 (0x0800 | KEYLEN_128)
70 #define MOD_AES192 (0x0900 | KEYLEN_192)
71 #define MOD_AES256 (0x0a00 | KEYLEN_256)
74 #define NPE_ID 2 /* NPE C */
76 /* Space for registering when the first
77 * NPE_QLEN crypt_ctl are busy */
78 #define NPE_QLEN_TOTAL 64
83 #define CTL_FLAG_UNUSED 0x0000
84 #define CTL_FLAG_USED 0x1000
85 #define CTL_FLAG_PERFORM_ABLK 0x0001
86 #define CTL_FLAG_GEN_ICV 0x0002
87 #define CTL_FLAG_GEN_REVAES 0x0004
88 #define CTL_FLAG_PERFORM_AEAD 0x0008
89 #define CTL_FLAG_MASK 0x000f
91 #define HMAC_PAD_BLOCKLEN SHA1_BLOCK_SIZE
93 #define MD5_DIGEST_SIZE 16
104 dma_addr_t phys_addr
;
106 struct buffer_desc
*next
;
107 enum dma_data_direction dir
;
112 u8 mode
; /* NPE_OP_* operation mode */
118 u8 mode
; /* NPE_OP_* operation mode */
120 u8 iv
[MAX_IVLEN
]; /* IV for CBC mode or CTR IV for CTR mode */
121 dma_addr_t icv_rev_aes
; /* icv or rev aes */
125 u16 auth_offs
; /* Authentication start offset */
126 u16 auth_len
; /* Authentication data length */
127 u16 crypt_offs
; /* Cryption start offset */
128 u16 crypt_len
; /* Cryption data length */
130 u16 auth_len
; /* Authentication data length */
131 u16 auth_offs
; /* Authentication start offset */
132 u16 crypt_len
; /* Cryption data length */
133 u16 crypt_offs
; /* Cryption start offset */
135 u32 aadAddr
; /* Additional Auth Data Addr for CCM mode */
136 u32 crypto_ctx
; /* NPE Crypto Param structure address */
138 /* Used by Host: 4*4 bytes*/
141 struct skcipher_request
*ablk_req
;
142 struct aead_request
*aead_req
;
143 struct crypto_tfm
*tfm
;
145 struct buffer_desc
*regist_buf
;
150 struct buffer_desc
*src
;
151 struct buffer_desc
*dst
;
155 struct buffer_desc
*src
;
156 struct buffer_desc
*dst
;
157 struct scatterlist ivlist
;
158 /* used when the hmac is not on one sg entry */
163 struct ix_hash_algo
{
169 unsigned char *npe_ctx
;
170 dma_addr_t npe_ctx_phys
;
176 struct ix_sa_dir encrypt
;
177 struct ix_sa_dir decrypt
;
179 u8 authkey
[MAX_KEYLEN
];
181 u8 enckey
[MAX_KEYLEN
];
183 u8 nonce
[CTR_RFC3686_NONCE_SIZE
];
185 atomic_t configuring
;
186 struct completion completion
;
190 struct skcipher_alg crypto
;
191 const struct ix_hash_algo
*hash
;
198 struct ixp_aead_alg
{
199 struct aead_alg crypto
;
200 const struct ix_hash_algo
*hash
;
207 static const struct ix_hash_algo hash_alg_md5
= {
208 .cfgword
= 0xAA010004,
209 .icv
= "\x01\x23\x45\x67\x89\xAB\xCD\xEF"
210 "\xFE\xDC\xBA\x98\x76\x54\x32\x10",
212 static const struct ix_hash_algo hash_alg_sha1
= {
213 .cfgword
= 0x00000005,
214 .icv
= "\x67\x45\x23\x01\xEF\xCD\xAB\x89\x98\xBA"
215 "\xDC\xFE\x10\x32\x54\x76\xC3\xD2\xE1\xF0",
218 static struct npe
*npe_c
;
219 static struct dma_pool
*buffer_pool
= NULL
;
220 static struct dma_pool
*ctx_pool
= NULL
;
222 static struct crypt_ctl
*crypt_virt
= NULL
;
223 static dma_addr_t crypt_phys
;
225 static int support_aes
= 1;
227 #define DRIVER_NAME "ixp4xx_crypto"
229 static struct platform_device
*pdev
;
231 static inline dma_addr_t
crypt_virt2phys(struct crypt_ctl
*virt
)
233 return crypt_phys
+ (virt
- crypt_virt
) * sizeof(struct crypt_ctl
);
236 static inline struct crypt_ctl
*crypt_phys2virt(dma_addr_t phys
)
238 return crypt_virt
+ (phys
- crypt_phys
) / sizeof(struct crypt_ctl
);
241 static inline u32
cipher_cfg_enc(struct crypto_tfm
*tfm
)
243 return container_of(tfm
->__crt_alg
, struct ixp_alg
,crypto
.base
)->cfg_enc
;
246 static inline u32
cipher_cfg_dec(struct crypto_tfm
*tfm
)
248 return container_of(tfm
->__crt_alg
, struct ixp_alg
,crypto
.base
)->cfg_dec
;
251 static inline const struct ix_hash_algo
*ix_hash(struct crypto_tfm
*tfm
)
253 return container_of(tfm
->__crt_alg
, struct ixp_alg
, crypto
.base
)->hash
;
256 static int setup_crypt_desc(void)
258 struct device
*dev
= &pdev
->dev
;
259 BUILD_BUG_ON(sizeof(struct crypt_ctl
) != 64);
260 crypt_virt
= dma_alloc_coherent(dev
,
261 NPE_QLEN
* sizeof(struct crypt_ctl
),
262 &crypt_phys
, GFP_ATOMIC
);
268 static spinlock_t desc_lock
;
269 static struct crypt_ctl
*get_crypt_desc(void)
275 spin_lock_irqsave(&desc_lock
, flags
);
277 if (unlikely(!crypt_virt
))
279 if (unlikely(!crypt_virt
)) {
280 spin_unlock_irqrestore(&desc_lock
, flags
);
284 if (crypt_virt
[i
].ctl_flags
== CTL_FLAG_UNUSED
) {
285 if (++idx
>= NPE_QLEN
)
287 crypt_virt
[i
].ctl_flags
= CTL_FLAG_USED
;
288 spin_unlock_irqrestore(&desc_lock
, flags
);
289 return crypt_virt
+i
;
291 spin_unlock_irqrestore(&desc_lock
, flags
);
296 static spinlock_t emerg_lock
;
297 static struct crypt_ctl
*get_crypt_desc_emerg(void)
300 static int idx
= NPE_QLEN
;
301 struct crypt_ctl
*desc
;
304 desc
= get_crypt_desc();
307 if (unlikely(!crypt_virt
))
310 spin_lock_irqsave(&emerg_lock
, flags
);
312 if (crypt_virt
[i
].ctl_flags
== CTL_FLAG_UNUSED
) {
313 if (++idx
>= NPE_QLEN_TOTAL
)
315 crypt_virt
[i
].ctl_flags
= CTL_FLAG_USED
;
316 spin_unlock_irqrestore(&emerg_lock
, flags
);
317 return crypt_virt
+i
;
319 spin_unlock_irqrestore(&emerg_lock
, flags
);
324 static void free_buf_chain(struct device
*dev
, struct buffer_desc
*buf
,
328 struct buffer_desc
*buf1
;
332 phys1
= buf
->phys_next
;
333 dma_unmap_single(dev
, buf
->phys_next
, buf
->buf_len
, buf
->dir
);
334 dma_pool_free(buffer_pool
, buf
, phys
);
340 static struct tasklet_struct crypto_done_tasklet
;
342 static void finish_scattered_hmac(struct crypt_ctl
*crypt
)
344 struct aead_request
*req
= crypt
->data
.aead_req
;
345 struct aead_ctx
*req_ctx
= aead_request_ctx(req
);
346 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
347 int authsize
= crypto_aead_authsize(tfm
);
348 int decryptlen
= req
->assoclen
+ req
->cryptlen
- authsize
;
350 if (req_ctx
->encrypt
) {
351 scatterwalk_map_and_copy(req_ctx
->hmac_virt
,
352 req
->dst
, decryptlen
, authsize
, 1);
354 dma_pool_free(buffer_pool
, req_ctx
->hmac_virt
, crypt
->icv_rev_aes
);
357 static void one_packet(dma_addr_t phys
)
359 struct device
*dev
= &pdev
->dev
;
360 struct crypt_ctl
*crypt
;
364 failed
= phys
& 0x1 ? -EBADMSG
: 0;
366 crypt
= crypt_phys2virt(phys
);
368 switch (crypt
->ctl_flags
& CTL_FLAG_MASK
) {
369 case CTL_FLAG_PERFORM_AEAD
: {
370 struct aead_request
*req
= crypt
->data
.aead_req
;
371 struct aead_ctx
*req_ctx
= aead_request_ctx(req
);
373 free_buf_chain(dev
, req_ctx
->src
, crypt
->src_buf
);
374 free_buf_chain(dev
, req_ctx
->dst
, crypt
->dst_buf
);
375 if (req_ctx
->hmac_virt
) {
376 finish_scattered_hmac(crypt
);
378 req
->base
.complete(&req
->base
, failed
);
381 case CTL_FLAG_PERFORM_ABLK
: {
382 struct skcipher_request
*req
= crypt
->data
.ablk_req
;
383 struct ablk_ctx
*req_ctx
= skcipher_request_ctx(req
);
386 free_buf_chain(dev
, req_ctx
->dst
, crypt
->dst_buf
);
388 free_buf_chain(dev
, req_ctx
->src
, crypt
->src_buf
);
389 req
->base
.complete(&req
->base
, failed
);
392 case CTL_FLAG_GEN_ICV
:
393 ctx
= crypto_tfm_ctx(crypt
->data
.tfm
);
394 dma_pool_free(ctx_pool
, crypt
->regist_ptr
,
395 crypt
->regist_buf
->phys_addr
);
396 dma_pool_free(buffer_pool
, crypt
->regist_buf
, crypt
->src_buf
);
397 if (atomic_dec_and_test(&ctx
->configuring
))
398 complete(&ctx
->completion
);
400 case CTL_FLAG_GEN_REVAES
:
401 ctx
= crypto_tfm_ctx(crypt
->data
.tfm
);
402 *(u32
*)ctx
->decrypt
.npe_ctx
&= cpu_to_be32(~CIPH_ENCR
);
403 if (atomic_dec_and_test(&ctx
->configuring
))
404 complete(&ctx
->completion
);
409 crypt
->ctl_flags
= CTL_FLAG_UNUSED
;
412 static void irqhandler(void *_unused
)
414 tasklet_schedule(&crypto_done_tasklet
);
417 static void crypto_done_action(unsigned long arg
)
422 dma_addr_t phys
= qmgr_get_entry(RECV_QID
);
427 tasklet_schedule(&crypto_done_tasklet
);
430 static int init_ixp_crypto(struct device
*dev
)
433 u32 msg
[2] = { 0, 0 };
435 if (! ( ~(*IXP4XX_EXP_CFG2
) & (IXP4XX_FEATURE_HASH
|
436 IXP4XX_FEATURE_AES
| IXP4XX_FEATURE_DES
))) {
437 printk(KERN_ERR
"ixp_crypto: No HW crypto available\n");
440 npe_c
= npe_request(NPE_ID
);
444 if (!npe_running(npe_c
)) {
445 ret
= npe_load_firmware(npe_c
, npe_name(npe_c
), dev
);
448 if (npe_recv_message(npe_c
, msg
, "STATUS_MSG"))
451 if (npe_send_message(npe_c
, msg
, "STATUS_MSG"))
454 if (npe_recv_message(npe_c
, msg
, "STATUS_MSG"))
458 switch ((msg
[1]>>16) & 0xff) {
460 printk(KERN_WARNING
"Firmware of %s lacks AES support\n",
469 printk(KERN_ERR
"Firmware of %s lacks crypto support\n",
474 /* buffer_pool will also be used to sometimes store the hmac,
475 * so assure it is large enough
477 BUILD_BUG_ON(SHA1_DIGEST_SIZE
> sizeof(struct buffer_desc
));
478 buffer_pool
= dma_pool_create("buffer", dev
,
479 sizeof(struct buffer_desc
), 32, 0);
484 ctx_pool
= dma_pool_create("context", dev
,
489 ret
= qmgr_request_queue(SEND_QID
, NPE_QLEN_TOTAL
, 0, 0,
490 "ixp_crypto:out", NULL
);
493 ret
= qmgr_request_queue(RECV_QID
, NPE_QLEN
, 0, 0,
494 "ixp_crypto:in", NULL
);
496 qmgr_release_queue(SEND_QID
);
499 qmgr_set_irq(RECV_QID
, QUEUE_IRQ_SRC_NOT_EMPTY
, irqhandler
, NULL
);
500 tasklet_init(&crypto_done_tasklet
, crypto_done_action
, 0);
502 qmgr_enable_irq(RECV_QID
);
506 printk(KERN_ERR
"%s not responding\n", npe_name(npe_c
));
509 dma_pool_destroy(ctx_pool
);
510 dma_pool_destroy(buffer_pool
);
516 static void release_ixp_crypto(struct device
*dev
)
518 qmgr_disable_irq(RECV_QID
);
519 tasklet_kill(&crypto_done_tasklet
);
521 qmgr_release_queue(SEND_QID
);
522 qmgr_release_queue(RECV_QID
);
524 dma_pool_destroy(ctx_pool
);
525 dma_pool_destroy(buffer_pool
);
530 dma_free_coherent(dev
,
531 NPE_QLEN_TOTAL
* sizeof( struct crypt_ctl
),
532 crypt_virt
, crypt_phys
);
536 static void reset_sa_dir(struct ix_sa_dir
*dir
)
538 memset(dir
->npe_ctx
, 0, NPE_CTX_LEN
);
539 dir
->npe_ctx_idx
= 0;
543 static int init_sa_dir(struct ix_sa_dir
*dir
)
545 dir
->npe_ctx
= dma_pool_alloc(ctx_pool
, GFP_KERNEL
, &dir
->npe_ctx_phys
);
553 static void free_sa_dir(struct ix_sa_dir
*dir
)
555 memset(dir
->npe_ctx
, 0, NPE_CTX_LEN
);
556 dma_pool_free(ctx_pool
, dir
->npe_ctx
, dir
->npe_ctx_phys
);
559 static int init_tfm(struct crypto_tfm
*tfm
)
561 struct ixp_ctx
*ctx
= crypto_tfm_ctx(tfm
);
564 atomic_set(&ctx
->configuring
, 0);
565 ret
= init_sa_dir(&ctx
->encrypt
);
568 ret
= init_sa_dir(&ctx
->decrypt
);
570 free_sa_dir(&ctx
->encrypt
);
575 static int init_tfm_ablk(struct crypto_skcipher
*tfm
)
577 crypto_skcipher_set_reqsize(tfm
, sizeof(struct ablk_ctx
));
578 return init_tfm(crypto_skcipher_tfm(tfm
));
581 static int init_tfm_aead(struct crypto_aead
*tfm
)
583 crypto_aead_set_reqsize(tfm
, sizeof(struct aead_ctx
));
584 return init_tfm(crypto_aead_tfm(tfm
));
587 static void exit_tfm(struct crypto_tfm
*tfm
)
589 struct ixp_ctx
*ctx
= crypto_tfm_ctx(tfm
);
590 free_sa_dir(&ctx
->encrypt
);
591 free_sa_dir(&ctx
->decrypt
);
594 static void exit_tfm_ablk(struct crypto_skcipher
*tfm
)
596 exit_tfm(crypto_skcipher_tfm(tfm
));
599 static void exit_tfm_aead(struct crypto_aead
*tfm
)
601 exit_tfm(crypto_aead_tfm(tfm
));
604 static int register_chain_var(struct crypto_tfm
*tfm
, u8 xpad
, u32 target
,
605 int init_len
, u32 ctx_addr
, const u8
*key
, int key_len
)
607 struct ixp_ctx
*ctx
= crypto_tfm_ctx(tfm
);
608 struct crypt_ctl
*crypt
;
609 struct buffer_desc
*buf
;
612 dma_addr_t pad_phys
, buf_phys
;
614 BUILD_BUG_ON(NPE_CTX_LEN
< HMAC_PAD_BLOCKLEN
);
615 pad
= dma_pool_alloc(ctx_pool
, GFP_KERNEL
, &pad_phys
);
618 buf
= dma_pool_alloc(buffer_pool
, GFP_KERNEL
, &buf_phys
);
620 dma_pool_free(ctx_pool
, pad
, pad_phys
);
623 crypt
= get_crypt_desc_emerg();
625 dma_pool_free(ctx_pool
, pad
, pad_phys
);
626 dma_pool_free(buffer_pool
, buf
, buf_phys
);
630 memcpy(pad
, key
, key_len
);
631 memset(pad
+ key_len
, 0, HMAC_PAD_BLOCKLEN
- key_len
);
632 for (i
= 0; i
< HMAC_PAD_BLOCKLEN
; i
++) {
636 crypt
->data
.tfm
= tfm
;
637 crypt
->regist_ptr
= pad
;
638 crypt
->regist_buf
= buf
;
640 crypt
->auth_offs
= 0;
641 crypt
->auth_len
= HMAC_PAD_BLOCKLEN
;
642 crypt
->crypto_ctx
= ctx_addr
;
643 crypt
->src_buf
= buf_phys
;
644 crypt
->icv_rev_aes
= target
;
645 crypt
->mode
= NPE_OP_HASH_GEN_ICV
;
646 crypt
->init_len
= init_len
;
647 crypt
->ctl_flags
|= CTL_FLAG_GEN_ICV
;
650 buf
->buf_len
= HMAC_PAD_BLOCKLEN
;
652 buf
->phys_addr
= pad_phys
;
654 atomic_inc(&ctx
->configuring
);
655 qmgr_put_entry(SEND_QID
, crypt_virt2phys(crypt
));
656 BUG_ON(qmgr_stat_overflow(SEND_QID
));
660 static int setup_auth(struct crypto_tfm
*tfm
, int encrypt
, unsigned authsize
,
661 const u8
*key
, int key_len
, unsigned digest_len
)
663 u32 itarget
, otarget
, npe_ctx_addr
;
664 unsigned char *cinfo
;
665 int init_len
, ret
= 0;
667 struct ix_sa_dir
*dir
;
668 struct ixp_ctx
*ctx
= crypto_tfm_ctx(tfm
);
669 const struct ix_hash_algo
*algo
;
671 dir
= encrypt
? &ctx
->encrypt
: &ctx
->decrypt
;
672 cinfo
= dir
->npe_ctx
+ dir
->npe_ctx_idx
;
675 /* write cfg word to cryptinfo */
676 cfgword
= algo
->cfgword
| ( authsize
<< 6); /* (authsize/4) << 8 */
678 cfgword
^= 0xAA000000; /* change the "byte swap" flags */
680 *(u32
*)cinfo
= cpu_to_be32(cfgword
);
681 cinfo
+= sizeof(cfgword
);
683 /* write ICV to cryptinfo */
684 memcpy(cinfo
, algo
->icv
, digest_len
);
687 itarget
= dir
->npe_ctx_phys
+ dir
->npe_ctx_idx
688 + sizeof(algo
->cfgword
);
689 otarget
= itarget
+ digest_len
;
690 init_len
= cinfo
- (dir
->npe_ctx
+ dir
->npe_ctx_idx
);
691 npe_ctx_addr
= dir
->npe_ctx_phys
+ dir
->npe_ctx_idx
;
693 dir
->npe_ctx_idx
+= init_len
;
694 dir
->npe_mode
|= NPE_OP_HASH_ENABLE
;
697 dir
->npe_mode
|= NPE_OP_HASH_VERIFY
;
699 ret
= register_chain_var(tfm
, HMAC_OPAD_VALUE
, otarget
,
700 init_len
, npe_ctx_addr
, key
, key_len
);
703 return register_chain_var(tfm
, HMAC_IPAD_VALUE
, itarget
,
704 init_len
, npe_ctx_addr
, key
, key_len
);
707 static int gen_rev_aes_key(struct crypto_tfm
*tfm
)
709 struct crypt_ctl
*crypt
;
710 struct ixp_ctx
*ctx
= crypto_tfm_ctx(tfm
);
711 struct ix_sa_dir
*dir
= &ctx
->decrypt
;
713 crypt
= get_crypt_desc_emerg();
717 *(u32
*)dir
->npe_ctx
|= cpu_to_be32(CIPH_ENCR
);
719 crypt
->data
.tfm
= tfm
;
720 crypt
->crypt_offs
= 0;
721 crypt
->crypt_len
= AES_BLOCK128
;
723 crypt
->crypto_ctx
= dir
->npe_ctx_phys
;
724 crypt
->icv_rev_aes
= dir
->npe_ctx_phys
+ sizeof(u32
);
725 crypt
->mode
= NPE_OP_ENC_GEN_KEY
;
726 crypt
->init_len
= dir
->npe_ctx_idx
;
727 crypt
->ctl_flags
|= CTL_FLAG_GEN_REVAES
;
729 atomic_inc(&ctx
->configuring
);
730 qmgr_put_entry(SEND_QID
, crypt_virt2phys(crypt
));
731 BUG_ON(qmgr_stat_overflow(SEND_QID
));
735 static int setup_cipher(struct crypto_tfm
*tfm
, int encrypt
,
736 const u8
*key
, int key_len
)
741 struct ix_sa_dir
*dir
;
742 struct ixp_ctx
*ctx
= crypto_tfm_ctx(tfm
);
743 u32
*flags
= &tfm
->crt_flags
;
745 dir
= encrypt
? &ctx
->encrypt
: &ctx
->decrypt
;
746 cinfo
= dir
->npe_ctx
;
749 cipher_cfg
= cipher_cfg_enc(tfm
);
750 dir
->npe_mode
|= NPE_OP_CRYPT_ENCRYPT
;
752 cipher_cfg
= cipher_cfg_dec(tfm
);
754 if (cipher_cfg
& MOD_AES
) {
756 case 16: keylen_cfg
= MOD_AES128
; break;
757 case 24: keylen_cfg
= MOD_AES192
; break;
758 case 32: keylen_cfg
= MOD_AES256
; break;
760 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
763 cipher_cfg
|= keylen_cfg
;
765 crypto_des_verify_key(tfm
, key
);
767 /* write cfg word to cryptinfo */
768 *(u32
*)cinfo
= cpu_to_be32(cipher_cfg
);
769 cinfo
+= sizeof(cipher_cfg
);
771 /* write cipher key to cryptinfo */
772 memcpy(cinfo
, key
, key_len
);
773 /* NPE wants keylen set to DES3_EDE_KEY_SIZE even for single DES */
774 if (key_len
< DES3_EDE_KEY_SIZE
&& !(cipher_cfg
& MOD_AES
)) {
775 memset(cinfo
+ key_len
, 0, DES3_EDE_KEY_SIZE
-key_len
);
776 key_len
= DES3_EDE_KEY_SIZE
;
778 dir
->npe_ctx_idx
= sizeof(cipher_cfg
) + key_len
;
779 dir
->npe_mode
|= NPE_OP_CRYPT_ENABLE
;
780 if ((cipher_cfg
& MOD_AES
) && !encrypt
) {
781 return gen_rev_aes_key(tfm
);
786 static struct buffer_desc
*chainup_buffers(struct device
*dev
,
787 struct scatterlist
*sg
, unsigned nbytes
,
788 struct buffer_desc
*buf
, gfp_t flags
,
789 enum dma_data_direction dir
)
791 for (; nbytes
> 0; sg
= sg_next(sg
)) {
792 unsigned len
= min(nbytes
, sg
->length
);
793 struct buffer_desc
*next_buf
;
794 dma_addr_t next_buf_phys
;
799 next_buf
= dma_pool_alloc(buffer_pool
, flags
, &next_buf_phys
);
804 sg_dma_address(sg
) = dma_map_single(dev
, ptr
, len
, dir
);
805 buf
->next
= next_buf
;
806 buf
->phys_next
= next_buf_phys
;
809 buf
->phys_addr
= sg_dma_address(sg
);
818 static int ablk_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
819 unsigned int key_len
)
821 struct ixp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
822 u32
*flags
= &tfm
->base
.crt_flags
;
825 init_completion(&ctx
->completion
);
826 atomic_inc(&ctx
->configuring
);
828 reset_sa_dir(&ctx
->encrypt
);
829 reset_sa_dir(&ctx
->decrypt
);
831 ctx
->encrypt
.npe_mode
= NPE_OP_HMAC_DISABLE
;
832 ctx
->decrypt
.npe_mode
= NPE_OP_HMAC_DISABLE
;
834 ret
= setup_cipher(&tfm
->base
, 0, key
, key_len
);
837 ret
= setup_cipher(&tfm
->base
, 1, key
, key_len
);
841 if (*flags
& CRYPTO_TFM_RES_WEAK_KEY
) {
842 if (*flags
& CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
) {
845 *flags
&= ~CRYPTO_TFM_RES_WEAK_KEY
;
849 if (!atomic_dec_and_test(&ctx
->configuring
))
850 wait_for_completion(&ctx
->completion
);
854 static int ablk_des3_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
855 unsigned int key_len
)
857 return verify_skcipher_des3_key(tfm
, key
) ?:
858 ablk_setkey(tfm
, key
, key_len
);
861 static int ablk_rfc3686_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
862 unsigned int key_len
)
864 struct ixp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
866 /* the nonce is stored in bytes at end of key */
867 if (key_len
< CTR_RFC3686_NONCE_SIZE
)
870 memcpy(ctx
->nonce
, key
+ (key_len
- CTR_RFC3686_NONCE_SIZE
),
871 CTR_RFC3686_NONCE_SIZE
);
873 key_len
-= CTR_RFC3686_NONCE_SIZE
;
874 return ablk_setkey(tfm
, key
, key_len
);
877 static int ablk_perform(struct skcipher_request
*req
, int encrypt
)
879 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
880 struct ixp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
881 unsigned ivsize
= crypto_skcipher_ivsize(tfm
);
882 struct ix_sa_dir
*dir
;
883 struct crypt_ctl
*crypt
;
884 unsigned int nbytes
= req
->cryptlen
;
885 enum dma_data_direction src_direction
= DMA_BIDIRECTIONAL
;
886 struct ablk_ctx
*req_ctx
= skcipher_request_ctx(req
);
887 struct buffer_desc src_hook
;
888 struct device
*dev
= &pdev
->dev
;
889 gfp_t flags
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
?
890 GFP_KERNEL
: GFP_ATOMIC
;
892 if (qmgr_stat_full(SEND_QID
))
894 if (atomic_read(&ctx
->configuring
))
897 dir
= encrypt
? &ctx
->encrypt
: &ctx
->decrypt
;
899 crypt
= get_crypt_desc();
903 crypt
->data
.ablk_req
= req
;
904 crypt
->crypto_ctx
= dir
->npe_ctx_phys
;
905 crypt
->mode
= dir
->npe_mode
;
906 crypt
->init_len
= dir
->npe_ctx_idx
;
908 crypt
->crypt_offs
= 0;
909 crypt
->crypt_len
= nbytes
;
911 BUG_ON(ivsize
&& !req
->iv
);
912 memcpy(crypt
->iv
, req
->iv
, ivsize
);
913 if (req
->src
!= req
->dst
) {
914 struct buffer_desc dst_hook
;
915 crypt
->mode
|= NPE_OP_NOT_IN_PLACE
;
916 /* This was never tested by Intel
917 * for more than one dst buffer, I think. */
919 if (!chainup_buffers(dev
, req
->dst
, nbytes
, &dst_hook
,
920 flags
, DMA_FROM_DEVICE
))
922 src_direction
= DMA_TO_DEVICE
;
923 req_ctx
->dst
= dst_hook
.next
;
924 crypt
->dst_buf
= dst_hook
.phys_next
;
929 if (!chainup_buffers(dev
, req
->src
, nbytes
, &src_hook
,
930 flags
, src_direction
))
933 req_ctx
->src
= src_hook
.next
;
934 crypt
->src_buf
= src_hook
.phys_next
;
935 crypt
->ctl_flags
|= CTL_FLAG_PERFORM_ABLK
;
936 qmgr_put_entry(SEND_QID
, crypt_virt2phys(crypt
));
937 BUG_ON(qmgr_stat_overflow(SEND_QID
));
941 free_buf_chain(dev
, req_ctx
->src
, crypt
->src_buf
);
943 if (req
->src
!= req
->dst
) {
944 free_buf_chain(dev
, req_ctx
->dst
, crypt
->dst_buf
);
946 crypt
->ctl_flags
= CTL_FLAG_UNUSED
;
950 static int ablk_encrypt(struct skcipher_request
*req
)
952 return ablk_perform(req
, 1);
955 static int ablk_decrypt(struct skcipher_request
*req
)
957 return ablk_perform(req
, 0);
960 static int ablk_rfc3686_crypt(struct skcipher_request
*req
)
962 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
963 struct ixp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
964 u8 iv
[CTR_RFC3686_BLOCK_SIZE
];
968 /* set up counter block */
969 memcpy(iv
, ctx
->nonce
, CTR_RFC3686_NONCE_SIZE
);
970 memcpy(iv
+ CTR_RFC3686_NONCE_SIZE
, info
, CTR_RFC3686_IV_SIZE
);
972 /* initialize counter portion of counter block */
973 *(__be32
*)(iv
+ CTR_RFC3686_NONCE_SIZE
+ CTR_RFC3686_IV_SIZE
) =
977 ret
= ablk_perform(req
, 1);
982 static int aead_perform(struct aead_request
*req
, int encrypt
,
983 int cryptoffset
, int eff_cryptlen
, u8
*iv
)
985 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
986 struct ixp_ctx
*ctx
= crypto_aead_ctx(tfm
);
987 unsigned ivsize
= crypto_aead_ivsize(tfm
);
988 unsigned authsize
= crypto_aead_authsize(tfm
);
989 struct ix_sa_dir
*dir
;
990 struct crypt_ctl
*crypt
;
991 unsigned int cryptlen
;
992 struct buffer_desc
*buf
, src_hook
;
993 struct aead_ctx
*req_ctx
= aead_request_ctx(req
);
994 struct device
*dev
= &pdev
->dev
;
995 gfp_t flags
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
?
996 GFP_KERNEL
: GFP_ATOMIC
;
997 enum dma_data_direction src_direction
= DMA_BIDIRECTIONAL
;
998 unsigned int lastlen
;
1000 if (qmgr_stat_full(SEND_QID
))
1002 if (atomic_read(&ctx
->configuring
))
1006 dir
= &ctx
->encrypt
;
1007 cryptlen
= req
->cryptlen
;
1009 dir
= &ctx
->decrypt
;
1010 /* req->cryptlen includes the authsize when decrypting */
1011 cryptlen
= req
->cryptlen
-authsize
;
1012 eff_cryptlen
-= authsize
;
1014 crypt
= get_crypt_desc();
1018 crypt
->data
.aead_req
= req
;
1019 crypt
->crypto_ctx
= dir
->npe_ctx_phys
;
1020 crypt
->mode
= dir
->npe_mode
;
1021 crypt
->init_len
= dir
->npe_ctx_idx
;
1023 crypt
->crypt_offs
= cryptoffset
;
1024 crypt
->crypt_len
= eff_cryptlen
;
1026 crypt
->auth_offs
= 0;
1027 crypt
->auth_len
= req
->assoclen
+ cryptlen
;
1028 BUG_ON(ivsize
&& !req
->iv
);
1029 memcpy(crypt
->iv
, req
->iv
, ivsize
);
1031 buf
= chainup_buffers(dev
, req
->src
, crypt
->auth_len
,
1032 &src_hook
, flags
, src_direction
);
1033 req_ctx
->src
= src_hook
.next
;
1034 crypt
->src_buf
= src_hook
.phys_next
;
1038 lastlen
= buf
->buf_len
;
1039 if (lastlen
>= authsize
)
1040 crypt
->icv_rev_aes
= buf
->phys_addr
+
1041 buf
->buf_len
- authsize
;
1043 req_ctx
->dst
= NULL
;
1045 if (req
->src
!= req
->dst
) {
1046 struct buffer_desc dst_hook
;
1048 crypt
->mode
|= NPE_OP_NOT_IN_PLACE
;
1049 src_direction
= DMA_TO_DEVICE
;
1051 buf
= chainup_buffers(dev
, req
->dst
, crypt
->auth_len
,
1052 &dst_hook
, flags
, DMA_FROM_DEVICE
);
1053 req_ctx
->dst
= dst_hook
.next
;
1054 crypt
->dst_buf
= dst_hook
.phys_next
;
1060 lastlen
= buf
->buf_len
;
1061 if (lastlen
>= authsize
)
1062 crypt
->icv_rev_aes
= buf
->phys_addr
+
1063 buf
->buf_len
- authsize
;
1067 if (unlikely(lastlen
< authsize
)) {
1068 /* The 12 hmac bytes are scattered,
1069 * we need to copy them into a safe buffer */
1070 req_ctx
->hmac_virt
= dma_pool_alloc(buffer_pool
, flags
,
1071 &crypt
->icv_rev_aes
);
1072 if (unlikely(!req_ctx
->hmac_virt
))
1075 scatterwalk_map_and_copy(req_ctx
->hmac_virt
,
1076 req
->src
, cryptlen
, authsize
, 0);
1078 req_ctx
->encrypt
= encrypt
;
1080 req_ctx
->hmac_virt
= NULL
;
1083 crypt
->ctl_flags
|= CTL_FLAG_PERFORM_AEAD
;
1084 qmgr_put_entry(SEND_QID
, crypt_virt2phys(crypt
));
1085 BUG_ON(qmgr_stat_overflow(SEND_QID
));
1086 return -EINPROGRESS
;
1089 free_buf_chain(dev
, req_ctx
->dst
, crypt
->dst_buf
);
1091 free_buf_chain(dev
, req_ctx
->src
, crypt
->src_buf
);
1092 crypt
->ctl_flags
= CTL_FLAG_UNUSED
;
1096 static int aead_setup(struct crypto_aead
*tfm
, unsigned int authsize
)
1098 struct ixp_ctx
*ctx
= crypto_aead_ctx(tfm
);
1099 u32
*flags
= &tfm
->base
.crt_flags
;
1100 unsigned digest_len
= crypto_aead_maxauthsize(tfm
);
1103 if (!ctx
->enckey_len
&& !ctx
->authkey_len
)
1105 init_completion(&ctx
->completion
);
1106 atomic_inc(&ctx
->configuring
);
1108 reset_sa_dir(&ctx
->encrypt
);
1109 reset_sa_dir(&ctx
->decrypt
);
1111 ret
= setup_cipher(&tfm
->base
, 0, ctx
->enckey
, ctx
->enckey_len
);
1114 ret
= setup_cipher(&tfm
->base
, 1, ctx
->enckey
, ctx
->enckey_len
);
1117 ret
= setup_auth(&tfm
->base
, 0, authsize
, ctx
->authkey
,
1118 ctx
->authkey_len
, digest_len
);
1121 ret
= setup_auth(&tfm
->base
, 1, authsize
, ctx
->authkey
,
1122 ctx
->authkey_len
, digest_len
);
1126 if (*flags
& CRYPTO_TFM_RES_WEAK_KEY
) {
1127 if (*flags
& CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
) {
1131 *flags
&= ~CRYPTO_TFM_RES_WEAK_KEY
;
1135 if (!atomic_dec_and_test(&ctx
->configuring
))
1136 wait_for_completion(&ctx
->completion
);
1140 static int aead_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
1142 int max
= crypto_aead_maxauthsize(tfm
) >> 2;
1144 if ((authsize
>>2) < 1 || (authsize
>>2) > max
|| (authsize
& 3))
1146 return aead_setup(tfm
, authsize
);
1149 static int aead_setkey(struct crypto_aead
*tfm
, const u8
*key
,
1150 unsigned int keylen
)
1152 struct ixp_ctx
*ctx
= crypto_aead_ctx(tfm
);
1153 struct crypto_authenc_keys keys
;
1155 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
1158 if (keys
.authkeylen
> sizeof(ctx
->authkey
))
1161 if (keys
.enckeylen
> sizeof(ctx
->enckey
))
1164 memcpy(ctx
->authkey
, keys
.authkey
, keys
.authkeylen
);
1165 memcpy(ctx
->enckey
, keys
.enckey
, keys
.enckeylen
);
1166 ctx
->authkey_len
= keys
.authkeylen
;
1167 ctx
->enckey_len
= keys
.enckeylen
;
1169 memzero_explicit(&keys
, sizeof(keys
));
1170 return aead_setup(tfm
, crypto_aead_authsize(tfm
));
1172 crypto_aead_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
1173 memzero_explicit(&keys
, sizeof(keys
));
1177 static int des3_aead_setkey(struct crypto_aead
*tfm
, const u8
*key
,
1178 unsigned int keylen
)
1180 struct ixp_ctx
*ctx
= crypto_aead_ctx(tfm
);
1181 struct crypto_authenc_keys keys
;
1184 err
= crypto_authenc_extractkeys(&keys
, key
, keylen
);
1189 if (keys
.authkeylen
> sizeof(ctx
->authkey
))
1192 err
= verify_aead_des3_key(tfm
, keys
.enckey
, keys
.enckeylen
);
1196 memcpy(ctx
->authkey
, keys
.authkey
, keys
.authkeylen
);
1197 memcpy(ctx
->enckey
, keys
.enckey
, keys
.enckeylen
);
1198 ctx
->authkey_len
= keys
.authkeylen
;
1199 ctx
->enckey_len
= keys
.enckeylen
;
1201 memzero_explicit(&keys
, sizeof(keys
));
1202 return aead_setup(tfm
, crypto_aead_authsize(tfm
));
1204 memzero_explicit(&keys
, sizeof(keys
));
1208 static int aead_encrypt(struct aead_request
*req
)
1210 return aead_perform(req
, 1, req
->assoclen
, req
->cryptlen
, req
->iv
);
1213 static int aead_decrypt(struct aead_request
*req
)
1215 return aead_perform(req
, 0, req
->assoclen
, req
->cryptlen
, req
->iv
);
1218 static struct ixp_alg ixp4xx_algos
[] = {
1221 .base
.cra_name
= "cbc(des)",
1222 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
1224 .min_keysize
= DES_KEY_SIZE
,
1225 .max_keysize
= DES_KEY_SIZE
,
1226 .ivsize
= DES_BLOCK_SIZE
,
1228 .cfg_enc
= CIPH_ENCR
| MOD_DES
| MOD_CBC_ENC
| KEYLEN_192
,
1229 .cfg_dec
= CIPH_DECR
| MOD_DES
| MOD_CBC_DEC
| KEYLEN_192
,
1233 .base
.cra_name
= "ecb(des)",
1234 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
1235 .min_keysize
= DES_KEY_SIZE
,
1236 .max_keysize
= DES_KEY_SIZE
,
1238 .cfg_enc
= CIPH_ENCR
| MOD_DES
| MOD_ECB
| KEYLEN_192
,
1239 .cfg_dec
= CIPH_DECR
| MOD_DES
| MOD_ECB
| KEYLEN_192
,
1242 .base
.cra_name
= "cbc(des3_ede)",
1243 .base
.cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1245 .min_keysize
= DES3_EDE_KEY_SIZE
,
1246 .max_keysize
= DES3_EDE_KEY_SIZE
,
1247 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1248 .setkey
= ablk_des3_setkey
,
1250 .cfg_enc
= CIPH_ENCR
| MOD_3DES
| MOD_CBC_ENC
| KEYLEN_192
,
1251 .cfg_dec
= CIPH_DECR
| MOD_3DES
| MOD_CBC_DEC
| KEYLEN_192
,
1254 .base
.cra_name
= "ecb(des3_ede)",
1255 .base
.cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1257 .min_keysize
= DES3_EDE_KEY_SIZE
,
1258 .max_keysize
= DES3_EDE_KEY_SIZE
,
1259 .setkey
= ablk_des3_setkey
,
1261 .cfg_enc
= CIPH_ENCR
| MOD_3DES
| MOD_ECB
| KEYLEN_192
,
1262 .cfg_dec
= CIPH_DECR
| MOD_3DES
| MOD_ECB
| KEYLEN_192
,
1265 .base
.cra_name
= "cbc(aes)",
1266 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1268 .min_keysize
= AES_MIN_KEY_SIZE
,
1269 .max_keysize
= AES_MAX_KEY_SIZE
,
1270 .ivsize
= AES_BLOCK_SIZE
,
1272 .cfg_enc
= CIPH_ENCR
| MOD_AES
| MOD_CBC_ENC
,
1273 .cfg_dec
= CIPH_DECR
| MOD_AES
| MOD_CBC_DEC
,
1276 .base
.cra_name
= "ecb(aes)",
1277 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1279 .min_keysize
= AES_MIN_KEY_SIZE
,
1280 .max_keysize
= AES_MAX_KEY_SIZE
,
1282 .cfg_enc
= CIPH_ENCR
| MOD_AES
| MOD_ECB
,
1283 .cfg_dec
= CIPH_DECR
| MOD_AES
| MOD_ECB
,
1286 .base
.cra_name
= "ctr(aes)",
1287 .base
.cra_blocksize
= 1,
1289 .min_keysize
= AES_MIN_KEY_SIZE
,
1290 .max_keysize
= AES_MAX_KEY_SIZE
,
1291 .ivsize
= AES_BLOCK_SIZE
,
1293 .cfg_enc
= CIPH_ENCR
| MOD_AES
| MOD_CTR
,
1294 .cfg_dec
= CIPH_ENCR
| MOD_AES
| MOD_CTR
,
1297 .base
.cra_name
= "rfc3686(ctr(aes))",
1298 .base
.cra_blocksize
= 1,
1300 .min_keysize
= AES_MIN_KEY_SIZE
,
1301 .max_keysize
= AES_MAX_KEY_SIZE
,
1302 .ivsize
= AES_BLOCK_SIZE
,
1303 .setkey
= ablk_rfc3686_setkey
,
1304 .encrypt
= ablk_rfc3686_crypt
,
1305 .decrypt
= ablk_rfc3686_crypt
,
1307 .cfg_enc
= CIPH_ENCR
| MOD_AES
| MOD_CTR
,
1308 .cfg_dec
= CIPH_ENCR
| MOD_AES
| MOD_CTR
,
1311 static struct ixp_aead_alg ixp4xx_aeads
[] = {
1315 .cra_name
= "authenc(hmac(md5),cbc(des))",
1316 .cra_blocksize
= DES_BLOCK_SIZE
,
1318 .ivsize
= DES_BLOCK_SIZE
,
1319 .maxauthsize
= MD5_DIGEST_SIZE
,
1321 .hash
= &hash_alg_md5
,
1322 .cfg_enc
= CIPH_ENCR
| MOD_DES
| MOD_CBC_ENC
| KEYLEN_192
,
1323 .cfg_dec
= CIPH_DECR
| MOD_DES
| MOD_CBC_DEC
| KEYLEN_192
,
1327 .cra_name
= "authenc(hmac(md5),cbc(des3_ede))",
1328 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1330 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1331 .maxauthsize
= MD5_DIGEST_SIZE
,
1332 .setkey
= des3_aead_setkey
,
1334 .hash
= &hash_alg_md5
,
1335 .cfg_enc
= CIPH_ENCR
| MOD_3DES
| MOD_CBC_ENC
| KEYLEN_192
,
1336 .cfg_dec
= CIPH_DECR
| MOD_3DES
| MOD_CBC_DEC
| KEYLEN_192
,
1340 .cra_name
= "authenc(hmac(sha1),cbc(des))",
1341 .cra_blocksize
= DES_BLOCK_SIZE
,
1343 .ivsize
= DES_BLOCK_SIZE
,
1344 .maxauthsize
= SHA1_DIGEST_SIZE
,
1346 .hash
= &hash_alg_sha1
,
1347 .cfg_enc
= CIPH_ENCR
| MOD_DES
| MOD_CBC_ENC
| KEYLEN_192
,
1348 .cfg_dec
= CIPH_DECR
| MOD_DES
| MOD_CBC_DEC
| KEYLEN_192
,
1352 .cra_name
= "authenc(hmac(sha1),cbc(des3_ede))",
1353 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1355 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1356 .maxauthsize
= SHA1_DIGEST_SIZE
,
1357 .setkey
= des3_aead_setkey
,
1359 .hash
= &hash_alg_sha1
,
1360 .cfg_enc
= CIPH_ENCR
| MOD_3DES
| MOD_CBC_ENC
| KEYLEN_192
,
1361 .cfg_dec
= CIPH_DECR
| MOD_3DES
| MOD_CBC_DEC
| KEYLEN_192
,
1365 .cra_name
= "authenc(hmac(md5),cbc(aes))",
1366 .cra_blocksize
= AES_BLOCK_SIZE
,
1368 .ivsize
= AES_BLOCK_SIZE
,
1369 .maxauthsize
= MD5_DIGEST_SIZE
,
1371 .hash
= &hash_alg_md5
,
1372 .cfg_enc
= CIPH_ENCR
| MOD_AES
| MOD_CBC_ENC
,
1373 .cfg_dec
= CIPH_DECR
| MOD_AES
| MOD_CBC_DEC
,
1377 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
1378 .cra_blocksize
= AES_BLOCK_SIZE
,
1380 .ivsize
= AES_BLOCK_SIZE
,
1381 .maxauthsize
= SHA1_DIGEST_SIZE
,
1383 .hash
= &hash_alg_sha1
,
1384 .cfg_enc
= CIPH_ENCR
| MOD_AES
| MOD_CBC_ENC
,
1385 .cfg_dec
= CIPH_DECR
| MOD_AES
| MOD_CBC_DEC
,
1388 #define IXP_POSTFIX "-ixp4xx"
1390 static const struct platform_device_info ixp_dev_info __initdata
= {
1391 .name
= DRIVER_NAME
,
1393 .dma_mask
= DMA_BIT_MASK(32),
1396 static int __init
ixp_module_init(void)
1398 int num
= ARRAY_SIZE(ixp4xx_algos
);
1401 pdev
= platform_device_register_full(&ixp_dev_info
);
1403 return PTR_ERR(pdev
);
1405 spin_lock_init(&desc_lock
);
1406 spin_lock_init(&emerg_lock
);
1408 err
= init_ixp_crypto(&pdev
->dev
);
1410 platform_device_unregister(pdev
);
1413 for (i
=0; i
< num
; i
++) {
1414 struct skcipher_alg
*cra
= &ixp4xx_algos
[i
].crypto
;
1416 if (snprintf(cra
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
1417 "%s"IXP_POSTFIX
, cra
->base
.cra_name
) >=
1418 CRYPTO_MAX_ALG_NAME
)
1422 if (!support_aes
&& (ixp4xx_algos
[i
].cfg_enc
& MOD_AES
)) {
1427 cra
->base
.cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
1430 cra
->setkey
= ablk_setkey
;
1432 cra
->encrypt
= ablk_encrypt
;
1434 cra
->decrypt
= ablk_decrypt
;
1435 cra
->init
= init_tfm_ablk
;
1436 cra
->exit
= exit_tfm_ablk
;
1438 cra
->base
.cra_ctxsize
= sizeof(struct ixp_ctx
);
1439 cra
->base
.cra_module
= THIS_MODULE
;
1440 cra
->base
.cra_alignmask
= 3;
1441 cra
->base
.cra_priority
= 300;
1442 if (crypto_register_skcipher(cra
))
1443 printk(KERN_ERR
"Failed to register '%s'\n",
1444 cra
->base
.cra_name
);
1446 ixp4xx_algos
[i
].registered
= 1;
1449 for (i
= 0; i
< ARRAY_SIZE(ixp4xx_aeads
); i
++) {
1450 struct aead_alg
*cra
= &ixp4xx_aeads
[i
].crypto
;
1452 if (snprintf(cra
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
1453 "%s"IXP_POSTFIX
, cra
->base
.cra_name
) >=
1454 CRYPTO_MAX_ALG_NAME
)
1456 if (!support_aes
&& (ixp4xx_algos
[i
].cfg_enc
& MOD_AES
))
1460 cra
->base
.cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
1462 cra
->setkey
= cra
->setkey
?: aead_setkey
;
1463 cra
->setauthsize
= aead_setauthsize
;
1464 cra
->encrypt
= aead_encrypt
;
1465 cra
->decrypt
= aead_decrypt
;
1466 cra
->init
= init_tfm_aead
;
1467 cra
->exit
= exit_tfm_aead
;
1469 cra
->base
.cra_ctxsize
= sizeof(struct ixp_ctx
);
1470 cra
->base
.cra_module
= THIS_MODULE
;
1471 cra
->base
.cra_alignmask
= 3;
1472 cra
->base
.cra_priority
= 300;
1474 if (crypto_register_aead(cra
))
1475 printk(KERN_ERR
"Failed to register '%s'\n",
1476 cra
->base
.cra_driver_name
);
1478 ixp4xx_aeads
[i
].registered
= 1;
1483 static void __exit
ixp_module_exit(void)
1485 int num
= ARRAY_SIZE(ixp4xx_algos
);
1488 for (i
= 0; i
< ARRAY_SIZE(ixp4xx_aeads
); i
++) {
1489 if (ixp4xx_aeads
[i
].registered
)
1490 crypto_unregister_aead(&ixp4xx_aeads
[i
].crypto
);
1493 for (i
=0; i
< num
; i
++) {
1494 if (ixp4xx_algos
[i
].registered
)
1495 crypto_unregister_skcipher(&ixp4xx_algos
[i
].crypto
);
1497 release_ixp_crypto(&pdev
->dev
);
1498 platform_device_unregister(pdev
);
1501 module_init(ixp_module_init
);
1502 module_exit(ixp_module_exit
);
1504 MODULE_LICENSE("GPL");
1505 MODULE_AUTHOR("Christian Hohnstaedt <chohnstaedt@innominate.com>");
1506 MODULE_DESCRIPTION("IXP4xx hardware crypto");