4 * Support for ATMEL AES HW acceleration.
6 * Copyright (c) 2012 Eukréa Electromatique - ATMEL
7 * Author: Nicolas Royer <nicolas@eukrea.com>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License version 2 as published
11 * by the Free Software Foundation.
13 * Some ideas are from omap-aes.c driver.
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/slab.h>
20 #include <linux/err.h>
21 #include <linux/clk.h>
23 #include <linux/hw_random.h>
24 #include <linux/platform_device.h>
26 #include <linux/device.h>
27 #include <linux/module.h>
28 #include <linux/init.h>
29 #include <linux/errno.h>
30 #include <linux/interrupt.h>
31 #include <linux/kernel.h>
32 #include <linux/clk.h>
33 #include <linux/irq.h>
35 #include <linux/platform_device.h>
36 #include <linux/scatterlist.h>
37 #include <linux/dma-mapping.h>
38 #include <linux/delay.h>
39 #include <linux/crypto.h>
40 #include <linux/cryptohash.h>
41 #include <crypto/scatterwalk.h>
42 #include <crypto/algapi.h>
43 #include <crypto/aes.h>
44 #include <crypto/hash.h>
45 #include <crypto/internal/hash.h>
46 #include <linux/platform_data/atmel-aes.h>
47 #include "atmel-aes-regs.h"
49 #define CFB8_BLOCK_SIZE 1
50 #define CFB16_BLOCK_SIZE 2
51 #define CFB32_BLOCK_SIZE 4
52 #define CFB64_BLOCK_SIZE 8
55 #define AES_FLAGS_MODE_MASK 0x01ff
56 #define AES_FLAGS_ENCRYPT BIT(0)
57 #define AES_FLAGS_CBC BIT(1)
58 #define AES_FLAGS_CFB BIT(2)
59 #define AES_FLAGS_CFB8 BIT(3)
60 #define AES_FLAGS_CFB16 BIT(4)
61 #define AES_FLAGS_CFB32 BIT(5)
62 #define AES_FLAGS_CFB64 BIT(6)
63 #define AES_FLAGS_OFB BIT(7)
64 #define AES_FLAGS_CTR BIT(8)
66 #define AES_FLAGS_INIT BIT(16)
67 #define AES_FLAGS_DMA BIT(17)
68 #define AES_FLAGS_BUSY BIT(18)
70 #define AES_FLAGS_DUALBUFF BIT(24)
72 #define ATMEL_AES_QUEUE_LENGTH 1
73 #define ATMEL_AES_CACHE_SIZE 0
75 #define ATMEL_AES_DMA_THRESHOLD 16
80 struct atmel_aes_ctx
{
81 struct atmel_aes_dev
*dd
;
84 u32 key
[AES_KEYSIZE_256
/ sizeof(u32
)];
87 struct atmel_aes_reqctx
{
91 struct atmel_aes_dma
{
92 struct dma_chan
*chan
;
93 struct dma_slave_config dma_conf
;
96 struct atmel_aes_dev
{
97 struct list_head list
;
98 unsigned long phys_base
;
99 void __iomem
*io_base
;
101 struct atmel_aes_ctx
*ctx
;
110 struct crypto_queue queue
;
112 struct tasklet_struct done_task
;
113 struct tasklet_struct queue_task
;
115 struct ablkcipher_request
*req
;
118 struct scatterlist
*in_sg
;
119 unsigned int nb_in_sg
;
121 struct scatterlist
*out_sg
;
122 unsigned int nb_out_sg
;
126 u8 buf_in
[ATMEL_AES_DMA_THRESHOLD
] __aligned(sizeof(u32
));
128 struct atmel_aes_dma dma_lch_in
;
130 u8 buf_out
[ATMEL_AES_DMA_THRESHOLD
] __aligned(sizeof(u32
));
132 struct atmel_aes_dma dma_lch_out
;
137 struct atmel_aes_drv
{
138 struct list_head dev_list
;
142 static struct atmel_aes_drv atmel_aes
= {
143 .dev_list
= LIST_HEAD_INIT(atmel_aes
.dev_list
),
144 .lock
= __SPIN_LOCK_UNLOCKED(atmel_aes
.lock
),
147 static int atmel_aes_sg_length(struct ablkcipher_request
*req
,
148 struct scatterlist
*sg
)
150 unsigned int total
= req
->nbytes
;
153 struct scatterlist
*sg_list
;
160 len
= min(sg_list
->length
, total
);
165 sg_list
= sg_next(sg_list
);
173 static inline u32
atmel_aes_read(struct atmel_aes_dev
*dd
, u32 offset
)
175 return readl_relaxed(dd
->io_base
+ offset
);
178 static inline void atmel_aes_write(struct atmel_aes_dev
*dd
,
179 u32 offset
, u32 value
)
181 writel_relaxed(value
, dd
->io_base
+ offset
);
184 static void atmel_aes_read_n(struct atmel_aes_dev
*dd
, u32 offset
,
185 u32
*value
, int count
)
187 for (; count
--; value
++, offset
+= 4)
188 *value
= atmel_aes_read(dd
, offset
);
191 static void atmel_aes_write_n(struct atmel_aes_dev
*dd
, u32 offset
,
192 u32
*value
, int count
)
194 for (; count
--; value
++, offset
+= 4)
195 atmel_aes_write(dd
, offset
, *value
);
198 static void atmel_aes_dualbuff_test(struct atmel_aes_dev
*dd
)
200 atmel_aes_write(dd
, AES_MR
, AES_MR_DUALBUFF
);
202 if (atmel_aes_read(dd
, AES_MR
) & AES_MR_DUALBUFF
)
203 dd
->flags
|= AES_FLAGS_DUALBUFF
;
206 static struct atmel_aes_dev
*atmel_aes_find_dev(struct atmel_aes_ctx
*ctx
)
208 struct atmel_aes_dev
*aes_dd
= NULL
;
209 struct atmel_aes_dev
*tmp
;
211 spin_lock_bh(&atmel_aes
.lock
);
213 list_for_each_entry(tmp
, &atmel_aes
.dev_list
, list
) {
222 spin_unlock_bh(&atmel_aes
.lock
);
227 static int atmel_aes_hw_init(struct atmel_aes_dev
*dd
)
229 clk_prepare_enable(dd
->iclk
);
231 if (!(dd
->flags
& AES_FLAGS_INIT
)) {
232 atmel_aes_write(dd
, AES_CR
, AES_CR_SWRST
);
233 atmel_aes_dualbuff_test(dd
);
234 dd
->flags
|= AES_FLAGS_INIT
;
241 static void atmel_aes_hw_version_init(struct atmel_aes_dev
*dd
)
243 atmel_aes_hw_init(dd
);
245 dd
->hw_version
= atmel_aes_read(dd
, AES_HW_VERSION
);
247 clk_disable_unprepare(dd
->iclk
);
250 static void atmel_aes_finish_req(struct atmel_aes_dev
*dd
, int err
)
252 struct ablkcipher_request
*req
= dd
->req
;
254 clk_disable_unprepare(dd
->iclk
);
255 dd
->flags
&= ~AES_FLAGS_BUSY
;
257 req
->base
.complete(&req
->base
, err
);
260 static void atmel_aes_dma_callback(void *data
)
262 struct atmel_aes_dev
*dd
= data
;
264 /* dma_lch_out - completed */
265 tasklet_schedule(&dd
->done_task
);
268 static int atmel_aes_crypt_dma(struct atmel_aes_dev
*dd
)
270 struct dma_async_tx_descriptor
*in_desc
, *out_desc
;
271 int nb_dma_sg_in
, nb_dma_sg_out
;
273 dd
->nb_in_sg
= atmel_aes_sg_length(dd
->req
, dd
->in_sg
);
277 nb_dma_sg_in
= dma_map_sg(dd
->dev
, dd
->in_sg
, dd
->nb_in_sg
,
282 in_desc
= dmaengine_prep_slave_sg(dd
->dma_lch_in
.chan
, dd
->in_sg
,
283 nb_dma_sg_in
, DMA_MEM_TO_DEV
,
284 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
289 /* callback not needed */
291 dd
->nb_out_sg
= atmel_aes_sg_length(dd
->req
, dd
->out_sg
);
295 nb_dma_sg_out
= dma_map_sg(dd
->dev
, dd
->out_sg
, dd
->nb_out_sg
,
300 out_desc
= dmaengine_prep_slave_sg(dd
->dma_lch_out
.chan
, dd
->out_sg
,
301 nb_dma_sg_out
, DMA_DEV_TO_MEM
,
302 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
307 out_desc
->callback
= atmel_aes_dma_callback
;
308 out_desc
->callback_param
= dd
;
310 dd
->total
-= dd
->req
->nbytes
;
312 dmaengine_submit(out_desc
);
313 dma_async_issue_pending(dd
->dma_lch_out
.chan
);
315 dmaengine_submit(in_desc
);
316 dma_async_issue_pending(dd
->dma_lch_in
.chan
);
321 dma_unmap_sg(dd
->dev
, dd
->out_sg
, dd
->nb_out_sg
,
324 dma_unmap_sg(dd
->dev
, dd
->in_sg
, dd
->nb_in_sg
,
330 static int atmel_aes_crypt_cpu_start(struct atmel_aes_dev
*dd
)
332 dd
->flags
&= ~AES_FLAGS_DMA
;
334 /* use cache buffers */
335 dd
->nb_in_sg
= atmel_aes_sg_length(dd
->req
, dd
->in_sg
);
339 dd
->nb_out_sg
= atmel_aes_sg_length(dd
->req
, dd
->out_sg
);
343 dd
->bufcnt
= sg_copy_to_buffer(dd
->in_sg
, dd
->nb_in_sg
,
344 dd
->buf_in
, dd
->total
);
349 dd
->total
-= dd
->bufcnt
;
351 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
352 atmel_aes_write_n(dd
, AES_IDATAR(0), (u32
*) dd
->buf_in
,
358 static int atmel_aes_crypt_dma_start(struct atmel_aes_dev
*dd
)
362 if (dd
->flags
& AES_FLAGS_CFB8
) {
363 dd
->dma_lch_in
.dma_conf
.dst_addr_width
=
364 DMA_SLAVE_BUSWIDTH_1_BYTE
;
365 dd
->dma_lch_out
.dma_conf
.src_addr_width
=
366 DMA_SLAVE_BUSWIDTH_1_BYTE
;
367 } else if (dd
->flags
& AES_FLAGS_CFB16
) {
368 dd
->dma_lch_in
.dma_conf
.dst_addr_width
=
369 DMA_SLAVE_BUSWIDTH_2_BYTES
;
370 dd
->dma_lch_out
.dma_conf
.src_addr_width
=
371 DMA_SLAVE_BUSWIDTH_2_BYTES
;
373 dd
->dma_lch_in
.dma_conf
.dst_addr_width
=
374 DMA_SLAVE_BUSWIDTH_4_BYTES
;
375 dd
->dma_lch_out
.dma_conf
.src_addr_width
=
376 DMA_SLAVE_BUSWIDTH_4_BYTES
;
379 dmaengine_slave_config(dd
->dma_lch_in
.chan
, &dd
->dma_lch_in
.dma_conf
);
380 dmaengine_slave_config(dd
->dma_lch_out
.chan
, &dd
->dma_lch_out
.dma_conf
);
382 dd
->flags
|= AES_FLAGS_DMA
;
383 err
= atmel_aes_crypt_dma(dd
);
388 static int atmel_aes_write_ctrl(struct atmel_aes_dev
*dd
)
391 u32 valcr
= 0, valmr
= 0;
393 err
= atmel_aes_hw_init(dd
);
398 /* MR register must be set before IV registers */
399 if (dd
->ctx
->keylen
== AES_KEYSIZE_128
)
400 valmr
|= AES_MR_KEYSIZE_128
;
401 else if (dd
->ctx
->keylen
== AES_KEYSIZE_192
)
402 valmr
|= AES_MR_KEYSIZE_192
;
404 valmr
|= AES_MR_KEYSIZE_256
;
406 if (dd
->flags
& AES_FLAGS_CBC
) {
407 valmr
|= AES_MR_OPMOD_CBC
;
408 } else if (dd
->flags
& AES_FLAGS_CFB
) {
409 valmr
|= AES_MR_OPMOD_CFB
;
410 if (dd
->flags
& AES_FLAGS_CFB8
)
411 valmr
|= AES_MR_CFBS_8b
;
412 else if (dd
->flags
& AES_FLAGS_CFB16
)
413 valmr
|= AES_MR_CFBS_16b
;
414 else if (dd
->flags
& AES_FLAGS_CFB32
)
415 valmr
|= AES_MR_CFBS_32b
;
416 else if (dd
->flags
& AES_FLAGS_CFB64
)
417 valmr
|= AES_MR_CFBS_64b
;
418 } else if (dd
->flags
& AES_FLAGS_OFB
) {
419 valmr
|= AES_MR_OPMOD_OFB
;
420 } else if (dd
->flags
& AES_FLAGS_CTR
) {
421 valmr
|= AES_MR_OPMOD_CTR
;
423 valmr
|= AES_MR_OPMOD_ECB
;
426 if (dd
->flags
& AES_FLAGS_ENCRYPT
)
427 valmr
|= AES_MR_CYPHER_ENC
;
429 if (dd
->total
> ATMEL_AES_DMA_THRESHOLD
) {
430 valmr
|= AES_MR_SMOD_IDATAR0
;
431 if (dd
->flags
& AES_FLAGS_DUALBUFF
)
432 valmr
|= AES_MR_DUALBUFF
;
434 valmr
|= AES_MR_SMOD_AUTO
;
437 atmel_aes_write(dd
, AES_CR
, valcr
);
438 atmel_aes_write(dd
, AES_MR
, valmr
);
440 atmel_aes_write_n(dd
, AES_KEYWR(0), dd
->ctx
->key
,
441 dd
->ctx
->keylen
>> 2);
443 if (((dd
->flags
& AES_FLAGS_CBC
) || (dd
->flags
& AES_FLAGS_CFB
) ||
444 (dd
->flags
& AES_FLAGS_OFB
) || (dd
->flags
& AES_FLAGS_CTR
)) &&
446 atmel_aes_write_n(dd
, AES_IVR(0), dd
->req
->info
, 4);
452 static int atmel_aes_handle_queue(struct atmel_aes_dev
*dd
,
453 struct ablkcipher_request
*req
)
455 struct crypto_async_request
*async_req
, *backlog
;
456 struct atmel_aes_ctx
*ctx
;
457 struct atmel_aes_reqctx
*rctx
;
461 spin_lock_irqsave(&dd
->lock
, flags
);
463 ret
= ablkcipher_enqueue_request(&dd
->queue
, req
);
464 if (dd
->flags
& AES_FLAGS_BUSY
) {
465 spin_unlock_irqrestore(&dd
->lock
, flags
);
468 backlog
= crypto_get_backlog(&dd
->queue
);
469 async_req
= crypto_dequeue_request(&dd
->queue
);
471 dd
->flags
|= AES_FLAGS_BUSY
;
472 spin_unlock_irqrestore(&dd
->lock
, flags
);
478 backlog
->complete(backlog
, -EINPROGRESS
);
480 req
= ablkcipher_request_cast(async_req
);
482 /* assign new request to device */
484 dd
->total
= req
->nbytes
;
485 dd
->in_sg
= req
->src
;
486 dd
->out_sg
= req
->dst
;
488 rctx
= ablkcipher_request_ctx(req
);
489 ctx
= crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req
));
490 rctx
->mode
&= AES_FLAGS_MODE_MASK
;
491 dd
->flags
= (dd
->flags
& ~AES_FLAGS_MODE_MASK
) | rctx
->mode
;
495 err
= atmel_aes_write_ctrl(dd
);
497 if (dd
->total
> ATMEL_AES_DMA_THRESHOLD
)
498 err
= atmel_aes_crypt_dma_start(dd
);
500 err
= atmel_aes_crypt_cpu_start(dd
);
503 /* aes_task will not finish it, so do it here */
504 atmel_aes_finish_req(dd
, err
);
505 tasklet_schedule(&dd
->queue_task
);
511 static int atmel_aes_crypt_dma_stop(struct atmel_aes_dev
*dd
)
515 if (dd
->flags
& AES_FLAGS_DMA
) {
516 dma_unmap_sg(dd
->dev
, dd
->out_sg
,
517 dd
->nb_out_sg
, DMA_FROM_DEVICE
);
518 dma_unmap_sg(dd
->dev
, dd
->in_sg
,
519 dd
->nb_in_sg
, DMA_TO_DEVICE
);
526 static int atmel_aes_crypt(struct ablkcipher_request
*req
, unsigned long mode
)
528 struct atmel_aes_ctx
*ctx
= crypto_ablkcipher_ctx(
529 crypto_ablkcipher_reqtfm(req
));
530 struct atmel_aes_reqctx
*rctx
= ablkcipher_request_ctx(req
);
531 struct atmel_aes_dev
*dd
;
533 if (!IS_ALIGNED(req
->nbytes
, AES_BLOCK_SIZE
)) {
534 pr_err("request size is not exact amount of AES blocks\n");
538 dd
= atmel_aes_find_dev(ctx
);
544 return atmel_aes_handle_queue(dd
, req
);
547 static bool atmel_aes_filter(struct dma_chan
*chan
, void *slave
)
549 struct at_dma_slave
*sl
= slave
;
551 if (sl
&& sl
->dma_dev
== chan
->device
->dev
) {
559 static int atmel_aes_dma_init(struct atmel_aes_dev
*dd
)
562 struct aes_platform_data
*pdata
;
563 dma_cap_mask_t mask_in
, mask_out
;
565 pdata
= dd
->dev
->platform_data
;
567 if (pdata
&& pdata
->dma_slave
->txdata
.dma_dev
&&
568 pdata
->dma_slave
->rxdata
.dma_dev
) {
570 /* Try to grab 2 DMA channels */
571 dma_cap_zero(mask_in
);
572 dma_cap_set(DMA_SLAVE
, mask_in
);
574 dd
->dma_lch_in
.chan
= dma_request_channel(mask_in
,
575 atmel_aes_filter
, &pdata
->dma_slave
->rxdata
);
576 if (!dd
->dma_lch_in
.chan
)
579 dd
->dma_lch_in
.dma_conf
.direction
= DMA_MEM_TO_DEV
;
580 dd
->dma_lch_in
.dma_conf
.dst_addr
= dd
->phys_base
+
582 dd
->dma_lch_in
.dma_conf
.src_maxburst
= 1;
583 dd
->dma_lch_in
.dma_conf
.dst_maxburst
= 1;
584 dd
->dma_lch_in
.dma_conf
.device_fc
= false;
586 dma_cap_zero(mask_out
);
587 dma_cap_set(DMA_SLAVE
, mask_out
);
588 dd
->dma_lch_out
.chan
= dma_request_channel(mask_out
,
589 atmel_aes_filter
, &pdata
->dma_slave
->txdata
);
590 if (!dd
->dma_lch_out
.chan
)
593 dd
->dma_lch_out
.dma_conf
.direction
= DMA_DEV_TO_MEM
;
594 dd
->dma_lch_out
.dma_conf
.src_addr
= dd
->phys_base
+
596 dd
->dma_lch_out
.dma_conf
.src_maxburst
= 1;
597 dd
->dma_lch_out
.dma_conf
.dst_maxburst
= 1;
598 dd
->dma_lch_out
.dma_conf
.device_fc
= false;
606 dma_release_channel(dd
->dma_lch_in
.chan
);
611 static void atmel_aes_dma_cleanup(struct atmel_aes_dev
*dd
)
613 dma_release_channel(dd
->dma_lch_in
.chan
);
614 dma_release_channel(dd
->dma_lch_out
.chan
);
617 static int atmel_aes_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
620 struct atmel_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
622 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
623 keylen
!= AES_KEYSIZE_256
) {
624 crypto_ablkcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
628 memcpy(ctx
->key
, key
, keylen
);
629 ctx
->keylen
= keylen
;
634 static int atmel_aes_ecb_encrypt(struct ablkcipher_request
*req
)
636 return atmel_aes_crypt(req
,
640 static int atmel_aes_ecb_decrypt(struct ablkcipher_request
*req
)
642 return atmel_aes_crypt(req
,
646 static int atmel_aes_cbc_encrypt(struct ablkcipher_request
*req
)
648 return atmel_aes_crypt(req
,
649 AES_FLAGS_ENCRYPT
| AES_FLAGS_CBC
);
652 static int atmel_aes_cbc_decrypt(struct ablkcipher_request
*req
)
654 return atmel_aes_crypt(req
,
658 static int atmel_aes_ofb_encrypt(struct ablkcipher_request
*req
)
660 return atmel_aes_crypt(req
,
661 AES_FLAGS_ENCRYPT
| AES_FLAGS_OFB
);
664 static int atmel_aes_ofb_decrypt(struct ablkcipher_request
*req
)
666 return atmel_aes_crypt(req
,
670 static int atmel_aes_cfb_encrypt(struct ablkcipher_request
*req
)
672 return atmel_aes_crypt(req
,
673 AES_FLAGS_ENCRYPT
| AES_FLAGS_CFB
);
676 static int atmel_aes_cfb_decrypt(struct ablkcipher_request
*req
)
678 return atmel_aes_crypt(req
,
682 static int atmel_aes_cfb64_encrypt(struct ablkcipher_request
*req
)
684 return atmel_aes_crypt(req
,
685 AES_FLAGS_ENCRYPT
| AES_FLAGS_CFB
| AES_FLAGS_CFB64
);
688 static int atmel_aes_cfb64_decrypt(struct ablkcipher_request
*req
)
690 return atmel_aes_crypt(req
,
691 AES_FLAGS_CFB
| AES_FLAGS_CFB64
);
694 static int atmel_aes_cfb32_encrypt(struct ablkcipher_request
*req
)
696 return atmel_aes_crypt(req
,
697 AES_FLAGS_ENCRYPT
| AES_FLAGS_CFB
| AES_FLAGS_CFB32
);
700 static int atmel_aes_cfb32_decrypt(struct ablkcipher_request
*req
)
702 return atmel_aes_crypt(req
,
703 AES_FLAGS_CFB
| AES_FLAGS_CFB32
);
706 static int atmel_aes_cfb16_encrypt(struct ablkcipher_request
*req
)
708 return atmel_aes_crypt(req
,
709 AES_FLAGS_ENCRYPT
| AES_FLAGS_CFB
| AES_FLAGS_CFB16
);
712 static int atmel_aes_cfb16_decrypt(struct ablkcipher_request
*req
)
714 return atmel_aes_crypt(req
,
715 AES_FLAGS_CFB
| AES_FLAGS_CFB16
);
718 static int atmel_aes_cfb8_encrypt(struct ablkcipher_request
*req
)
720 return atmel_aes_crypt(req
,
721 AES_FLAGS_ENCRYPT
| AES_FLAGS_CFB
| AES_FLAGS_CFB8
);
724 static int atmel_aes_cfb8_decrypt(struct ablkcipher_request
*req
)
726 return atmel_aes_crypt(req
,
727 AES_FLAGS_CFB
| AES_FLAGS_CFB8
);
730 static int atmel_aes_ctr_encrypt(struct ablkcipher_request
*req
)
732 return atmel_aes_crypt(req
,
733 AES_FLAGS_ENCRYPT
| AES_FLAGS_CTR
);
736 static int atmel_aes_ctr_decrypt(struct ablkcipher_request
*req
)
738 return atmel_aes_crypt(req
,
742 static int atmel_aes_cra_init(struct crypto_tfm
*tfm
)
744 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct atmel_aes_reqctx
);
749 static void atmel_aes_cra_exit(struct crypto_tfm
*tfm
)
753 static struct crypto_alg aes_algs
[] = {
755 .cra_name
= "ecb(aes)",
756 .cra_driver_name
= "atmel-ecb-aes",
758 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
759 .cra_blocksize
= AES_BLOCK_SIZE
,
760 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
761 .cra_alignmask
= 0x0,
762 .cra_type
= &crypto_ablkcipher_type
,
763 .cra_module
= THIS_MODULE
,
764 .cra_init
= atmel_aes_cra_init
,
765 .cra_exit
= atmel_aes_cra_exit
,
766 .cra_u
.ablkcipher
= {
767 .min_keysize
= AES_MIN_KEY_SIZE
,
768 .max_keysize
= AES_MAX_KEY_SIZE
,
769 .setkey
= atmel_aes_setkey
,
770 .encrypt
= atmel_aes_ecb_encrypt
,
771 .decrypt
= atmel_aes_ecb_decrypt
,
775 .cra_name
= "cbc(aes)",
776 .cra_driver_name
= "atmel-cbc-aes",
778 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
779 .cra_blocksize
= AES_BLOCK_SIZE
,
780 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
781 .cra_alignmask
= 0x0,
782 .cra_type
= &crypto_ablkcipher_type
,
783 .cra_module
= THIS_MODULE
,
784 .cra_init
= atmel_aes_cra_init
,
785 .cra_exit
= atmel_aes_cra_exit
,
786 .cra_u
.ablkcipher
= {
787 .min_keysize
= AES_MIN_KEY_SIZE
,
788 .max_keysize
= AES_MAX_KEY_SIZE
,
789 .ivsize
= AES_BLOCK_SIZE
,
790 .setkey
= atmel_aes_setkey
,
791 .encrypt
= atmel_aes_cbc_encrypt
,
792 .decrypt
= atmel_aes_cbc_decrypt
,
796 .cra_name
= "ofb(aes)",
797 .cra_driver_name
= "atmel-ofb-aes",
799 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
800 .cra_blocksize
= AES_BLOCK_SIZE
,
801 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
802 .cra_alignmask
= 0x0,
803 .cra_type
= &crypto_ablkcipher_type
,
804 .cra_module
= THIS_MODULE
,
805 .cra_init
= atmel_aes_cra_init
,
806 .cra_exit
= atmel_aes_cra_exit
,
807 .cra_u
.ablkcipher
= {
808 .min_keysize
= AES_MIN_KEY_SIZE
,
809 .max_keysize
= AES_MAX_KEY_SIZE
,
810 .ivsize
= AES_BLOCK_SIZE
,
811 .setkey
= atmel_aes_setkey
,
812 .encrypt
= atmel_aes_ofb_encrypt
,
813 .decrypt
= atmel_aes_ofb_decrypt
,
817 .cra_name
= "cfb(aes)",
818 .cra_driver_name
= "atmel-cfb-aes",
820 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
821 .cra_blocksize
= AES_BLOCK_SIZE
,
822 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
823 .cra_alignmask
= 0x0,
824 .cra_type
= &crypto_ablkcipher_type
,
825 .cra_module
= THIS_MODULE
,
826 .cra_init
= atmel_aes_cra_init
,
827 .cra_exit
= atmel_aes_cra_exit
,
828 .cra_u
.ablkcipher
= {
829 .min_keysize
= AES_MIN_KEY_SIZE
,
830 .max_keysize
= AES_MAX_KEY_SIZE
,
831 .ivsize
= AES_BLOCK_SIZE
,
832 .setkey
= atmel_aes_setkey
,
833 .encrypt
= atmel_aes_cfb_encrypt
,
834 .decrypt
= atmel_aes_cfb_decrypt
,
838 .cra_name
= "cfb32(aes)",
839 .cra_driver_name
= "atmel-cfb32-aes",
841 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
842 .cra_blocksize
= CFB32_BLOCK_SIZE
,
843 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
844 .cra_alignmask
= 0x0,
845 .cra_type
= &crypto_ablkcipher_type
,
846 .cra_module
= THIS_MODULE
,
847 .cra_init
= atmel_aes_cra_init
,
848 .cra_exit
= atmel_aes_cra_exit
,
849 .cra_u
.ablkcipher
= {
850 .min_keysize
= AES_MIN_KEY_SIZE
,
851 .max_keysize
= AES_MAX_KEY_SIZE
,
852 .ivsize
= AES_BLOCK_SIZE
,
853 .setkey
= atmel_aes_setkey
,
854 .encrypt
= atmel_aes_cfb32_encrypt
,
855 .decrypt
= atmel_aes_cfb32_decrypt
,
859 .cra_name
= "cfb16(aes)",
860 .cra_driver_name
= "atmel-cfb16-aes",
862 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
863 .cra_blocksize
= CFB16_BLOCK_SIZE
,
864 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
865 .cra_alignmask
= 0x0,
866 .cra_type
= &crypto_ablkcipher_type
,
867 .cra_module
= THIS_MODULE
,
868 .cra_init
= atmel_aes_cra_init
,
869 .cra_exit
= atmel_aes_cra_exit
,
870 .cra_u
.ablkcipher
= {
871 .min_keysize
= AES_MIN_KEY_SIZE
,
872 .max_keysize
= AES_MAX_KEY_SIZE
,
873 .ivsize
= AES_BLOCK_SIZE
,
874 .setkey
= atmel_aes_setkey
,
875 .encrypt
= atmel_aes_cfb16_encrypt
,
876 .decrypt
= atmel_aes_cfb16_decrypt
,
880 .cra_name
= "cfb8(aes)",
881 .cra_driver_name
= "atmel-cfb8-aes",
883 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
884 .cra_blocksize
= CFB64_BLOCK_SIZE
,
885 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
886 .cra_alignmask
= 0x0,
887 .cra_type
= &crypto_ablkcipher_type
,
888 .cra_module
= THIS_MODULE
,
889 .cra_init
= atmel_aes_cra_init
,
890 .cra_exit
= atmel_aes_cra_exit
,
891 .cra_u
.ablkcipher
= {
892 .min_keysize
= AES_MIN_KEY_SIZE
,
893 .max_keysize
= AES_MAX_KEY_SIZE
,
894 .ivsize
= AES_BLOCK_SIZE
,
895 .setkey
= atmel_aes_setkey
,
896 .encrypt
= atmel_aes_cfb8_encrypt
,
897 .decrypt
= atmel_aes_cfb8_decrypt
,
901 .cra_name
= "ctr(aes)",
902 .cra_driver_name
= "atmel-ctr-aes",
904 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
905 .cra_blocksize
= AES_BLOCK_SIZE
,
906 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
907 .cra_alignmask
= 0x0,
908 .cra_type
= &crypto_ablkcipher_type
,
909 .cra_module
= THIS_MODULE
,
910 .cra_init
= atmel_aes_cra_init
,
911 .cra_exit
= atmel_aes_cra_exit
,
912 .cra_u
.ablkcipher
= {
913 .min_keysize
= AES_MIN_KEY_SIZE
,
914 .max_keysize
= AES_MAX_KEY_SIZE
,
915 .ivsize
= AES_BLOCK_SIZE
,
916 .setkey
= atmel_aes_setkey
,
917 .encrypt
= atmel_aes_ctr_encrypt
,
918 .decrypt
= atmel_aes_ctr_decrypt
,
923 static struct crypto_alg aes_cfb64_alg
[] = {
925 .cra_name
= "cfb64(aes)",
926 .cra_driver_name
= "atmel-cfb64-aes",
928 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
929 .cra_blocksize
= CFB64_BLOCK_SIZE
,
930 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
931 .cra_alignmask
= 0x0,
932 .cra_type
= &crypto_ablkcipher_type
,
933 .cra_module
= THIS_MODULE
,
934 .cra_init
= atmel_aes_cra_init
,
935 .cra_exit
= atmel_aes_cra_exit
,
936 .cra_u
.ablkcipher
= {
937 .min_keysize
= AES_MIN_KEY_SIZE
,
938 .max_keysize
= AES_MAX_KEY_SIZE
,
939 .ivsize
= AES_BLOCK_SIZE
,
940 .setkey
= atmel_aes_setkey
,
941 .encrypt
= atmel_aes_cfb64_encrypt
,
942 .decrypt
= atmel_aes_cfb64_decrypt
,
947 static void atmel_aes_queue_task(unsigned long data
)
949 struct atmel_aes_dev
*dd
= (struct atmel_aes_dev
*)data
;
951 atmel_aes_handle_queue(dd
, NULL
);
954 static void atmel_aes_done_task(unsigned long data
)
956 struct atmel_aes_dev
*dd
= (struct atmel_aes_dev
*) data
;
959 if (!(dd
->flags
& AES_FLAGS_DMA
)) {
960 atmel_aes_read_n(dd
, AES_ODATAR(0), (u32
*) dd
->buf_out
,
963 if (sg_copy_from_buffer(dd
->out_sg
, dd
->nb_out_sg
,
964 dd
->buf_out
, dd
->bufcnt
))
972 err
= atmel_aes_crypt_dma_stop(dd
);
974 err
= dd
->err
? : err
;
976 if (dd
->total
&& !err
) {
977 err
= atmel_aes_crypt_dma_start(dd
);
979 return; /* DMA started. Not fininishing. */
983 atmel_aes_finish_req(dd
, err
);
984 atmel_aes_handle_queue(dd
, NULL
);
987 static irqreturn_t
atmel_aes_irq(int irq
, void *dev_id
)
989 struct atmel_aes_dev
*aes_dd
= dev_id
;
992 reg
= atmel_aes_read(aes_dd
, AES_ISR
);
993 if (reg
& atmel_aes_read(aes_dd
, AES_IMR
)) {
994 atmel_aes_write(aes_dd
, AES_IDR
, reg
);
995 if (AES_FLAGS_BUSY
& aes_dd
->flags
)
996 tasklet_schedule(&aes_dd
->done_task
);
998 dev_warn(aes_dd
->dev
, "AES interrupt when no active requests.\n");
1005 static void atmel_aes_unregister_algs(struct atmel_aes_dev
*dd
)
1009 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++)
1010 crypto_unregister_alg(&aes_algs
[i
]);
1011 if (dd
->hw_version
>= 0x130)
1012 crypto_unregister_alg(&aes_cfb64_alg
[0]);
1015 static int atmel_aes_register_algs(struct atmel_aes_dev
*dd
)
1019 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
1020 INIT_LIST_HEAD(&aes_algs
[i
].cra_list
);
1021 err
= crypto_register_alg(&aes_algs
[i
]);
1026 atmel_aes_hw_version_init(dd
);
1028 if (dd
->hw_version
>= 0x130) {
1029 INIT_LIST_HEAD(&aes_cfb64_alg
[0].cra_list
);
1030 err
= crypto_register_alg(&aes_cfb64_alg
[0]);
1032 goto err_aes_cfb64_alg
;
1038 i
= ARRAY_SIZE(aes_algs
);
1040 for (j
= 0; j
< i
; j
++)
1041 crypto_unregister_alg(&aes_algs
[j
]);
1046 static int __devinit
atmel_aes_probe(struct platform_device
*pdev
)
1048 struct atmel_aes_dev
*aes_dd
;
1049 struct aes_platform_data
*pdata
;
1050 struct device
*dev
= &pdev
->dev
;
1051 struct resource
*aes_res
;
1052 unsigned long aes_phys_size
;
1055 pdata
= pdev
->dev
.platform_data
;
1061 aes_dd
= kzalloc(sizeof(struct atmel_aes_dev
), GFP_KERNEL
);
1062 if (aes_dd
== NULL
) {
1063 dev_err(dev
, "unable to alloc data struct.\n");
1070 platform_set_drvdata(pdev
, aes_dd
);
1072 INIT_LIST_HEAD(&aes_dd
->list
);
1074 tasklet_init(&aes_dd
->done_task
, atmel_aes_done_task
,
1075 (unsigned long)aes_dd
);
1076 tasklet_init(&aes_dd
->queue_task
, atmel_aes_queue_task
,
1077 (unsigned long)aes_dd
);
1079 crypto_init_queue(&aes_dd
->queue
, ATMEL_AES_QUEUE_LENGTH
);
1083 /* Get the base address */
1084 aes_res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1086 dev_err(dev
, "no MEM resource info\n");
1090 aes_dd
->phys_base
= aes_res
->start
;
1091 aes_phys_size
= resource_size(aes_res
);
1094 aes_dd
->irq
= platform_get_irq(pdev
, 0);
1095 if (aes_dd
->irq
< 0) {
1096 dev_err(dev
, "no IRQ resource info\n");
1101 err
= request_irq(aes_dd
->irq
, atmel_aes_irq
, IRQF_SHARED
, "atmel-aes",
1104 dev_err(dev
, "unable to request aes irq.\n");
1108 /* Initializing the clock */
1109 aes_dd
->iclk
= clk_get(&pdev
->dev
, NULL
);
1110 if (IS_ERR(aes_dd
->iclk
)) {
1111 dev_err(dev
, "clock intialization failed.\n");
1112 err
= PTR_ERR(aes_dd
->iclk
);
1116 aes_dd
->io_base
= ioremap(aes_dd
->phys_base
, aes_phys_size
);
1117 if (!aes_dd
->io_base
) {
1118 dev_err(dev
, "can't ioremap\n");
1123 err
= atmel_aes_dma_init(aes_dd
);
1127 spin_lock(&atmel_aes
.lock
);
1128 list_add_tail(&aes_dd
->list
, &atmel_aes
.dev_list
);
1129 spin_unlock(&atmel_aes
.lock
);
1131 err
= atmel_aes_register_algs(aes_dd
);
1135 dev_info(dev
, "Atmel AES\n");
1140 spin_lock(&atmel_aes
.lock
);
1141 list_del(&aes_dd
->list
);
1142 spin_unlock(&atmel_aes
.lock
);
1143 atmel_aes_dma_cleanup(aes_dd
);
1145 iounmap(aes_dd
->io_base
);
1147 clk_put(aes_dd
->iclk
);
1149 free_irq(aes_dd
->irq
, aes_dd
);
1152 tasklet_kill(&aes_dd
->done_task
);
1153 tasklet_kill(&aes_dd
->queue_task
);
1157 dev_err(dev
, "initialization failed.\n");
1162 static int __devexit
atmel_aes_remove(struct platform_device
*pdev
)
1164 static struct atmel_aes_dev
*aes_dd
;
1166 aes_dd
= platform_get_drvdata(pdev
);
1169 spin_lock(&atmel_aes
.lock
);
1170 list_del(&aes_dd
->list
);
1171 spin_unlock(&atmel_aes
.lock
);
1173 atmel_aes_unregister_algs(aes_dd
);
1175 tasklet_kill(&aes_dd
->done_task
);
1176 tasklet_kill(&aes_dd
->queue_task
);
1178 atmel_aes_dma_cleanup(aes_dd
);
1180 iounmap(aes_dd
->io_base
);
1182 clk_put(aes_dd
->iclk
);
1184 if (aes_dd
->irq
> 0)
1185 free_irq(aes_dd
->irq
, aes_dd
);
1193 static struct platform_driver atmel_aes_driver
= {
1194 .probe
= atmel_aes_probe
,
1195 .remove
= __devexit_p(atmel_aes_remove
),
1197 .name
= "atmel_aes",
1198 .owner
= THIS_MODULE
,
1202 module_platform_driver(atmel_aes_driver
);
1204 MODULE_DESCRIPTION("Atmel AES hw acceleration support.");
1205 MODULE_LICENSE("GPL v2");
1206 MODULE_AUTHOR("Nicolas Royer - Eukréa Electromatique");