1 // SPDX-License-Identifier: GPL-2.0
5 * Support for ATMEL AES HW acceleration.
7 * Copyright (c) 2012 Eukréa Electromatique - ATMEL
8 * Author: Nicolas Royer <nicolas@eukrea.com>
10 * Some ideas are from omap-aes.c driver.
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/slab.h>
17 #include <linux/err.h>
18 #include <linux/clk.h>
20 #include <linux/hw_random.h>
21 #include <linux/platform_device.h>
23 #include <linux/device.h>
24 #include <linux/init.h>
25 #include <linux/errno.h>
26 #include <linux/interrupt.h>
27 #include <linux/irq.h>
28 #include <linux/scatterlist.h>
29 #include <linux/dma-mapping.h>
30 #include <linux/of_device.h>
31 #include <linux/delay.h>
32 #include <linux/crypto.h>
33 #include <crypto/scatterwalk.h>
34 #include <crypto/algapi.h>
35 #include <crypto/aes.h>
36 #include <crypto/gcm.h>
37 #include <crypto/xts.h>
38 #include <crypto/internal/aead.h>
39 #include <crypto/internal/skcipher.h>
40 #include <linux/platform_data/crypto-atmel.h>
41 #include <dt-bindings/dma/at91.h>
42 #include "atmel-aes-regs.h"
43 #include "atmel-authenc.h"
45 #define ATMEL_AES_PRIORITY 300
47 #define ATMEL_AES_BUFFER_ORDER 2
48 #define ATMEL_AES_BUFFER_SIZE (PAGE_SIZE << ATMEL_AES_BUFFER_ORDER)
50 #define CFB8_BLOCK_SIZE 1
51 #define CFB16_BLOCK_SIZE 2
52 #define CFB32_BLOCK_SIZE 4
53 #define CFB64_BLOCK_SIZE 8
55 #define SIZE_IN_WORDS(x) ((x) >> 2)
58 /* Reserve bits [18:16] [14:12] [1:0] for mode (same as for AES_MR) */
59 #define AES_FLAGS_ENCRYPT AES_MR_CYPHER_ENC
60 #define AES_FLAGS_GTAGEN AES_MR_GTAGEN
61 #define AES_FLAGS_OPMODE_MASK (AES_MR_OPMOD_MASK | AES_MR_CFBS_MASK)
62 #define AES_FLAGS_ECB AES_MR_OPMOD_ECB
63 #define AES_FLAGS_CBC AES_MR_OPMOD_CBC
64 #define AES_FLAGS_OFB AES_MR_OPMOD_OFB
65 #define AES_FLAGS_CFB128 (AES_MR_OPMOD_CFB | AES_MR_CFBS_128b)
66 #define AES_FLAGS_CFB64 (AES_MR_OPMOD_CFB | AES_MR_CFBS_64b)
67 #define AES_FLAGS_CFB32 (AES_MR_OPMOD_CFB | AES_MR_CFBS_32b)
68 #define AES_FLAGS_CFB16 (AES_MR_OPMOD_CFB | AES_MR_CFBS_16b)
69 #define AES_FLAGS_CFB8 (AES_MR_OPMOD_CFB | AES_MR_CFBS_8b)
70 #define AES_FLAGS_CTR AES_MR_OPMOD_CTR
71 #define AES_FLAGS_GCM AES_MR_OPMOD_GCM
72 #define AES_FLAGS_XTS AES_MR_OPMOD_XTS
74 #define AES_FLAGS_MODE_MASK (AES_FLAGS_OPMODE_MASK | \
78 #define AES_FLAGS_BUSY BIT(3)
79 #define AES_FLAGS_DUMP_REG BIT(4)
80 #define AES_FLAGS_OWN_SHA BIT(5)
82 #define AES_FLAGS_PERSISTENT AES_FLAGS_BUSY
84 #define ATMEL_AES_QUEUE_LENGTH 50
86 #define ATMEL_AES_DMA_THRESHOLD 256
89 struct atmel_aes_caps
{
102 typedef int (*atmel_aes_fn_t
)(struct atmel_aes_dev
*);
105 struct atmel_aes_base_ctx
{
106 struct atmel_aes_dev
*dd
;
107 atmel_aes_fn_t start
;
109 u32 key
[AES_KEYSIZE_256
/ sizeof(u32
)];
114 struct atmel_aes_ctx
{
115 struct atmel_aes_base_ctx base
;
118 struct atmel_aes_ctr_ctx
{
119 struct atmel_aes_base_ctx base
;
121 __be32 iv
[AES_BLOCK_SIZE
/ sizeof(u32
)];
123 struct scatterlist src
[2];
124 struct scatterlist dst
[2];
127 struct atmel_aes_gcm_ctx
{
128 struct atmel_aes_base_ctx base
;
130 struct scatterlist src
[2];
131 struct scatterlist dst
[2];
133 __be32 j0
[AES_BLOCK_SIZE
/ sizeof(u32
)];
134 u32 tag
[AES_BLOCK_SIZE
/ sizeof(u32
)];
135 __be32 ghash
[AES_BLOCK_SIZE
/ sizeof(u32
)];
138 const __be32
*ghash_in
;
140 atmel_aes_fn_t ghash_resume
;
143 struct atmel_aes_xts_ctx
{
144 struct atmel_aes_base_ctx base
;
146 u32 key2
[AES_KEYSIZE_256
/ sizeof(u32
)];
149 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
150 struct atmel_aes_authenc_ctx
{
151 struct atmel_aes_base_ctx base
;
152 struct atmel_sha_authenc_ctx
*auth
;
156 struct atmel_aes_reqctx
{
158 u8 lastc
[AES_BLOCK_SIZE
];
161 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
162 struct atmel_aes_authenc_reqctx
{
163 struct atmel_aes_reqctx base
;
165 struct scatterlist src
[2];
166 struct scatterlist dst
[2];
168 u32 digest
[SHA512_DIGEST_SIZE
/ sizeof(u32
)];
170 /* auth_req MUST be place last. */
171 struct ahash_request auth_req
;
175 struct atmel_aes_dma
{
176 struct dma_chan
*chan
;
177 struct scatterlist
*sg
;
179 unsigned int remainder
;
183 struct atmel_aes_dev
{
184 struct list_head list
;
185 unsigned long phys_base
;
186 void __iomem
*io_base
;
188 struct crypto_async_request
*areq
;
189 struct atmel_aes_base_ctx
*ctx
;
192 atmel_aes_fn_t resume
;
193 atmel_aes_fn_t cpu_transfer_complete
;
202 struct crypto_queue queue
;
204 struct tasklet_struct done_task
;
205 struct tasklet_struct queue_task
;
211 struct atmel_aes_dma src
;
212 struct atmel_aes_dma dst
;
216 struct scatterlist aligned_sg
;
217 struct scatterlist
*real_dst
;
219 struct atmel_aes_caps caps
;
224 struct atmel_aes_drv
{
225 struct list_head dev_list
;
229 static struct atmel_aes_drv atmel_aes
= {
230 .dev_list
= LIST_HEAD_INIT(atmel_aes
.dev_list
),
231 .lock
= __SPIN_LOCK_UNLOCKED(atmel_aes
.lock
),
235 static const char *atmel_aes_reg_name(u32 offset
, char *tmp
, size_t sz
)
264 snprintf(tmp
, sz
, "KEYWR[%u]", (offset
- AES_KEYWR(0)) >> 2);
271 snprintf(tmp
, sz
, "IDATAR[%u]", (offset
- AES_IDATAR(0)) >> 2);
278 snprintf(tmp
, sz
, "ODATAR[%u]", (offset
- AES_ODATAR(0)) >> 2);
285 snprintf(tmp
, sz
, "IVR[%u]", (offset
- AES_IVR(0)) >> 2);
298 snprintf(tmp
, sz
, "GHASHR[%u]", (offset
- AES_GHASHR(0)) >> 2);
305 snprintf(tmp
, sz
, "TAGR[%u]", (offset
- AES_TAGR(0)) >> 2);
315 snprintf(tmp
, sz
, "GCMHR[%u]", (offset
- AES_GCMHR(0)) >> 2);
325 snprintf(tmp
, sz
, "TWR[%u]", (offset
- AES_TWR(0)) >> 2);
332 snprintf(tmp
, sz
, "ALPHAR[%u]", (offset
- AES_ALPHAR(0)) >> 2);
336 snprintf(tmp
, sz
, "0x%02x", offset
);
342 #endif /* VERBOSE_DEBUG */
344 /* Shared functions */
346 static inline u32
atmel_aes_read(struct atmel_aes_dev
*dd
, u32 offset
)
348 u32 value
= readl_relaxed(dd
->io_base
+ offset
);
351 if (dd
->flags
& AES_FLAGS_DUMP_REG
) {
354 dev_vdbg(dd
->dev
, "read 0x%08x from %s\n", value
,
355 atmel_aes_reg_name(offset
, tmp
, sizeof(tmp
)));
357 #endif /* VERBOSE_DEBUG */
362 static inline void atmel_aes_write(struct atmel_aes_dev
*dd
,
363 u32 offset
, u32 value
)
366 if (dd
->flags
& AES_FLAGS_DUMP_REG
) {
369 dev_vdbg(dd
->dev
, "write 0x%08x into %s\n", value
,
370 atmel_aes_reg_name(offset
, tmp
, sizeof(tmp
)));
372 #endif /* VERBOSE_DEBUG */
374 writel_relaxed(value
, dd
->io_base
+ offset
);
377 static void atmel_aes_read_n(struct atmel_aes_dev
*dd
, u32 offset
,
378 u32
*value
, int count
)
380 for (; count
--; value
++, offset
+= 4)
381 *value
= atmel_aes_read(dd
, offset
);
384 static void atmel_aes_write_n(struct atmel_aes_dev
*dd
, u32 offset
,
385 const u32
*value
, int count
)
387 for (; count
--; value
++, offset
+= 4)
388 atmel_aes_write(dd
, offset
, *value
);
391 static inline void atmel_aes_read_block(struct atmel_aes_dev
*dd
, u32 offset
,
394 atmel_aes_read_n(dd
, offset
, value
, SIZE_IN_WORDS(AES_BLOCK_SIZE
));
397 static inline void atmel_aes_write_block(struct atmel_aes_dev
*dd
, u32 offset
,
400 atmel_aes_write_n(dd
, offset
, value
, SIZE_IN_WORDS(AES_BLOCK_SIZE
));
403 static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev
*dd
,
404 atmel_aes_fn_t resume
)
406 u32 isr
= atmel_aes_read(dd
, AES_ISR
);
408 if (unlikely(isr
& AES_INT_DATARDY
))
412 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
416 static inline size_t atmel_aes_padlen(size_t len
, size_t block_size
)
418 len
&= block_size
- 1;
419 return len
? block_size
- len
: 0;
422 static struct atmel_aes_dev
*atmel_aes_find_dev(struct atmel_aes_base_ctx
*ctx
)
424 struct atmel_aes_dev
*aes_dd
= NULL
;
425 struct atmel_aes_dev
*tmp
;
427 spin_lock_bh(&atmel_aes
.lock
);
429 list_for_each_entry(tmp
, &atmel_aes
.dev_list
, list
) {
438 spin_unlock_bh(&atmel_aes
.lock
);
443 static int atmel_aes_hw_init(struct atmel_aes_dev
*dd
)
447 err
= clk_enable(dd
->iclk
);
451 atmel_aes_write(dd
, AES_CR
, AES_CR_SWRST
);
452 atmel_aes_write(dd
, AES_MR
, 0xE << AES_MR_CKEY_OFFSET
);
457 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev
*dd
)
459 return atmel_aes_read(dd
, AES_HW_VERSION
) & 0x00000fff;
462 static int atmel_aes_hw_version_init(struct atmel_aes_dev
*dd
)
466 err
= atmel_aes_hw_init(dd
);
470 dd
->hw_version
= atmel_aes_get_version(dd
);
472 dev_info(dd
->dev
, "version: 0x%x\n", dd
->hw_version
);
474 clk_disable(dd
->iclk
);
478 static inline void atmel_aes_set_mode(struct atmel_aes_dev
*dd
,
479 const struct atmel_aes_reqctx
*rctx
)
481 /* Clear all but persistent flags and set request flags. */
482 dd
->flags
= (dd
->flags
& AES_FLAGS_PERSISTENT
) | rctx
->mode
;
485 static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev
*dd
)
487 return (dd
->flags
& AES_FLAGS_ENCRYPT
);
490 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
491 static void atmel_aes_authenc_complete(struct atmel_aes_dev
*dd
, int err
);
494 static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev
*dd
)
496 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
497 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
498 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
499 unsigned int ivsize
= crypto_skcipher_ivsize(skcipher
);
501 if (req
->cryptlen
< ivsize
)
504 if (rctx
->mode
& AES_FLAGS_ENCRYPT
) {
505 scatterwalk_map_and_copy(req
->iv
, req
->dst
,
506 req
->cryptlen
- ivsize
, ivsize
, 0);
508 if (req
->src
== req
->dst
)
509 memcpy(req
->iv
, rctx
->lastc
, ivsize
);
511 scatterwalk_map_and_copy(req
->iv
, req
->src
,
512 req
->cryptlen
- ivsize
,
517 static inline int atmel_aes_complete(struct atmel_aes_dev
*dd
, int err
)
519 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
520 if (dd
->ctx
->is_aead
)
521 atmel_aes_authenc_complete(dd
, err
);
524 clk_disable(dd
->iclk
);
525 dd
->flags
&= ~AES_FLAGS_BUSY
;
527 if (!dd
->ctx
->is_aead
)
528 atmel_aes_set_iv_as_last_ciphertext_block(dd
);
531 dd
->areq
->complete(dd
->areq
, err
);
533 tasklet_schedule(&dd
->queue_task
);
538 static void atmel_aes_write_ctrl_key(struct atmel_aes_dev
*dd
, bool use_dma
,
539 const __be32
*iv
, const u32
*key
, int keylen
)
543 /* MR register must be set before IV registers */
544 if (keylen
== AES_KEYSIZE_128
)
545 valmr
|= AES_MR_KEYSIZE_128
;
546 else if (keylen
== AES_KEYSIZE_192
)
547 valmr
|= AES_MR_KEYSIZE_192
;
549 valmr
|= AES_MR_KEYSIZE_256
;
551 valmr
|= dd
->flags
& AES_FLAGS_MODE_MASK
;
554 valmr
|= AES_MR_SMOD_IDATAR0
;
555 if (dd
->caps
.has_dualbuff
)
556 valmr
|= AES_MR_DUALBUFF
;
558 valmr
|= AES_MR_SMOD_AUTO
;
561 atmel_aes_write(dd
, AES_MR
, valmr
);
563 atmel_aes_write_n(dd
, AES_KEYWR(0), key
, SIZE_IN_WORDS(keylen
));
565 if (iv
&& (valmr
& AES_MR_OPMOD_MASK
) != AES_MR_OPMOD_ECB
)
566 atmel_aes_write_block(dd
, AES_IVR(0), iv
);
569 static inline void atmel_aes_write_ctrl(struct atmel_aes_dev
*dd
, bool use_dma
,
573 atmel_aes_write_ctrl_key(dd
, use_dma
, iv
,
574 dd
->ctx
->key
, dd
->ctx
->keylen
);
579 static int atmel_aes_cpu_transfer(struct atmel_aes_dev
*dd
)
585 atmel_aes_read_block(dd
, AES_ODATAR(0), dd
->data
);
587 dd
->datalen
-= AES_BLOCK_SIZE
;
589 if (dd
->datalen
< AES_BLOCK_SIZE
)
592 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
594 isr
= atmel_aes_read(dd
, AES_ISR
);
595 if (!(isr
& AES_INT_DATARDY
)) {
596 dd
->resume
= atmel_aes_cpu_transfer
;
597 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
602 if (!sg_copy_from_buffer(dd
->real_dst
, sg_nents(dd
->real_dst
),
607 return atmel_aes_complete(dd
, err
);
609 return dd
->cpu_transfer_complete(dd
);
612 static int atmel_aes_cpu_start(struct atmel_aes_dev
*dd
,
613 struct scatterlist
*src
,
614 struct scatterlist
*dst
,
616 atmel_aes_fn_t resume
)
618 size_t padlen
= atmel_aes_padlen(len
, AES_BLOCK_SIZE
);
620 if (unlikely(len
== 0))
623 sg_copy_to_buffer(src
, sg_nents(src
), dd
->buf
, len
);
627 dd
->cpu_transfer_complete
= resume
;
628 dd
->datalen
= len
+ padlen
;
629 dd
->data
= (u32
*)dd
->buf
;
630 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
631 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_cpu_transfer
);
637 static void atmel_aes_dma_callback(void *data
);
639 static bool atmel_aes_check_aligned(struct atmel_aes_dev
*dd
,
640 struct scatterlist
*sg
,
642 struct atmel_aes_dma
*dma
)
646 if (!IS_ALIGNED(len
, dd
->ctx
->block_size
))
649 for (nents
= 0; sg
; sg
= sg_next(sg
), ++nents
) {
650 if (!IS_ALIGNED(sg
->offset
, sizeof(u32
)))
653 if (len
<= sg
->length
) {
654 if (!IS_ALIGNED(len
, dd
->ctx
->block_size
))
657 dma
->nents
= nents
+1;
658 dma
->remainder
= sg
->length
- len
;
663 if (!IS_ALIGNED(sg
->length
, dd
->ctx
->block_size
))
672 static inline void atmel_aes_restore_sg(const struct atmel_aes_dma
*dma
)
674 struct scatterlist
*sg
= dma
->sg
;
675 int nents
= dma
->nents
;
680 while (--nents
> 0 && sg
)
686 sg
->length
+= dma
->remainder
;
689 static int atmel_aes_map(struct atmel_aes_dev
*dd
,
690 struct scatterlist
*src
,
691 struct scatterlist
*dst
,
694 bool src_aligned
, dst_aligned
;
702 src_aligned
= atmel_aes_check_aligned(dd
, src
, len
, &dd
->src
);
704 dst_aligned
= src_aligned
;
706 dst_aligned
= atmel_aes_check_aligned(dd
, dst
, len
, &dd
->dst
);
707 if (!src_aligned
|| !dst_aligned
) {
708 padlen
= atmel_aes_padlen(len
, dd
->ctx
->block_size
);
710 if (dd
->buflen
< len
+ padlen
)
714 sg_copy_to_buffer(src
, sg_nents(src
), dd
->buf
, len
);
715 dd
->src
.sg
= &dd
->aligned_sg
;
717 dd
->src
.remainder
= 0;
721 dd
->dst
.sg
= &dd
->aligned_sg
;
723 dd
->dst
.remainder
= 0;
726 sg_init_table(&dd
->aligned_sg
, 1);
727 sg_set_buf(&dd
->aligned_sg
, dd
->buf
, len
+ padlen
);
730 if (dd
->src
.sg
== dd
->dst
.sg
) {
731 dd
->src
.sg_len
= dma_map_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
733 dd
->dst
.sg_len
= dd
->src
.sg_len
;
737 dd
->src
.sg_len
= dma_map_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
742 dd
->dst
.sg_len
= dma_map_sg(dd
->dev
, dd
->dst
.sg
, dd
->dst
.nents
,
744 if (!dd
->dst
.sg_len
) {
745 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
754 static void atmel_aes_unmap(struct atmel_aes_dev
*dd
)
756 if (dd
->src
.sg
== dd
->dst
.sg
) {
757 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
760 if (dd
->src
.sg
!= &dd
->aligned_sg
)
761 atmel_aes_restore_sg(&dd
->src
);
763 dma_unmap_sg(dd
->dev
, dd
->dst
.sg
, dd
->dst
.nents
,
766 if (dd
->dst
.sg
!= &dd
->aligned_sg
)
767 atmel_aes_restore_sg(&dd
->dst
);
769 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
772 if (dd
->src
.sg
!= &dd
->aligned_sg
)
773 atmel_aes_restore_sg(&dd
->src
);
776 if (dd
->dst
.sg
== &dd
->aligned_sg
)
777 sg_copy_from_buffer(dd
->real_dst
, sg_nents(dd
->real_dst
),
781 static int atmel_aes_dma_transfer_start(struct atmel_aes_dev
*dd
,
782 enum dma_slave_buswidth addr_width
,
783 enum dma_transfer_direction dir
,
786 struct dma_async_tx_descriptor
*desc
;
787 struct dma_slave_config config
;
788 dma_async_tx_callback callback
;
789 struct atmel_aes_dma
*dma
;
792 memset(&config
, 0, sizeof(config
));
793 config
.direction
= dir
;
794 config
.src_addr_width
= addr_width
;
795 config
.dst_addr_width
= addr_width
;
796 config
.src_maxburst
= maxburst
;
797 config
.dst_maxburst
= maxburst
;
803 config
.dst_addr
= dd
->phys_base
+ AES_IDATAR(0);
808 callback
= atmel_aes_dma_callback
;
809 config
.src_addr
= dd
->phys_base
+ AES_ODATAR(0);
816 err
= dmaengine_slave_config(dma
->chan
, &config
);
820 desc
= dmaengine_prep_slave_sg(dma
->chan
, dma
->sg
, dma
->sg_len
, dir
,
821 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
825 desc
->callback
= callback
;
826 desc
->callback_param
= dd
;
827 dmaengine_submit(desc
);
828 dma_async_issue_pending(dma
->chan
);
833 static void atmel_aes_dma_transfer_stop(struct atmel_aes_dev
*dd
,
834 enum dma_transfer_direction dir
)
836 struct atmel_aes_dma
*dma
;
851 dmaengine_terminate_all(dma
->chan
);
854 static int atmel_aes_dma_start(struct atmel_aes_dev
*dd
,
855 struct scatterlist
*src
,
856 struct scatterlist
*dst
,
858 atmel_aes_fn_t resume
)
860 enum dma_slave_buswidth addr_width
;
864 switch (dd
->ctx
->block_size
) {
865 case CFB8_BLOCK_SIZE
:
866 addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
870 case CFB16_BLOCK_SIZE
:
871 addr_width
= DMA_SLAVE_BUSWIDTH_2_BYTES
;
875 case CFB32_BLOCK_SIZE
:
876 case CFB64_BLOCK_SIZE
:
877 addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
882 addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
883 maxburst
= dd
->caps
.max_burst_size
;
891 err
= atmel_aes_map(dd
, src
, dst
, len
);
897 /* Set output DMA transfer first */
898 err
= atmel_aes_dma_transfer_start(dd
, addr_width
, DMA_DEV_TO_MEM
,
903 /* Then set input DMA transfer */
904 err
= atmel_aes_dma_transfer_start(dd
, addr_width
, DMA_MEM_TO_DEV
,
907 goto output_transfer_stop
;
911 output_transfer_stop
:
912 atmel_aes_dma_transfer_stop(dd
, DMA_DEV_TO_MEM
);
916 return atmel_aes_complete(dd
, err
);
919 static void atmel_aes_dma_stop(struct atmel_aes_dev
*dd
)
921 atmel_aes_dma_transfer_stop(dd
, DMA_MEM_TO_DEV
);
922 atmel_aes_dma_transfer_stop(dd
, DMA_DEV_TO_MEM
);
926 static void atmel_aes_dma_callback(void *data
)
928 struct atmel_aes_dev
*dd
= data
;
930 atmel_aes_dma_stop(dd
);
932 (void)dd
->resume(dd
);
935 static int atmel_aes_handle_queue(struct atmel_aes_dev
*dd
,
936 struct crypto_async_request
*new_areq
)
938 struct crypto_async_request
*areq
, *backlog
;
939 struct atmel_aes_base_ctx
*ctx
;
944 spin_lock_irqsave(&dd
->lock
, flags
);
946 ret
= crypto_enqueue_request(&dd
->queue
, new_areq
);
947 if (dd
->flags
& AES_FLAGS_BUSY
) {
948 spin_unlock_irqrestore(&dd
->lock
, flags
);
951 backlog
= crypto_get_backlog(&dd
->queue
);
952 areq
= crypto_dequeue_request(&dd
->queue
);
954 dd
->flags
|= AES_FLAGS_BUSY
;
955 spin_unlock_irqrestore(&dd
->lock
, flags
);
961 backlog
->complete(backlog
, -EINPROGRESS
);
963 ctx
= crypto_tfm_ctx(areq
->tfm
);
967 start_async
= (areq
!= new_areq
);
968 dd
->is_async
= start_async
;
970 /* WARNING: ctx->start() MAY change dd->is_async. */
971 err
= ctx
->start(dd
);
972 return (start_async
) ? ret
: err
;
976 /* AES async block ciphers */
978 static int atmel_aes_transfer_complete(struct atmel_aes_dev
*dd
)
980 return atmel_aes_complete(dd
, 0);
983 static int atmel_aes_start(struct atmel_aes_dev
*dd
)
985 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
986 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
987 bool use_dma
= (req
->cryptlen
>= ATMEL_AES_DMA_THRESHOLD
||
988 dd
->ctx
->block_size
!= AES_BLOCK_SIZE
);
991 atmel_aes_set_mode(dd
, rctx
);
993 err
= atmel_aes_hw_init(dd
);
995 return atmel_aes_complete(dd
, err
);
997 atmel_aes_write_ctrl(dd
, use_dma
, (void *)req
->iv
);
999 return atmel_aes_dma_start(dd
, req
->src
, req
->dst
,
1001 atmel_aes_transfer_complete
);
1003 return atmel_aes_cpu_start(dd
, req
->src
, req
->dst
, req
->cryptlen
,
1004 atmel_aes_transfer_complete
);
1007 static inline struct atmel_aes_ctr_ctx
*
1008 atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
1010 return container_of(ctx
, struct atmel_aes_ctr_ctx
, base
);
1013 static int atmel_aes_ctr_transfer(struct atmel_aes_dev
*dd
)
1015 struct atmel_aes_ctr_ctx
*ctx
= atmel_aes_ctr_ctx_cast(dd
->ctx
);
1016 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
1017 struct scatterlist
*src
, *dst
;
1020 bool use_dma
, fragmented
= false;
1022 /* Check for transfer completion. */
1023 ctx
->offset
+= dd
->total
;
1024 if (ctx
->offset
>= req
->cryptlen
)
1025 return atmel_aes_transfer_complete(dd
);
1027 /* Compute data length. */
1028 datalen
= req
->cryptlen
- ctx
->offset
;
1029 blocks
= DIV_ROUND_UP(datalen
, AES_BLOCK_SIZE
);
1030 ctr
= be32_to_cpu(ctx
->iv
[3]);
1031 if (dd
->caps
.has_ctr32
) {
1032 /* Check 32bit counter overflow. */
1034 u32 end
= start
+ blocks
- 1;
1038 datalen
= AES_BLOCK_SIZE
* -start
;
1042 /* Check 16bit counter overflow. */
1043 u16 start
= ctr
& 0xffff;
1044 u16 end
= start
+ (u16
)blocks
- 1;
1046 if (blocks
>> 16 || end
< start
) {
1048 datalen
= AES_BLOCK_SIZE
* (0x10000-start
);
1052 use_dma
= (datalen
>= ATMEL_AES_DMA_THRESHOLD
);
1054 /* Jump to offset. */
1055 src
= scatterwalk_ffwd(ctx
->src
, req
->src
, ctx
->offset
);
1056 dst
= ((req
->src
== req
->dst
) ? src
:
1057 scatterwalk_ffwd(ctx
->dst
, req
->dst
, ctx
->offset
));
1059 /* Configure hardware. */
1060 atmel_aes_write_ctrl(dd
, use_dma
, ctx
->iv
);
1061 if (unlikely(fragmented
)) {
1063 * Increment the counter manually to cope with the hardware
1066 ctx
->iv
[3] = cpu_to_be32(ctr
);
1067 crypto_inc((u8
*)ctx
->iv
, AES_BLOCK_SIZE
);
1071 return atmel_aes_dma_start(dd
, src
, dst
, datalen
,
1072 atmel_aes_ctr_transfer
);
1074 return atmel_aes_cpu_start(dd
, src
, dst
, datalen
,
1075 atmel_aes_ctr_transfer
);
1078 static int atmel_aes_ctr_start(struct atmel_aes_dev
*dd
)
1080 struct atmel_aes_ctr_ctx
*ctx
= atmel_aes_ctr_ctx_cast(dd
->ctx
);
1081 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
1082 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
1085 atmel_aes_set_mode(dd
, rctx
);
1087 err
= atmel_aes_hw_init(dd
);
1089 return atmel_aes_complete(dd
, err
);
1091 memcpy(ctx
->iv
, req
->iv
, AES_BLOCK_SIZE
);
1094 return atmel_aes_ctr_transfer(dd
);
1097 static int atmel_aes_crypt(struct skcipher_request
*req
, unsigned long mode
)
1099 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1100 struct atmel_aes_base_ctx
*ctx
= crypto_skcipher_ctx(skcipher
);
1101 struct atmel_aes_reqctx
*rctx
;
1102 struct atmel_aes_dev
*dd
;
1104 switch (mode
& AES_FLAGS_OPMODE_MASK
) {
1105 case AES_FLAGS_CFB8
:
1106 ctx
->block_size
= CFB8_BLOCK_SIZE
;
1109 case AES_FLAGS_CFB16
:
1110 ctx
->block_size
= CFB16_BLOCK_SIZE
;
1113 case AES_FLAGS_CFB32
:
1114 ctx
->block_size
= CFB32_BLOCK_SIZE
;
1117 case AES_FLAGS_CFB64
:
1118 ctx
->block_size
= CFB64_BLOCK_SIZE
;
1122 ctx
->block_size
= AES_BLOCK_SIZE
;
1125 ctx
->is_aead
= false;
1127 dd
= atmel_aes_find_dev(ctx
);
1131 rctx
= skcipher_request_ctx(req
);
1134 if (!(mode
& AES_FLAGS_ENCRYPT
) && (req
->src
== req
->dst
)) {
1135 unsigned int ivsize
= crypto_skcipher_ivsize(skcipher
);
1137 if (req
->cryptlen
>= ivsize
)
1138 scatterwalk_map_and_copy(rctx
->lastc
, req
->src
,
1139 req
->cryptlen
- ivsize
,
1143 return atmel_aes_handle_queue(dd
, &req
->base
);
1146 static int atmel_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
1147 unsigned int keylen
)
1149 struct atmel_aes_base_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1151 if (keylen
!= AES_KEYSIZE_128
&&
1152 keylen
!= AES_KEYSIZE_192
&&
1153 keylen
!= AES_KEYSIZE_256
) {
1154 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
1158 memcpy(ctx
->key
, key
, keylen
);
1159 ctx
->keylen
= keylen
;
1164 static int atmel_aes_ecb_encrypt(struct skcipher_request
*req
)
1166 return atmel_aes_crypt(req
, AES_FLAGS_ECB
| AES_FLAGS_ENCRYPT
);
1169 static int atmel_aes_ecb_decrypt(struct skcipher_request
*req
)
1171 return atmel_aes_crypt(req
, AES_FLAGS_ECB
);
1174 static int atmel_aes_cbc_encrypt(struct skcipher_request
*req
)
1176 return atmel_aes_crypt(req
, AES_FLAGS_CBC
| AES_FLAGS_ENCRYPT
);
1179 static int atmel_aes_cbc_decrypt(struct skcipher_request
*req
)
1181 return atmel_aes_crypt(req
, AES_FLAGS_CBC
);
1184 static int atmel_aes_ofb_encrypt(struct skcipher_request
*req
)
1186 return atmel_aes_crypt(req
, AES_FLAGS_OFB
| AES_FLAGS_ENCRYPT
);
1189 static int atmel_aes_ofb_decrypt(struct skcipher_request
*req
)
1191 return atmel_aes_crypt(req
, AES_FLAGS_OFB
);
1194 static int atmel_aes_cfb_encrypt(struct skcipher_request
*req
)
1196 return atmel_aes_crypt(req
, AES_FLAGS_CFB128
| AES_FLAGS_ENCRYPT
);
1199 static int atmel_aes_cfb_decrypt(struct skcipher_request
*req
)
1201 return atmel_aes_crypt(req
, AES_FLAGS_CFB128
);
1204 static int atmel_aes_cfb64_encrypt(struct skcipher_request
*req
)
1206 return atmel_aes_crypt(req
, AES_FLAGS_CFB64
| AES_FLAGS_ENCRYPT
);
1209 static int atmel_aes_cfb64_decrypt(struct skcipher_request
*req
)
1211 return atmel_aes_crypt(req
, AES_FLAGS_CFB64
);
1214 static int atmel_aes_cfb32_encrypt(struct skcipher_request
*req
)
1216 return atmel_aes_crypt(req
, AES_FLAGS_CFB32
| AES_FLAGS_ENCRYPT
);
1219 static int atmel_aes_cfb32_decrypt(struct skcipher_request
*req
)
1221 return atmel_aes_crypt(req
, AES_FLAGS_CFB32
);
1224 static int atmel_aes_cfb16_encrypt(struct skcipher_request
*req
)
1226 return atmel_aes_crypt(req
, AES_FLAGS_CFB16
| AES_FLAGS_ENCRYPT
);
1229 static int atmel_aes_cfb16_decrypt(struct skcipher_request
*req
)
1231 return atmel_aes_crypt(req
, AES_FLAGS_CFB16
);
1234 static int atmel_aes_cfb8_encrypt(struct skcipher_request
*req
)
1236 return atmel_aes_crypt(req
, AES_FLAGS_CFB8
| AES_FLAGS_ENCRYPT
);
1239 static int atmel_aes_cfb8_decrypt(struct skcipher_request
*req
)
1241 return atmel_aes_crypt(req
, AES_FLAGS_CFB8
);
1244 static int atmel_aes_ctr_encrypt(struct skcipher_request
*req
)
1246 return atmel_aes_crypt(req
, AES_FLAGS_CTR
| AES_FLAGS_ENCRYPT
);
1249 static int atmel_aes_ctr_decrypt(struct skcipher_request
*req
)
1251 return atmel_aes_crypt(req
, AES_FLAGS_CTR
);
1254 static int atmel_aes_init_tfm(struct crypto_skcipher
*tfm
)
1256 struct atmel_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1258 crypto_skcipher_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1259 ctx
->base
.start
= atmel_aes_start
;
1264 static int atmel_aes_ctr_init_tfm(struct crypto_skcipher
*tfm
)
1266 struct atmel_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1268 crypto_skcipher_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1269 ctx
->base
.start
= atmel_aes_ctr_start
;
1274 static struct skcipher_alg aes_algs
[] = {
1276 .base
.cra_name
= "ecb(aes)",
1277 .base
.cra_driver_name
= "atmel-ecb-aes",
1278 .base
.cra_priority
= ATMEL_AES_PRIORITY
,
1279 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1280 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1281 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1282 .base
.cra_alignmask
= 0xf,
1283 .base
.cra_module
= THIS_MODULE
,
1285 .init
= atmel_aes_init_tfm
,
1286 .min_keysize
= AES_MIN_KEY_SIZE
,
1287 .max_keysize
= AES_MAX_KEY_SIZE
,
1288 .setkey
= atmel_aes_setkey
,
1289 .encrypt
= atmel_aes_ecb_encrypt
,
1290 .decrypt
= atmel_aes_ecb_decrypt
,
1293 .base
.cra_name
= "cbc(aes)",
1294 .base
.cra_driver_name
= "atmel-cbc-aes",
1295 .base
.cra_priority
= ATMEL_AES_PRIORITY
,
1296 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1297 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1298 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1299 .base
.cra_alignmask
= 0xf,
1300 .base
.cra_module
= THIS_MODULE
,
1302 .init
= atmel_aes_init_tfm
,
1303 .min_keysize
= AES_MIN_KEY_SIZE
,
1304 .max_keysize
= AES_MAX_KEY_SIZE
,
1305 .setkey
= atmel_aes_setkey
,
1306 .encrypt
= atmel_aes_cbc_encrypt
,
1307 .decrypt
= atmel_aes_cbc_decrypt
,
1308 .ivsize
= AES_BLOCK_SIZE
,
1311 .base
.cra_name
= "ofb(aes)",
1312 .base
.cra_driver_name
= "atmel-ofb-aes",
1313 .base
.cra_priority
= ATMEL_AES_PRIORITY
,
1314 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1315 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1316 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1317 .base
.cra_alignmask
= 0xf,
1318 .base
.cra_module
= THIS_MODULE
,
1320 .init
= atmel_aes_init_tfm
,
1321 .min_keysize
= AES_MIN_KEY_SIZE
,
1322 .max_keysize
= AES_MAX_KEY_SIZE
,
1323 .setkey
= atmel_aes_setkey
,
1324 .encrypt
= atmel_aes_ofb_encrypt
,
1325 .decrypt
= atmel_aes_ofb_decrypt
,
1326 .ivsize
= AES_BLOCK_SIZE
,
1329 .base
.cra_name
= "cfb(aes)",
1330 .base
.cra_driver_name
= "atmel-cfb-aes",
1331 .base
.cra_priority
= ATMEL_AES_PRIORITY
,
1332 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1333 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1334 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1335 .base
.cra_alignmask
= 0xf,
1336 .base
.cra_module
= THIS_MODULE
,
1338 .init
= atmel_aes_init_tfm
,
1339 .min_keysize
= AES_MIN_KEY_SIZE
,
1340 .max_keysize
= AES_MAX_KEY_SIZE
,
1341 .setkey
= atmel_aes_setkey
,
1342 .encrypt
= atmel_aes_cfb_encrypt
,
1343 .decrypt
= atmel_aes_cfb_decrypt
,
1344 .ivsize
= AES_BLOCK_SIZE
,
1347 .base
.cra_name
= "cfb32(aes)",
1348 .base
.cra_driver_name
= "atmel-cfb32-aes",
1349 .base
.cra_priority
= ATMEL_AES_PRIORITY
,
1350 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1351 .base
.cra_blocksize
= CFB32_BLOCK_SIZE
,
1352 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1353 .base
.cra_alignmask
= 0xf,
1354 .base
.cra_module
= THIS_MODULE
,
1356 .init
= atmel_aes_init_tfm
,
1357 .min_keysize
= AES_MIN_KEY_SIZE
,
1358 .max_keysize
= AES_MAX_KEY_SIZE
,
1359 .setkey
= atmel_aes_setkey
,
1360 .encrypt
= atmel_aes_cfb32_encrypt
,
1361 .decrypt
= atmel_aes_cfb32_decrypt
,
1362 .ivsize
= AES_BLOCK_SIZE
,
1365 .base
.cra_name
= "cfb16(aes)",
1366 .base
.cra_driver_name
= "atmel-cfb16-aes",
1367 .base
.cra_priority
= ATMEL_AES_PRIORITY
,
1368 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1369 .base
.cra_blocksize
= CFB16_BLOCK_SIZE
,
1370 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1371 .base
.cra_alignmask
= 0xf,
1372 .base
.cra_module
= THIS_MODULE
,
1374 .init
= atmel_aes_init_tfm
,
1375 .min_keysize
= AES_MIN_KEY_SIZE
,
1376 .max_keysize
= AES_MAX_KEY_SIZE
,
1377 .setkey
= atmel_aes_setkey
,
1378 .encrypt
= atmel_aes_cfb16_encrypt
,
1379 .decrypt
= atmel_aes_cfb16_decrypt
,
1380 .ivsize
= AES_BLOCK_SIZE
,
1383 .base
.cra_name
= "cfb8(aes)",
1384 .base
.cra_driver_name
= "atmel-cfb8-aes",
1385 .base
.cra_priority
= ATMEL_AES_PRIORITY
,
1386 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1387 .base
.cra_blocksize
= CFB8_BLOCK_SIZE
,
1388 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1389 .base
.cra_alignmask
= 0xf,
1390 .base
.cra_module
= THIS_MODULE
,
1392 .init
= atmel_aes_init_tfm
,
1393 .min_keysize
= AES_MIN_KEY_SIZE
,
1394 .max_keysize
= AES_MAX_KEY_SIZE
,
1395 .setkey
= atmel_aes_setkey
,
1396 .encrypt
= atmel_aes_cfb8_encrypt
,
1397 .decrypt
= atmel_aes_cfb8_decrypt
,
1398 .ivsize
= AES_BLOCK_SIZE
,
1401 .base
.cra_name
= "ctr(aes)",
1402 .base
.cra_driver_name
= "atmel-ctr-aes",
1403 .base
.cra_priority
= ATMEL_AES_PRIORITY
,
1404 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1405 .base
.cra_blocksize
= 1,
1406 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctr_ctx
),
1407 .base
.cra_alignmask
= 0xf,
1408 .base
.cra_module
= THIS_MODULE
,
1410 .init
= atmel_aes_ctr_init_tfm
,
1411 .min_keysize
= AES_MIN_KEY_SIZE
,
1412 .max_keysize
= AES_MAX_KEY_SIZE
,
1413 .setkey
= atmel_aes_setkey
,
1414 .encrypt
= atmel_aes_ctr_encrypt
,
1415 .decrypt
= atmel_aes_ctr_decrypt
,
1416 .ivsize
= AES_BLOCK_SIZE
,
1420 static struct skcipher_alg aes_cfb64_alg
= {
1421 .base
.cra_name
= "cfb64(aes)",
1422 .base
.cra_driver_name
= "atmel-cfb64-aes",
1423 .base
.cra_priority
= ATMEL_AES_PRIORITY
,
1424 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1425 .base
.cra_blocksize
= CFB64_BLOCK_SIZE
,
1426 .base
.cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1427 .base
.cra_alignmask
= 0xf,
1428 .base
.cra_module
= THIS_MODULE
,
1430 .init
= atmel_aes_init_tfm
,
1431 .min_keysize
= AES_MIN_KEY_SIZE
,
1432 .max_keysize
= AES_MAX_KEY_SIZE
,
1433 .setkey
= atmel_aes_setkey
,
1434 .encrypt
= atmel_aes_cfb64_encrypt
,
1435 .decrypt
= atmel_aes_cfb64_decrypt
,
1436 .ivsize
= AES_BLOCK_SIZE
,
1440 /* gcm aead functions */
1442 static int atmel_aes_gcm_ghash(struct atmel_aes_dev
*dd
,
1443 const u32
*data
, size_t datalen
,
1444 const __be32
*ghash_in
, __be32
*ghash_out
,
1445 atmel_aes_fn_t resume
);
1446 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev
*dd
);
1447 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev
*dd
);
1449 static int atmel_aes_gcm_start(struct atmel_aes_dev
*dd
);
1450 static int atmel_aes_gcm_process(struct atmel_aes_dev
*dd
);
1451 static int atmel_aes_gcm_length(struct atmel_aes_dev
*dd
);
1452 static int atmel_aes_gcm_data(struct atmel_aes_dev
*dd
);
1453 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev
*dd
);
1454 static int atmel_aes_gcm_tag(struct atmel_aes_dev
*dd
);
1455 static int atmel_aes_gcm_finalize(struct atmel_aes_dev
*dd
);
1457 static inline struct atmel_aes_gcm_ctx
*
1458 atmel_aes_gcm_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
1460 return container_of(ctx
, struct atmel_aes_gcm_ctx
, base
);
1463 static int atmel_aes_gcm_ghash(struct atmel_aes_dev
*dd
,
1464 const u32
*data
, size_t datalen
,
1465 const __be32
*ghash_in
, __be32
*ghash_out
,
1466 atmel_aes_fn_t resume
)
1468 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1470 dd
->data
= (u32
*)data
;
1471 dd
->datalen
= datalen
;
1472 ctx
->ghash_in
= ghash_in
;
1473 ctx
->ghash_out
= ghash_out
;
1474 ctx
->ghash_resume
= resume
;
1476 atmel_aes_write_ctrl(dd
, false, NULL
);
1477 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_ghash_init
);
1480 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev
*dd
)
1482 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1484 /* Set the data length. */
1485 atmel_aes_write(dd
, AES_AADLENR
, dd
->total
);
1486 atmel_aes_write(dd
, AES_CLENR
, 0);
1488 /* If needed, overwrite the GCM Intermediate Hash Word Registers */
1490 atmel_aes_write_block(dd
, AES_GHASHR(0), ctx
->ghash_in
);
1492 return atmel_aes_gcm_ghash_finalize(dd
);
1495 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev
*dd
)
1497 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1500 /* Write data into the Input Data Registers. */
1501 while (dd
->datalen
> 0) {
1502 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
1504 dd
->datalen
-= AES_BLOCK_SIZE
;
1506 isr
= atmel_aes_read(dd
, AES_ISR
);
1507 if (!(isr
& AES_INT_DATARDY
)) {
1508 dd
->resume
= atmel_aes_gcm_ghash_finalize
;
1509 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
1510 return -EINPROGRESS
;
1514 /* Read the computed hash from GHASHRx. */
1515 atmel_aes_read_block(dd
, AES_GHASHR(0), ctx
->ghash_out
);
1517 return ctx
->ghash_resume(dd
);
1521 static int atmel_aes_gcm_start(struct atmel_aes_dev
*dd
)
1523 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1524 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1525 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1526 struct atmel_aes_reqctx
*rctx
= aead_request_ctx(req
);
1527 size_t ivsize
= crypto_aead_ivsize(tfm
);
1528 size_t datalen
, padlen
;
1529 const void *iv
= req
->iv
;
1533 atmel_aes_set_mode(dd
, rctx
);
1535 err
= atmel_aes_hw_init(dd
);
1537 return atmel_aes_complete(dd
, err
);
1539 if (likely(ivsize
== GCM_AES_IV_SIZE
)) {
1540 memcpy(ctx
->j0
, iv
, ivsize
);
1541 ctx
->j0
[3] = cpu_to_be32(1);
1542 return atmel_aes_gcm_process(dd
);
1545 padlen
= atmel_aes_padlen(ivsize
, AES_BLOCK_SIZE
);
1546 datalen
= ivsize
+ padlen
+ AES_BLOCK_SIZE
;
1547 if (datalen
> dd
->buflen
)
1548 return atmel_aes_complete(dd
, -EINVAL
);
1550 memcpy(data
, iv
, ivsize
);
1551 memset(data
+ ivsize
, 0, padlen
+ sizeof(u64
));
1552 ((__be64
*)(data
+ datalen
))[-1] = cpu_to_be64(ivsize
* 8);
1554 return atmel_aes_gcm_ghash(dd
, (const u32
*)data
, datalen
,
1555 NULL
, ctx
->j0
, atmel_aes_gcm_process
);
1558 static int atmel_aes_gcm_process(struct atmel_aes_dev
*dd
)
1560 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1561 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1562 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1563 bool enc
= atmel_aes_is_encrypt(dd
);
1566 /* Compute text length. */
1567 authsize
= crypto_aead_authsize(tfm
);
1568 ctx
->textlen
= req
->cryptlen
- (enc
? 0 : authsize
);
1571 * According to tcrypt test suite, the GCM Automatic Tag Generation
1572 * fails when both the message and its associated data are empty.
1574 if (likely(req
->assoclen
!= 0 || ctx
->textlen
!= 0))
1575 dd
->flags
|= AES_FLAGS_GTAGEN
;
1577 atmel_aes_write_ctrl(dd
, false, NULL
);
1578 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_length
);
1581 static int atmel_aes_gcm_length(struct atmel_aes_dev
*dd
)
1583 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1584 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1585 __be32 j0_lsw
, *j0
= ctx
->j0
;
1588 /* Write incr32(J0) into IV. */
1590 j0
[3] = cpu_to_be32(be32_to_cpu(j0
[3]) + 1);
1591 atmel_aes_write_block(dd
, AES_IVR(0), j0
);
1594 /* Set aad and text lengths. */
1595 atmel_aes_write(dd
, AES_AADLENR
, req
->assoclen
);
1596 atmel_aes_write(dd
, AES_CLENR
, ctx
->textlen
);
1598 /* Check whether AAD are present. */
1599 if (unlikely(req
->assoclen
== 0)) {
1601 return atmel_aes_gcm_data(dd
);
1604 /* Copy assoc data and add padding. */
1605 padlen
= atmel_aes_padlen(req
->assoclen
, AES_BLOCK_SIZE
);
1606 if (unlikely(req
->assoclen
+ padlen
> dd
->buflen
))
1607 return atmel_aes_complete(dd
, -EINVAL
);
1608 sg_copy_to_buffer(req
->src
, sg_nents(req
->src
), dd
->buf
, req
->assoclen
);
1610 /* Write assoc data into the Input Data register. */
1611 dd
->data
= (u32
*)dd
->buf
;
1612 dd
->datalen
= req
->assoclen
+ padlen
;
1613 return atmel_aes_gcm_data(dd
);
1616 static int atmel_aes_gcm_data(struct atmel_aes_dev
*dd
)
1618 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1619 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1620 bool use_dma
= (ctx
->textlen
>= ATMEL_AES_DMA_THRESHOLD
);
1621 struct scatterlist
*src
, *dst
;
1624 /* Write AAD first. */
1625 while (dd
->datalen
> 0) {
1626 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
1628 dd
->datalen
-= AES_BLOCK_SIZE
;
1630 isr
= atmel_aes_read(dd
, AES_ISR
);
1631 if (!(isr
& AES_INT_DATARDY
)) {
1632 dd
->resume
= atmel_aes_gcm_data
;
1633 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
1634 return -EINPROGRESS
;
1639 if (unlikely(ctx
->textlen
== 0))
1640 return atmel_aes_gcm_tag_init(dd
);
1642 /* Prepare src and dst scatter lists to transfer cipher/plain texts */
1643 src
= scatterwalk_ffwd(ctx
->src
, req
->src
, req
->assoclen
);
1644 dst
= ((req
->src
== req
->dst
) ? src
:
1645 scatterwalk_ffwd(ctx
->dst
, req
->dst
, req
->assoclen
));
1648 /* Update the Mode Register for DMA transfers. */
1649 mr
= atmel_aes_read(dd
, AES_MR
);
1650 mr
&= ~(AES_MR_SMOD_MASK
| AES_MR_DUALBUFF
);
1651 mr
|= AES_MR_SMOD_IDATAR0
;
1652 if (dd
->caps
.has_dualbuff
)
1653 mr
|= AES_MR_DUALBUFF
;
1654 atmel_aes_write(dd
, AES_MR
, mr
);
1656 return atmel_aes_dma_start(dd
, src
, dst
, ctx
->textlen
,
1657 atmel_aes_gcm_tag_init
);
1660 return atmel_aes_cpu_start(dd
, src
, dst
, ctx
->textlen
,
1661 atmel_aes_gcm_tag_init
);
1664 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev
*dd
)
1666 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1667 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1668 __be64
*data
= dd
->buf
;
1670 if (likely(dd
->flags
& AES_FLAGS_GTAGEN
)) {
1671 if (!(atmel_aes_read(dd
, AES_ISR
) & AES_INT_TAGRDY
)) {
1672 dd
->resume
= atmel_aes_gcm_tag_init
;
1673 atmel_aes_write(dd
, AES_IER
, AES_INT_TAGRDY
);
1674 return -EINPROGRESS
;
1677 return atmel_aes_gcm_finalize(dd
);
1680 /* Read the GCM Intermediate Hash Word Registers. */
1681 atmel_aes_read_block(dd
, AES_GHASHR(0), ctx
->ghash
);
1683 data
[0] = cpu_to_be64(req
->assoclen
* 8);
1684 data
[1] = cpu_to_be64(ctx
->textlen
* 8);
1686 return atmel_aes_gcm_ghash(dd
, (const u32
*)data
, AES_BLOCK_SIZE
,
1687 ctx
->ghash
, ctx
->ghash
, atmel_aes_gcm_tag
);
1690 static int atmel_aes_gcm_tag(struct atmel_aes_dev
*dd
)
1692 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1693 unsigned long flags
;
1696 * Change mode to CTR to complete the tag generation.
1697 * Use J0 as Initialization Vector.
1700 dd
->flags
&= ~(AES_FLAGS_OPMODE_MASK
| AES_FLAGS_GTAGEN
);
1701 dd
->flags
|= AES_FLAGS_CTR
;
1702 atmel_aes_write_ctrl(dd
, false, ctx
->j0
);
1705 atmel_aes_write_block(dd
, AES_IDATAR(0), ctx
->ghash
);
1706 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_finalize
);
1709 static int atmel_aes_gcm_finalize(struct atmel_aes_dev
*dd
)
1711 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1712 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1713 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1714 bool enc
= atmel_aes_is_encrypt(dd
);
1715 u32 offset
, authsize
, itag
[4], *otag
= ctx
->tag
;
1718 /* Read the computed tag. */
1719 if (likely(dd
->flags
& AES_FLAGS_GTAGEN
))
1720 atmel_aes_read_block(dd
, AES_TAGR(0), ctx
->tag
);
1722 atmel_aes_read_block(dd
, AES_ODATAR(0), ctx
->tag
);
1724 offset
= req
->assoclen
+ ctx
->textlen
;
1725 authsize
= crypto_aead_authsize(tfm
);
1727 scatterwalk_map_and_copy(otag
, req
->dst
, offset
, authsize
, 1);
1730 scatterwalk_map_and_copy(itag
, req
->src
, offset
, authsize
, 0);
1731 err
= crypto_memneq(itag
, otag
, authsize
) ? -EBADMSG
: 0;
1734 return atmel_aes_complete(dd
, err
);
1737 static int atmel_aes_gcm_crypt(struct aead_request
*req
,
1740 struct atmel_aes_base_ctx
*ctx
;
1741 struct atmel_aes_reqctx
*rctx
;
1742 struct atmel_aes_dev
*dd
;
1744 ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
1745 ctx
->block_size
= AES_BLOCK_SIZE
;
1746 ctx
->is_aead
= true;
1748 dd
= atmel_aes_find_dev(ctx
);
1752 rctx
= aead_request_ctx(req
);
1753 rctx
->mode
= AES_FLAGS_GCM
| mode
;
1755 return atmel_aes_handle_queue(dd
, &req
->base
);
1758 static int atmel_aes_gcm_setkey(struct crypto_aead
*tfm
, const u8
*key
,
1759 unsigned int keylen
)
1761 struct atmel_aes_base_ctx
*ctx
= crypto_aead_ctx(tfm
);
1763 if (keylen
!= AES_KEYSIZE_256
&&
1764 keylen
!= AES_KEYSIZE_192
&&
1765 keylen
!= AES_KEYSIZE_128
) {
1766 crypto_aead_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
1770 memcpy(ctx
->key
, key
, keylen
);
1771 ctx
->keylen
= keylen
;
1776 static int atmel_aes_gcm_setauthsize(struct crypto_aead
*tfm
,
1777 unsigned int authsize
)
1779 /* Same as crypto_gcm_authsize() from crypto/gcm.c */
1796 static int atmel_aes_gcm_encrypt(struct aead_request
*req
)
1798 return atmel_aes_gcm_crypt(req
, AES_FLAGS_ENCRYPT
);
1801 static int atmel_aes_gcm_decrypt(struct aead_request
*req
)
1803 return atmel_aes_gcm_crypt(req
, 0);
1806 static int atmel_aes_gcm_init(struct crypto_aead
*tfm
)
1808 struct atmel_aes_gcm_ctx
*ctx
= crypto_aead_ctx(tfm
);
1810 crypto_aead_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1811 ctx
->base
.start
= atmel_aes_gcm_start
;
1816 static struct aead_alg aes_gcm_alg
= {
1817 .setkey
= atmel_aes_gcm_setkey
,
1818 .setauthsize
= atmel_aes_gcm_setauthsize
,
1819 .encrypt
= atmel_aes_gcm_encrypt
,
1820 .decrypt
= atmel_aes_gcm_decrypt
,
1821 .init
= atmel_aes_gcm_init
,
1822 .ivsize
= GCM_AES_IV_SIZE
,
1823 .maxauthsize
= AES_BLOCK_SIZE
,
1826 .cra_name
= "gcm(aes)",
1827 .cra_driver_name
= "atmel-gcm-aes",
1828 .cra_priority
= ATMEL_AES_PRIORITY
,
1829 .cra_flags
= CRYPTO_ALG_ASYNC
,
1831 .cra_ctxsize
= sizeof(struct atmel_aes_gcm_ctx
),
1832 .cra_alignmask
= 0xf,
1833 .cra_module
= THIS_MODULE
,
1840 static inline struct atmel_aes_xts_ctx
*
1841 atmel_aes_xts_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
1843 return container_of(ctx
, struct atmel_aes_xts_ctx
, base
);
1846 static int atmel_aes_xts_process_data(struct atmel_aes_dev
*dd
);
1848 static int atmel_aes_xts_start(struct atmel_aes_dev
*dd
)
1850 struct atmel_aes_xts_ctx
*ctx
= atmel_aes_xts_ctx_cast(dd
->ctx
);
1851 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
1852 struct atmel_aes_reqctx
*rctx
= skcipher_request_ctx(req
);
1853 unsigned long flags
;
1856 atmel_aes_set_mode(dd
, rctx
);
1858 err
= atmel_aes_hw_init(dd
);
1860 return atmel_aes_complete(dd
, err
);
1862 /* Compute the tweak value from req->iv with ecb(aes). */
1864 dd
->flags
&= ~AES_FLAGS_MODE_MASK
;
1865 dd
->flags
|= (AES_FLAGS_ECB
| AES_FLAGS_ENCRYPT
);
1866 atmel_aes_write_ctrl_key(dd
, false, NULL
,
1867 ctx
->key2
, ctx
->base
.keylen
);
1870 atmel_aes_write_block(dd
, AES_IDATAR(0), req
->iv
);
1871 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_xts_process_data
);
1874 static int atmel_aes_xts_process_data(struct atmel_aes_dev
*dd
)
1876 struct skcipher_request
*req
= skcipher_request_cast(dd
->areq
);
1877 bool use_dma
= (req
->cryptlen
>= ATMEL_AES_DMA_THRESHOLD
);
1878 u32 tweak
[AES_BLOCK_SIZE
/ sizeof(u32
)];
1879 static const __le32 one
[AES_BLOCK_SIZE
/ sizeof(u32
)] = {cpu_to_le32(1), };
1880 u8
*tweak_bytes
= (u8
*)tweak
;
1883 /* Read the computed ciphered tweak value. */
1884 atmel_aes_read_block(dd
, AES_ODATAR(0), tweak
);
1887 * the order of the ciphered tweak bytes need to be reversed before
1888 * writing them into the ODATARx registers.
1890 for (i
= 0; i
< AES_BLOCK_SIZE
/2; ++i
) {
1891 u8 tmp
= tweak_bytes
[AES_BLOCK_SIZE
- 1 - i
];
1893 tweak_bytes
[AES_BLOCK_SIZE
- 1 - i
] = tweak_bytes
[i
];
1894 tweak_bytes
[i
] = tmp
;
1897 /* Process the data. */
1898 atmel_aes_write_ctrl(dd
, use_dma
, NULL
);
1899 atmel_aes_write_block(dd
, AES_TWR(0), tweak
);
1900 atmel_aes_write_block(dd
, AES_ALPHAR(0), one
);
1902 return atmel_aes_dma_start(dd
, req
->src
, req
->dst
,
1904 atmel_aes_transfer_complete
);
1906 return atmel_aes_cpu_start(dd
, req
->src
, req
->dst
, req
->cryptlen
,
1907 atmel_aes_transfer_complete
);
1910 static int atmel_aes_xts_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
1911 unsigned int keylen
)
1913 struct atmel_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1916 err
= xts_check_key(crypto_skcipher_tfm(tfm
), key
, keylen
);
1920 memcpy(ctx
->base
.key
, key
, keylen
/2);
1921 memcpy(ctx
->key2
, key
+ keylen
/2, keylen
/2);
1922 ctx
->base
.keylen
= keylen
/2;
1927 static int atmel_aes_xts_encrypt(struct skcipher_request
*req
)
1929 return atmel_aes_crypt(req
, AES_FLAGS_XTS
| AES_FLAGS_ENCRYPT
);
1932 static int atmel_aes_xts_decrypt(struct skcipher_request
*req
)
1934 return atmel_aes_crypt(req
, AES_FLAGS_XTS
);
1937 static int atmel_aes_xts_init_tfm(struct crypto_skcipher
*tfm
)
1939 struct atmel_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
1941 crypto_skcipher_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1942 ctx
->base
.start
= atmel_aes_xts_start
;
1947 static struct skcipher_alg aes_xts_alg
= {
1948 .base
.cra_name
= "xts(aes)",
1949 .base
.cra_driver_name
= "atmel-xts-aes",
1950 .base
.cra_priority
= ATMEL_AES_PRIORITY
,
1951 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1952 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1953 .base
.cra_ctxsize
= sizeof(struct atmel_aes_xts_ctx
),
1954 .base
.cra_alignmask
= 0xf,
1955 .base
.cra_module
= THIS_MODULE
,
1957 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1958 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1959 .ivsize
= AES_BLOCK_SIZE
,
1960 .setkey
= atmel_aes_xts_setkey
,
1961 .encrypt
= atmel_aes_xts_encrypt
,
1962 .decrypt
= atmel_aes_xts_decrypt
,
1963 .init
= atmel_aes_xts_init_tfm
,
1966 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
1967 /* authenc aead functions */
1969 static int atmel_aes_authenc_start(struct atmel_aes_dev
*dd
);
1970 static int atmel_aes_authenc_init(struct atmel_aes_dev
*dd
, int err
,
1972 static int atmel_aes_authenc_transfer(struct atmel_aes_dev
*dd
, int err
,
1974 static int atmel_aes_authenc_digest(struct atmel_aes_dev
*dd
);
1975 static int atmel_aes_authenc_final(struct atmel_aes_dev
*dd
, int err
,
1978 static void atmel_aes_authenc_complete(struct atmel_aes_dev
*dd
, int err
)
1980 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1981 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1983 if (err
&& (dd
->flags
& AES_FLAGS_OWN_SHA
))
1984 atmel_sha_authenc_abort(&rctx
->auth_req
);
1985 dd
->flags
&= ~AES_FLAGS_OWN_SHA
;
1988 static int atmel_aes_authenc_start(struct atmel_aes_dev
*dd
)
1990 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1991 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
1992 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1993 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
1996 atmel_aes_set_mode(dd
, &rctx
->base
);
1998 err
= atmel_aes_hw_init(dd
);
2000 return atmel_aes_complete(dd
, err
);
2002 return atmel_sha_authenc_schedule(&rctx
->auth_req
, ctx
->auth
,
2003 atmel_aes_authenc_init
, dd
);
2006 static int atmel_aes_authenc_init(struct atmel_aes_dev
*dd
, int err
,
2009 struct aead_request
*req
= aead_request_cast(dd
->areq
);
2010 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2013 dd
->is_async
= true;
2015 return atmel_aes_complete(dd
, err
);
2017 /* If here, we've got the ownership of the SHA device. */
2018 dd
->flags
|= AES_FLAGS_OWN_SHA
;
2020 /* Configure the SHA device. */
2021 return atmel_sha_authenc_init(&rctx
->auth_req
,
2022 req
->src
, req
->assoclen
,
2024 atmel_aes_authenc_transfer
, dd
);
2027 static int atmel_aes_authenc_transfer(struct atmel_aes_dev
*dd
, int err
,
2030 struct aead_request
*req
= aead_request_cast(dd
->areq
);
2031 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2032 bool enc
= atmel_aes_is_encrypt(dd
);
2033 struct scatterlist
*src
, *dst
;
2034 __be32 iv
[AES_BLOCK_SIZE
/ sizeof(u32
)];
2038 dd
->is_async
= true;
2040 return atmel_aes_complete(dd
, err
);
2042 /* Prepare src and dst scatter-lists to transfer cipher/plain texts. */
2043 src
= scatterwalk_ffwd(rctx
->src
, req
->src
, req
->assoclen
);
2046 if (req
->src
!= req
->dst
)
2047 dst
= scatterwalk_ffwd(rctx
->dst
, req
->dst
, req
->assoclen
);
2049 /* Configure the AES device. */
2050 memcpy(iv
, req
->iv
, sizeof(iv
));
2053 * Here we always set the 2nd parameter of atmel_aes_write_ctrl() to
2054 * 'true' even if the data transfer is actually performed by the CPU (so
2055 * not by the DMA) because we must force the AES_MR_SMOD bitfield to the
2056 * value AES_MR_SMOD_IDATAR0. Indeed, both AES_MR_SMOD and SHA_MR_SMOD
2057 * must be set to *_MR_SMOD_IDATAR0.
2059 atmel_aes_write_ctrl(dd
, true, iv
);
2060 emr
= AES_EMR_PLIPEN
;
2062 emr
|= AES_EMR_PLIPD
;
2063 atmel_aes_write(dd
, AES_EMR
, emr
);
2065 /* Transfer data. */
2066 return atmel_aes_dma_start(dd
, src
, dst
, rctx
->textlen
,
2067 atmel_aes_authenc_digest
);
2070 static int atmel_aes_authenc_digest(struct atmel_aes_dev
*dd
)
2072 struct aead_request
*req
= aead_request_cast(dd
->areq
);
2073 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2075 /* atmel_sha_authenc_final() releases the SHA device. */
2076 dd
->flags
&= ~AES_FLAGS_OWN_SHA
;
2077 return atmel_sha_authenc_final(&rctx
->auth_req
,
2078 rctx
->digest
, sizeof(rctx
->digest
),
2079 atmel_aes_authenc_final
, dd
);
2082 static int atmel_aes_authenc_final(struct atmel_aes_dev
*dd
, int err
,
2085 struct aead_request
*req
= aead_request_cast(dd
->areq
);
2086 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2087 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
2088 bool enc
= atmel_aes_is_encrypt(dd
);
2089 u32 idigest
[SHA512_DIGEST_SIZE
/ sizeof(u32
)], *odigest
= rctx
->digest
;
2093 dd
->is_async
= true;
2097 offs
= req
->assoclen
+ rctx
->textlen
;
2098 authsize
= crypto_aead_authsize(tfm
);
2100 scatterwalk_map_and_copy(odigest
, req
->dst
, offs
, authsize
, 1);
2102 scatterwalk_map_and_copy(idigest
, req
->src
, offs
, authsize
, 0);
2103 if (crypto_memneq(idigest
, odigest
, authsize
))
2108 return atmel_aes_complete(dd
, err
);
2111 static int atmel_aes_authenc_setkey(struct crypto_aead
*tfm
, const u8
*key
,
2112 unsigned int keylen
)
2114 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
2115 struct crypto_authenc_keys keys
;
2119 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
2122 if (keys
.enckeylen
> sizeof(ctx
->base
.key
))
2125 /* Save auth key. */
2126 flags
= crypto_aead_get_flags(tfm
);
2127 err
= atmel_sha_authenc_setkey(ctx
->auth
,
2128 keys
.authkey
, keys
.authkeylen
,
2130 crypto_aead_set_flags(tfm
, flags
& CRYPTO_TFM_RES_MASK
);
2132 memzero_explicit(&keys
, sizeof(keys
));
2137 ctx
->base
.keylen
= keys
.enckeylen
;
2138 memcpy(ctx
->base
.key
, keys
.enckey
, keys
.enckeylen
);
2140 memzero_explicit(&keys
, sizeof(keys
));
2144 crypto_aead_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
2145 memzero_explicit(&keys
, sizeof(keys
));
2149 static int atmel_aes_authenc_init_tfm(struct crypto_aead
*tfm
,
2150 unsigned long auth_mode
)
2152 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
2153 unsigned int auth_reqsize
= atmel_sha_authenc_get_reqsize();
2155 ctx
->auth
= atmel_sha_authenc_spawn(auth_mode
);
2156 if (IS_ERR(ctx
->auth
))
2157 return PTR_ERR(ctx
->auth
);
2159 crypto_aead_set_reqsize(tfm
, (sizeof(struct atmel_aes_authenc_reqctx
) +
2161 ctx
->base
.start
= atmel_aes_authenc_start
;
2166 static int atmel_aes_authenc_hmac_sha1_init_tfm(struct crypto_aead
*tfm
)
2168 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA1
);
2171 static int atmel_aes_authenc_hmac_sha224_init_tfm(struct crypto_aead
*tfm
)
2173 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA224
);
2176 static int atmel_aes_authenc_hmac_sha256_init_tfm(struct crypto_aead
*tfm
)
2178 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA256
);
2181 static int atmel_aes_authenc_hmac_sha384_init_tfm(struct crypto_aead
*tfm
)
2183 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA384
);
2186 static int atmel_aes_authenc_hmac_sha512_init_tfm(struct crypto_aead
*tfm
)
2188 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA512
);
2191 static void atmel_aes_authenc_exit_tfm(struct crypto_aead
*tfm
)
2193 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
2195 atmel_sha_authenc_free(ctx
->auth
);
2198 static int atmel_aes_authenc_crypt(struct aead_request
*req
,
2201 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2202 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
2203 struct atmel_aes_base_ctx
*ctx
= crypto_aead_ctx(tfm
);
2204 u32 authsize
= crypto_aead_authsize(tfm
);
2205 bool enc
= (mode
& AES_FLAGS_ENCRYPT
);
2206 struct atmel_aes_dev
*dd
;
2208 /* Compute text length. */
2209 if (!enc
&& req
->cryptlen
< authsize
)
2211 rctx
->textlen
= req
->cryptlen
- (enc
? 0 : authsize
);
2214 * Currently, empty messages are not supported yet:
2215 * the SHA auto-padding can be used only on non-empty messages.
2216 * Hence a special case needs to be implemented for empty message.
2218 if (!rctx
->textlen
&& !req
->assoclen
)
2221 rctx
->base
.mode
= mode
;
2222 ctx
->block_size
= AES_BLOCK_SIZE
;
2223 ctx
->is_aead
= true;
2225 dd
= atmel_aes_find_dev(ctx
);
2229 return atmel_aes_handle_queue(dd
, &req
->base
);
2232 static int atmel_aes_authenc_cbc_aes_encrypt(struct aead_request
*req
)
2234 return atmel_aes_authenc_crypt(req
, AES_FLAGS_CBC
| AES_FLAGS_ENCRYPT
);
2237 static int atmel_aes_authenc_cbc_aes_decrypt(struct aead_request
*req
)
2239 return atmel_aes_authenc_crypt(req
, AES_FLAGS_CBC
);
2242 static struct aead_alg aes_authenc_algs
[] = {
2244 .setkey
= atmel_aes_authenc_setkey
,
2245 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2246 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2247 .init
= atmel_aes_authenc_hmac_sha1_init_tfm
,
2248 .exit
= atmel_aes_authenc_exit_tfm
,
2249 .ivsize
= AES_BLOCK_SIZE
,
2250 .maxauthsize
= SHA1_DIGEST_SIZE
,
2253 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2254 .cra_driver_name
= "atmel-authenc-hmac-sha1-cbc-aes",
2255 .cra_priority
= ATMEL_AES_PRIORITY
,
2256 .cra_flags
= CRYPTO_ALG_ASYNC
,
2257 .cra_blocksize
= AES_BLOCK_SIZE
,
2258 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2259 .cra_alignmask
= 0xf,
2260 .cra_module
= THIS_MODULE
,
2264 .setkey
= atmel_aes_authenc_setkey
,
2265 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2266 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2267 .init
= atmel_aes_authenc_hmac_sha224_init_tfm
,
2268 .exit
= atmel_aes_authenc_exit_tfm
,
2269 .ivsize
= AES_BLOCK_SIZE
,
2270 .maxauthsize
= SHA224_DIGEST_SIZE
,
2273 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
2274 .cra_driver_name
= "atmel-authenc-hmac-sha224-cbc-aes",
2275 .cra_priority
= ATMEL_AES_PRIORITY
,
2276 .cra_flags
= CRYPTO_ALG_ASYNC
,
2277 .cra_blocksize
= AES_BLOCK_SIZE
,
2278 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2279 .cra_alignmask
= 0xf,
2280 .cra_module
= THIS_MODULE
,
2284 .setkey
= atmel_aes_authenc_setkey
,
2285 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2286 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2287 .init
= atmel_aes_authenc_hmac_sha256_init_tfm
,
2288 .exit
= atmel_aes_authenc_exit_tfm
,
2289 .ivsize
= AES_BLOCK_SIZE
,
2290 .maxauthsize
= SHA256_DIGEST_SIZE
,
2293 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2294 .cra_driver_name
= "atmel-authenc-hmac-sha256-cbc-aes",
2295 .cra_priority
= ATMEL_AES_PRIORITY
,
2296 .cra_flags
= CRYPTO_ALG_ASYNC
,
2297 .cra_blocksize
= AES_BLOCK_SIZE
,
2298 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2299 .cra_alignmask
= 0xf,
2300 .cra_module
= THIS_MODULE
,
2304 .setkey
= atmel_aes_authenc_setkey
,
2305 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2306 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2307 .init
= atmel_aes_authenc_hmac_sha384_init_tfm
,
2308 .exit
= atmel_aes_authenc_exit_tfm
,
2309 .ivsize
= AES_BLOCK_SIZE
,
2310 .maxauthsize
= SHA384_DIGEST_SIZE
,
2313 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
2314 .cra_driver_name
= "atmel-authenc-hmac-sha384-cbc-aes",
2315 .cra_priority
= ATMEL_AES_PRIORITY
,
2316 .cra_flags
= CRYPTO_ALG_ASYNC
,
2317 .cra_blocksize
= AES_BLOCK_SIZE
,
2318 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2319 .cra_alignmask
= 0xf,
2320 .cra_module
= THIS_MODULE
,
2324 .setkey
= atmel_aes_authenc_setkey
,
2325 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2326 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2327 .init
= atmel_aes_authenc_hmac_sha512_init_tfm
,
2328 .exit
= atmel_aes_authenc_exit_tfm
,
2329 .ivsize
= AES_BLOCK_SIZE
,
2330 .maxauthsize
= SHA512_DIGEST_SIZE
,
2333 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
2334 .cra_driver_name
= "atmel-authenc-hmac-sha512-cbc-aes",
2335 .cra_priority
= ATMEL_AES_PRIORITY
,
2336 .cra_flags
= CRYPTO_ALG_ASYNC
,
2337 .cra_blocksize
= AES_BLOCK_SIZE
,
2338 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2339 .cra_alignmask
= 0xf,
2340 .cra_module
= THIS_MODULE
,
2344 #endif /* CONFIG_CRYPTO_DEV_ATMEL_AUTHENC */
2346 /* Probe functions */
2348 static int atmel_aes_buff_init(struct atmel_aes_dev
*dd
)
2350 dd
->buf
= (void *)__get_free_pages(GFP_KERNEL
, ATMEL_AES_BUFFER_ORDER
);
2351 dd
->buflen
= ATMEL_AES_BUFFER_SIZE
;
2352 dd
->buflen
&= ~(AES_BLOCK_SIZE
- 1);
2355 dev_err(dd
->dev
, "unable to alloc pages.\n");
2362 static void atmel_aes_buff_cleanup(struct atmel_aes_dev
*dd
)
2364 free_page((unsigned long)dd
->buf
);
2367 static bool atmel_aes_filter(struct dma_chan
*chan
, void *slave
)
2369 struct at_dma_slave
*sl
= slave
;
2371 if (sl
&& sl
->dma_dev
== chan
->device
->dev
) {
2379 static int atmel_aes_dma_init(struct atmel_aes_dev
*dd
,
2380 struct crypto_platform_data
*pdata
)
2382 struct at_dma_slave
*slave
;
2383 dma_cap_mask_t mask
;
2386 dma_cap_set(DMA_SLAVE
, mask
);
2388 /* Try to grab 2 DMA channels */
2389 slave
= &pdata
->dma_slave
->rxdata
;
2390 dd
->src
.chan
= dma_request_slave_channel_compat(mask
, atmel_aes_filter
,
2391 slave
, dd
->dev
, "tx");
2395 slave
= &pdata
->dma_slave
->txdata
;
2396 dd
->dst
.chan
= dma_request_slave_channel_compat(mask
, atmel_aes_filter
,
2397 slave
, dd
->dev
, "rx");
2404 dma_release_channel(dd
->src
.chan
);
2406 dev_warn(dd
->dev
, "no DMA channel available\n");
2410 static void atmel_aes_dma_cleanup(struct atmel_aes_dev
*dd
)
2412 dma_release_channel(dd
->dst
.chan
);
2413 dma_release_channel(dd
->src
.chan
);
2416 static void atmel_aes_queue_task(unsigned long data
)
2418 struct atmel_aes_dev
*dd
= (struct atmel_aes_dev
*)data
;
2420 atmel_aes_handle_queue(dd
, NULL
);
2423 static void atmel_aes_done_task(unsigned long data
)
2425 struct atmel_aes_dev
*dd
= (struct atmel_aes_dev
*)data
;
2427 dd
->is_async
= true;
2428 (void)dd
->resume(dd
);
2431 static irqreturn_t
atmel_aes_irq(int irq
, void *dev_id
)
2433 struct atmel_aes_dev
*aes_dd
= dev_id
;
2436 reg
= atmel_aes_read(aes_dd
, AES_ISR
);
2437 if (reg
& atmel_aes_read(aes_dd
, AES_IMR
)) {
2438 atmel_aes_write(aes_dd
, AES_IDR
, reg
);
2439 if (AES_FLAGS_BUSY
& aes_dd
->flags
)
2440 tasklet_schedule(&aes_dd
->done_task
);
2442 dev_warn(aes_dd
->dev
, "AES interrupt when no active requests.\n");
2449 static void atmel_aes_unregister_algs(struct atmel_aes_dev
*dd
)
2453 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2454 if (dd
->caps
.has_authenc
)
2455 for (i
= 0; i
< ARRAY_SIZE(aes_authenc_algs
); i
++)
2456 crypto_unregister_aead(&aes_authenc_algs
[i
]);
2459 if (dd
->caps
.has_xts
)
2460 crypto_unregister_skcipher(&aes_xts_alg
);
2462 if (dd
->caps
.has_gcm
)
2463 crypto_unregister_aead(&aes_gcm_alg
);
2465 if (dd
->caps
.has_cfb64
)
2466 crypto_unregister_skcipher(&aes_cfb64_alg
);
2468 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++)
2469 crypto_unregister_skcipher(&aes_algs
[i
]);
2472 static int atmel_aes_register_algs(struct atmel_aes_dev
*dd
)
2476 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
2477 err
= crypto_register_skcipher(&aes_algs
[i
]);
2482 if (dd
->caps
.has_cfb64
) {
2483 err
= crypto_register_skcipher(&aes_cfb64_alg
);
2485 goto err_aes_cfb64_alg
;
2488 if (dd
->caps
.has_gcm
) {
2489 err
= crypto_register_aead(&aes_gcm_alg
);
2491 goto err_aes_gcm_alg
;
2494 if (dd
->caps
.has_xts
) {
2495 err
= crypto_register_skcipher(&aes_xts_alg
);
2497 goto err_aes_xts_alg
;
2500 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2501 if (dd
->caps
.has_authenc
) {
2502 for (i
= 0; i
< ARRAY_SIZE(aes_authenc_algs
); i
++) {
2503 err
= crypto_register_aead(&aes_authenc_algs
[i
]);
2505 goto err_aes_authenc_alg
;
2512 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2513 /* i = ARRAY_SIZE(aes_authenc_algs); */
2514 err_aes_authenc_alg
:
2515 for (j
= 0; j
< i
; j
++)
2516 crypto_unregister_aead(&aes_authenc_algs
[j
]);
2517 crypto_unregister_skcipher(&aes_xts_alg
);
2520 crypto_unregister_aead(&aes_gcm_alg
);
2522 crypto_unregister_skcipher(&aes_cfb64_alg
);
2524 i
= ARRAY_SIZE(aes_algs
);
2526 for (j
= 0; j
< i
; j
++)
2527 crypto_unregister_skcipher(&aes_algs
[j
]);
2532 static void atmel_aes_get_cap(struct atmel_aes_dev
*dd
)
2534 dd
->caps
.has_dualbuff
= 0;
2535 dd
->caps
.has_cfb64
= 0;
2536 dd
->caps
.has_ctr32
= 0;
2537 dd
->caps
.has_gcm
= 0;
2538 dd
->caps
.has_xts
= 0;
2539 dd
->caps
.has_authenc
= 0;
2540 dd
->caps
.max_burst_size
= 1;
2542 /* keep only major version number */
2543 switch (dd
->hw_version
& 0xff0) {
2545 dd
->caps
.has_dualbuff
= 1;
2546 dd
->caps
.has_cfb64
= 1;
2547 dd
->caps
.has_ctr32
= 1;
2548 dd
->caps
.has_gcm
= 1;
2549 dd
->caps
.has_xts
= 1;
2550 dd
->caps
.has_authenc
= 1;
2551 dd
->caps
.max_burst_size
= 4;
2554 dd
->caps
.has_dualbuff
= 1;
2555 dd
->caps
.has_cfb64
= 1;
2556 dd
->caps
.has_ctr32
= 1;
2557 dd
->caps
.has_gcm
= 1;
2558 dd
->caps
.max_burst_size
= 4;
2561 dd
->caps
.has_dualbuff
= 1;
2562 dd
->caps
.has_cfb64
= 1;
2563 dd
->caps
.max_burst_size
= 4;
2569 "Unmanaged aes version, set minimum capabilities\n");
2574 #if defined(CONFIG_OF)
2575 static const struct of_device_id atmel_aes_dt_ids
[] = {
2576 { .compatible
= "atmel,at91sam9g46-aes" },
2579 MODULE_DEVICE_TABLE(of
, atmel_aes_dt_ids
);
2581 static struct crypto_platform_data
*atmel_aes_of_init(struct platform_device
*pdev
)
2583 struct device_node
*np
= pdev
->dev
.of_node
;
2584 struct crypto_platform_data
*pdata
;
2587 dev_err(&pdev
->dev
, "device node not found\n");
2588 return ERR_PTR(-EINVAL
);
2591 pdata
= devm_kzalloc(&pdev
->dev
, sizeof(*pdata
), GFP_KERNEL
);
2593 return ERR_PTR(-ENOMEM
);
2595 pdata
->dma_slave
= devm_kzalloc(&pdev
->dev
,
2596 sizeof(*(pdata
->dma_slave
)),
2598 if (!pdata
->dma_slave
) {
2599 devm_kfree(&pdev
->dev
, pdata
);
2600 return ERR_PTR(-ENOMEM
);
2606 static inline struct crypto_platform_data
*atmel_aes_of_init(struct platform_device
*pdev
)
2608 return ERR_PTR(-EINVAL
);
2612 static int atmel_aes_probe(struct platform_device
*pdev
)
2614 struct atmel_aes_dev
*aes_dd
;
2615 struct crypto_platform_data
*pdata
;
2616 struct device
*dev
= &pdev
->dev
;
2617 struct resource
*aes_res
;
2620 pdata
= pdev
->dev
.platform_data
;
2622 pdata
= atmel_aes_of_init(pdev
);
2623 if (IS_ERR(pdata
)) {
2624 err
= PTR_ERR(pdata
);
2629 if (!pdata
->dma_slave
) {
2634 aes_dd
= devm_kzalloc(&pdev
->dev
, sizeof(*aes_dd
), GFP_KERNEL
);
2635 if (aes_dd
== NULL
) {
2642 platform_set_drvdata(pdev
, aes_dd
);
2644 INIT_LIST_HEAD(&aes_dd
->list
);
2645 spin_lock_init(&aes_dd
->lock
);
2647 tasklet_init(&aes_dd
->done_task
, atmel_aes_done_task
,
2648 (unsigned long)aes_dd
);
2649 tasklet_init(&aes_dd
->queue_task
, atmel_aes_queue_task
,
2650 (unsigned long)aes_dd
);
2652 crypto_init_queue(&aes_dd
->queue
, ATMEL_AES_QUEUE_LENGTH
);
2654 /* Get the base address */
2655 aes_res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
2657 dev_err(dev
, "no MEM resource info\n");
2661 aes_dd
->phys_base
= aes_res
->start
;
2664 aes_dd
->irq
= platform_get_irq(pdev
, 0);
2665 if (aes_dd
->irq
< 0) {
2670 err
= devm_request_irq(&pdev
->dev
, aes_dd
->irq
, atmel_aes_irq
,
2671 IRQF_SHARED
, "atmel-aes", aes_dd
);
2673 dev_err(dev
, "unable to request aes irq.\n");
2677 /* Initializing the clock */
2678 aes_dd
->iclk
= devm_clk_get(&pdev
->dev
, "aes_clk");
2679 if (IS_ERR(aes_dd
->iclk
)) {
2680 dev_err(dev
, "clock initialization failed.\n");
2681 err
= PTR_ERR(aes_dd
->iclk
);
2685 aes_dd
->io_base
= devm_ioremap_resource(&pdev
->dev
, aes_res
);
2686 if (IS_ERR(aes_dd
->io_base
)) {
2687 dev_err(dev
, "can't ioremap\n");
2688 err
= PTR_ERR(aes_dd
->io_base
);
2692 err
= clk_prepare(aes_dd
->iclk
);
2696 err
= atmel_aes_hw_version_init(aes_dd
);
2698 goto iclk_unprepare
;
2700 atmel_aes_get_cap(aes_dd
);
2702 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2703 if (aes_dd
->caps
.has_authenc
&& !atmel_sha_authenc_is_ready()) {
2704 err
= -EPROBE_DEFER
;
2705 goto iclk_unprepare
;
2709 err
= atmel_aes_buff_init(aes_dd
);
2713 err
= atmel_aes_dma_init(aes_dd
, pdata
);
2717 spin_lock(&atmel_aes
.lock
);
2718 list_add_tail(&aes_dd
->list
, &atmel_aes
.dev_list
);
2719 spin_unlock(&atmel_aes
.lock
);
2721 err
= atmel_aes_register_algs(aes_dd
);
2725 dev_info(dev
, "Atmel AES - Using %s, %s for DMA transfers\n",
2726 dma_chan_name(aes_dd
->src
.chan
),
2727 dma_chan_name(aes_dd
->dst
.chan
));
2732 spin_lock(&atmel_aes
.lock
);
2733 list_del(&aes_dd
->list
);
2734 spin_unlock(&atmel_aes
.lock
);
2735 atmel_aes_dma_cleanup(aes_dd
);
2737 atmel_aes_buff_cleanup(aes_dd
);
2740 clk_unprepare(aes_dd
->iclk
);
2742 tasklet_kill(&aes_dd
->done_task
);
2743 tasklet_kill(&aes_dd
->queue_task
);
2745 if (err
!= -EPROBE_DEFER
)
2746 dev_err(dev
, "initialization failed.\n");
2751 static int atmel_aes_remove(struct platform_device
*pdev
)
2753 struct atmel_aes_dev
*aes_dd
;
2755 aes_dd
= platform_get_drvdata(pdev
);
2758 spin_lock(&atmel_aes
.lock
);
2759 list_del(&aes_dd
->list
);
2760 spin_unlock(&atmel_aes
.lock
);
2762 atmel_aes_unregister_algs(aes_dd
);
2764 tasklet_kill(&aes_dd
->done_task
);
2765 tasklet_kill(&aes_dd
->queue_task
);
2767 atmel_aes_dma_cleanup(aes_dd
);
2768 atmel_aes_buff_cleanup(aes_dd
);
2770 clk_unprepare(aes_dd
->iclk
);
2775 static struct platform_driver atmel_aes_driver
= {
2776 .probe
= atmel_aes_probe
,
2777 .remove
= atmel_aes_remove
,
2779 .name
= "atmel_aes",
2780 .of_match_table
= of_match_ptr(atmel_aes_dt_ids
),
2784 module_platform_driver(atmel_aes_driver
);
2786 MODULE_DESCRIPTION("Atmel AES hw acceleration support.");
2787 MODULE_LICENSE("GPL v2");
2788 MODULE_AUTHOR("Nicolas Royer - Eukréa Electromatique");