1 // SPDX-License-Identifier: GPL-2.0-only
5 * Support for OMAP AES GCM HW acceleration.
7 * Copyright (c) 2016 Texas Instruments Incorporated
10 #include <linux/errno.h>
11 #include <linux/scatterlist.h>
12 #include <linux/dma-mapping.h>
13 #include <linux/dmaengine.h>
14 #include <linux/omap-dma.h>
15 #include <linux/interrupt.h>
16 #include <crypto/aes.h>
17 #include <crypto/gcm.h>
18 #include <crypto/scatterwalk.h>
19 #include <crypto/skcipher.h>
20 #include <crypto/internal/aead.h>
22 #include "omap-crypto.h"
25 static int omap_aes_gcm_handle_queue(struct omap_aes_dev
*dd
,
26 struct aead_request
*req
);
28 static void omap_aes_gcm_finish_req(struct omap_aes_dev
*dd
, int ret
)
30 struct aead_request
*req
= dd
->aead_req
;
32 dd
->flags
&= ~FLAGS_BUSY
;
36 req
->base
.complete(&req
->base
, ret
);
39 static void omap_aes_gcm_done_task(struct omap_aes_dev
*dd
)
42 int alen
, clen
, i
, ret
= 0, nsg
;
43 struct omap_aes_reqctx
*rctx
;
45 alen
= ALIGN(dd
->assoc_len
, AES_BLOCK_SIZE
);
46 clen
= ALIGN(dd
->total
, AES_BLOCK_SIZE
);
47 rctx
= aead_request_ctx(dd
->aead_req
);
49 nsg
= !!(dd
->assoc_len
&& dd
->total
);
51 dma_sync_sg_for_device(dd
->dev
, dd
->out_sg
, dd
->out_sg_len
,
53 dma_unmap_sg(dd
->dev
, dd
->in_sg
, dd
->in_sg_len
, DMA_TO_DEVICE
);
54 dma_unmap_sg(dd
->dev
, dd
->out_sg
, dd
->out_sg_len
, DMA_FROM_DEVICE
);
55 omap_aes_crypt_dma_stop(dd
);
57 omap_crypto_cleanup(dd
->out_sg
, dd
->orig_out
,
58 dd
->aead_req
->assoclen
, dd
->total
,
59 FLAGS_OUT_DATA_ST_SHIFT
, dd
->flags
);
61 if (dd
->flags
& FLAGS_ENCRYPT
)
62 scatterwalk_map_and_copy(rctx
->auth_tag
,
64 dd
->total
+ dd
->aead_req
->assoclen
,
67 omap_crypto_cleanup(&dd
->in_sgl
[0], NULL
, 0, alen
,
68 FLAGS_ASSOC_DATA_ST_SHIFT
, dd
->flags
);
70 omap_crypto_cleanup(&dd
->in_sgl
[nsg
], NULL
, 0, clen
,
71 FLAGS_IN_DATA_ST_SHIFT
, dd
->flags
);
73 if (!(dd
->flags
& FLAGS_ENCRYPT
)) {
74 tag
= (u8
*)rctx
->auth_tag
;
75 for (i
= 0; i
< dd
->authsize
; i
++) {
77 dev_err(dd
->dev
, "GCM decryption: Tag Message is wrong\n");
83 omap_aes_gcm_finish_req(dd
, ret
);
84 omap_aes_gcm_handle_queue(dd
, NULL
);
87 static int omap_aes_gcm_copy_buffers(struct omap_aes_dev
*dd
,
88 struct aead_request
*req
)
90 int alen
, clen
, cryptlen
, assoclen
, ret
;
91 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
92 unsigned int authlen
= crypto_aead_authsize(aead
);
93 struct scatterlist
*tmp
, sg_arr
[2];
97 assoclen
= req
->assoclen
;
98 cryptlen
= req
->cryptlen
;
100 if (dd
->flags
& FLAGS_RFC4106_GCM
)
103 if (!(dd
->flags
& FLAGS_ENCRYPT
))
106 alen
= ALIGN(assoclen
, AES_BLOCK_SIZE
);
107 clen
= ALIGN(cryptlen
, AES_BLOCK_SIZE
);
109 nsg
= !!(assoclen
&& cryptlen
);
111 omap_aes_clear_copy_flags(dd
);
113 sg_init_table(dd
->in_sgl
, nsg
+ 1);
116 ret
= omap_crypto_align_sg(&tmp
, assoclen
,
117 AES_BLOCK_SIZE
, dd
->in_sgl
,
118 OMAP_CRYPTO_COPY_DATA
|
119 OMAP_CRYPTO_ZERO_BUF
|
120 OMAP_CRYPTO_FORCE_SINGLE_ENTRY
,
121 FLAGS_ASSOC_DATA_ST_SHIFT
,
126 tmp
= scatterwalk_ffwd(sg_arr
, req
->src
, req
->assoclen
);
128 ret
= omap_crypto_align_sg(&tmp
, cryptlen
,
129 AES_BLOCK_SIZE
, &dd
->in_sgl
[nsg
],
130 OMAP_CRYPTO_COPY_DATA
|
131 OMAP_CRYPTO_ZERO_BUF
|
132 OMAP_CRYPTO_FORCE_SINGLE_ENTRY
,
133 FLAGS_IN_DATA_ST_SHIFT
,
137 dd
->in_sg
= dd
->in_sgl
;
138 dd
->total
= cryptlen
;
139 dd
->assoc_len
= assoclen
;
140 dd
->authsize
= authlen
;
142 dd
->out_sg
= req
->dst
;
143 dd
->orig_out
= req
->dst
;
145 dd
->out_sg
= scatterwalk_ffwd(sg_arr
, req
->dst
, assoclen
);
148 if (req
->src
== req
->dst
|| dd
->out_sg
== sg_arr
)
149 flags
|= OMAP_CRYPTO_FORCE_COPY
;
151 ret
= omap_crypto_align_sg(&dd
->out_sg
, cryptlen
,
152 AES_BLOCK_SIZE
, &dd
->out_sgl
,
154 FLAGS_OUT_DATA_ST_SHIFT
, &dd
->flags
);
158 dd
->in_sg_len
= sg_nents_for_len(dd
->in_sg
, alen
+ clen
);
159 dd
->out_sg_len
= sg_nents_for_len(dd
->out_sg
, clen
);
164 static void omap_aes_gcm_complete(struct crypto_async_request
*req
, int err
)
166 struct omap_aes_gcm_result
*res
= req
->data
;
168 if (err
== -EINPROGRESS
)
172 complete(&res
->completion
);
175 static int do_encrypt_iv(struct aead_request
*req
, u32
*tag
, u32
*iv
)
177 struct scatterlist iv_sg
, tag_sg
;
178 struct skcipher_request
*sk_req
;
179 struct omap_aes_gcm_result result
;
180 struct omap_aes_ctx
*ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
183 sk_req
= skcipher_request_alloc(ctx
->ctr
, GFP_KERNEL
);
185 pr_err("skcipher: Failed to allocate request\n");
189 init_completion(&result
.completion
);
191 sg_init_one(&iv_sg
, iv
, AES_BLOCK_SIZE
);
192 sg_init_one(&tag_sg
, tag
, AES_BLOCK_SIZE
);
193 skcipher_request_set_callback(sk_req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
194 omap_aes_gcm_complete
, &result
);
195 ret
= crypto_skcipher_setkey(ctx
->ctr
, (u8
*)ctx
->key
, ctx
->keylen
);
196 skcipher_request_set_crypt(sk_req
, &iv_sg
, &tag_sg
, AES_BLOCK_SIZE
,
198 ret
= crypto_skcipher_encrypt(sk_req
);
204 ret
= wait_for_completion_interruptible(&result
.completion
);
208 reinit_completion(&result
.completion
);
214 pr_err("Encryption of IV failed for GCM mode\n");
218 skcipher_request_free(sk_req
);
222 void omap_aes_gcm_dma_out_callback(void *data
)
224 struct omap_aes_dev
*dd
= data
;
225 struct omap_aes_reqctx
*rctx
;
227 u32
*auth_tag
, tag
[4];
229 if (!(dd
->flags
& FLAGS_ENCRYPT
))
230 scatterwalk_map_and_copy(tag
, dd
->aead_req
->src
,
231 dd
->total
+ dd
->aead_req
->assoclen
,
234 rctx
= aead_request_ctx(dd
->aead_req
);
235 auth_tag
= (u32
*)rctx
->auth_tag
;
236 for (i
= 0; i
< 4; i
++) {
237 val
= omap_aes_read(dd
, AES_REG_TAG_N(dd
, i
));
238 auth_tag
[i
] = val
^ auth_tag
[i
];
239 if (!(dd
->flags
& FLAGS_ENCRYPT
))
240 auth_tag
[i
] = auth_tag
[i
] ^ tag
[i
];
243 omap_aes_gcm_done_task(dd
);
246 static int omap_aes_gcm_handle_queue(struct omap_aes_dev
*dd
,
247 struct aead_request
*req
)
249 struct omap_aes_ctx
*ctx
;
250 struct aead_request
*backlog
;
251 struct omap_aes_reqctx
*rctx
;
255 spin_lock_irqsave(&dd
->lock
, flags
);
257 ret
= aead_enqueue_request(&dd
->aead_queue
, req
);
258 if (dd
->flags
& FLAGS_BUSY
) {
259 spin_unlock_irqrestore(&dd
->lock
, flags
);
263 backlog
= aead_get_backlog(&dd
->aead_queue
);
264 req
= aead_dequeue_request(&dd
->aead_queue
);
266 dd
->flags
|= FLAGS_BUSY
;
267 spin_unlock_irqrestore(&dd
->lock
, flags
);
273 backlog
->base
.complete(&backlog
->base
, -EINPROGRESS
);
275 ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
276 rctx
= aead_request_ctx(req
);
282 rctx
->mode
&= FLAGS_MODE_MASK
;
283 dd
->flags
= (dd
->flags
& ~FLAGS_MODE_MASK
) | rctx
->mode
;
285 err
= omap_aes_gcm_copy_buffers(dd
, req
);
289 err
= omap_aes_write_ctrl(dd
);
291 err
= omap_aes_crypt_dma_start(dd
);
294 omap_aes_gcm_finish_req(dd
, err
);
295 omap_aes_gcm_handle_queue(dd
, NULL
);
301 static int omap_aes_gcm_crypt(struct aead_request
*req
, unsigned long mode
)
303 struct omap_aes_reqctx
*rctx
= aead_request_ctx(req
);
304 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
305 unsigned int authlen
= crypto_aead_authsize(aead
);
306 struct omap_aes_dev
*dd
;
307 __be32 counter
= cpu_to_be32(1);
310 memset(rctx
->auth_tag
, 0, sizeof(rctx
->auth_tag
));
311 memcpy(rctx
->iv
+ GCM_AES_IV_SIZE
, &counter
, 4);
313 err
= do_encrypt_iv(req
, (u32
*)rctx
->auth_tag
, (u32
*)rctx
->iv
);
317 if (mode
& FLAGS_RFC4106_GCM
)
318 assoclen
= req
->assoclen
- 8;
320 assoclen
= req
->assoclen
;
321 if (assoclen
+ req
->cryptlen
== 0) {
322 scatterwalk_map_and_copy(rctx
->auth_tag
, req
->dst
, 0, authlen
,
327 dd
= omap_aes_find_dev(rctx
);
332 return omap_aes_gcm_handle_queue(dd
, req
);
335 int omap_aes_gcm_encrypt(struct aead_request
*req
)
337 struct omap_aes_reqctx
*rctx
= aead_request_ctx(req
);
339 memcpy(rctx
->iv
, req
->iv
, GCM_AES_IV_SIZE
);
340 return omap_aes_gcm_crypt(req
, FLAGS_ENCRYPT
| FLAGS_GCM
);
343 int omap_aes_gcm_decrypt(struct aead_request
*req
)
345 struct omap_aes_reqctx
*rctx
= aead_request_ctx(req
);
347 memcpy(rctx
->iv
, req
->iv
, GCM_AES_IV_SIZE
);
348 return omap_aes_gcm_crypt(req
, FLAGS_GCM
);
351 int omap_aes_4106gcm_encrypt(struct aead_request
*req
)
353 struct omap_aes_ctx
*ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
354 struct omap_aes_reqctx
*rctx
= aead_request_ctx(req
);
356 memcpy(rctx
->iv
, ctx
->nonce
, 4);
357 memcpy(rctx
->iv
+ 4, req
->iv
, 8);
358 return omap_aes_gcm_crypt(req
, FLAGS_ENCRYPT
| FLAGS_GCM
|
362 int omap_aes_4106gcm_decrypt(struct aead_request
*req
)
364 struct omap_aes_ctx
*ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
365 struct omap_aes_reqctx
*rctx
= aead_request_ctx(req
);
367 memcpy(rctx
->iv
, ctx
->nonce
, 4);
368 memcpy(rctx
->iv
+ 4, req
->iv
, 8);
369 return omap_aes_gcm_crypt(req
, FLAGS_GCM
| FLAGS_RFC4106_GCM
);
372 int omap_aes_gcm_setkey(struct crypto_aead
*tfm
, const u8
*key
,
375 struct omap_aes_ctx
*ctx
= crypto_aead_ctx(tfm
);
377 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
378 keylen
!= AES_KEYSIZE_256
)
381 memcpy(ctx
->key
, key
, keylen
);
382 ctx
->keylen
= keylen
;
387 int omap_aes_4106gcm_setkey(struct crypto_aead
*tfm
, const u8
*key
,
390 struct omap_aes_ctx
*ctx
= crypto_aead_ctx(tfm
);
396 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
397 keylen
!= AES_KEYSIZE_256
)
400 memcpy(ctx
->key
, key
, keylen
);
401 memcpy(ctx
->nonce
, key
+ keylen
, 4);
402 ctx
->keylen
= keylen
;