2 * Accelerated GHASH implementation with Intel PCLMULQDQ-NI
3 * instructions. This file contains glue code.
5 * Copyright (c) 2009 Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU General Public License version 2 as published
10 * by the Free Software Foundation.
13 #include <linux/err.h>
14 #include <linux/module.h>
15 #include <linux/init.h>
16 #include <linux/kernel.h>
17 #include <linux/crypto.h>
18 #include <crypto/algapi.h>
19 #include <crypto/cryptd.h>
20 #include <crypto/gf128mul.h>
21 #include <crypto/internal/hash.h>
24 #define GHASH_BLOCK_SIZE 16
25 #define GHASH_DIGEST_SIZE 16
27 void clmul_ghash_mul(char *dst
, const be128
*shash
);
29 void clmul_ghash_update(char *dst
, const char *src
, unsigned int srclen
,
32 void clmul_ghash_setkey(be128
*shash
, const u8
*key
);
34 struct ghash_async_ctx
{
35 struct cryptd_ahash
*cryptd_tfm
;
42 struct ghash_desc_ctx
{
43 u8 buffer
[GHASH_BLOCK_SIZE
];
47 static int ghash_init(struct shash_desc
*desc
)
49 struct ghash_desc_ctx
*dctx
= shash_desc_ctx(desc
);
51 memset(dctx
, 0, sizeof(*dctx
));
56 static int ghash_setkey(struct crypto_shash
*tfm
,
57 const u8
*key
, unsigned int keylen
)
59 struct ghash_ctx
*ctx
= crypto_shash_ctx(tfm
);
61 if (keylen
!= GHASH_BLOCK_SIZE
) {
62 crypto_shash_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
66 clmul_ghash_setkey(&ctx
->shash
, key
);
71 static int ghash_update(struct shash_desc
*desc
,
72 const u8
*src
, unsigned int srclen
)
74 struct ghash_desc_ctx
*dctx
= shash_desc_ctx(desc
);
75 struct ghash_ctx
*ctx
= crypto_shash_ctx(desc
->tfm
);
76 u8
*dst
= dctx
->buffer
;
80 int n
= min(srclen
, dctx
->bytes
);
81 u8
*pos
= dst
+ (GHASH_BLOCK_SIZE
- dctx
->bytes
);
90 clmul_ghash_mul(dst
, &ctx
->shash
);
93 clmul_ghash_update(dst
, src
, srclen
, &ctx
->shash
);
97 src
+= srclen
- (srclen
& 0xf);
99 dctx
->bytes
= GHASH_BLOCK_SIZE
- srclen
;
107 static void ghash_flush(struct ghash_ctx
*ctx
, struct ghash_desc_ctx
*dctx
)
109 u8
*dst
= dctx
->buffer
;
112 u8
*tmp
= dst
+ (GHASH_BLOCK_SIZE
- dctx
->bytes
);
114 while (dctx
->bytes
--)
118 clmul_ghash_mul(dst
, &ctx
->shash
);
125 static int ghash_final(struct shash_desc
*desc
, u8
*dst
)
127 struct ghash_desc_ctx
*dctx
= shash_desc_ctx(desc
);
128 struct ghash_ctx
*ctx
= crypto_shash_ctx(desc
->tfm
);
129 u8
*buf
= dctx
->buffer
;
131 ghash_flush(ctx
, dctx
);
132 memcpy(dst
, buf
, GHASH_BLOCK_SIZE
);
137 static struct shash_alg ghash_alg
= {
138 .digestsize
= GHASH_DIGEST_SIZE
,
140 .update
= ghash_update
,
141 .final
= ghash_final
,
142 .setkey
= ghash_setkey
,
143 .descsize
= sizeof(struct ghash_desc_ctx
),
145 .cra_name
= "__ghash",
146 .cra_driver_name
= "__ghash-pclmulqdqni",
148 .cra_flags
= CRYPTO_ALG_TYPE_SHASH
,
149 .cra_blocksize
= GHASH_BLOCK_SIZE
,
150 .cra_ctxsize
= sizeof(struct ghash_ctx
),
151 .cra_module
= THIS_MODULE
,
152 .cra_list
= LIST_HEAD_INIT(ghash_alg
.base
.cra_list
),
156 static int ghash_async_init(struct ahash_request
*req
)
158 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
159 struct ghash_async_ctx
*ctx
= crypto_ahash_ctx(tfm
);
160 struct ahash_request
*cryptd_req
= ahash_request_ctx(req
);
161 struct cryptd_ahash
*cryptd_tfm
= ctx
->cryptd_tfm
;
163 if (!irq_fpu_usable()) {
164 memcpy(cryptd_req
, req
, sizeof(*req
));
165 ahash_request_set_tfm(cryptd_req
, &cryptd_tfm
->base
);
166 return crypto_ahash_init(cryptd_req
);
168 struct shash_desc
*desc
= cryptd_shash_desc(cryptd_req
);
169 struct crypto_shash
*child
= cryptd_ahash_child(cryptd_tfm
);
172 desc
->flags
= req
->base
.flags
;
173 return crypto_shash_init(desc
);
177 static int ghash_async_update(struct ahash_request
*req
)
179 struct ahash_request
*cryptd_req
= ahash_request_ctx(req
);
181 if (!irq_fpu_usable()) {
182 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
183 struct ghash_async_ctx
*ctx
= crypto_ahash_ctx(tfm
);
184 struct cryptd_ahash
*cryptd_tfm
= ctx
->cryptd_tfm
;
186 memcpy(cryptd_req
, req
, sizeof(*req
));
187 ahash_request_set_tfm(cryptd_req
, &cryptd_tfm
->base
);
188 return crypto_ahash_update(cryptd_req
);
190 struct shash_desc
*desc
= cryptd_shash_desc(cryptd_req
);
191 return shash_ahash_update(req
, desc
);
195 static int ghash_async_final(struct ahash_request
*req
)
197 struct ahash_request
*cryptd_req
= ahash_request_ctx(req
);
199 if (!irq_fpu_usable()) {
200 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
201 struct ghash_async_ctx
*ctx
= crypto_ahash_ctx(tfm
);
202 struct cryptd_ahash
*cryptd_tfm
= ctx
->cryptd_tfm
;
204 memcpy(cryptd_req
, req
, sizeof(*req
));
205 ahash_request_set_tfm(cryptd_req
, &cryptd_tfm
->base
);
206 return crypto_ahash_final(cryptd_req
);
208 struct shash_desc
*desc
= cryptd_shash_desc(cryptd_req
);
209 return crypto_shash_final(desc
, req
->result
);
213 static int ghash_async_digest(struct ahash_request
*req
)
215 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
216 struct ghash_async_ctx
*ctx
= crypto_ahash_ctx(tfm
);
217 struct ahash_request
*cryptd_req
= ahash_request_ctx(req
);
218 struct cryptd_ahash
*cryptd_tfm
= ctx
->cryptd_tfm
;
220 if (!irq_fpu_usable()) {
221 memcpy(cryptd_req
, req
, sizeof(*req
));
222 ahash_request_set_tfm(cryptd_req
, &cryptd_tfm
->base
);
223 return crypto_ahash_digest(cryptd_req
);
225 struct shash_desc
*desc
= cryptd_shash_desc(cryptd_req
);
226 struct crypto_shash
*child
= cryptd_ahash_child(cryptd_tfm
);
229 desc
->flags
= req
->base
.flags
;
230 return shash_ahash_digest(req
, desc
);
234 static int ghash_async_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
237 struct ghash_async_ctx
*ctx
= crypto_ahash_ctx(tfm
);
238 struct crypto_ahash
*child
= &ctx
->cryptd_tfm
->base
;
241 crypto_ahash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
242 crypto_ahash_set_flags(child
, crypto_ahash_get_flags(tfm
)
243 & CRYPTO_TFM_REQ_MASK
);
244 err
= crypto_ahash_setkey(child
, key
, keylen
);
245 crypto_ahash_set_flags(tfm
, crypto_ahash_get_flags(child
)
246 & CRYPTO_TFM_RES_MASK
);
251 static int ghash_async_init_tfm(struct crypto_tfm
*tfm
)
253 struct cryptd_ahash
*cryptd_tfm
;
254 struct ghash_async_ctx
*ctx
= crypto_tfm_ctx(tfm
);
256 cryptd_tfm
= cryptd_alloc_ahash("__ghash-pclmulqdqni", 0, 0);
257 if (IS_ERR(cryptd_tfm
))
258 return PTR_ERR(cryptd_tfm
);
259 ctx
->cryptd_tfm
= cryptd_tfm
;
260 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
261 sizeof(struct ahash_request
) +
262 crypto_ahash_reqsize(&cryptd_tfm
->base
));
267 static void ghash_async_exit_tfm(struct crypto_tfm
*tfm
)
269 struct ghash_async_ctx
*ctx
= crypto_tfm_ctx(tfm
);
271 cryptd_free_ahash(ctx
->cryptd_tfm
);
274 static struct ahash_alg ghash_async_alg
= {
275 .init
= ghash_async_init
,
276 .update
= ghash_async_update
,
277 .final
= ghash_async_final
,
278 .setkey
= ghash_async_setkey
,
279 .digest
= ghash_async_digest
,
281 .digestsize
= GHASH_DIGEST_SIZE
,
284 .cra_driver_name
= "ghash-clmulni",
286 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_ASYNC
,
287 .cra_blocksize
= GHASH_BLOCK_SIZE
,
288 .cra_type
= &crypto_ahash_type
,
289 .cra_module
= THIS_MODULE
,
290 .cra_list
= LIST_HEAD_INIT(ghash_async_alg
.halg
.base
.cra_list
),
291 .cra_init
= ghash_async_init_tfm
,
292 .cra_exit
= ghash_async_exit_tfm
,
297 static int __init
ghash_pclmulqdqni_mod_init(void)
301 if (!cpu_has_pclmulqdq
) {
302 printk(KERN_INFO
"Intel PCLMULQDQ-NI instructions are not"
307 err
= crypto_register_shash(&ghash_alg
);
310 err
= crypto_register_ahash(&ghash_async_alg
);
317 crypto_unregister_shash(&ghash_alg
);
322 static void __exit
ghash_pclmulqdqni_mod_exit(void)
324 crypto_unregister_ahash(&ghash_async_alg
);
325 crypto_unregister_shash(&ghash_alg
);
328 module_init(ghash_pclmulqdqni_mod_init
);
329 module_exit(ghash_pclmulqdqni_mod_exit
);
331 MODULE_LICENSE("GPL");
332 MODULE_DESCRIPTION("GHASH Message Digest Algorithm, "
333 "acclerated by PCLMULQDQ-NI");
334 MODULE_ALIAS("ghash");