2 * Asynchronous Cryptographic Hash operations.
4 * This is the asynchronous version of hash.c with notification of
5 * completion via a callback.
7 * Copyright (c) 2008 Loc Ho <lho@amcc.com>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
16 #include <crypto/internal/hash.h>
17 #include <crypto/scatterwalk.h>
18 #include <linux/err.h>
19 #include <linux/kernel.h>
20 #include <linux/module.h>
21 #include <linux/sched.h>
22 #include <linux/slab.h>
23 #include <linux/seq_file.h>
27 struct ahash_request_priv
{
28 crypto_completion_t complete
;
31 void *ubuf
[] CRYPTO_MINALIGN_ATTR
;
34 static inline struct ahash_alg
*crypto_ahash_alg(struct crypto_ahash
*hash
)
36 return container_of(crypto_hash_alg_common(hash
), struct ahash_alg
,
40 static int hash_walk_next(struct crypto_hash_walk
*walk
)
42 unsigned int alignmask
= walk
->alignmask
;
43 unsigned int offset
= walk
->offset
;
44 unsigned int nbytes
= min(walk
->entrylen
,
45 ((unsigned int)(PAGE_SIZE
)) - offset
);
47 walk
->data
= crypto_kmap(walk
->pg
, 0);
50 if (offset
& alignmask
)
51 nbytes
= alignmask
+ 1 - (offset
& alignmask
);
53 walk
->entrylen
-= nbytes
;
57 static int hash_walk_new_entry(struct crypto_hash_walk
*walk
)
59 struct scatterlist
*sg
;
62 walk
->pg
= sg_page(sg
);
63 walk
->offset
= sg
->offset
;
64 walk
->entrylen
= sg
->length
;
66 if (walk
->entrylen
> walk
->total
)
67 walk
->entrylen
= walk
->total
;
68 walk
->total
-= walk
->entrylen
;
70 return hash_walk_next(walk
);
73 int crypto_hash_walk_done(struct crypto_hash_walk
*walk
, int err
)
75 unsigned int alignmask
= walk
->alignmask
;
76 unsigned int nbytes
= walk
->entrylen
;
78 walk
->data
-= walk
->offset
;
80 if (nbytes
&& walk
->offset
& alignmask
&& !err
) {
81 walk
->offset
+= alignmask
- 1;
82 walk
->offset
= ALIGN(walk
->offset
, alignmask
+ 1);
83 walk
->data
+= walk
->offset
;
86 ((unsigned int)(PAGE_SIZE
)) - walk
->offset
);
87 walk
->entrylen
-= nbytes
;
92 crypto_kunmap(walk
->data
, 0);
93 crypto_yield(walk
->flags
);
101 return hash_walk_next(walk
);
107 walk
->sg
= scatterwalk_sg_next(walk
->sg
);
109 return hash_walk_new_entry(walk
);
111 EXPORT_SYMBOL_GPL(crypto_hash_walk_done
);
113 int crypto_hash_walk_first(struct ahash_request
*req
,
114 struct crypto_hash_walk
*walk
)
116 walk
->total
= req
->nbytes
;
121 walk
->alignmask
= crypto_ahash_alignmask(crypto_ahash_reqtfm(req
));
123 walk
->flags
= req
->base
.flags
;
125 return hash_walk_new_entry(walk
);
127 EXPORT_SYMBOL_GPL(crypto_hash_walk_first
);
129 int crypto_hash_walk_first_compat(struct hash_desc
*hdesc
,
130 struct crypto_hash_walk
*walk
,
131 struct scatterlist
*sg
, unsigned int len
)
138 walk
->alignmask
= crypto_hash_alignmask(hdesc
->tfm
);
140 walk
->flags
= hdesc
->flags
;
142 return hash_walk_new_entry(walk
);
145 static int ahash_setkey_unaligned(struct crypto_ahash
*tfm
, const u8
*key
,
148 unsigned long alignmask
= crypto_ahash_alignmask(tfm
);
150 u8
*buffer
, *alignbuffer
;
151 unsigned long absize
;
153 absize
= keylen
+ alignmask
;
154 buffer
= kmalloc(absize
, GFP_KERNEL
);
158 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
159 memcpy(alignbuffer
, key
, keylen
);
160 ret
= tfm
->setkey(tfm
, alignbuffer
, keylen
);
165 int crypto_ahash_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
168 unsigned long alignmask
= crypto_ahash_alignmask(tfm
);
170 if ((unsigned long)key
& alignmask
)
171 return ahash_setkey_unaligned(tfm
, key
, keylen
);
173 return tfm
->setkey(tfm
, key
, keylen
);
175 EXPORT_SYMBOL_GPL(crypto_ahash_setkey
);
177 static int ahash_nosetkey(struct crypto_ahash
*tfm
, const u8
*key
,
183 static inline unsigned int ahash_align_buffer_size(unsigned len
,
186 return len
+ (mask
& ~(crypto_tfm_ctx_alignment() - 1));
189 static void ahash_op_unaligned_finish(struct ahash_request
*req
, int err
)
191 struct ahash_request_priv
*priv
= req
->priv
;
193 if (err
== -EINPROGRESS
)
197 memcpy(priv
->result
, req
->result
,
198 crypto_ahash_digestsize(crypto_ahash_reqtfm(req
)));
203 static void ahash_op_unaligned_done(struct crypto_async_request
*req
, int err
)
205 struct ahash_request
*areq
= req
->data
;
206 struct ahash_request_priv
*priv
= areq
->priv
;
207 crypto_completion_t complete
= priv
->complete
;
208 void *data
= priv
->data
;
210 ahash_op_unaligned_finish(areq
, err
);
215 static int ahash_op_unaligned(struct ahash_request
*req
,
216 int (*op
)(struct ahash_request
*))
218 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
219 unsigned long alignmask
= crypto_ahash_alignmask(tfm
);
220 unsigned int ds
= crypto_ahash_digestsize(tfm
);
221 struct ahash_request_priv
*priv
;
224 priv
= kmalloc(sizeof(*priv
) + ahash_align_buffer_size(ds
, alignmask
),
225 (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
226 GFP_KERNEL
: GFP_ATOMIC
);
230 priv
->result
= req
->result
;
231 priv
->complete
= req
->base
.complete
;
232 priv
->data
= req
->base
.data
;
234 req
->result
= PTR_ALIGN((u8
*)priv
->ubuf
, alignmask
+ 1);
235 req
->base
.complete
= ahash_op_unaligned_done
;
236 req
->base
.data
= req
;
240 ahash_op_unaligned_finish(req
, err
);
245 static int crypto_ahash_op(struct ahash_request
*req
,
246 int (*op
)(struct ahash_request
*))
248 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
249 unsigned long alignmask
= crypto_ahash_alignmask(tfm
);
251 if ((unsigned long)req
->result
& alignmask
)
252 return ahash_op_unaligned(req
, op
);
257 int crypto_ahash_final(struct ahash_request
*req
)
259 return crypto_ahash_op(req
, crypto_ahash_reqtfm(req
)->final
);
261 EXPORT_SYMBOL_GPL(crypto_ahash_final
);
263 int crypto_ahash_finup(struct ahash_request
*req
)
265 return crypto_ahash_op(req
, crypto_ahash_reqtfm(req
)->finup
);
267 EXPORT_SYMBOL_GPL(crypto_ahash_finup
);
269 int crypto_ahash_digest(struct ahash_request
*req
)
271 return crypto_ahash_op(req
, crypto_ahash_reqtfm(req
)->digest
);
273 EXPORT_SYMBOL_GPL(crypto_ahash_digest
);
275 static void ahash_def_finup_finish2(struct ahash_request
*req
, int err
)
277 struct ahash_request_priv
*priv
= req
->priv
;
279 if (err
== -EINPROGRESS
)
283 memcpy(priv
->result
, req
->result
,
284 crypto_ahash_digestsize(crypto_ahash_reqtfm(req
)));
289 static void ahash_def_finup_done2(struct crypto_async_request
*req
, int err
)
291 struct ahash_request
*areq
= req
->data
;
292 struct ahash_request_priv
*priv
= areq
->priv
;
293 crypto_completion_t complete
= priv
->complete
;
294 void *data
= priv
->data
;
296 ahash_def_finup_finish2(areq
, err
);
301 static int ahash_def_finup_finish1(struct ahash_request
*req
, int err
)
306 req
->base
.complete
= ahash_def_finup_done2
;
307 req
->base
.flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
308 err
= crypto_ahash_reqtfm(req
)->final(req
);
311 ahash_def_finup_finish2(req
, err
);
315 static void ahash_def_finup_done1(struct crypto_async_request
*req
, int err
)
317 struct ahash_request
*areq
= req
->data
;
318 struct ahash_request_priv
*priv
= areq
->priv
;
319 crypto_completion_t complete
= priv
->complete
;
320 void *data
= priv
->data
;
322 err
= ahash_def_finup_finish1(areq
, err
);
327 static int ahash_def_finup(struct ahash_request
*req
)
329 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
330 unsigned long alignmask
= crypto_ahash_alignmask(tfm
);
331 unsigned int ds
= crypto_ahash_digestsize(tfm
);
332 struct ahash_request_priv
*priv
;
334 priv
= kmalloc(sizeof(*priv
) + ahash_align_buffer_size(ds
, alignmask
),
335 (req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
336 GFP_KERNEL
: GFP_ATOMIC
);
340 priv
->result
= req
->result
;
341 priv
->complete
= req
->base
.complete
;
342 priv
->data
= req
->base
.data
;
344 req
->result
= PTR_ALIGN((u8
*)priv
->ubuf
, alignmask
+ 1);
345 req
->base
.complete
= ahash_def_finup_done1
;
346 req
->base
.data
= req
;
349 return ahash_def_finup_finish1(req
, tfm
->update(req
));
352 static int ahash_no_export(struct ahash_request
*req
, void *out
)
357 static int ahash_no_import(struct ahash_request
*req
, const void *in
)
362 static int crypto_ahash_init_tfm(struct crypto_tfm
*tfm
)
364 struct crypto_ahash
*hash
= __crypto_ahash_cast(tfm
);
365 struct ahash_alg
*alg
= crypto_ahash_alg(hash
);
367 hash
->setkey
= ahash_nosetkey
;
368 hash
->export
= ahash_no_export
;
369 hash
->import
= ahash_no_import
;
371 if (tfm
->__crt_alg
->cra_type
!= &crypto_ahash_type
)
372 return crypto_init_shash_ops_async(tfm
);
374 hash
->init
= alg
->init
;
375 hash
->update
= alg
->update
;
376 hash
->final
= alg
->final
;
377 hash
->finup
= alg
->finup
?: ahash_def_finup
;
378 hash
->digest
= alg
->digest
;
381 hash
->setkey
= alg
->setkey
;
383 hash
->export
= alg
->export
;
385 hash
->import
= alg
->import
;
390 static unsigned int crypto_ahash_extsize(struct crypto_alg
*alg
)
392 if (alg
->cra_type
== &crypto_ahash_type
)
393 return alg
->cra_ctxsize
;
395 return sizeof(struct crypto_shash
*);
398 static void crypto_ahash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
399 __attribute__ ((unused
));
400 static void crypto_ahash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
402 seq_printf(m
, "type : ahash\n");
403 seq_printf(m
, "async : %s\n", alg
->cra_flags
& CRYPTO_ALG_ASYNC
?
405 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
406 seq_printf(m
, "digestsize : %u\n",
407 __crypto_hash_alg_common(alg
)->digestsize
);
410 const struct crypto_type crypto_ahash_type
= {
411 .extsize
= crypto_ahash_extsize
,
412 .init_tfm
= crypto_ahash_init_tfm
,
413 #ifdef CONFIG_PROC_FS
414 .show
= crypto_ahash_show
,
416 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
417 .maskset
= CRYPTO_ALG_TYPE_AHASH_MASK
,
418 .type
= CRYPTO_ALG_TYPE_AHASH
,
419 .tfmsize
= offsetof(struct crypto_ahash
, base
),
421 EXPORT_SYMBOL_GPL(crypto_ahash_type
);
423 struct crypto_ahash
*crypto_alloc_ahash(const char *alg_name
, u32 type
,
426 return crypto_alloc_tfm(alg_name
, &crypto_ahash_type
, type
, mask
);
428 EXPORT_SYMBOL_GPL(crypto_alloc_ahash
);
430 static int ahash_prepare_alg(struct ahash_alg
*alg
)
432 struct crypto_alg
*base
= &alg
->halg
.base
;
434 if (alg
->halg
.digestsize
> PAGE_SIZE
/ 8 ||
435 alg
->halg
.statesize
> PAGE_SIZE
/ 8)
438 base
->cra_type
= &crypto_ahash_type
;
439 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
440 base
->cra_flags
|= CRYPTO_ALG_TYPE_AHASH
;
445 int crypto_register_ahash(struct ahash_alg
*alg
)
447 struct crypto_alg
*base
= &alg
->halg
.base
;
450 err
= ahash_prepare_alg(alg
);
454 return crypto_register_alg(base
);
456 EXPORT_SYMBOL_GPL(crypto_register_ahash
);
458 int crypto_unregister_ahash(struct ahash_alg
*alg
)
460 return crypto_unregister_alg(&alg
->halg
.base
);
462 EXPORT_SYMBOL_GPL(crypto_unregister_ahash
);
464 int ahash_register_instance(struct crypto_template
*tmpl
,
465 struct ahash_instance
*inst
)
469 err
= ahash_prepare_alg(&inst
->alg
);
473 return crypto_register_instance(tmpl
, ahash_crypto_instance(inst
));
475 EXPORT_SYMBOL_GPL(ahash_register_instance
);
477 void ahash_free_instance(struct crypto_instance
*inst
)
479 crypto_drop_spawn(crypto_instance_ctx(inst
));
480 kfree(ahash_instance(inst
));
482 EXPORT_SYMBOL_GPL(ahash_free_instance
);
484 int crypto_init_ahash_spawn(struct crypto_ahash_spawn
*spawn
,
485 struct hash_alg_common
*alg
,
486 struct crypto_instance
*inst
)
488 return crypto_init_spawn2(&spawn
->base
, &alg
->base
, inst
,
491 EXPORT_SYMBOL_GPL(crypto_init_ahash_spawn
);
493 struct hash_alg_common
*ahash_attr_alg(struct rtattr
*rta
, u32 type
, u32 mask
)
495 struct crypto_alg
*alg
;
497 alg
= crypto_attr_alg2(rta
, &crypto_ahash_type
, type
, mask
);
498 return IS_ERR(alg
) ? ERR_CAST(alg
) : __crypto_hash_alg_common(alg
);
500 EXPORT_SYMBOL_GPL(ahash_attr_alg
);
502 MODULE_LICENSE("GPL");
503 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");