2 * Synchronous Cryptographic Hash operations.
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22 #include <linux/compiler.h>
26 static const struct crypto_type crypto_shash_type
;
28 int shash_no_setkey(struct crypto_shash
*tfm
, const u8
*key
,
33 EXPORT_SYMBOL_GPL(shash_no_setkey
);
35 static int shash_setkey_unaligned(struct crypto_shash
*tfm
, const u8
*key
,
38 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
39 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
41 u8
*buffer
, *alignbuffer
;
44 absize
= keylen
+ (alignmask
& ~(crypto_tfm_ctx_alignment() - 1));
45 buffer
= kmalloc(absize
, GFP_ATOMIC
);
49 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
50 memcpy(alignbuffer
, key
, keylen
);
51 err
= shash
->setkey(tfm
, alignbuffer
, keylen
);
56 int crypto_shash_setkey(struct crypto_shash
*tfm
, const u8
*key
,
59 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
60 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
63 if ((unsigned long)key
& alignmask
)
64 err
= shash_setkey_unaligned(tfm
, key
, keylen
);
66 err
= shash
->setkey(tfm
, key
, keylen
);
71 crypto_shash_clear_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
74 EXPORT_SYMBOL_GPL(crypto_shash_setkey
);
76 static inline unsigned int shash_align_buffer_size(unsigned len
,
79 typedef u8 __aligned_largest u8_aligned
;
80 return len
+ (mask
& ~(__alignof__(u8_aligned
) - 1));
83 static int shash_update_unaligned(struct shash_desc
*desc
, const u8
*data
,
86 struct crypto_shash
*tfm
= desc
->tfm
;
87 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
88 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
89 unsigned int unaligned_len
= alignmask
+ 1 -
90 ((unsigned long)data
& alignmask
);
91 u8 ubuf
[shash_align_buffer_size(unaligned_len
, alignmask
)]
93 u8
*buf
= PTR_ALIGN(&ubuf
[0], alignmask
+ 1);
96 if (unaligned_len
> len
)
99 memcpy(buf
, data
, unaligned_len
);
100 err
= shash
->update(desc
, buf
, unaligned_len
);
101 memset(buf
, 0, unaligned_len
);
104 shash
->update(desc
, data
+ unaligned_len
, len
- unaligned_len
);
107 int crypto_shash_update(struct shash_desc
*desc
, const u8
*data
,
110 struct crypto_shash
*tfm
= desc
->tfm
;
111 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
112 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
114 if ((unsigned long)data
& alignmask
)
115 return shash_update_unaligned(desc
, data
, len
);
117 return shash
->update(desc
, data
, len
);
119 EXPORT_SYMBOL_GPL(crypto_shash_update
);
121 static int shash_final_unaligned(struct shash_desc
*desc
, u8
*out
)
123 struct crypto_shash
*tfm
= desc
->tfm
;
124 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
125 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
126 unsigned int ds
= crypto_shash_digestsize(tfm
);
127 u8 ubuf
[shash_align_buffer_size(ds
, alignmask
)]
129 u8
*buf
= PTR_ALIGN(&ubuf
[0], alignmask
+ 1);
132 err
= shash
->final(desc
, buf
);
136 memcpy(out
, buf
, ds
);
143 int crypto_shash_final(struct shash_desc
*desc
, u8
*out
)
145 struct crypto_shash
*tfm
= desc
->tfm
;
146 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
147 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
149 if ((unsigned long)out
& alignmask
)
150 return shash_final_unaligned(desc
, out
);
152 return shash
->final(desc
, out
);
154 EXPORT_SYMBOL_GPL(crypto_shash_final
);
156 static int shash_finup_unaligned(struct shash_desc
*desc
, const u8
*data
,
157 unsigned int len
, u8
*out
)
159 return crypto_shash_update(desc
, data
, len
) ?:
160 crypto_shash_final(desc
, out
);
163 int crypto_shash_finup(struct shash_desc
*desc
, const u8
*data
,
164 unsigned int len
, u8
*out
)
166 struct crypto_shash
*tfm
= desc
->tfm
;
167 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
168 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
170 if (((unsigned long)data
| (unsigned long)out
) & alignmask
)
171 return shash_finup_unaligned(desc
, data
, len
, out
);
173 return shash
->finup(desc
, data
, len
, out
);
175 EXPORT_SYMBOL_GPL(crypto_shash_finup
);
177 static int shash_digest_unaligned(struct shash_desc
*desc
, const u8
*data
,
178 unsigned int len
, u8
*out
)
180 return crypto_shash_init(desc
) ?:
181 crypto_shash_finup(desc
, data
, len
, out
);
184 int crypto_shash_digest(struct shash_desc
*desc
, const u8
*data
,
185 unsigned int len
, u8
*out
)
187 struct crypto_shash
*tfm
= desc
->tfm
;
188 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
189 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
191 if (crypto_shash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
194 if (((unsigned long)data
| (unsigned long)out
) & alignmask
)
195 return shash_digest_unaligned(desc
, data
, len
, out
);
197 return shash
->digest(desc
, data
, len
, out
);
199 EXPORT_SYMBOL_GPL(crypto_shash_digest
);
201 static int shash_default_export(struct shash_desc
*desc
, void *out
)
203 memcpy(out
, shash_desc_ctx(desc
), crypto_shash_descsize(desc
->tfm
));
207 static int shash_default_import(struct shash_desc
*desc
, const void *in
)
209 memcpy(shash_desc_ctx(desc
), in
, crypto_shash_descsize(desc
->tfm
));
213 static int shash_async_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
216 struct crypto_shash
**ctx
= crypto_ahash_ctx(tfm
);
218 return crypto_shash_setkey(*ctx
, key
, keylen
);
221 static int shash_async_init(struct ahash_request
*req
)
223 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
224 struct shash_desc
*desc
= ahash_request_ctx(req
);
227 desc
->flags
= req
->base
.flags
;
229 return crypto_shash_init(desc
);
232 int shash_ahash_update(struct ahash_request
*req
, struct shash_desc
*desc
)
234 struct crypto_hash_walk walk
;
237 for (nbytes
= crypto_hash_walk_first(req
, &walk
); nbytes
> 0;
238 nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
239 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
243 EXPORT_SYMBOL_GPL(shash_ahash_update
);
245 static int shash_async_update(struct ahash_request
*req
)
247 return shash_ahash_update(req
, ahash_request_ctx(req
));
250 static int shash_async_final(struct ahash_request
*req
)
252 return crypto_shash_final(ahash_request_ctx(req
), req
->result
);
255 int shash_ahash_finup(struct ahash_request
*req
, struct shash_desc
*desc
)
257 struct crypto_hash_walk walk
;
260 nbytes
= crypto_hash_walk_first(req
, &walk
);
262 return crypto_shash_final(desc
, req
->result
);
265 nbytes
= crypto_hash_walk_last(&walk
) ?
266 crypto_shash_finup(desc
, walk
.data
, nbytes
,
268 crypto_shash_update(desc
, walk
.data
, nbytes
);
269 nbytes
= crypto_hash_walk_done(&walk
, nbytes
);
270 } while (nbytes
> 0);
274 EXPORT_SYMBOL_GPL(shash_ahash_finup
);
276 static int shash_async_finup(struct ahash_request
*req
)
278 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
279 struct shash_desc
*desc
= ahash_request_ctx(req
);
282 desc
->flags
= req
->base
.flags
;
284 return shash_ahash_finup(req
, desc
);
287 int shash_ahash_digest(struct ahash_request
*req
, struct shash_desc
*desc
)
289 unsigned int nbytes
= req
->nbytes
;
290 struct scatterlist
*sg
;
295 (sg
= req
->src
, offset
= sg
->offset
,
296 nbytes
< min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
))) {
299 data
= kmap_atomic(sg_page(sg
));
300 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
,
303 crypto_yield(desc
->flags
);
305 err
= crypto_shash_init(desc
) ?:
306 shash_ahash_finup(req
, desc
);
310 EXPORT_SYMBOL_GPL(shash_ahash_digest
);
312 static int shash_async_digest(struct ahash_request
*req
)
314 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
315 struct shash_desc
*desc
= ahash_request_ctx(req
);
318 desc
->flags
= req
->base
.flags
;
320 return shash_ahash_digest(req
, desc
);
323 static int shash_async_export(struct ahash_request
*req
, void *out
)
325 return crypto_shash_export(ahash_request_ctx(req
), out
);
328 static int shash_async_import(struct ahash_request
*req
, const void *in
)
330 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
331 struct shash_desc
*desc
= ahash_request_ctx(req
);
334 desc
->flags
= req
->base
.flags
;
336 return crypto_shash_import(desc
, in
);
339 static void crypto_exit_shash_ops_async(struct crypto_tfm
*tfm
)
341 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
343 crypto_free_shash(*ctx
);
346 int crypto_init_shash_ops_async(struct crypto_tfm
*tfm
)
348 struct crypto_alg
*calg
= tfm
->__crt_alg
;
349 struct shash_alg
*alg
= __crypto_shash_alg(calg
);
350 struct crypto_ahash
*crt
= __crypto_ahash_cast(tfm
);
351 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
352 struct crypto_shash
*shash
;
354 if (!crypto_mod_get(calg
))
357 shash
= crypto_create_tfm(calg
, &crypto_shash_type
);
359 crypto_mod_put(calg
);
360 return PTR_ERR(shash
);
364 tfm
->exit
= crypto_exit_shash_ops_async
;
366 crt
->init
= shash_async_init
;
367 crt
->update
= shash_async_update
;
368 crt
->final
= shash_async_final
;
369 crt
->finup
= shash_async_finup
;
370 crt
->digest
= shash_async_digest
;
371 crt
->setkey
= shash_async_setkey
;
373 crypto_ahash_set_flags(crt
, crypto_shash_get_flags(shash
) &
374 CRYPTO_TFM_NEED_KEY
);
377 crt
->export
= shash_async_export
;
379 crt
->import
= shash_async_import
;
381 crt
->reqsize
= sizeof(struct shash_desc
) + crypto_shash_descsize(shash
);
386 static int crypto_shash_init_tfm(struct crypto_tfm
*tfm
)
388 struct crypto_shash
*hash
= __crypto_shash_cast(tfm
);
389 struct shash_alg
*alg
= crypto_shash_alg(hash
);
391 hash
->descsize
= alg
->descsize
;
393 if (crypto_shash_alg_has_setkey(alg
) &&
394 !(alg
->base
.cra_flags
& CRYPTO_ALG_OPTIONAL_KEY
))
395 crypto_shash_set_flags(hash
, CRYPTO_TFM_NEED_KEY
);
401 static int crypto_shash_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
403 struct crypto_report_hash rhash
;
404 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
406 strncpy(rhash
.type
, "shash", sizeof(rhash
.type
));
408 rhash
.blocksize
= alg
->cra_blocksize
;
409 rhash
.digestsize
= salg
->digestsize
;
411 if (nla_put(skb
, CRYPTOCFGA_REPORT_HASH
,
412 sizeof(struct crypto_report_hash
), &rhash
))
413 goto nla_put_failure
;
420 static int crypto_shash_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
426 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
428 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
430 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
432 seq_printf(m
, "type : shash\n");
433 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
434 seq_printf(m
, "digestsize : %u\n", salg
->digestsize
);
437 static const struct crypto_type crypto_shash_type
= {
438 .extsize
= crypto_alg_extsize
,
439 .init_tfm
= crypto_shash_init_tfm
,
440 #ifdef CONFIG_PROC_FS
441 .show
= crypto_shash_show
,
443 .report
= crypto_shash_report
,
444 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
445 .maskset
= CRYPTO_ALG_TYPE_MASK
,
446 .type
= CRYPTO_ALG_TYPE_SHASH
,
447 .tfmsize
= offsetof(struct crypto_shash
, base
),
450 struct crypto_shash
*crypto_alloc_shash(const char *alg_name
, u32 type
,
453 return crypto_alloc_tfm(alg_name
, &crypto_shash_type
, type
, mask
);
455 EXPORT_SYMBOL_GPL(crypto_alloc_shash
);
457 static int shash_prepare_alg(struct shash_alg
*alg
)
459 struct crypto_alg
*base
= &alg
->base
;
461 if (alg
->digestsize
> PAGE_SIZE
/ 8 ||
462 alg
->descsize
> PAGE_SIZE
/ 8 ||
463 alg
->statesize
> PAGE_SIZE
/ 8)
466 base
->cra_type
= &crypto_shash_type
;
467 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
468 base
->cra_flags
|= CRYPTO_ALG_TYPE_SHASH
;
471 alg
->finup
= shash_finup_unaligned
;
473 alg
->digest
= shash_digest_unaligned
;
475 alg
->export
= shash_default_export
;
476 alg
->import
= shash_default_import
;
477 alg
->statesize
= alg
->descsize
;
480 alg
->setkey
= shash_no_setkey
;
485 int crypto_register_shash(struct shash_alg
*alg
)
487 struct crypto_alg
*base
= &alg
->base
;
490 err
= shash_prepare_alg(alg
);
494 return crypto_register_alg(base
);
496 EXPORT_SYMBOL_GPL(crypto_register_shash
);
498 int crypto_unregister_shash(struct shash_alg
*alg
)
500 return crypto_unregister_alg(&alg
->base
);
502 EXPORT_SYMBOL_GPL(crypto_unregister_shash
);
504 int crypto_register_shashes(struct shash_alg
*algs
, int count
)
508 for (i
= 0; i
< count
; i
++) {
509 ret
= crypto_register_shash(&algs
[i
]);
517 for (--i
; i
>= 0; --i
)
518 crypto_unregister_shash(&algs
[i
]);
522 EXPORT_SYMBOL_GPL(crypto_register_shashes
);
524 int crypto_unregister_shashes(struct shash_alg
*algs
, int count
)
528 for (i
= count
- 1; i
>= 0; --i
) {
529 ret
= crypto_unregister_shash(&algs
[i
]);
531 pr_err("Failed to unregister %s %s: %d\n",
532 algs
[i
].base
.cra_driver_name
,
533 algs
[i
].base
.cra_name
, ret
);
538 EXPORT_SYMBOL_GPL(crypto_unregister_shashes
);
540 int shash_register_instance(struct crypto_template
*tmpl
,
541 struct shash_instance
*inst
)
545 err
= shash_prepare_alg(&inst
->alg
);
549 return crypto_register_instance(tmpl
, shash_crypto_instance(inst
));
551 EXPORT_SYMBOL_GPL(shash_register_instance
);
553 void shash_free_instance(struct crypto_instance
*inst
)
555 crypto_drop_spawn(crypto_instance_ctx(inst
));
556 kfree(shash_instance(inst
));
558 EXPORT_SYMBOL_GPL(shash_free_instance
);
560 int crypto_init_shash_spawn(struct crypto_shash_spawn
*spawn
,
561 struct shash_alg
*alg
,
562 struct crypto_instance
*inst
)
564 return crypto_init_spawn2(&spawn
->base
, &alg
->base
, inst
,
567 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn
);
569 struct shash_alg
*shash_attr_alg(struct rtattr
*rta
, u32 type
, u32 mask
)
571 struct crypto_alg
*alg
;
573 alg
= crypto_attr_alg2(rta
, &crypto_shash_type
, type
, mask
);
574 return IS_ERR(alg
) ? ERR_CAST(alg
) :
575 container_of(alg
, struct shash_alg
, base
);
577 EXPORT_SYMBOL_GPL(shash_attr_alg
);
579 MODULE_LICENSE("GPL");
580 MODULE_DESCRIPTION("Synchronous cryptographic hash type");