2 * Synchronous Cryptographic Hash operations.
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
21 static const struct crypto_type crypto_shash_type
;
23 static inline struct crypto_shash
*__crypto_shash_cast(struct crypto_tfm
*tfm
)
25 return container_of(tfm
, struct crypto_shash
, base
);
30 static int shash_setkey_unaligned(struct crypto_shash
*tfm
, const u8
*key
,
33 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
34 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
36 u8
*buffer
, *alignbuffer
;
39 absize
= keylen
+ (alignmask
& ~(CRYPTO_MINALIGN
- 1));
40 buffer
= kmalloc(absize
, GFP_KERNEL
);
44 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
45 memcpy(alignbuffer
, key
, keylen
);
46 err
= shash
->setkey(tfm
, alignbuffer
, keylen
);
47 memset(alignbuffer
, 0, keylen
);
52 int crypto_shash_setkey(struct crypto_shash
*tfm
, const u8
*key
,
55 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
56 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
61 if ((unsigned long)key
& alignmask
)
62 return shash_setkey_unaligned(tfm
, key
, keylen
);
64 return shash
->setkey(tfm
, key
, keylen
);
66 EXPORT_SYMBOL_GPL(crypto_shash_setkey
);
68 static inline unsigned int shash_align_buffer_size(unsigned len
,
71 return len
+ (mask
& ~(__alignof__(u8
__attribute__ ((aligned
))) - 1));
74 static int shash_update_unaligned(struct shash_desc
*desc
, const u8
*data
,
77 struct crypto_shash
*tfm
= desc
->tfm
;
78 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
79 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
80 unsigned int unaligned_len
= alignmask
+ 1 -
81 ((unsigned long)data
& alignmask
);
82 u8 buf
[shash_align_buffer_size(unaligned_len
, alignmask
)]
83 __attribute__ ((aligned
));
85 if (unaligned_len
> len
)
88 memcpy(buf
, data
, unaligned_len
);
90 return shash
->update(desc
, buf
, unaligned_len
) ?:
91 shash
->update(desc
, data
+ unaligned_len
, len
- unaligned_len
);
94 int crypto_shash_update(struct shash_desc
*desc
, const u8
*data
,
97 struct crypto_shash
*tfm
= desc
->tfm
;
98 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
99 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
101 if ((unsigned long)data
& alignmask
)
102 return shash_update_unaligned(desc
, data
, len
);
104 return shash
->update(desc
, data
, len
);
106 EXPORT_SYMBOL_GPL(crypto_shash_update
);
108 static int shash_final_unaligned(struct shash_desc
*desc
, u8
*out
)
110 struct crypto_shash
*tfm
= desc
->tfm
;
111 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
112 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
113 unsigned int ds
= crypto_shash_digestsize(tfm
);
114 u8 buf
[shash_align_buffer_size(ds
, alignmask
)]
115 __attribute__ ((aligned
));
118 err
= shash
->final(desc
, buf
);
119 memcpy(out
, buf
, ds
);
123 int crypto_shash_final(struct shash_desc
*desc
, u8
*out
)
125 struct crypto_shash
*tfm
= desc
->tfm
;
126 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
127 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
129 if ((unsigned long)out
& alignmask
)
130 return shash_final_unaligned(desc
, out
);
132 return shash
->final(desc
, out
);
134 EXPORT_SYMBOL_GPL(crypto_shash_final
);
136 static int shash_finup_unaligned(struct shash_desc
*desc
, const u8
*data
,
137 unsigned int len
, u8
*out
)
139 return crypto_shash_update(desc
, data
, len
) ?:
140 crypto_shash_final(desc
, out
);
143 int crypto_shash_finup(struct shash_desc
*desc
, const u8
*data
,
144 unsigned int len
, u8
*out
)
146 struct crypto_shash
*tfm
= desc
->tfm
;
147 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
148 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
150 if (((unsigned long)data
| (unsigned long)out
) & alignmask
||
152 return shash_finup_unaligned(desc
, data
, len
, out
);
154 return shash
->finup(desc
, data
, len
, out
);
156 EXPORT_SYMBOL_GPL(crypto_shash_finup
);
158 static int shash_digest_unaligned(struct shash_desc
*desc
, const u8
*data
,
159 unsigned int len
, u8
*out
)
161 return crypto_shash_init(desc
) ?:
162 crypto_shash_update(desc
, data
, len
) ?:
163 crypto_shash_final(desc
, out
);
166 int crypto_shash_digest(struct shash_desc
*desc
, const u8
*data
,
167 unsigned int len
, u8
*out
)
169 struct crypto_shash
*tfm
= desc
->tfm
;
170 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
171 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
173 if (((unsigned long)data
| (unsigned long)out
) & alignmask
||
175 return shash_digest_unaligned(desc
, data
, len
, out
);
177 return shash
->digest(desc
, data
, len
, out
);
179 EXPORT_SYMBOL_GPL(crypto_shash_digest
);
181 int crypto_shash_import(struct shash_desc
*desc
, const u8
*in
)
183 struct crypto_shash
*tfm
= desc
->tfm
;
184 struct shash_alg
*alg
= crypto_shash_alg(tfm
);
186 memcpy(shash_desc_ctx(desc
), in
, crypto_shash_descsize(tfm
));
193 EXPORT_SYMBOL_GPL(crypto_shash_import
);
195 static int shash_async_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
198 struct crypto_shash
**ctx
= crypto_ahash_ctx(tfm
);
200 return crypto_shash_setkey(*ctx
, key
, keylen
);
203 static int shash_async_init(struct ahash_request
*req
)
205 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
206 struct shash_desc
*desc
= ahash_request_ctx(req
);
209 desc
->flags
= req
->base
.flags
;
211 return crypto_shash_init(desc
);
214 static int shash_async_update(struct ahash_request
*req
)
216 struct shash_desc
*desc
= ahash_request_ctx(req
);
217 struct crypto_hash_walk walk
;
220 for (nbytes
= crypto_hash_walk_first(req
, &walk
); nbytes
> 0;
221 nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
222 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
227 static int shash_async_final(struct ahash_request
*req
)
229 return crypto_shash_final(ahash_request_ctx(req
), req
->result
);
232 static int shash_async_digest(struct ahash_request
*req
)
234 struct scatterlist
*sg
= req
->src
;
235 unsigned int offset
= sg
->offset
;
236 unsigned int nbytes
= req
->nbytes
;
239 if (nbytes
< min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
)) {
240 struct crypto_shash
**ctx
=
241 crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
242 struct shash_desc
*desc
= ahash_request_ctx(req
);
246 desc
->flags
= req
->base
.flags
;
248 data
= crypto_kmap(sg_page(sg
), 0);
249 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
,
251 crypto_kunmap(data
, 0);
252 crypto_yield(desc
->flags
);
256 err
= shash_async_init(req
);
260 err
= shash_async_update(req
);
264 err
= shash_async_final(req
);
270 static void crypto_exit_shash_ops_async(struct crypto_tfm
*tfm
)
272 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
274 crypto_free_shash(*ctx
);
277 static int crypto_init_shash_ops_async(struct crypto_tfm
*tfm
)
279 struct crypto_alg
*calg
= tfm
->__crt_alg
;
280 struct shash_alg
*alg
= __crypto_shash_alg(calg
);
281 struct ahash_tfm
*crt
= &tfm
->crt_ahash
;
282 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
283 struct crypto_shash
*shash
;
285 if (!crypto_mod_get(calg
))
288 shash
= __crypto_shash_cast(crypto_create_tfm(
289 calg
, &crypto_shash_type
));
291 crypto_mod_put(calg
);
292 return PTR_ERR(shash
);
296 tfm
->exit
= crypto_exit_shash_ops_async
;
298 crt
->init
= shash_async_init
;
299 crt
->update
= shash_async_update
;
300 crt
->final
= shash_async_final
;
301 crt
->digest
= shash_async_digest
;
302 crt
->setkey
= shash_async_setkey
;
304 crt
->digestsize
= alg
->digestsize
;
305 crt
->reqsize
= sizeof(struct shash_desc
) + crypto_shash_descsize(shash
);
310 static int shash_compat_setkey(struct crypto_hash
*tfm
, const u8
*key
,
313 struct shash_desc
*desc
= crypto_hash_ctx(tfm
);
315 return crypto_shash_setkey(desc
->tfm
, key
, keylen
);
318 static int shash_compat_init(struct hash_desc
*hdesc
)
320 struct shash_desc
*desc
= crypto_hash_ctx(hdesc
->tfm
);
322 desc
->flags
= hdesc
->flags
;
324 return crypto_shash_init(desc
);
327 static int shash_compat_update(struct hash_desc
*hdesc
, struct scatterlist
*sg
,
330 struct shash_desc
*desc
= crypto_hash_ctx(hdesc
->tfm
);
331 struct crypto_hash_walk walk
;
334 for (nbytes
= crypto_hash_walk_first_compat(hdesc
, &walk
, sg
, len
);
335 nbytes
> 0; nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
336 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
341 static int shash_compat_final(struct hash_desc
*hdesc
, u8
*out
)
343 return crypto_shash_final(crypto_hash_ctx(hdesc
->tfm
), out
);
346 static int shash_compat_digest(struct hash_desc
*hdesc
, struct scatterlist
*sg
,
347 unsigned int nbytes
, u8
*out
)
349 unsigned int offset
= sg
->offset
;
352 if (nbytes
< min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
)) {
353 struct shash_desc
*desc
= crypto_hash_ctx(hdesc
->tfm
);
356 desc
->flags
= hdesc
->flags
;
358 data
= crypto_kmap(sg_page(sg
), 0);
359 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
, out
);
360 crypto_kunmap(data
, 0);
361 crypto_yield(desc
->flags
);
365 err
= shash_compat_init(hdesc
);
369 err
= shash_compat_update(hdesc
, sg
, nbytes
);
373 err
= shash_compat_final(hdesc
, out
);
379 static void crypto_exit_shash_ops_compat(struct crypto_tfm
*tfm
)
381 struct shash_desc
*desc
= crypto_tfm_ctx(tfm
);
383 crypto_free_shash(desc
->tfm
);
386 static int crypto_init_shash_ops_compat(struct crypto_tfm
*tfm
)
388 struct hash_tfm
*crt
= &tfm
->crt_hash
;
389 struct crypto_alg
*calg
= tfm
->__crt_alg
;
390 struct shash_alg
*alg
= __crypto_shash_alg(calg
);
391 struct shash_desc
*desc
= crypto_tfm_ctx(tfm
);
392 struct crypto_shash
*shash
;
394 if (!crypto_mod_get(calg
))
397 shash
= __crypto_shash_cast(crypto_create_tfm(
398 calg
, &crypto_shash_type
));
400 crypto_mod_put(calg
);
401 return PTR_ERR(shash
);
405 tfm
->exit
= crypto_exit_shash_ops_compat
;
407 crt
->init
= shash_compat_init
;
408 crt
->update
= shash_compat_update
;
409 crt
->final
= shash_compat_final
;
410 crt
->digest
= shash_compat_digest
;
411 crt
->setkey
= shash_compat_setkey
;
413 crt
->digestsize
= alg
->digestsize
;
418 static int crypto_init_shash_ops(struct crypto_tfm
*tfm
, u32 type
, u32 mask
)
420 switch (mask
& CRYPTO_ALG_TYPE_MASK
) {
421 case CRYPTO_ALG_TYPE_HASH_MASK
:
422 return crypto_init_shash_ops_compat(tfm
);
423 case CRYPTO_ALG_TYPE_AHASH_MASK
:
424 return crypto_init_shash_ops_async(tfm
);
430 static unsigned int crypto_shash_ctxsize(struct crypto_alg
*alg
, u32 type
,
433 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
435 switch (mask
& CRYPTO_ALG_TYPE_MASK
) {
436 case CRYPTO_ALG_TYPE_HASH_MASK
:
437 return sizeof(struct shash_desc
) + salg
->descsize
;
438 case CRYPTO_ALG_TYPE_AHASH_MASK
:
439 return sizeof(struct crypto_shash
*);
445 static int crypto_shash_init_tfm(struct crypto_tfm
*tfm
,
446 const struct crypto_type
*frontend
)
448 if (frontend
->type
!= CRYPTO_ALG_TYPE_SHASH
)
453 static unsigned int crypto_shash_extsize(struct crypto_alg
*alg
,
454 const struct crypto_type
*frontend
)
456 return alg
->cra_ctxsize
;
459 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
460 __attribute__ ((unused
));
461 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
463 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
465 seq_printf(m
, "type : shash\n");
466 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
467 seq_printf(m
, "digestsize : %u\n", salg
->digestsize
);
468 seq_printf(m
, "descsize : %u\n", salg
->descsize
);
471 static const struct crypto_type crypto_shash_type
= {
472 .ctxsize
= crypto_shash_ctxsize
,
473 .extsize
= crypto_shash_extsize
,
474 .init
= crypto_init_shash_ops
,
475 .init_tfm
= crypto_shash_init_tfm
,
476 #ifdef CONFIG_PROC_FS
477 .show
= crypto_shash_show
,
479 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
480 .maskset
= CRYPTO_ALG_TYPE_MASK
,
481 .type
= CRYPTO_ALG_TYPE_SHASH
,
482 .tfmsize
= offsetof(struct crypto_shash
, base
),
485 struct crypto_shash
*crypto_alloc_shash(const char *alg_name
, u32 type
,
488 return __crypto_shash_cast(
489 crypto_alloc_tfm(alg_name
, &crypto_shash_type
, type
, mask
));
491 EXPORT_SYMBOL_GPL(crypto_alloc_shash
);
493 int crypto_register_shash(struct shash_alg
*alg
)
495 struct crypto_alg
*base
= &alg
->base
;
497 if (alg
->digestsize
> PAGE_SIZE
/ 8 ||
498 alg
->descsize
> PAGE_SIZE
/ 8)
501 base
->cra_type
= &crypto_shash_type
;
502 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
503 base
->cra_flags
|= CRYPTO_ALG_TYPE_SHASH
;
505 return crypto_register_alg(base
);
507 EXPORT_SYMBOL_GPL(crypto_register_shash
);
509 int crypto_unregister_shash(struct shash_alg
*alg
)
511 return crypto_unregister_alg(&alg
->base
);
513 EXPORT_SYMBOL_GPL(crypto_unregister_shash
);
515 MODULE_LICENSE("GPL");
516 MODULE_DESCRIPTION("Synchronous cryptographic hash type");