2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
29 LIST_HEAD(crypto_alg_list
);
30 EXPORT_SYMBOL_GPL(crypto_alg_list
);
31 DECLARE_RWSEM(crypto_alg_sem
);
32 EXPORT_SYMBOL_GPL(crypto_alg_sem
);
34 BLOCKING_NOTIFIER_HEAD(crypto_chain
);
35 EXPORT_SYMBOL_GPL(crypto_chain
);
37 static inline struct crypto_alg
*crypto_alg_get(struct crypto_alg
*alg
)
39 atomic_inc(&alg
->cra_refcnt
);
43 struct crypto_alg
*crypto_mod_get(struct crypto_alg
*alg
)
45 return try_module_get(alg
->cra_module
) ? crypto_alg_get(alg
) : NULL
;
47 EXPORT_SYMBOL_GPL(crypto_mod_get
);
49 void crypto_mod_put(struct crypto_alg
*alg
)
51 struct module
*module
= alg
->cra_module
;
56 EXPORT_SYMBOL_GPL(crypto_mod_put
);
58 struct crypto_alg
*__crypto_alg_lookup(const char *name
, u32 type
, u32 mask
)
60 struct crypto_alg
*q
, *alg
= NULL
;
63 list_for_each_entry(q
, &crypto_alg_list
, cra_list
) {
66 if (crypto_is_moribund(q
))
69 if ((q
->cra_flags
^ type
) & mask
)
72 if (crypto_is_larval(q
) &&
73 ((struct crypto_larval
*)q
)->mask
!= mask
)
76 exact
= !strcmp(q
->cra_driver_name
, name
);
77 fuzzy
= !strcmp(q
->cra_name
, name
);
78 if (!exact
&& !(fuzzy
&& q
->cra_priority
> best
))
81 if (unlikely(!crypto_mod_get(q
)))
84 best
= q
->cra_priority
;
95 EXPORT_SYMBOL_GPL(__crypto_alg_lookup
);
97 static void crypto_larval_destroy(struct crypto_alg
*alg
)
99 struct crypto_larval
*larval
= (void *)alg
;
101 BUG_ON(!crypto_is_larval(alg
));
103 crypto_mod_put(larval
->adult
);
107 static struct crypto_alg
*crypto_larval_alloc(const char *name
, u32 type
,
110 struct crypto_alg
*alg
;
111 struct crypto_larval
*larval
;
113 larval
= kzalloc(sizeof(*larval
), GFP_KERNEL
);
115 return ERR_PTR(-ENOMEM
);
118 larval
->alg
.cra_flags
= CRYPTO_ALG_LARVAL
| type
;
119 larval
->alg
.cra_priority
= -1;
120 larval
->alg
.cra_destroy
= crypto_larval_destroy
;
122 atomic_set(&larval
->alg
.cra_refcnt
, 2);
123 strlcpy(larval
->alg
.cra_name
, name
, CRYPTO_MAX_ALG_NAME
);
124 init_completion(&larval
->completion
);
126 down_write(&crypto_alg_sem
);
127 alg
= __crypto_alg_lookup(name
, type
, mask
);
130 list_add(&alg
->cra_list
, &crypto_alg_list
);
132 up_write(&crypto_alg_sem
);
134 if (alg
!= &larval
->alg
)
140 static void crypto_larval_kill(struct crypto_alg
*alg
)
142 struct crypto_larval
*larval
= (void *)alg
;
144 down_write(&crypto_alg_sem
);
145 list_del(&alg
->cra_list
);
146 up_write(&crypto_alg_sem
);
147 complete(&larval
->completion
);
151 static struct crypto_alg
*crypto_larval_wait(struct crypto_alg
*alg
)
153 struct crypto_larval
*larval
= (void *)alg
;
155 wait_for_completion_interruptible_timeout(&larval
->completion
, 60 * HZ
);
158 if (!crypto_mod_get(alg
))
159 alg
= ERR_PTR(-EAGAIN
);
161 alg
= ERR_PTR(-ENOENT
);
162 crypto_mod_put(&larval
->alg
);
167 static struct crypto_alg
*crypto_alg_lookup(const char *name
, u32 type
,
170 struct crypto_alg
*alg
;
172 down_read(&crypto_alg_sem
);
173 alg
= __crypto_alg_lookup(name
, type
, mask
);
174 up_read(&crypto_alg_sem
);
179 struct crypto_alg
*crypto_alg_mod_lookup(const char *name
, u32 type
, u32 mask
)
181 struct crypto_alg
*alg
;
182 struct crypto_alg
*larval
;
186 return ERR_PTR(-ENOENT
);
188 mask
&= ~(CRYPTO_ALG_LARVAL
| CRYPTO_ALG_DEAD
);
191 alg
= try_then_request_module(crypto_alg_lookup(name
, type
, mask
),
194 return crypto_is_larval(alg
) ? crypto_larval_wait(alg
) : alg
;
196 larval
= crypto_larval_alloc(name
, type
, mask
);
197 if (IS_ERR(larval
) || !crypto_is_larval(larval
))
200 ok
= crypto_notify(CRYPTO_MSG_ALG_REQUEST
, larval
);
201 if (ok
== NOTIFY_DONE
) {
202 request_module("cryptomgr");
203 ok
= crypto_notify(CRYPTO_MSG_ALG_REQUEST
, larval
);
206 if (ok
== NOTIFY_STOP
)
207 alg
= crypto_larval_wait(larval
);
209 crypto_mod_put(larval
);
210 alg
= ERR_PTR(-ENOENT
);
212 crypto_larval_kill(larval
);
215 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup
);
217 static int crypto_init_flags(struct crypto_tfm
*tfm
, u32 flags
)
219 tfm
->crt_flags
= flags
& CRYPTO_TFM_REQ_MASK
;
220 flags
&= ~CRYPTO_TFM_REQ_MASK
;
222 switch (crypto_tfm_alg_type(tfm
)) {
223 case CRYPTO_ALG_TYPE_CIPHER
:
224 return crypto_init_cipher_flags(tfm
, flags
);
226 case CRYPTO_ALG_TYPE_DIGEST
:
227 return crypto_init_digest_flags(tfm
, flags
);
229 case CRYPTO_ALG_TYPE_COMPRESS
:
230 return crypto_init_compress_flags(tfm
, flags
);
236 static int crypto_init_ops(struct crypto_tfm
*tfm
)
238 const struct crypto_type
*type
= tfm
->__crt_alg
->cra_type
;
241 return type
->init(tfm
);
243 switch (crypto_tfm_alg_type(tfm
)) {
244 case CRYPTO_ALG_TYPE_CIPHER
:
245 return crypto_init_cipher_ops(tfm
);
247 case CRYPTO_ALG_TYPE_DIGEST
:
248 return crypto_init_digest_ops(tfm
);
250 case CRYPTO_ALG_TYPE_COMPRESS
:
251 return crypto_init_compress_ops(tfm
);
261 static void crypto_exit_ops(struct crypto_tfm
*tfm
)
263 const struct crypto_type
*type
= tfm
->__crt_alg
->cra_type
;
271 switch (crypto_tfm_alg_type(tfm
)) {
272 case CRYPTO_ALG_TYPE_CIPHER
:
273 crypto_exit_cipher_ops(tfm
);
276 case CRYPTO_ALG_TYPE_DIGEST
:
277 crypto_exit_digest_ops(tfm
);
280 case CRYPTO_ALG_TYPE_COMPRESS
:
281 crypto_exit_compress_ops(tfm
);
290 static unsigned int crypto_ctxsize(struct crypto_alg
*alg
, int flags
)
292 const struct crypto_type
*type
= alg
->cra_type
;
295 len
= alg
->cra_alignmask
& ~(crypto_tfm_ctx_alignment() - 1);
297 return len
+ type
->ctxsize(alg
);
299 switch (alg
->cra_flags
& CRYPTO_ALG_TYPE_MASK
) {
303 case CRYPTO_ALG_TYPE_CIPHER
:
304 len
+= crypto_cipher_ctxsize(alg
, flags
);
307 case CRYPTO_ALG_TYPE_DIGEST
:
308 len
+= crypto_digest_ctxsize(alg
, flags
);
311 case CRYPTO_ALG_TYPE_COMPRESS
:
312 len
+= crypto_compress_ctxsize(alg
, flags
);
319 void crypto_shoot_alg(struct crypto_alg
*alg
)
321 down_write(&crypto_alg_sem
);
322 alg
->cra_flags
|= CRYPTO_ALG_DYING
;
323 up_write(&crypto_alg_sem
);
325 EXPORT_SYMBOL_GPL(crypto_shoot_alg
);
327 struct crypto_tfm
*__crypto_alloc_tfm(struct crypto_alg
*alg
, u32 flags
)
329 struct crypto_tfm
*tfm
= NULL
;
330 unsigned int tfm_size
;
333 tfm_size
= sizeof(*tfm
) + crypto_ctxsize(alg
, flags
);
334 tfm
= kzalloc(tfm_size
, GFP_KERNEL
);
338 tfm
->__crt_alg
= alg
;
340 err
= crypto_init_flags(tfm
, flags
);
344 err
= crypto_init_ops(tfm
);
348 if (alg
->cra_init
&& (err
= alg
->cra_init(tfm
))) {
350 crypto_shoot_alg(alg
);
351 goto cra_init_failed
;
357 crypto_exit_ops(tfm
);
365 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm
);
367 struct crypto_tfm
*crypto_alloc_tfm(const char *name
, u32 flags
)
369 struct crypto_tfm
*tfm
= NULL
;
373 struct crypto_alg
*alg
;
375 alg
= crypto_alg_mod_lookup(name
, 0, CRYPTO_ALG_ASYNC
);
380 tfm
= __crypto_alloc_tfm(alg
, flags
);
387 } while (err
== -EAGAIN
&& !signal_pending(current
));
393 * crypto_alloc_base - Locate algorithm and allocate transform
394 * @alg_name: Name of algorithm
395 * @type: Type of algorithm
396 * @mask: Mask for type comparison
398 * crypto_alloc_base() will first attempt to locate an already loaded
399 * algorithm. If that fails and the kernel supports dynamically loadable
400 * modules, it will then attempt to load a module of the same name or
401 * alias. If that fails it will send a query to any loaded crypto manager
402 * to construct an algorithm on the fly. A refcount is grabbed on the
403 * algorithm which is then associated with the new transform.
405 * The returned transform is of a non-determinate type. Most people
406 * should use one of the more specific allocation functions such as
407 * crypto_alloc_blkcipher.
409 * In case of error the return value is an error pointer.
411 struct crypto_tfm
*crypto_alloc_base(const char *alg_name
, u32 type
, u32 mask
)
413 struct crypto_tfm
*tfm
;
417 struct crypto_alg
*alg
;
419 alg
= crypto_alg_mod_lookup(alg_name
, type
, mask
);
425 tfm
= __crypto_alloc_tfm(alg
, 0);
435 if (signal_pending(current
)) {
443 EXPORT_SYMBOL_GPL(crypto_alloc_base
);
446 * crypto_free_tfm - Free crypto transform
447 * @tfm: Transform to free
449 * crypto_free_tfm() frees up the transform and any associated resources,
450 * then drops the refcount on the associated algorithm.
452 void crypto_free_tfm(struct crypto_tfm
*tfm
)
454 struct crypto_alg
*alg
;
460 alg
= tfm
->__crt_alg
;
461 size
= sizeof(*tfm
) + alg
->cra_ctxsize
;
465 crypto_exit_ops(tfm
);
467 memset(tfm
, 0, size
);
471 EXPORT_SYMBOL_GPL(crypto_alloc_tfm
);
472 EXPORT_SYMBOL_GPL(crypto_free_tfm
);
474 int crypto_has_alg(const char *name
, u32 type
, u32 mask
)
477 struct crypto_alg
*alg
= crypto_alg_mod_lookup(name
, type
, mask
);
486 EXPORT_SYMBOL_GPL(crypto_has_alg
);