md: improve partition detection in md array
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / crypto / api.c
blob55af8bb0f0502eb7f80f3ed2c71fb21518319252
1 /*
2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
27 #include "internal.h"
29 LIST_HEAD(crypto_alg_list);
30 EXPORT_SYMBOL_GPL(crypto_alg_list);
31 DECLARE_RWSEM(crypto_alg_sem);
32 EXPORT_SYMBOL_GPL(crypto_alg_sem);
34 BLOCKING_NOTIFIER_HEAD(crypto_chain);
35 EXPORT_SYMBOL_GPL(crypto_chain);
37 static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg)
39 atomic_inc(&alg->cra_refcnt);
40 return alg;
43 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
45 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
47 EXPORT_SYMBOL_GPL(crypto_mod_get);
49 void crypto_mod_put(struct crypto_alg *alg)
51 crypto_alg_put(alg);
52 module_put(alg->cra_module);
54 EXPORT_SYMBOL_GPL(crypto_mod_put);
56 struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask)
58 struct crypto_alg *q, *alg = NULL;
59 int best = -2;
61 list_for_each_entry(q, &crypto_alg_list, cra_list) {
62 int exact, fuzzy;
64 if (crypto_is_moribund(q))
65 continue;
67 if ((q->cra_flags ^ type) & mask)
68 continue;
70 if (crypto_is_larval(q) &&
71 ((struct crypto_larval *)q)->mask != mask)
72 continue;
74 exact = !strcmp(q->cra_driver_name, name);
75 fuzzy = !strcmp(q->cra_name, name);
76 if (!exact && !(fuzzy && q->cra_priority > best))
77 continue;
79 if (unlikely(!crypto_mod_get(q)))
80 continue;
82 best = q->cra_priority;
83 if (alg)
84 crypto_mod_put(alg);
85 alg = q;
87 if (exact)
88 break;
91 return alg;
93 EXPORT_SYMBOL_GPL(__crypto_alg_lookup);
95 static void crypto_larval_destroy(struct crypto_alg *alg)
97 struct crypto_larval *larval = (void *)alg;
99 BUG_ON(!crypto_is_larval(alg));
100 if (larval->adult)
101 crypto_mod_put(larval->adult);
102 kfree(larval);
105 static struct crypto_alg *crypto_larval_alloc(const char *name, u32 type,
106 u32 mask)
108 struct crypto_alg *alg;
109 struct crypto_larval *larval;
111 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
112 if (!larval)
113 return ERR_PTR(-ENOMEM);
115 larval->mask = mask;
116 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
117 larval->alg.cra_priority = -1;
118 larval->alg.cra_destroy = crypto_larval_destroy;
120 atomic_set(&larval->alg.cra_refcnt, 2);
121 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
122 init_completion(&larval->completion);
124 down_write(&crypto_alg_sem);
125 alg = __crypto_alg_lookup(name, type, mask);
126 if (!alg) {
127 alg = &larval->alg;
128 list_add(&alg->cra_list, &crypto_alg_list);
130 up_write(&crypto_alg_sem);
132 if (alg != &larval->alg)
133 kfree(larval);
135 return alg;
138 static void crypto_larval_kill(struct crypto_alg *alg)
140 struct crypto_larval *larval = (void *)alg;
142 down_write(&crypto_alg_sem);
143 list_del(&alg->cra_list);
144 up_write(&crypto_alg_sem);
145 complete(&larval->completion);
146 crypto_alg_put(alg);
149 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
151 struct crypto_larval *larval = (void *)alg;
153 wait_for_completion_interruptible_timeout(&larval->completion, 60 * HZ);
154 alg = larval->adult;
155 if (alg) {
156 if (!crypto_mod_get(alg))
157 alg = ERR_PTR(-EAGAIN);
158 } else
159 alg = ERR_PTR(-ENOENT);
160 crypto_mod_put(&larval->alg);
162 return alg;
165 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
166 u32 mask)
168 struct crypto_alg *alg;
170 down_read(&crypto_alg_sem);
171 alg = __crypto_alg_lookup(name, type, mask);
172 up_read(&crypto_alg_sem);
174 return alg;
177 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
179 struct crypto_alg *alg;
180 struct crypto_alg *larval;
181 int ok;
183 if (!name)
184 return ERR_PTR(-ENOENT);
186 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
187 type &= mask;
189 alg = try_then_request_module(crypto_alg_lookup(name, type, mask),
190 name);
191 if (alg)
192 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
194 larval = crypto_larval_alloc(name, type, mask);
195 if (IS_ERR(larval) || !crypto_is_larval(larval))
196 return larval;
198 ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval);
199 if (ok == NOTIFY_DONE) {
200 request_module("cryptomgr");
201 ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval);
204 if (ok == NOTIFY_STOP)
205 alg = crypto_larval_wait(larval);
206 else {
207 crypto_mod_put(larval);
208 alg = ERR_PTR(-ENOENT);
210 crypto_larval_kill(larval);
211 return alg;
213 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
215 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
217 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
219 if (type_obj)
220 return type_obj->init(tfm, type, mask);
222 switch (crypto_tfm_alg_type(tfm)) {
223 case CRYPTO_ALG_TYPE_CIPHER:
224 return crypto_init_cipher_ops(tfm);
226 case CRYPTO_ALG_TYPE_DIGEST:
227 return crypto_init_digest_ops(tfm);
229 case CRYPTO_ALG_TYPE_COMPRESS:
230 return crypto_init_compress_ops(tfm);
232 default:
233 break;
236 BUG();
237 return -EINVAL;
240 static void crypto_exit_ops(struct crypto_tfm *tfm)
242 const struct crypto_type *type = tfm->__crt_alg->cra_type;
244 if (type) {
245 if (type->exit)
246 type->exit(tfm);
247 return;
250 switch (crypto_tfm_alg_type(tfm)) {
251 case CRYPTO_ALG_TYPE_CIPHER:
252 crypto_exit_cipher_ops(tfm);
253 break;
255 case CRYPTO_ALG_TYPE_DIGEST:
256 crypto_exit_digest_ops(tfm);
257 break;
259 case CRYPTO_ALG_TYPE_COMPRESS:
260 crypto_exit_compress_ops(tfm);
261 break;
263 default:
264 BUG();
269 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
271 const struct crypto_type *type_obj = alg->cra_type;
272 unsigned int len;
274 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
275 if (type_obj)
276 return len + type_obj->ctxsize(alg, type, mask);
278 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
279 default:
280 BUG();
282 case CRYPTO_ALG_TYPE_CIPHER:
283 len += crypto_cipher_ctxsize(alg);
284 break;
286 case CRYPTO_ALG_TYPE_DIGEST:
287 len += crypto_digest_ctxsize(alg);
288 break;
290 case CRYPTO_ALG_TYPE_COMPRESS:
291 len += crypto_compress_ctxsize(alg);
292 break;
295 return len;
298 void crypto_shoot_alg(struct crypto_alg *alg)
300 down_write(&crypto_alg_sem);
301 alg->cra_flags |= CRYPTO_ALG_DYING;
302 up_write(&crypto_alg_sem);
304 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
306 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
307 u32 mask)
309 struct crypto_tfm *tfm = NULL;
310 unsigned int tfm_size;
311 int err = -ENOMEM;
313 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
314 tfm = kzalloc(tfm_size, GFP_KERNEL);
315 if (tfm == NULL)
316 goto out_err;
318 tfm->__crt_alg = alg;
320 err = crypto_init_ops(tfm, type, mask);
321 if (err)
322 goto out_free_tfm;
324 if (alg->cra_init && (err = alg->cra_init(tfm))) {
325 if (err == -EAGAIN)
326 crypto_shoot_alg(alg);
327 goto cra_init_failed;
330 goto out;
332 cra_init_failed:
333 crypto_exit_ops(tfm);
334 out_free_tfm:
335 kfree(tfm);
336 out_err:
337 tfm = ERR_PTR(err);
338 out:
339 return tfm;
341 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
344 * crypto_alloc_base - Locate algorithm and allocate transform
345 * @alg_name: Name of algorithm
346 * @type: Type of algorithm
347 * @mask: Mask for type comparison
349 * crypto_alloc_base() will first attempt to locate an already loaded
350 * algorithm. If that fails and the kernel supports dynamically loadable
351 * modules, it will then attempt to load a module of the same name or
352 * alias. If that fails it will send a query to any loaded crypto manager
353 * to construct an algorithm on the fly. A refcount is grabbed on the
354 * algorithm which is then associated with the new transform.
356 * The returned transform is of a non-determinate type. Most people
357 * should use one of the more specific allocation functions such as
358 * crypto_alloc_blkcipher.
360 * In case of error the return value is an error pointer.
362 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
364 struct crypto_tfm *tfm;
365 int err;
367 for (;;) {
368 struct crypto_alg *alg;
370 alg = crypto_alg_mod_lookup(alg_name, type, mask);
371 if (IS_ERR(alg)) {
372 err = PTR_ERR(alg);
373 goto err;
376 tfm = __crypto_alloc_tfm(alg, type, mask);
377 if (!IS_ERR(tfm))
378 return tfm;
380 crypto_mod_put(alg);
381 err = PTR_ERR(tfm);
383 err:
384 if (err != -EAGAIN)
385 break;
386 if (signal_pending(current)) {
387 err = -EINTR;
388 break;
392 return ERR_PTR(err);
394 EXPORT_SYMBOL_GPL(crypto_alloc_base);
397 * crypto_free_tfm - Free crypto transform
398 * @tfm: Transform to free
400 * crypto_free_tfm() frees up the transform and any associated resources,
401 * then drops the refcount on the associated algorithm.
403 void crypto_free_tfm(struct crypto_tfm *tfm)
405 struct crypto_alg *alg;
406 int size;
408 if (unlikely(!tfm))
409 return;
411 alg = tfm->__crt_alg;
412 size = sizeof(*tfm) + alg->cra_ctxsize;
414 if (alg->cra_exit)
415 alg->cra_exit(tfm);
416 crypto_exit_ops(tfm);
417 crypto_mod_put(alg);
418 memset(tfm, 0, size);
419 kfree(tfm);
422 EXPORT_SYMBOL_GPL(crypto_free_tfm);
424 int crypto_has_alg(const char *name, u32 type, u32 mask)
426 int ret = 0;
427 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
429 if (!IS_ERR(alg)) {
430 crypto_mod_put(alg);
431 ret = 1;
434 return ret;
436 EXPORT_SYMBOL_GPL(crypto_has_alg);