net: ethernet: mediatek: fix module loading automatically based on MODULE_DEVICE_TABLE
[linux-2.6/btrfs-unstable.git] / include / crypto / algapi.h
blob8637cdfe382a4d8ff2542ac9ccab2170fc48546a
1 /*
2 * Cryptographic API for algorithms (i.e., low-level API).
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
18 #include <linux/kthread.h>
19 #include <linux/skbuff.h>
21 struct crypto_aead;
22 struct crypto_instance;
23 struct module;
24 struct rtattr;
25 struct seq_file;
27 struct crypto_type {
28 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
29 unsigned int (*extsize)(struct crypto_alg *alg);
30 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
31 int (*init_tfm)(struct crypto_tfm *tfm);
32 void (*show)(struct seq_file *m, struct crypto_alg *alg);
33 int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
34 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
35 void (*free)(struct crypto_instance *inst);
37 unsigned int type;
38 unsigned int maskclear;
39 unsigned int maskset;
40 unsigned int tfmsize;
43 struct crypto_instance {
44 struct crypto_alg alg;
46 struct crypto_template *tmpl;
47 struct hlist_node list;
49 void *__ctx[] CRYPTO_MINALIGN_ATTR;
52 struct crypto_template {
53 struct list_head list;
54 struct hlist_head instances;
55 struct module *module;
57 struct crypto_instance *(*alloc)(struct rtattr **tb);
58 void (*free)(struct crypto_instance *inst);
59 int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
61 char name[CRYPTO_MAX_ALG_NAME];
64 struct crypto_spawn {
65 struct list_head list;
66 struct crypto_alg *alg;
67 struct crypto_instance *inst;
68 const struct crypto_type *frontend;
69 u32 mask;
72 struct crypto_queue {
73 struct list_head list;
74 struct list_head *backlog;
76 unsigned int qlen;
77 unsigned int max_qlen;
80 struct scatter_walk {
81 struct scatterlist *sg;
82 unsigned int offset;
85 struct blkcipher_walk {
86 union {
87 struct {
88 struct page *page;
89 unsigned long offset;
90 } phys;
92 struct {
93 u8 *page;
94 u8 *addr;
95 } virt;
96 } src, dst;
98 struct scatter_walk in;
99 unsigned int nbytes;
101 struct scatter_walk out;
102 unsigned int total;
104 void *page;
105 u8 *buffer;
106 u8 *iv;
107 unsigned int ivsize;
109 int flags;
110 unsigned int walk_blocksize;
111 unsigned int cipher_blocksize;
112 unsigned int alignmask;
115 struct ablkcipher_walk {
116 struct {
117 struct page *page;
118 unsigned int offset;
119 } src, dst;
121 struct scatter_walk in;
122 unsigned int nbytes;
123 struct scatter_walk out;
124 unsigned int total;
125 struct list_head buffers;
126 u8 *iv_buffer;
127 u8 *iv;
128 int flags;
129 unsigned int blocksize;
132 #define ENGINE_NAME_LEN 30
134 * struct crypto_engine - crypto hardware engine
135 * @name: the engine name
136 * @idling: the engine is entering idle state
137 * @busy: request pump is busy
138 * @running: the engine is on working
139 * @cur_req_prepared: current request is prepared
140 * @list: link with the global crypto engine list
141 * @queue_lock: spinlock to syncronise access to request queue
142 * @queue: the crypto queue of the engine
143 * @rt: whether this queue is set to run as a realtime task
144 * @prepare_crypt_hardware: a request will soon arrive from the queue
145 * so the subsystem requests the driver to prepare the hardware
146 * by issuing this call
147 * @unprepare_crypt_hardware: there are currently no more requests on the
148 * queue so the subsystem notifies the driver that it may relax the
149 * hardware by issuing this call
150 * @prepare_request: do some prepare if need before handle the current request
151 * @unprepare_request: undo any work done by prepare_message()
152 * @crypt_one_request: do encryption for current request
153 * @kworker: thread struct for request pump
154 * @kworker_task: pointer to task for request pump kworker thread
155 * @pump_requests: work struct for scheduling work to the request pump
156 * @priv_data: the engine private data
157 * @cur_req: the current request which is on processing
159 struct crypto_engine {
160 char name[ENGINE_NAME_LEN];
161 bool idling;
162 bool busy;
163 bool running;
164 bool cur_req_prepared;
166 struct list_head list;
167 spinlock_t queue_lock;
168 struct crypto_queue queue;
170 bool rt;
172 int (*prepare_crypt_hardware)(struct crypto_engine *engine);
173 int (*unprepare_crypt_hardware)(struct crypto_engine *engine);
175 int (*prepare_request)(struct crypto_engine *engine,
176 struct ablkcipher_request *req);
177 int (*unprepare_request)(struct crypto_engine *engine,
178 struct ablkcipher_request *req);
179 int (*crypt_one_request)(struct crypto_engine *engine,
180 struct ablkcipher_request *req);
182 struct kthread_worker kworker;
183 struct task_struct *kworker_task;
184 struct kthread_work pump_requests;
186 void *priv_data;
187 struct ablkcipher_request *cur_req;
190 int crypto_transfer_request(struct crypto_engine *engine,
191 struct ablkcipher_request *req, bool need_pump);
192 int crypto_transfer_request_to_engine(struct crypto_engine *engine,
193 struct ablkcipher_request *req);
194 void crypto_finalize_request(struct crypto_engine *engine,
195 struct ablkcipher_request *req, int err);
196 int crypto_engine_start(struct crypto_engine *engine);
197 int crypto_engine_stop(struct crypto_engine *engine);
198 struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt);
199 int crypto_engine_exit(struct crypto_engine *engine);
201 extern const struct crypto_type crypto_ablkcipher_type;
202 extern const struct crypto_type crypto_blkcipher_type;
204 void crypto_mod_put(struct crypto_alg *alg);
206 int crypto_register_template(struct crypto_template *tmpl);
207 void crypto_unregister_template(struct crypto_template *tmpl);
208 struct crypto_template *crypto_lookup_template(const char *name);
210 int crypto_register_instance(struct crypto_template *tmpl,
211 struct crypto_instance *inst);
212 int crypto_unregister_instance(struct crypto_instance *inst);
214 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
215 struct crypto_instance *inst, u32 mask);
216 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
217 struct crypto_instance *inst,
218 const struct crypto_type *frontend);
219 int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
220 u32 type, u32 mask);
222 void crypto_drop_spawn(struct crypto_spawn *spawn);
223 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
224 u32 mask);
225 void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
227 static inline void crypto_set_spawn(struct crypto_spawn *spawn,
228 struct crypto_instance *inst)
230 spawn->inst = inst;
233 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
234 int crypto_check_attr_type(struct rtattr **tb, u32 type);
235 const char *crypto_attr_alg_name(struct rtattr *rta);
236 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
237 const struct crypto_type *frontend,
238 u32 type, u32 mask);
240 static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
241 u32 type, u32 mask)
243 return crypto_attr_alg2(rta, NULL, type, mask);
246 int crypto_attr_u32(struct rtattr *rta, u32 *num);
247 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
248 struct crypto_alg *alg);
249 void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
250 unsigned int head);
251 struct crypto_instance *crypto_alloc_instance(const char *name,
252 struct crypto_alg *alg);
254 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
255 int crypto_enqueue_request(struct crypto_queue *queue,
256 struct crypto_async_request *request);
257 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
258 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
259 static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
261 return queue->qlen;
264 /* These functions require the input/output to be aligned as u32. */
265 void crypto_inc(u8 *a, unsigned int size);
266 void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
268 int blkcipher_walk_done(struct blkcipher_desc *desc,
269 struct blkcipher_walk *walk, int err);
270 int blkcipher_walk_virt(struct blkcipher_desc *desc,
271 struct blkcipher_walk *walk);
272 int blkcipher_walk_phys(struct blkcipher_desc *desc,
273 struct blkcipher_walk *walk);
274 int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
275 struct blkcipher_walk *walk,
276 unsigned int blocksize);
277 int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
278 struct blkcipher_walk *walk,
279 struct crypto_aead *tfm,
280 unsigned int blocksize);
282 int ablkcipher_walk_done(struct ablkcipher_request *req,
283 struct ablkcipher_walk *walk, int err);
284 int ablkcipher_walk_phys(struct ablkcipher_request *req,
285 struct ablkcipher_walk *walk);
286 void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
288 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
290 return PTR_ALIGN(crypto_tfm_ctx(tfm),
291 crypto_tfm_alg_alignmask(tfm) + 1);
294 static inline struct crypto_instance *crypto_tfm_alg_instance(
295 struct crypto_tfm *tfm)
297 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
300 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
302 return inst->__ctx;
305 static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
306 struct crypto_ablkcipher *tfm)
308 return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
311 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
313 return crypto_tfm_ctx(&tfm->base);
316 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
318 return crypto_tfm_ctx_aligned(&tfm->base);
321 static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
322 struct crypto_spawn *spawn)
324 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
325 u32 mask = CRYPTO_ALG_TYPE_MASK;
327 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
330 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
332 return crypto_tfm_ctx(&tfm->base);
335 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
337 return crypto_tfm_ctx_aligned(&tfm->base);
340 static inline struct crypto_cipher *crypto_spawn_cipher(
341 struct crypto_spawn *spawn)
343 u32 type = CRYPTO_ALG_TYPE_CIPHER;
344 u32 mask = CRYPTO_ALG_TYPE_MASK;
346 return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
349 static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
351 return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
354 static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
355 struct scatterlist *dst,
356 struct scatterlist *src,
357 unsigned int nbytes)
359 walk->in.sg = src;
360 walk->out.sg = dst;
361 walk->total = nbytes;
364 static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
365 struct scatterlist *dst,
366 struct scatterlist *src,
367 unsigned int nbytes)
369 walk->in.sg = src;
370 walk->out.sg = dst;
371 walk->total = nbytes;
372 INIT_LIST_HEAD(&walk->buffers);
375 static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
377 if (unlikely(!list_empty(&walk->buffers)))
378 __ablkcipher_walk_complete(walk);
381 static inline struct crypto_async_request *crypto_get_backlog(
382 struct crypto_queue *queue)
384 return queue->backlog == &queue->list ? NULL :
385 container_of(queue->backlog, struct crypto_async_request, list);
388 static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
389 struct ablkcipher_request *request)
391 return crypto_enqueue_request(queue, &request->base);
394 static inline struct ablkcipher_request *ablkcipher_dequeue_request(
395 struct crypto_queue *queue)
397 return ablkcipher_request_cast(crypto_dequeue_request(queue));
400 static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
402 return req->__ctx;
405 static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
406 struct crypto_ablkcipher *tfm)
408 return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
411 static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
412 u32 type, u32 mask)
414 return crypto_attr_alg(tb[1], type, mask);
418 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
419 * Otherwise returns zero.
421 static inline int crypto_requires_sync(u32 type, u32 mask)
423 return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
426 noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
429 * crypto_memneq - Compare two areas of memory without leaking
430 * timing information.
432 * @a: One area of memory
433 * @b: Another area of memory
434 * @size: The size of the area.
436 * Returns 0 when data is equal, 1 otherwise.
438 static inline int crypto_memneq(const void *a, const void *b, size_t size)
440 return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
443 static inline void crypto_yield(u32 flags)
445 #if !defined(CONFIG_PREEMPT) || defined(CONFIG_PREEMPT_VOLUNTARY)
446 if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
447 cond_resched();
448 #endif
451 #endif /* _CRYPTO_ALGAPI_H */