2 * Cryptographic API for algorithms (i.e., low-level API).
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
24 unsigned int (*ctxsize
)(struct crypto_alg
*alg
, u32 type
, u32 mask
);
25 unsigned int (*extsize
)(struct crypto_alg
*alg
,
26 const struct crypto_type
*frontend
);
27 int (*init
)(struct crypto_tfm
*tfm
, u32 type
, u32 mask
);
28 int (*init_tfm
)(struct crypto_tfm
*tfm
,
29 const struct crypto_type
*frontend
);
30 void (*show
)(struct seq_file
*m
, struct crypto_alg
*alg
);
31 struct crypto_alg
*(*lookup
)(const char *name
, u32 type
, u32 mask
);
34 unsigned int maskclear
;
39 struct crypto_instance
{
40 struct crypto_alg alg
;
42 struct crypto_template
*tmpl
;
43 struct hlist_node list
;
45 void *__ctx
[] CRYPTO_MINALIGN_ATTR
;
48 struct crypto_template
{
49 struct list_head list
;
50 struct hlist_head instances
;
51 struct module
*module
;
53 struct crypto_instance
*(*alloc
)(struct rtattr
**tb
);
54 void (*free
)(struct crypto_instance
*inst
);
55 int (*create
)(struct crypto_template
*tmpl
, struct rtattr
**tb
);
57 char name
[CRYPTO_MAX_ALG_NAME
];
61 struct list_head list
;
62 struct crypto_alg
*alg
;
63 struct crypto_instance
*inst
;
64 const struct crypto_type
*frontend
;
69 struct list_head list
;
70 struct list_head
*backlog
;
73 unsigned int max_qlen
;
77 struct scatterlist
*sg
;
81 struct blkcipher_walk
{
94 struct scatter_walk in
;
97 struct scatter_walk out
;
105 unsigned int blocksize
;
108 extern const struct crypto_type crypto_ablkcipher_type
;
109 extern const struct crypto_type crypto_aead_type
;
110 extern const struct crypto_type crypto_blkcipher_type
;
111 extern const struct crypto_type crypto_hash_type
;
113 void crypto_mod_put(struct crypto_alg
*alg
);
115 int crypto_register_template(struct crypto_template
*tmpl
);
116 void crypto_unregister_template(struct crypto_template
*tmpl
);
117 struct crypto_template
*crypto_lookup_template(const char *name
);
119 int crypto_init_spawn(struct crypto_spawn
*spawn
, struct crypto_alg
*alg
,
120 struct crypto_instance
*inst
, u32 mask
);
121 int crypto_init_spawn2(struct crypto_spawn
*spawn
, struct crypto_alg
*alg
,
122 struct crypto_instance
*inst
,
123 const struct crypto_type
*frontend
);
125 void crypto_drop_spawn(struct crypto_spawn
*spawn
);
126 struct crypto_tfm
*crypto_spawn_tfm(struct crypto_spawn
*spawn
, u32 type
,
128 void *crypto_spawn_tfm2(struct crypto_spawn
*spawn
);
130 static inline void crypto_set_spawn(struct crypto_spawn
*spawn
,
131 struct crypto_instance
*inst
)
136 struct crypto_attr_type
*crypto_get_attr_type(struct rtattr
**tb
);
137 int crypto_check_attr_type(struct rtattr
**tb
, u32 type
);
138 const char *crypto_attr_alg_name(struct rtattr
*rta
);
139 struct crypto_alg
*crypto_attr_alg(struct rtattr
*rta
, u32 type
, u32 mask
);
140 int crypto_attr_u32(struct rtattr
*rta
, u32
*num
);
141 void *crypto_alloc_instance2(const char *name
, struct crypto_alg
*alg
,
143 struct crypto_instance
*crypto_alloc_instance(const char *name
,
144 struct crypto_alg
*alg
);
146 void crypto_init_queue(struct crypto_queue
*queue
, unsigned int max_qlen
);
147 int crypto_enqueue_request(struct crypto_queue
*queue
,
148 struct crypto_async_request
*request
);
149 struct crypto_async_request
*crypto_dequeue_request(struct crypto_queue
*queue
);
150 int crypto_tfm_in_queue(struct crypto_queue
*queue
, struct crypto_tfm
*tfm
);
152 /* These functions require the input/output to be aligned as u32. */
153 void crypto_inc(u8
*a
, unsigned int size
);
154 void crypto_xor(u8
*dst
, const u8
*src
, unsigned int size
);
156 int blkcipher_walk_done(struct blkcipher_desc
*desc
,
157 struct blkcipher_walk
*walk
, int err
);
158 int blkcipher_walk_virt(struct blkcipher_desc
*desc
,
159 struct blkcipher_walk
*walk
);
160 int blkcipher_walk_phys(struct blkcipher_desc
*desc
,
161 struct blkcipher_walk
*walk
);
162 int blkcipher_walk_virt_block(struct blkcipher_desc
*desc
,
163 struct blkcipher_walk
*walk
,
164 unsigned int blocksize
);
166 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm
*tfm
)
168 unsigned long addr
= (unsigned long)crypto_tfm_ctx(tfm
);
169 unsigned long align
= crypto_tfm_alg_alignmask(tfm
);
171 if (align
<= crypto_tfm_ctx_alignment())
173 return (void *)ALIGN(addr
, align
);
176 static inline struct crypto_instance
*crypto_tfm_alg_instance(
177 struct crypto_tfm
*tfm
)
179 return container_of(tfm
->__crt_alg
, struct crypto_instance
, alg
);
182 static inline void *crypto_instance_ctx(struct crypto_instance
*inst
)
187 static inline struct ablkcipher_alg
*crypto_ablkcipher_alg(
188 struct crypto_ablkcipher
*tfm
)
190 return &crypto_ablkcipher_tfm(tfm
)->__crt_alg
->cra_ablkcipher
;
193 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher
*tfm
)
195 return crypto_tfm_ctx(&tfm
->base
);
198 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher
*tfm
)
200 return crypto_tfm_ctx_aligned(&tfm
->base
);
203 static inline struct aead_alg
*crypto_aead_alg(struct crypto_aead
*tfm
)
205 return &crypto_aead_tfm(tfm
)->__crt_alg
->cra_aead
;
208 static inline void *crypto_aead_ctx(struct crypto_aead
*tfm
)
210 return crypto_tfm_ctx(&tfm
->base
);
213 static inline struct crypto_instance
*crypto_aead_alg_instance(
214 struct crypto_aead
*aead
)
216 return crypto_tfm_alg_instance(&aead
->base
);
219 static inline struct crypto_blkcipher
*crypto_spawn_blkcipher(
220 struct crypto_spawn
*spawn
)
222 u32 type
= CRYPTO_ALG_TYPE_BLKCIPHER
;
223 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
225 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn
, type
, mask
));
228 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher
*tfm
)
230 return crypto_tfm_ctx(&tfm
->base
);
233 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher
*tfm
)
235 return crypto_tfm_ctx_aligned(&tfm
->base
);
238 static inline struct crypto_cipher
*crypto_spawn_cipher(
239 struct crypto_spawn
*spawn
)
241 u32 type
= CRYPTO_ALG_TYPE_CIPHER
;
242 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
244 return __crypto_cipher_cast(crypto_spawn_tfm(spawn
, type
, mask
));
247 static inline struct cipher_alg
*crypto_cipher_alg(struct crypto_cipher
*tfm
)
249 return &crypto_cipher_tfm(tfm
)->__crt_alg
->cra_cipher
;
252 static inline struct crypto_hash
*crypto_spawn_hash(struct crypto_spawn
*spawn
)
254 u32 type
= CRYPTO_ALG_TYPE_HASH
;
255 u32 mask
= CRYPTO_ALG_TYPE_HASH_MASK
;
257 return __crypto_hash_cast(crypto_spawn_tfm(spawn
, type
, mask
));
260 static inline void *crypto_hash_ctx(struct crypto_hash
*tfm
)
262 return crypto_tfm_ctx(&tfm
->base
);
265 static inline void *crypto_hash_ctx_aligned(struct crypto_hash
*tfm
)
267 return crypto_tfm_ctx_aligned(&tfm
->base
);
270 static inline void blkcipher_walk_init(struct blkcipher_walk
*walk
,
271 struct scatterlist
*dst
,
272 struct scatterlist
*src
,
277 walk
->total
= nbytes
;
280 static inline struct crypto_async_request
*crypto_get_backlog(
281 struct crypto_queue
*queue
)
283 return queue
->backlog
== &queue
->list
? NULL
:
284 container_of(queue
->backlog
, struct crypto_async_request
, list
);
287 static inline int ablkcipher_enqueue_request(struct crypto_queue
*queue
,
288 struct ablkcipher_request
*request
)
290 return crypto_enqueue_request(queue
, &request
->base
);
293 static inline struct ablkcipher_request
*ablkcipher_dequeue_request(
294 struct crypto_queue
*queue
)
296 return ablkcipher_request_cast(crypto_dequeue_request(queue
));
299 static inline void *ablkcipher_request_ctx(struct ablkcipher_request
*req
)
304 static inline int ablkcipher_tfm_in_queue(struct crypto_queue
*queue
,
305 struct crypto_ablkcipher
*tfm
)
307 return crypto_tfm_in_queue(queue
, crypto_ablkcipher_tfm(tfm
));
310 static inline void *aead_request_ctx(struct aead_request
*req
)
315 static inline void aead_request_complete(struct aead_request
*req
, int err
)
317 req
->base
.complete(&req
->base
, err
);
320 static inline u32
aead_request_flags(struct aead_request
*req
)
322 return req
->base
.flags
;
325 static inline struct crypto_alg
*crypto_get_attr_alg(struct rtattr
**tb
,
328 return crypto_attr_alg(tb
[1], type
, mask
);
332 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
333 * Otherwise returns zero.
335 static inline int crypto_requires_sync(u32 type
, u32 mask
)
337 return (type
^ CRYPTO_ALG_ASYNC
) & mask
& CRYPTO_ALG_ASYNC
;
340 #endif /* _CRYPTO_ALGAPI_H */