2 * Cryptographic API for algorithms (i.e., low-level API).
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
24 unsigned int (*ctxsize
)(struct crypto_alg
*alg
, u32 type
, u32 mask
);
25 int (*init
)(struct crypto_tfm
*tfm
, u32 type
, u32 mask
);
26 void (*exit
)(struct crypto_tfm
*tfm
);
27 void (*show
)(struct seq_file
*m
, struct crypto_alg
*alg
);
30 struct crypto_instance
{
31 struct crypto_alg alg
;
33 struct crypto_template
*tmpl
;
34 struct hlist_node list
;
36 void *__ctx
[] CRYPTO_MINALIGN_ATTR
;
39 struct crypto_template
{
40 struct list_head list
;
41 struct hlist_head instances
;
42 struct module
*module
;
44 struct crypto_instance
*(*alloc
)(struct rtattr
**tb
);
45 void (*free
)(struct crypto_instance
*inst
);
47 char name
[CRYPTO_MAX_ALG_NAME
];
51 struct list_head list
;
52 struct crypto_alg
*alg
;
53 struct crypto_instance
*inst
;
58 struct list_head list
;
59 struct list_head
*backlog
;
62 unsigned int max_qlen
;
66 struct scatterlist
*sg
;
70 struct blkcipher_walk
{
83 struct scatter_walk in
;
86 struct scatter_walk out
;
94 unsigned int blocksize
;
97 extern const struct crypto_type crypto_ablkcipher_type
;
98 extern const struct crypto_type crypto_aead_type
;
99 extern const struct crypto_type crypto_blkcipher_type
;
100 extern const struct crypto_type crypto_hash_type
;
102 void crypto_mod_put(struct crypto_alg
*alg
);
104 int crypto_register_template(struct crypto_template
*tmpl
);
105 void crypto_unregister_template(struct crypto_template
*tmpl
);
106 struct crypto_template
*crypto_lookup_template(const char *name
);
108 int crypto_init_spawn(struct crypto_spawn
*spawn
, struct crypto_alg
*alg
,
109 struct crypto_instance
*inst
, u32 mask
);
110 void crypto_drop_spawn(struct crypto_spawn
*spawn
);
111 struct crypto_tfm
*crypto_spawn_tfm(struct crypto_spawn
*spawn
, u32 type
,
114 static inline void crypto_set_spawn(struct crypto_spawn
*spawn
,
115 struct crypto_instance
*inst
)
120 struct crypto_attr_type
*crypto_get_attr_type(struct rtattr
**tb
);
121 int crypto_check_attr_type(struct rtattr
**tb
, u32 type
);
122 const char *crypto_attr_alg_name(struct rtattr
*rta
);
123 struct crypto_alg
*crypto_attr_alg(struct rtattr
*rta
, u32 type
, u32 mask
);
124 int crypto_attr_u32(struct rtattr
*rta
, u32
*num
);
125 struct crypto_instance
*crypto_alloc_instance(const char *name
,
126 struct crypto_alg
*alg
);
128 void crypto_init_queue(struct crypto_queue
*queue
, unsigned int max_qlen
);
129 int crypto_enqueue_request(struct crypto_queue
*queue
,
130 struct crypto_async_request
*request
);
131 struct crypto_async_request
*crypto_dequeue_request(struct crypto_queue
*queue
);
132 int crypto_tfm_in_queue(struct crypto_queue
*queue
, struct crypto_tfm
*tfm
);
134 /* These functions require the input/output to be aligned as u32. */
135 void crypto_inc(u8
*a
, unsigned int size
);
136 void crypto_xor(u8
*dst
, const u8
*src
, unsigned int size
);
138 int blkcipher_walk_done(struct blkcipher_desc
*desc
,
139 struct blkcipher_walk
*walk
, int err
);
140 int blkcipher_walk_virt(struct blkcipher_desc
*desc
,
141 struct blkcipher_walk
*walk
);
142 int blkcipher_walk_phys(struct blkcipher_desc
*desc
,
143 struct blkcipher_walk
*walk
);
144 int blkcipher_walk_virt_block(struct blkcipher_desc
*desc
,
145 struct blkcipher_walk
*walk
,
146 unsigned int blocksize
);
148 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm
*tfm
)
150 unsigned long addr
= (unsigned long)crypto_tfm_ctx(tfm
);
151 unsigned long align
= crypto_tfm_alg_alignmask(tfm
);
153 if (align
<= crypto_tfm_ctx_alignment())
155 return (void *)ALIGN(addr
, align
);
158 static inline struct crypto_instance
*crypto_tfm_alg_instance(
159 struct crypto_tfm
*tfm
)
161 return container_of(tfm
->__crt_alg
, struct crypto_instance
, alg
);
164 static inline void *crypto_instance_ctx(struct crypto_instance
*inst
)
169 static inline struct ablkcipher_alg
*crypto_ablkcipher_alg(
170 struct crypto_ablkcipher
*tfm
)
172 return &crypto_ablkcipher_tfm(tfm
)->__crt_alg
->cra_ablkcipher
;
175 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher
*tfm
)
177 return crypto_tfm_ctx(&tfm
->base
);
180 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher
*tfm
)
182 return crypto_tfm_ctx_aligned(&tfm
->base
);
185 static inline struct aead_alg
*crypto_aead_alg(struct crypto_aead
*tfm
)
187 return &crypto_aead_tfm(tfm
)->__crt_alg
->cra_aead
;
190 static inline void *crypto_aead_ctx(struct crypto_aead
*tfm
)
192 return crypto_tfm_ctx(&tfm
->base
);
195 static inline struct crypto_instance
*crypto_aead_alg_instance(
196 struct crypto_aead
*aead
)
198 return crypto_tfm_alg_instance(&aead
->base
);
201 static inline struct crypto_blkcipher
*crypto_spawn_blkcipher(
202 struct crypto_spawn
*spawn
)
204 u32 type
= CRYPTO_ALG_TYPE_BLKCIPHER
;
205 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
207 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn
, type
, mask
));
210 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher
*tfm
)
212 return crypto_tfm_ctx(&tfm
->base
);
215 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher
*tfm
)
217 return crypto_tfm_ctx_aligned(&tfm
->base
);
220 static inline struct crypto_cipher
*crypto_spawn_cipher(
221 struct crypto_spawn
*spawn
)
223 u32 type
= CRYPTO_ALG_TYPE_CIPHER
;
224 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
226 return __crypto_cipher_cast(crypto_spawn_tfm(spawn
, type
, mask
));
229 static inline struct cipher_alg
*crypto_cipher_alg(struct crypto_cipher
*tfm
)
231 return &crypto_cipher_tfm(tfm
)->__crt_alg
->cra_cipher
;
234 static inline struct crypto_hash
*crypto_spawn_hash(struct crypto_spawn
*spawn
)
236 u32 type
= CRYPTO_ALG_TYPE_HASH
;
237 u32 mask
= CRYPTO_ALG_TYPE_HASH_MASK
;
239 return __crypto_hash_cast(crypto_spawn_tfm(spawn
, type
, mask
));
242 static inline void *crypto_hash_ctx_aligned(struct crypto_hash
*tfm
)
244 return crypto_tfm_ctx_aligned(&tfm
->base
);
247 static inline void blkcipher_walk_init(struct blkcipher_walk
*walk
,
248 struct scatterlist
*dst
,
249 struct scatterlist
*src
,
254 walk
->total
= nbytes
;
257 static inline struct crypto_async_request
*crypto_get_backlog(
258 struct crypto_queue
*queue
)
260 return queue
->backlog
== &queue
->list
? NULL
:
261 container_of(queue
->backlog
, struct crypto_async_request
, list
);
264 static inline int ablkcipher_enqueue_request(struct crypto_queue
*queue
,
265 struct ablkcipher_request
*request
)
267 return crypto_enqueue_request(queue
, &request
->base
);
270 static inline struct ablkcipher_request
*ablkcipher_dequeue_request(
271 struct crypto_queue
*queue
)
273 return ablkcipher_request_cast(crypto_dequeue_request(queue
));
276 static inline void *ablkcipher_request_ctx(struct ablkcipher_request
*req
)
281 static inline int ablkcipher_tfm_in_queue(struct crypto_queue
*queue
,
282 struct crypto_ablkcipher
*tfm
)
284 return crypto_tfm_in_queue(queue
, crypto_ablkcipher_tfm(tfm
));
287 static inline void *aead_request_ctx(struct aead_request
*req
)
292 static inline void aead_request_complete(struct aead_request
*req
, int err
)
294 req
->base
.complete(&req
->base
, err
);
297 static inline u32
aead_request_flags(struct aead_request
*req
)
299 return req
->base
.flags
;
302 static inline struct crypto_alg
*crypto_get_attr_alg(struct rtattr
**tb
,
305 return crypto_attr_alg(tb
[1], type
, mask
);
309 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
310 * Otherwise returns zero.
312 static inline int crypto_requires_sync(u32 type
, u32 mask
)
314 return (type
^ CRYPTO_ALG_ASYNC
) & mask
& CRYPTO_ALG_ASYNC
;
317 #endif /* _CRYPTO_ALGAPI_H */