2 * Cryptographic API for algorithms (i.e., low-level API).
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
24 unsigned int (*ctxsize
)(struct crypto_alg
*alg
, u32 type
, u32 mask
);
25 unsigned int (*extsize
)(struct crypto_alg
*alg
,
26 const struct crypto_type
*frontend
);
27 int (*init
)(struct crypto_tfm
*tfm
, u32 type
, u32 mask
);
28 int (*init_tfm
)(struct crypto_tfm
*tfm
,
29 const struct crypto_type
*frontend
);
30 void (*show
)(struct seq_file
*m
, struct crypto_alg
*alg
);
31 struct crypto_alg
*(*lookup
)(const char *name
, u32 type
, u32 mask
);
34 unsigned int maskclear
;
39 struct crypto_instance
{
40 struct crypto_alg alg
;
42 struct crypto_template
*tmpl
;
43 struct hlist_node list
;
45 void *__ctx
[] CRYPTO_MINALIGN_ATTR
;
48 struct crypto_template
{
49 struct list_head list
;
50 struct hlist_head instances
;
51 struct module
*module
;
53 struct crypto_instance
*(*alloc
)(struct rtattr
**tb
);
54 void (*free
)(struct crypto_instance
*inst
);
56 char name
[CRYPTO_MAX_ALG_NAME
];
60 struct list_head list
;
61 struct crypto_alg
*alg
;
62 struct crypto_instance
*inst
;
67 struct list_head list
;
68 struct list_head
*backlog
;
71 unsigned int max_qlen
;
75 struct scatterlist
*sg
;
79 struct blkcipher_walk
{
92 struct scatter_walk in
;
95 struct scatter_walk out
;
103 unsigned int blocksize
;
106 extern const struct crypto_type crypto_ablkcipher_type
;
107 extern const struct crypto_type crypto_aead_type
;
108 extern const struct crypto_type crypto_blkcipher_type
;
109 extern const struct crypto_type crypto_hash_type
;
111 void crypto_mod_put(struct crypto_alg
*alg
);
113 int crypto_register_template(struct crypto_template
*tmpl
);
114 void crypto_unregister_template(struct crypto_template
*tmpl
);
115 struct crypto_template
*crypto_lookup_template(const char *name
);
117 int crypto_init_spawn(struct crypto_spawn
*spawn
, struct crypto_alg
*alg
,
118 struct crypto_instance
*inst
, u32 mask
);
119 void crypto_drop_spawn(struct crypto_spawn
*spawn
);
120 struct crypto_tfm
*crypto_spawn_tfm(struct crypto_spawn
*spawn
, u32 type
,
123 static inline void crypto_set_spawn(struct crypto_spawn
*spawn
,
124 struct crypto_instance
*inst
)
129 struct crypto_attr_type
*crypto_get_attr_type(struct rtattr
**tb
);
130 int crypto_check_attr_type(struct rtattr
**tb
, u32 type
);
131 const char *crypto_attr_alg_name(struct rtattr
*rta
);
132 struct crypto_alg
*crypto_attr_alg(struct rtattr
*rta
, u32 type
, u32 mask
);
133 int crypto_attr_u32(struct rtattr
*rta
, u32
*num
);
134 struct crypto_instance
*crypto_alloc_instance(const char *name
,
135 struct crypto_alg
*alg
);
137 void crypto_init_queue(struct crypto_queue
*queue
, unsigned int max_qlen
);
138 int crypto_enqueue_request(struct crypto_queue
*queue
,
139 struct crypto_async_request
*request
);
140 void *__crypto_dequeue_request(struct crypto_queue
*queue
, unsigned int offset
);
141 struct crypto_async_request
*crypto_dequeue_request(struct crypto_queue
*queue
);
142 int crypto_tfm_in_queue(struct crypto_queue
*queue
, struct crypto_tfm
*tfm
);
144 /* These functions require the input/output to be aligned as u32. */
145 void crypto_inc(u8
*a
, unsigned int size
);
146 void crypto_xor(u8
*dst
, const u8
*src
, unsigned int size
);
148 int blkcipher_walk_done(struct blkcipher_desc
*desc
,
149 struct blkcipher_walk
*walk
, int err
);
150 int blkcipher_walk_virt(struct blkcipher_desc
*desc
,
151 struct blkcipher_walk
*walk
);
152 int blkcipher_walk_phys(struct blkcipher_desc
*desc
,
153 struct blkcipher_walk
*walk
);
154 int blkcipher_walk_virt_block(struct blkcipher_desc
*desc
,
155 struct blkcipher_walk
*walk
,
156 unsigned int blocksize
);
158 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm
*tfm
)
160 unsigned long addr
= (unsigned long)crypto_tfm_ctx(tfm
);
161 unsigned long align
= crypto_tfm_alg_alignmask(tfm
);
163 if (align
<= crypto_tfm_ctx_alignment())
165 return (void *)ALIGN(addr
, align
);
168 static inline struct crypto_instance
*crypto_tfm_alg_instance(
169 struct crypto_tfm
*tfm
)
171 return container_of(tfm
->__crt_alg
, struct crypto_instance
, alg
);
174 static inline void *crypto_instance_ctx(struct crypto_instance
*inst
)
179 static inline struct ablkcipher_alg
*crypto_ablkcipher_alg(
180 struct crypto_ablkcipher
*tfm
)
182 return &crypto_ablkcipher_tfm(tfm
)->__crt_alg
->cra_ablkcipher
;
185 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher
*tfm
)
187 return crypto_tfm_ctx(&tfm
->base
);
190 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher
*tfm
)
192 return crypto_tfm_ctx_aligned(&tfm
->base
);
195 static inline struct aead_alg
*crypto_aead_alg(struct crypto_aead
*tfm
)
197 return &crypto_aead_tfm(tfm
)->__crt_alg
->cra_aead
;
200 static inline void *crypto_aead_ctx(struct crypto_aead
*tfm
)
202 return crypto_tfm_ctx(&tfm
->base
);
205 static inline struct crypto_instance
*crypto_aead_alg_instance(
206 struct crypto_aead
*aead
)
208 return crypto_tfm_alg_instance(&aead
->base
);
211 static inline struct crypto_blkcipher
*crypto_spawn_blkcipher(
212 struct crypto_spawn
*spawn
)
214 u32 type
= CRYPTO_ALG_TYPE_BLKCIPHER
;
215 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
217 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn
, type
, mask
));
220 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher
*tfm
)
222 return crypto_tfm_ctx(&tfm
->base
);
225 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher
*tfm
)
227 return crypto_tfm_ctx_aligned(&tfm
->base
);
230 static inline struct crypto_cipher
*crypto_spawn_cipher(
231 struct crypto_spawn
*spawn
)
233 u32 type
= CRYPTO_ALG_TYPE_CIPHER
;
234 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
236 return __crypto_cipher_cast(crypto_spawn_tfm(spawn
, type
, mask
));
239 static inline struct cipher_alg
*crypto_cipher_alg(struct crypto_cipher
*tfm
)
241 return &crypto_cipher_tfm(tfm
)->__crt_alg
->cra_cipher
;
244 static inline struct crypto_hash
*crypto_spawn_hash(struct crypto_spawn
*spawn
)
246 u32 type
= CRYPTO_ALG_TYPE_HASH
;
247 u32 mask
= CRYPTO_ALG_TYPE_HASH_MASK
;
249 return __crypto_hash_cast(crypto_spawn_tfm(spawn
, type
, mask
));
252 static inline void *crypto_hash_ctx(struct crypto_hash
*tfm
)
254 return crypto_tfm_ctx(&tfm
->base
);
257 static inline void *crypto_hash_ctx_aligned(struct crypto_hash
*tfm
)
259 return crypto_tfm_ctx_aligned(&tfm
->base
);
262 static inline void blkcipher_walk_init(struct blkcipher_walk
*walk
,
263 struct scatterlist
*dst
,
264 struct scatterlist
*src
,
269 walk
->total
= nbytes
;
272 static inline struct crypto_async_request
*crypto_get_backlog(
273 struct crypto_queue
*queue
)
275 return queue
->backlog
== &queue
->list
? NULL
:
276 container_of(queue
->backlog
, struct crypto_async_request
, list
);
279 static inline int ablkcipher_enqueue_request(struct crypto_queue
*queue
,
280 struct ablkcipher_request
*request
)
282 return crypto_enqueue_request(queue
, &request
->base
);
285 static inline struct ablkcipher_request
*ablkcipher_dequeue_request(
286 struct crypto_queue
*queue
)
288 return ablkcipher_request_cast(crypto_dequeue_request(queue
));
291 static inline void *ablkcipher_request_ctx(struct ablkcipher_request
*req
)
296 static inline int ablkcipher_tfm_in_queue(struct crypto_queue
*queue
,
297 struct crypto_ablkcipher
*tfm
)
299 return crypto_tfm_in_queue(queue
, crypto_ablkcipher_tfm(tfm
));
302 static inline void *aead_request_ctx(struct aead_request
*req
)
307 static inline void aead_request_complete(struct aead_request
*req
, int err
)
309 req
->base
.complete(&req
->base
, err
);
312 static inline u32
aead_request_flags(struct aead_request
*req
)
314 return req
->base
.flags
;
317 static inline struct crypto_alg
*crypto_get_attr_alg(struct rtattr
**tb
,
320 return crypto_attr_alg(tb
[1], type
, mask
);
324 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
325 * Otherwise returns zero.
327 static inline int crypto_requires_sync(u32 type
, u32 mask
)
329 return (type
^ CRYPTO_ALG_ASYNC
) & mask
& CRYPTO_ALG_ASYNC
;
332 #endif /* _CRYPTO_ALGAPI_H */