2 * seqiv: Sequence Number IV Generator
4 * This generator generates an IV based on a sequence number by xoring it
5 * with a salt. This algorithm is mainly useful for CTR and similar modes.
7 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <linux/err.h>
19 #include <linux/init.h>
20 #include <linux/kernel.h>
21 #include <linux/module.h>
22 #include <linux/random.h>
23 #include <linux/spinlock.h>
24 #include <linux/string.h>
28 u8 salt
[] __attribute__ ((aligned(__alignof__(u32
))));
31 static void seqiv_complete2(struct skcipher_givcrypt_request
*req
, int err
)
33 struct ablkcipher_request
*subreq
= skcipher_givcrypt_reqctx(req
);
34 struct crypto_ablkcipher
*geniv
;
36 if (err
== -EINPROGRESS
)
42 geniv
= skcipher_givcrypt_reqtfm(req
);
43 memcpy(req
->creq
.info
, subreq
->info
, crypto_ablkcipher_ivsize(geniv
));
49 static void seqiv_complete(struct crypto_async_request
*base
, int err
)
51 struct skcipher_givcrypt_request
*req
= base
->data
;
53 seqiv_complete2(req
, err
);
54 skcipher_givcrypt_complete(req
, err
);
57 static void seqiv_aead_complete2(struct aead_givcrypt_request
*req
, int err
)
59 struct aead_request
*subreq
= aead_givcrypt_reqctx(req
);
60 struct crypto_aead
*geniv
;
62 if (err
== -EINPROGRESS
)
68 geniv
= aead_givcrypt_reqtfm(req
);
69 memcpy(req
->areq
.iv
, subreq
->iv
, crypto_aead_ivsize(geniv
));
75 static void seqiv_aead_complete(struct crypto_async_request
*base
, int err
)
77 struct aead_givcrypt_request
*req
= base
->data
;
79 seqiv_aead_complete2(req
, err
);
80 aead_givcrypt_complete(req
, err
);
83 static void seqiv_geniv(struct seqiv_ctx
*ctx
, u8
*info
, u64 seq
,
86 unsigned int len
= ivsize
;
88 if (ivsize
> sizeof(u64
)) {
89 memset(info
, 0, ivsize
- sizeof(u64
));
92 seq
= cpu_to_be64(seq
);
93 memcpy(info
+ ivsize
- len
, &seq
, len
);
94 crypto_xor(info
, ctx
->salt
, ivsize
);
97 static int seqiv_givencrypt(struct skcipher_givcrypt_request
*req
)
99 struct crypto_ablkcipher
*geniv
= skcipher_givcrypt_reqtfm(req
);
100 struct seqiv_ctx
*ctx
= crypto_ablkcipher_ctx(geniv
);
101 struct ablkcipher_request
*subreq
= skcipher_givcrypt_reqctx(req
);
102 crypto_completion_t complete
;
108 ablkcipher_request_set_tfm(subreq
, skcipher_geniv_cipher(geniv
));
110 complete
= req
->creq
.base
.complete
;
111 data
= req
->creq
.base
.data
;
112 info
= req
->creq
.info
;
114 ivsize
= crypto_ablkcipher_ivsize(geniv
);
116 if (unlikely(!IS_ALIGNED((unsigned long)info
,
117 crypto_ablkcipher_alignmask(geniv
) + 1))) {
118 info
= kmalloc(ivsize
, req
->creq
.base
.flags
&
119 CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
124 complete
= seqiv_complete
;
128 ablkcipher_request_set_callback(subreq
, req
->creq
.base
.flags
, complete
,
130 ablkcipher_request_set_crypt(subreq
, req
->creq
.src
, req
->creq
.dst
,
131 req
->creq
.nbytes
, info
);
133 seqiv_geniv(ctx
, info
, req
->seq
, ivsize
);
134 memcpy(req
->giv
, info
, ivsize
);
136 err
= crypto_ablkcipher_encrypt(subreq
);
137 if (unlikely(info
!= req
->creq
.info
))
138 seqiv_complete2(req
, err
);
142 static int seqiv_aead_givencrypt(struct aead_givcrypt_request
*req
)
144 struct crypto_aead
*geniv
= aead_givcrypt_reqtfm(req
);
145 struct seqiv_ctx
*ctx
= crypto_aead_ctx(geniv
);
146 struct aead_request
*areq
= &req
->areq
;
147 struct aead_request
*subreq
= aead_givcrypt_reqctx(req
);
148 crypto_completion_t complete
;
154 aead_request_set_tfm(subreq
, aead_geniv_base(geniv
));
156 complete
= areq
->base
.complete
;
157 data
= areq
->base
.data
;
160 ivsize
= crypto_aead_ivsize(geniv
);
162 if (unlikely(!IS_ALIGNED((unsigned long)info
,
163 crypto_aead_alignmask(geniv
) + 1))) {
164 info
= kmalloc(ivsize
, areq
->base
.flags
&
165 CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
170 complete
= seqiv_aead_complete
;
174 aead_request_set_callback(subreq
, areq
->base
.flags
, complete
, data
);
175 aead_request_set_crypt(subreq
, areq
->src
, areq
->dst
, areq
->cryptlen
,
177 aead_request_set_assoc(subreq
, areq
->assoc
, areq
->assoclen
);
179 seqiv_geniv(ctx
, info
, req
->seq
, ivsize
);
180 memcpy(req
->giv
, info
, ivsize
);
182 err
= crypto_aead_encrypt(subreq
);
183 if (unlikely(info
!= areq
->iv
))
184 seqiv_aead_complete2(req
, err
);
188 static int seqiv_givencrypt_first(struct skcipher_givcrypt_request
*req
)
190 struct crypto_ablkcipher
*geniv
= skcipher_givcrypt_reqtfm(req
);
191 struct seqiv_ctx
*ctx
= crypto_ablkcipher_ctx(geniv
);
193 spin_lock_bh(&ctx
->lock
);
194 if (crypto_ablkcipher_crt(geniv
)->givencrypt
!= seqiv_givencrypt_first
)
197 crypto_ablkcipher_crt(geniv
)->givencrypt
= seqiv_givencrypt
;
198 get_random_bytes(ctx
->salt
, crypto_ablkcipher_ivsize(geniv
));
201 spin_unlock_bh(&ctx
->lock
);
203 return seqiv_givencrypt(req
);
206 static int seqiv_aead_givencrypt_first(struct aead_givcrypt_request
*req
)
208 struct crypto_aead
*geniv
= aead_givcrypt_reqtfm(req
);
209 struct seqiv_ctx
*ctx
= crypto_aead_ctx(geniv
);
211 spin_lock_bh(&ctx
->lock
);
212 if (crypto_aead_crt(geniv
)->givencrypt
!= seqiv_aead_givencrypt_first
)
215 crypto_aead_crt(geniv
)->givencrypt
= seqiv_aead_givencrypt
;
216 get_random_bytes(ctx
->salt
, crypto_aead_ivsize(geniv
));
219 spin_unlock_bh(&ctx
->lock
);
221 return seqiv_aead_givencrypt(req
);
224 static int seqiv_init(struct crypto_tfm
*tfm
)
226 struct crypto_ablkcipher
*geniv
= __crypto_ablkcipher_cast(tfm
);
227 struct seqiv_ctx
*ctx
= crypto_ablkcipher_ctx(geniv
);
229 spin_lock_init(&ctx
->lock
);
231 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct ablkcipher_request
);
233 return skcipher_geniv_init(tfm
);
236 static int seqiv_aead_init(struct crypto_tfm
*tfm
)
238 struct crypto_aead
*geniv
= __crypto_aead_cast(tfm
);
239 struct seqiv_ctx
*ctx
= crypto_aead_ctx(geniv
);
241 spin_lock_init(&ctx
->lock
);
243 tfm
->crt_aead
.reqsize
= sizeof(struct aead_request
);
245 return aead_geniv_init(tfm
);
248 static struct crypto_template seqiv_tmpl
;
250 static struct crypto_instance
*seqiv_ablkcipher_alloc(struct rtattr
**tb
)
252 struct crypto_instance
*inst
;
254 inst
= skcipher_geniv_alloc(&seqiv_tmpl
, tb
, 0, 0);
259 inst
->alg
.cra_ablkcipher
.givencrypt
= seqiv_givencrypt_first
;
261 inst
->alg
.cra_init
= seqiv_init
;
262 inst
->alg
.cra_exit
= skcipher_geniv_exit
;
264 inst
->alg
.cra_ctxsize
+= inst
->alg
.cra_ablkcipher
.ivsize
;
270 static struct crypto_instance
*seqiv_aead_alloc(struct rtattr
**tb
)
272 struct crypto_instance
*inst
;
274 inst
= aead_geniv_alloc(&seqiv_tmpl
, tb
, 0, 0);
279 inst
->alg
.cra_aead
.givencrypt
= seqiv_aead_givencrypt_first
;
281 inst
->alg
.cra_init
= seqiv_aead_init
;
282 inst
->alg
.cra_exit
= aead_geniv_exit
;
284 inst
->alg
.cra_ctxsize
= inst
->alg
.cra_aead
.ivsize
;
290 static struct crypto_instance
*seqiv_alloc(struct rtattr
**tb
)
292 struct crypto_attr_type
*algt
;
293 struct crypto_instance
*inst
;
296 algt
= crypto_get_attr_type(tb
);
301 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & CRYPTO_ALG_TYPE_MASK
)
302 inst
= seqiv_ablkcipher_alloc(tb
);
304 inst
= seqiv_aead_alloc(tb
);
309 inst
->alg
.cra_alignmask
|= __alignof__(u32
) - 1;
310 inst
->alg
.cra_ctxsize
+= sizeof(struct seqiv_ctx
);
316 static void seqiv_free(struct crypto_instance
*inst
)
318 if ((inst
->alg
.cra_flags
^ CRYPTO_ALG_TYPE_AEAD
) & CRYPTO_ALG_TYPE_MASK
)
319 skcipher_geniv_free(inst
);
321 aead_geniv_free(inst
);
324 static struct crypto_template seqiv_tmpl
= {
326 .alloc
= seqiv_alloc
,
328 .module
= THIS_MODULE
,
331 static int __init
seqiv_module_init(void)
333 return crypto_register_template(&seqiv_tmpl
);
336 static void __exit
seqiv_module_exit(void)
338 crypto_unregister_template(&seqiv_tmpl
);
341 module_init(seqiv_module_init
);
342 module_exit(seqiv_module_exit
);
344 MODULE_LICENSE("GPL");
345 MODULE_DESCRIPTION("Sequence Number IV Generator");