2 * eseqiv: Encrypted Sequence Number IV Generator
4 * This generator generates an IV based on a sequence number by xoring it
5 * with a salt and then encrypting it with the same key as used to encrypt
6 * the plain text. This algorithm requires that the block size be equal
7 * to the IV size. It is mainly useful for CBC.
9 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
18 #include <crypto/internal/skcipher.h>
19 #include <crypto/scatterwalk.h>
20 #include <linux/err.h>
21 #include <linux/init.h>
22 #include <linux/kernel.h>
24 #include <linux/module.h>
25 #include <linux/random.h>
26 #include <linux/scatterlist.h>
27 #include <linux/spinlock.h>
28 #include <linux/string.h>
30 struct eseqiv_request_ctx
{
31 struct scatterlist src
[2];
32 struct scatterlist dst
[2];
42 static void eseqiv_complete2(struct skcipher_givcrypt_request
*req
)
44 struct crypto_ablkcipher
*geniv
= skcipher_givcrypt_reqtfm(req
);
45 struct eseqiv_request_ctx
*reqctx
= skcipher_givcrypt_reqctx(req
);
47 memcpy(req
->giv
, PTR_ALIGN((u8
*)reqctx
->tail
,
48 crypto_ablkcipher_alignmask(geniv
) + 1),
49 crypto_ablkcipher_ivsize(geniv
));
52 static void eseqiv_complete(struct crypto_async_request
*base
, int err
)
54 struct skcipher_givcrypt_request
*req
= base
->data
;
59 eseqiv_complete2(req
);
62 skcipher_givcrypt_complete(req
, err
);
65 static void eseqiv_chain(struct scatterlist
*head
, struct scatterlist
*sg
,
69 head
->length
+= sg
->length
;
70 sg
= scatterwalk_sg_next(sg
);
74 scatterwalk_sg_chain(head
, 2, sg
);
79 static int eseqiv_givencrypt(struct skcipher_givcrypt_request
*req
)
81 struct crypto_ablkcipher
*geniv
= skcipher_givcrypt_reqtfm(req
);
82 struct eseqiv_ctx
*ctx
= crypto_ablkcipher_ctx(geniv
);
83 struct eseqiv_request_ctx
*reqctx
= skcipher_givcrypt_reqctx(req
);
84 struct ablkcipher_request
*subreq
;
85 crypto_completion_t complete
;
87 struct scatterlist
*osrc
, *odst
;
88 struct scatterlist
*dst
;
99 subreq
= (void *)(reqctx
->tail
+ ctx
->reqoff
);
100 ablkcipher_request_set_tfm(subreq
, skcipher_geniv_cipher(geniv
));
103 complete
= req
->creq
.base
.complete
;
104 data
= req
->creq
.base
.data
;
106 osrc
= req
->creq
.src
;
107 odst
= req
->creq
.dst
;
108 srcp
= sg_page(osrc
);
109 dstp
= sg_page(odst
);
110 vsrc
= PageHighMem(srcp
) ? NULL
: page_address(srcp
) + osrc
->offset
;
111 vdst
= PageHighMem(dstp
) ? NULL
: page_address(dstp
) + odst
->offset
;
113 ivsize
= crypto_ablkcipher_ivsize(geniv
);
115 if (vsrc
!= giv
+ ivsize
&& vdst
!= giv
+ ivsize
) {
116 giv
= PTR_ALIGN((u8
*)reqctx
->tail
,
117 crypto_ablkcipher_alignmask(geniv
) + 1);
118 complete
= eseqiv_complete
;
122 ablkcipher_request_set_callback(subreq
, req
->creq
.base
.flags
, complete
,
125 sg_init_table(reqctx
->src
, 2);
126 sg_set_buf(reqctx
->src
, giv
, ivsize
);
127 eseqiv_chain(reqctx
->src
, osrc
, vsrc
== giv
+ ivsize
);
131 sg_init_table(reqctx
->dst
, 2);
132 sg_set_buf(reqctx
->dst
, giv
, ivsize
);
133 eseqiv_chain(reqctx
->dst
, odst
, vdst
== giv
+ ivsize
);
138 ablkcipher_request_set_crypt(subreq
, reqctx
->src
, dst
,
139 req
->creq
.nbytes
, req
->creq
.info
);
141 memcpy(req
->creq
.info
, ctx
->salt
, ivsize
);
144 if (ivsize
> sizeof(u64
)) {
145 memset(req
->giv
, 0, ivsize
- sizeof(u64
));
148 seq
= cpu_to_be64(req
->seq
);
149 memcpy(req
->giv
+ ivsize
- len
, &seq
, len
);
151 err
= crypto_ablkcipher_encrypt(subreq
);
155 eseqiv_complete2(req
);
161 static int eseqiv_givencrypt_first(struct skcipher_givcrypt_request
*req
)
163 struct crypto_ablkcipher
*geniv
= skcipher_givcrypt_reqtfm(req
);
164 struct eseqiv_ctx
*ctx
= crypto_ablkcipher_ctx(geniv
);
166 spin_lock_bh(&ctx
->lock
);
167 if (crypto_ablkcipher_crt(geniv
)->givencrypt
!= eseqiv_givencrypt_first
)
170 crypto_ablkcipher_crt(geniv
)->givencrypt
= eseqiv_givencrypt
;
171 get_random_bytes(ctx
->salt
, crypto_ablkcipher_ivsize(geniv
));
174 spin_unlock_bh(&ctx
->lock
);
176 return eseqiv_givencrypt(req
);
179 static int eseqiv_init(struct crypto_tfm
*tfm
)
181 struct crypto_ablkcipher
*geniv
= __crypto_ablkcipher_cast(tfm
);
182 struct eseqiv_ctx
*ctx
= crypto_ablkcipher_ctx(geniv
);
183 unsigned long alignmask
;
184 unsigned int reqsize
;
186 spin_lock_init(&ctx
->lock
);
188 alignmask
= crypto_tfm_ctx_alignment() - 1;
189 reqsize
= sizeof(struct eseqiv_request_ctx
);
191 if (alignmask
& reqsize
) {
192 alignmask
&= reqsize
;
196 alignmask
= ~alignmask
;
197 alignmask
&= crypto_ablkcipher_alignmask(geniv
);
199 reqsize
+= alignmask
;
200 reqsize
+= crypto_ablkcipher_ivsize(geniv
);
201 reqsize
= ALIGN(reqsize
, crypto_tfm_ctx_alignment());
203 ctx
->reqoff
= reqsize
- sizeof(struct eseqiv_request_ctx
);
205 tfm
->crt_ablkcipher
.reqsize
= reqsize
+
206 sizeof(struct ablkcipher_request
);
208 return skcipher_geniv_init(tfm
);
211 static struct crypto_template eseqiv_tmpl
;
213 static struct crypto_instance
*eseqiv_alloc(struct rtattr
**tb
)
215 struct crypto_instance
*inst
;
218 inst
= skcipher_geniv_alloc(&eseqiv_tmpl
, tb
, 0, 0);
223 if (inst
->alg
.cra_ablkcipher
.ivsize
!= inst
->alg
.cra_blocksize
)
226 inst
->alg
.cra_ablkcipher
.givencrypt
= eseqiv_givencrypt_first
;
228 inst
->alg
.cra_init
= eseqiv_init
;
229 inst
->alg
.cra_exit
= skcipher_geniv_exit
;
231 inst
->alg
.cra_ctxsize
= sizeof(struct eseqiv_ctx
);
232 inst
->alg
.cra_ctxsize
+= inst
->alg
.cra_ablkcipher
.ivsize
;
238 skcipher_geniv_free(inst
);
243 static struct crypto_template eseqiv_tmpl
= {
245 .alloc
= eseqiv_alloc
,
246 .free
= skcipher_geniv_free
,
247 .module
= THIS_MODULE
,
250 static int __init
eseqiv_module_init(void)
252 return crypto_register_template(&eseqiv_tmpl
);
255 static void __exit
eseqiv_module_exit(void)
257 crypto_unregister_template(&eseqiv_tmpl
);
260 module_init(eseqiv_module_init
);
261 module_exit(eseqiv_module_exit
);
263 MODULE_LICENSE("GPL");
264 MODULE_DESCRIPTION("Encrypted Sequence Number IV Generator");