2 * Key Wrapping: RFC3394 / NIST SP800-38F
4 * Copyright (C) 2015, Stephan Mueller <smueller@chronox.de>
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, and the entire permission notice in its entirety,
11 * including the disclaimer of warranties.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. The name of the author may not be used to endorse or promote
16 * products derived from this software without specific prior
19 * ALTERNATIVELY, this product may be distributed under the terms of
20 * the GNU General Public License, in which case the provisions of the GPL2
21 * are required INSTEAD OF the above restrictions. (This clause is
22 * necessary due to a potential bad interaction between the GPL and
23 * the restrictions contained in a BSD-style copyright.)
25 * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED
26 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
27 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ALL OF
28 * WHICH ARE HEREBY DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE
29 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
30 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
31 * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
32 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
34 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
35 * USE OF THIS SOFTWARE, EVEN IF NOT ADVISED OF THE POSSIBILITY OF SUCH
40 * Note for using key wrapping:
42 * * The result of the encryption operation is the ciphertext starting
43 * with the 2nd semiblock. The first semiblock is provided as the IV.
44 * The IV used to start the encryption operation is the default IV.
46 * * The input for the decryption is the first semiblock handed in as an
47 * IV. The ciphertext is the data starting with the 2nd semiblock. The
48 * return code of the decryption operation will be EBADMSG in case an
49 * integrity error occurs.
51 * To obtain the full result of an encryption as expected by SP800-38F, the
52 * caller must allocate a buffer of plaintext + 8 bytes:
54 * unsigned int datalen = ptlen + crypto_skcipher_ivsize(tfm);
57 * u8 *pt = data + crypto_skcipher_ivsize(tfm);
58 * <ensure that pt contains the plaintext of size ptlen>
59 * sg_init_one(&sg, ptdata, ptlen);
60 * skcipher_request_set_crypt(req, &sg, &sg, ptlen, iv);
62 * ==> After encryption, data now contains full KW result as per SP800-38F.
64 * In case of decryption, ciphertext now already has the expected length
65 * and must be segmented appropriately:
67 * unsigned int datalen = CTLEN;
69 * <ensure that data contains full ciphertext>
71 * u8 *ct = data + crypto_skcipher_ivsize(tfm);
72 * unsigned int ctlen = datalen - crypto_skcipher_ivsize(tfm);
73 * sg_init_one(&sg, ctdata, ctlen);
74 * skcipher_request_set_crypt(req, &sg, &sg, ptlen, iv);
76 * ==> After decryption (which hopefully does not return EBADMSG), the ct
77 * pointer now points to the plaintext of size ctlen.
79 * Note 2: KWP is not implemented as this would defy in-place operation.
80 * If somebody wants to wrap non-aligned data, he should simply pad
81 * the input with zeros to fill it up to the 8 byte boundary.
84 #include <linux/module.h>
85 #include <linux/crypto.h>
86 #include <linux/scatterlist.h>
87 #include <crypto/scatterwalk.h>
88 #include <crypto/internal/skcipher.h>
90 struct crypto_kw_ctx
{
91 struct crypto_cipher
*child
;
94 struct crypto_kw_block
{
100 /* convert 64 bit integer into its string representation */
101 static inline void crypto_kw_cpu_to_be64(u64 val
, u8
*buf
)
103 __be64
*a
= (__be64
*)buf
;
105 *a
= cpu_to_be64(val
);
109 * Fast forward the SGL to the "end" length minus SEMIBSIZE.
110 * The start in the SGL defined by the fast-forward is returned with
113 static void crypto_kw_scatterlist_ff(struct scatter_walk
*walk
,
114 struct scatterlist
*sg
,
117 unsigned int skip
= 0;
119 /* The caller should only operate on full SEMIBLOCKs. */
120 BUG_ON(end
< SEMIBSIZE
);
122 skip
= end
- SEMIBSIZE
;
124 if (sg
->length
> skip
) {
125 scatterwalk_start(walk
, sg
);
126 scatterwalk_advance(walk
, skip
);
135 static int crypto_kw_decrypt(struct blkcipher_desc
*desc
,
136 struct scatterlist
*dst
, struct scatterlist
*src
,
139 struct crypto_blkcipher
*tfm
= desc
->tfm
;
140 struct crypto_kw_ctx
*ctx
= crypto_blkcipher_ctx(tfm
);
141 struct crypto_cipher
*child
= ctx
->child
;
143 unsigned long alignmask
= max_t(unsigned long, SEMIBSIZE
,
144 crypto_cipher_alignmask(child
));
147 u8 blockbuf
[sizeof(struct crypto_kw_block
) + alignmask
];
148 struct crypto_kw_block
*block
= (struct crypto_kw_block
*)
149 PTR_ALIGN(blockbuf
+ 0, alignmask
+ 1);
151 u64 t
= 6 * ((nbytes
) >> 3);
152 struct scatterlist
*lsrc
, *ldst
;
156 * Require at least 2 semiblocks (note, the 3rd semiblock that is
157 * required by SP800-38F is the IV.
159 if (nbytes
< (2 * SEMIBSIZE
) || nbytes
% SEMIBSIZE
)
162 /* Place the IV into block A */
163 memcpy(block
->A
, desc
->info
, SEMIBSIZE
);
166 * src scatterlist is read-only. dst scatterlist is r/w. During the
167 * first loop, lsrc points to src and ldst to dst. For any
168 * subsequent round, the code operates on dst only.
173 for (i
= 0; i
< 6; i
++) {
174 u8 tbe_buffer
[SEMIBSIZE
+ alignmask
];
175 /* alignment for the crypto_xor and the _to_be64 operation */
176 u8
*tbe
= PTR_ALIGN(tbe_buffer
+ 0, alignmask
+ 1);
177 unsigned int tmp_nbytes
= nbytes
;
178 struct scatter_walk src_walk
, dst_walk
;
181 /* move pointer by tmp_nbytes in the SGL */
182 crypto_kw_scatterlist_ff(&src_walk
, lsrc
, tmp_nbytes
);
183 /* get the source block */
184 scatterwalk_copychunks(block
->R
, &src_walk
, SEMIBSIZE
,
187 /* perform KW operation: get counter as byte string */
188 crypto_kw_cpu_to_be64(t
, tbe
);
189 /* perform KW operation: modify IV with counter */
190 crypto_xor(block
->A
, tbe
, SEMIBSIZE
);
192 /* perform KW operation: decrypt block */
193 crypto_cipher_decrypt_one(child
, (u8
*)block
,
196 /* move pointer by tmp_nbytes in the SGL */
197 crypto_kw_scatterlist_ff(&dst_walk
, ldst
, tmp_nbytes
);
198 /* Copy block->R into place */
199 scatterwalk_copychunks(block
->R
, &dst_walk
, SEMIBSIZE
,
202 tmp_nbytes
-= SEMIBSIZE
;
205 /* we now start to operate on the dst SGL only */
210 /* Perform authentication check */
211 if (crypto_memneq("\xA6\xA6\xA6\xA6\xA6\xA6\xA6\xA6", block
->A
,
215 memzero_explicit(&block
, sizeof(struct crypto_kw_block
));
220 static int crypto_kw_encrypt(struct blkcipher_desc
*desc
,
221 struct scatterlist
*dst
, struct scatterlist
*src
,
224 struct crypto_blkcipher
*tfm
= desc
->tfm
;
225 struct crypto_kw_ctx
*ctx
= crypto_blkcipher_ctx(tfm
);
226 struct crypto_cipher
*child
= ctx
->child
;
228 unsigned long alignmask
= max_t(unsigned long, SEMIBSIZE
,
229 crypto_cipher_alignmask(child
));
232 u8 blockbuf
[sizeof(struct crypto_kw_block
) + alignmask
];
233 struct crypto_kw_block
*block
= (struct crypto_kw_block
*)
234 PTR_ALIGN(blockbuf
+ 0, alignmask
+ 1);
237 struct scatterlist
*lsrc
, *ldst
;
240 * Require at least 2 semiblocks (note, the 3rd semiblock that is
241 * required by SP800-38F is the IV that occupies the first semiblock.
242 * This means that the dst memory must be one semiblock larger than src.
243 * Also ensure that the given data is aligned to semiblock.
245 if (nbytes
< (2 * SEMIBSIZE
) || nbytes
% SEMIBSIZE
)
249 * Place the predefined IV into block A -- for encrypt, the caller
250 * does not need to provide an IV, but he needs to fetch the final IV.
252 memcpy(block
->A
, "\xA6\xA6\xA6\xA6\xA6\xA6\xA6\xA6", SEMIBSIZE
);
255 * src scatterlist is read-only. dst scatterlist is r/w. During the
256 * first loop, lsrc points to src and ldst to dst. For any
257 * subsequent round, the code operates on dst only.
262 for (i
= 0; i
< 6; i
++) {
263 u8 tbe_buffer
[SEMIBSIZE
+ alignmask
];
264 u8
*tbe
= PTR_ALIGN(tbe_buffer
+ 0, alignmask
+ 1);
265 unsigned int tmp_nbytes
= nbytes
;
266 struct scatter_walk src_walk
, dst_walk
;
268 scatterwalk_start(&src_walk
, lsrc
);
269 scatterwalk_start(&dst_walk
, ldst
);
272 /* get the source block */
273 scatterwalk_copychunks(block
->R
, &src_walk
, SEMIBSIZE
,
276 /* perform KW operation: encrypt block */
277 crypto_cipher_encrypt_one(child
, (u8
*)block
,
279 /* perform KW operation: get counter as byte string */
280 crypto_kw_cpu_to_be64(t
, tbe
);
281 /* perform KW operation: modify IV with counter */
282 crypto_xor(block
->A
, tbe
, SEMIBSIZE
);
285 /* Copy block->R into place */
286 scatterwalk_copychunks(block
->R
, &dst_walk
, SEMIBSIZE
,
289 tmp_nbytes
-= SEMIBSIZE
;
292 /* we now start to operate on the dst SGL only */
297 /* establish the IV for the caller to pick up */
298 memcpy(desc
->info
, block
->A
, SEMIBSIZE
);
300 memzero_explicit(&block
, sizeof(struct crypto_kw_block
));
305 static int crypto_kw_setkey(struct crypto_tfm
*parent
, const u8
*key
,
308 struct crypto_kw_ctx
*ctx
= crypto_tfm_ctx(parent
);
309 struct crypto_cipher
*child
= ctx
->child
;
312 crypto_cipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
313 crypto_cipher_set_flags(child
, crypto_tfm_get_flags(parent
) &
314 CRYPTO_TFM_REQ_MASK
);
315 err
= crypto_cipher_setkey(child
, key
, keylen
);
316 crypto_tfm_set_flags(parent
, crypto_cipher_get_flags(child
) &
317 CRYPTO_TFM_RES_MASK
);
321 static int crypto_kw_init_tfm(struct crypto_tfm
*tfm
)
323 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
324 struct crypto_spawn
*spawn
= crypto_instance_ctx(inst
);
325 struct crypto_kw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
326 struct crypto_cipher
*cipher
;
328 cipher
= crypto_spawn_cipher(spawn
);
330 return PTR_ERR(cipher
);
336 static void crypto_kw_exit_tfm(struct crypto_tfm
*tfm
)
338 struct crypto_kw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
340 crypto_free_cipher(ctx
->child
);
343 static struct crypto_instance
*crypto_kw_alloc(struct rtattr
**tb
)
345 struct crypto_instance
*inst
= NULL
;
346 struct crypto_alg
*alg
= NULL
;
349 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_BLKCIPHER
);
353 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_CIPHER
,
354 CRYPTO_ALG_TYPE_MASK
);
356 return ERR_CAST(alg
);
358 inst
= ERR_PTR(-EINVAL
);
359 /* Section 5.1 requirement for KW */
360 if (alg
->cra_blocksize
!= sizeof(struct crypto_kw_block
))
363 inst
= crypto_alloc_instance("kw", alg
);
367 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
;
368 inst
->alg
.cra_priority
= alg
->cra_priority
;
369 inst
->alg
.cra_blocksize
= SEMIBSIZE
;
370 inst
->alg
.cra_alignmask
= 0;
371 inst
->alg
.cra_type
= &crypto_blkcipher_type
;
372 inst
->alg
.cra_blkcipher
.ivsize
= SEMIBSIZE
;
373 inst
->alg
.cra_blkcipher
.min_keysize
= alg
->cra_cipher
.cia_min_keysize
;
374 inst
->alg
.cra_blkcipher
.max_keysize
= alg
->cra_cipher
.cia_max_keysize
;
376 inst
->alg
.cra_ctxsize
= sizeof(struct crypto_kw_ctx
);
378 inst
->alg
.cra_init
= crypto_kw_init_tfm
;
379 inst
->alg
.cra_exit
= crypto_kw_exit_tfm
;
381 inst
->alg
.cra_blkcipher
.setkey
= crypto_kw_setkey
;
382 inst
->alg
.cra_blkcipher
.encrypt
= crypto_kw_encrypt
;
383 inst
->alg
.cra_blkcipher
.decrypt
= crypto_kw_decrypt
;
390 static void crypto_kw_free(struct crypto_instance
*inst
)
392 crypto_drop_spawn(crypto_instance_ctx(inst
));
396 static struct crypto_template crypto_kw_tmpl
= {
398 .alloc
= crypto_kw_alloc
,
399 .free
= crypto_kw_free
,
400 .module
= THIS_MODULE
,
403 static int __init
crypto_kw_init(void)
405 return crypto_register_template(&crypto_kw_tmpl
);
408 static void __exit
crypto_kw_exit(void)
410 crypto_unregister_template(&crypto_kw_tmpl
);
413 module_init(crypto_kw_init
);
414 module_exit(crypto_kw_exit
);
416 MODULE_LICENSE("Dual BSD/GPL");
417 MODULE_AUTHOR("Stephan Mueller <smueller@chronox.de>");
418 MODULE_DESCRIPTION("Key Wrapping (RFC3394 / NIST SP800-38F)");
419 MODULE_ALIAS_CRYPTO("kw");