4 * Support for VIA PadLock hardware crypto engine.
6 * Copyright (c) 2004 Michal Ludvig <michal@logix.cz>
10 #include <crypto/algapi.h>
11 #include <crypto/aes.h>
12 #include <linux/module.h>
13 #include <linux/init.h>
14 #include <linux/types.h>
15 #include <linux/errno.h>
16 #include <linux/interrupt.h>
17 #include <linux/kernel.h>
18 #include <asm/byteorder.h>
23 unsigned int __attribute__ ((__packed__
))
30 } __attribute__ ((__aligned__(PADLOCK_ALIGNMENT
)));
32 /* Whenever making any changes to the following
33 * structure *make sure* you keep E, d_data
34 * and cword aligned on 16 Bytes boundaries and
35 * the Hardware can access 16 * 16 bytes of E and d_data
36 * (only the first 15 * 16 bytes matter but the HW reads
40 u32 E
[AES_MAX_KEYLENGTH_U32
]
41 __attribute__ ((__aligned__(PADLOCK_ALIGNMENT
)));
42 u32 d_data
[AES_MAX_KEYLENGTH_U32
]
43 __attribute__ ((__aligned__(PADLOCK_ALIGNMENT
)));
51 /* Tells whether the ACE is capable to generate
52 the extended key for a given key_len. */
54 aes_hw_extkey_available(uint8_t key_len
)
56 /* TODO: We should check the actual CPU model/stepping
57 as it's possible that the capability will be
58 added in the next CPU revisions. */
64 static inline struct aes_ctx
*aes_ctx_common(void *ctx
)
66 unsigned long addr
= (unsigned long)ctx
;
67 unsigned long align
= PADLOCK_ALIGNMENT
;
69 if (align
<= crypto_tfm_ctx_alignment())
71 return (struct aes_ctx
*)ALIGN(addr
, align
);
74 static inline struct aes_ctx
*aes_ctx(struct crypto_tfm
*tfm
)
76 return aes_ctx_common(crypto_tfm_ctx(tfm
));
79 static inline struct aes_ctx
*blk_aes_ctx(struct crypto_blkcipher
*tfm
)
81 return aes_ctx_common(crypto_blkcipher_ctx(tfm
));
84 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
87 struct aes_ctx
*ctx
= aes_ctx(tfm
);
88 const __le32
*key
= (const __le32
*)in_key
;
89 u32
*flags
= &tfm
->crt_flags
;
90 struct crypto_aes_ctx gen_aes
;
93 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
98 * If the hardware is capable of generating the extended key
99 * itself we must supply the plain key for both encryption
104 ctx
->E
[0] = le32_to_cpu(key
[0]);
105 ctx
->E
[1] = le32_to_cpu(key
[1]);
106 ctx
->E
[2] = le32_to_cpu(key
[2]);
107 ctx
->E
[3] = le32_to_cpu(key
[3]);
109 /* Prepare control words. */
110 memset(&ctx
->cword
, 0, sizeof(ctx
->cword
));
112 ctx
->cword
.decrypt
.encdec
= 1;
113 ctx
->cword
.encrypt
.rounds
= 10 + (key_len
- 16) / 4;
114 ctx
->cword
.decrypt
.rounds
= ctx
->cword
.encrypt
.rounds
;
115 ctx
->cword
.encrypt
.ksize
= (key_len
- 16) / 8;
116 ctx
->cword
.decrypt
.ksize
= ctx
->cword
.encrypt
.ksize
;
118 /* Don't generate extended keys if the hardware can do it. */
119 if (aes_hw_extkey_available(key_len
))
122 ctx
->D
= ctx
->d_data
;
123 ctx
->cword
.encrypt
.keygen
= 1;
124 ctx
->cword
.decrypt
.keygen
= 1;
126 if (crypto_aes_expand_key(&gen_aes
, in_key
, key_len
)) {
127 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
131 memcpy(ctx
->E
, gen_aes
.key_enc
, AES_MAX_KEYLENGTH
);
132 memcpy(ctx
->D
, gen_aes
.key_dec
, AES_MAX_KEYLENGTH
);
136 /* ====== Encryption/decryption routines ====== */
138 /* These are the real call to PadLock. */
139 static inline void padlock_reset_key(void)
141 asm volatile ("pushfl; popfl");
144 static inline void padlock_xcrypt(const u8
*input
, u8
*output
, void *key
,
147 asm volatile (".byte 0xf3,0x0f,0xa7,0xc8" /* rep xcryptecb */
148 : "+S"(input
), "+D"(output
)
149 : "d"(control_word
), "b"(key
), "c"(1));
152 static void aes_crypt_copy(const u8
*in
, u8
*out
, u32
*key
, struct cword
*cword
)
154 u8 buf
[AES_BLOCK_SIZE
* 2 + PADLOCK_ALIGNMENT
- 1];
155 u8
*tmp
= PTR_ALIGN(&buf
[0], PADLOCK_ALIGNMENT
);
157 memcpy(tmp
, in
, AES_BLOCK_SIZE
);
158 padlock_xcrypt(tmp
, out
, key
, cword
);
161 static inline void aes_crypt(const u8
*in
, u8
*out
, u32
*key
,
164 /* padlock_xcrypt requires at least two blocks of data. */
165 if (unlikely(!(((unsigned long)in
^ (PAGE_SIZE
- AES_BLOCK_SIZE
)) &
167 aes_crypt_copy(in
, out
, key
, cword
);
171 padlock_xcrypt(in
, out
, key
, cword
);
174 static inline void padlock_xcrypt_ecb(const u8
*input
, u8
*output
, void *key
,
175 void *control_word
, u32 count
)
178 aes_crypt(input
, output
, key
, control_word
);
182 asm volatile ("test $1, %%cl;"
184 "lea -1(%%ecx), %%eax;"
186 ".byte 0xf3,0x0f,0xa7,0xc8;" /* rep xcryptecb */
189 ".byte 0xf3,0x0f,0xa7,0xc8" /* rep xcryptecb */
190 : "+S"(input
), "+D"(output
)
191 : "d"(control_word
), "b"(key
), "c"(count
)
195 static inline u8
*padlock_xcrypt_cbc(const u8
*input
, u8
*output
, void *key
,
196 u8
*iv
, void *control_word
, u32 count
)
199 asm volatile (".byte 0xf3,0x0f,0xa7,0xd0"
200 : "+S" (input
), "+D" (output
), "+a" (iv
)
201 : "d" (control_word
), "b" (key
), "c" (count
));
205 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
207 struct aes_ctx
*ctx
= aes_ctx(tfm
);
209 aes_crypt(in
, out
, ctx
->E
, &ctx
->cword
.encrypt
);
212 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
214 struct aes_ctx
*ctx
= aes_ctx(tfm
);
216 aes_crypt(in
, out
, ctx
->D
, &ctx
->cword
.decrypt
);
219 static struct crypto_alg aes_alg
= {
221 .cra_driver_name
= "aes-padlock",
222 .cra_priority
= PADLOCK_CRA_PRIORITY
,
223 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
224 .cra_blocksize
= AES_BLOCK_SIZE
,
225 .cra_ctxsize
= sizeof(struct aes_ctx
),
226 .cra_alignmask
= PADLOCK_ALIGNMENT
- 1,
227 .cra_module
= THIS_MODULE
,
228 .cra_list
= LIST_HEAD_INIT(aes_alg
.cra_list
),
231 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
232 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
233 .cia_setkey
= aes_set_key
,
234 .cia_encrypt
= aes_encrypt
,
235 .cia_decrypt
= aes_decrypt
,
240 static int ecb_aes_encrypt(struct blkcipher_desc
*desc
,
241 struct scatterlist
*dst
, struct scatterlist
*src
,
244 struct aes_ctx
*ctx
= blk_aes_ctx(desc
->tfm
);
245 struct blkcipher_walk walk
;
250 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
251 err
= blkcipher_walk_virt(desc
, &walk
);
253 while ((nbytes
= walk
.nbytes
)) {
254 padlock_xcrypt_ecb(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
255 ctx
->E
, &ctx
->cword
.encrypt
,
256 nbytes
/ AES_BLOCK_SIZE
);
257 nbytes
&= AES_BLOCK_SIZE
- 1;
258 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
264 static int ecb_aes_decrypt(struct blkcipher_desc
*desc
,
265 struct scatterlist
*dst
, struct scatterlist
*src
,
268 struct aes_ctx
*ctx
= blk_aes_ctx(desc
->tfm
);
269 struct blkcipher_walk walk
;
274 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
275 err
= blkcipher_walk_virt(desc
, &walk
);
277 while ((nbytes
= walk
.nbytes
)) {
278 padlock_xcrypt_ecb(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
279 ctx
->D
, &ctx
->cword
.decrypt
,
280 nbytes
/ AES_BLOCK_SIZE
);
281 nbytes
&= AES_BLOCK_SIZE
- 1;
282 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
288 static struct crypto_alg ecb_aes_alg
= {
289 .cra_name
= "ecb(aes)",
290 .cra_driver_name
= "ecb-aes-padlock",
291 .cra_priority
= PADLOCK_COMPOSITE_PRIORITY
,
292 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
293 .cra_blocksize
= AES_BLOCK_SIZE
,
294 .cra_ctxsize
= sizeof(struct aes_ctx
),
295 .cra_alignmask
= PADLOCK_ALIGNMENT
- 1,
296 .cra_type
= &crypto_blkcipher_type
,
297 .cra_module
= THIS_MODULE
,
298 .cra_list
= LIST_HEAD_INIT(ecb_aes_alg
.cra_list
),
301 .min_keysize
= AES_MIN_KEY_SIZE
,
302 .max_keysize
= AES_MAX_KEY_SIZE
,
303 .setkey
= aes_set_key
,
304 .encrypt
= ecb_aes_encrypt
,
305 .decrypt
= ecb_aes_decrypt
,
310 static int cbc_aes_encrypt(struct blkcipher_desc
*desc
,
311 struct scatterlist
*dst
, struct scatterlist
*src
,
314 struct aes_ctx
*ctx
= blk_aes_ctx(desc
->tfm
);
315 struct blkcipher_walk walk
;
320 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
321 err
= blkcipher_walk_virt(desc
, &walk
);
323 while ((nbytes
= walk
.nbytes
)) {
324 u8
*iv
= padlock_xcrypt_cbc(walk
.src
.virt
.addr
,
325 walk
.dst
.virt
.addr
, ctx
->E
,
326 walk
.iv
, &ctx
->cword
.encrypt
,
327 nbytes
/ AES_BLOCK_SIZE
);
328 memcpy(walk
.iv
, iv
, AES_BLOCK_SIZE
);
329 nbytes
&= AES_BLOCK_SIZE
- 1;
330 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
336 static int cbc_aes_decrypt(struct blkcipher_desc
*desc
,
337 struct scatterlist
*dst
, struct scatterlist
*src
,
340 struct aes_ctx
*ctx
= blk_aes_ctx(desc
->tfm
);
341 struct blkcipher_walk walk
;
346 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
347 err
= blkcipher_walk_virt(desc
, &walk
);
349 while ((nbytes
= walk
.nbytes
)) {
350 padlock_xcrypt_cbc(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
351 ctx
->D
, walk
.iv
, &ctx
->cword
.decrypt
,
352 nbytes
/ AES_BLOCK_SIZE
);
353 nbytes
&= AES_BLOCK_SIZE
- 1;
354 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
360 static struct crypto_alg cbc_aes_alg
= {
361 .cra_name
= "cbc(aes)",
362 .cra_driver_name
= "cbc-aes-padlock",
363 .cra_priority
= PADLOCK_COMPOSITE_PRIORITY
,
364 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
365 .cra_blocksize
= AES_BLOCK_SIZE
,
366 .cra_ctxsize
= sizeof(struct aes_ctx
),
367 .cra_alignmask
= PADLOCK_ALIGNMENT
- 1,
368 .cra_type
= &crypto_blkcipher_type
,
369 .cra_module
= THIS_MODULE
,
370 .cra_list
= LIST_HEAD_INIT(cbc_aes_alg
.cra_list
),
373 .min_keysize
= AES_MIN_KEY_SIZE
,
374 .max_keysize
= AES_MAX_KEY_SIZE
,
375 .ivsize
= AES_BLOCK_SIZE
,
376 .setkey
= aes_set_key
,
377 .encrypt
= cbc_aes_encrypt
,
378 .decrypt
= cbc_aes_decrypt
,
383 static int __init
padlock_init(void)
387 if (!cpu_has_xcrypt
) {
388 printk(KERN_NOTICE PFX
"VIA PadLock not detected.\n");
392 if (!cpu_has_xcrypt_enabled
) {
393 printk(KERN_NOTICE PFX
"VIA PadLock detected, but not enabled. Hmm, strange...\n");
397 if ((ret
= crypto_register_alg(&aes_alg
)))
400 if ((ret
= crypto_register_alg(&ecb_aes_alg
)))
403 if ((ret
= crypto_register_alg(&cbc_aes_alg
)))
406 printk(KERN_NOTICE PFX
"Using VIA PadLock ACE for AES algorithm.\n");
412 crypto_unregister_alg(&ecb_aes_alg
);
414 crypto_unregister_alg(&aes_alg
);
416 printk(KERN_ERR PFX
"VIA PadLock AES initialization failed.\n");
420 static void __exit
padlock_fini(void)
422 crypto_unregister_alg(&cbc_aes_alg
);
423 crypto_unregister_alg(&ecb_aes_alg
);
424 crypto_unregister_alg(&aes_alg
);
427 module_init(padlock_init
);
428 module_exit(padlock_fini
);
430 MODULE_DESCRIPTION("VIA PadLock AES algorithm support");
431 MODULE_LICENSE("GPL");
432 MODULE_AUTHOR("Michal Ludvig");