1 /* Copyright (C) 2004-2006, Advanced Micro Devices, Inc.
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License as published by
5 * the Free Software Foundation; either version 2 of the License, or
6 * (at your option) any later version.
9 #include <linux/module.h>
10 #include <linux/kernel.h>
11 #include <linux/pci.h>
12 #include <linux/pci_ids.h>
13 #include <linux/crypto.h>
14 #include <linux/spinlock.h>
15 #include <crypto/algapi.h>
18 #include <asm/delay.h>
20 #include "geode-aes.h"
22 /* Register definitions */
24 #define AES_CTRLA_REG 0x0000
26 #define AES_CTRL_START 0x01
27 #define AES_CTRL_DECRYPT 0x00
28 #define AES_CTRL_ENCRYPT 0x02
29 #define AES_CTRL_WRKEY 0x04
30 #define AES_CTRL_DCA 0x08
31 #define AES_CTRL_SCA 0x10
32 #define AES_CTRL_CBC 0x20
34 #define AES_INTR_REG 0x0008
36 #define AES_INTRA_PENDING (1 << 16)
37 #define AES_INTRB_PENDING (1 << 17)
39 #define AES_INTR_PENDING (AES_INTRA_PENDING | AES_INTRB_PENDING)
40 #define AES_INTR_MASK 0x07
42 #define AES_SOURCEA_REG 0x0010
43 #define AES_DSTA_REG 0x0014
44 #define AES_LENA_REG 0x0018
45 #define AES_WRITEKEY0_REG 0x0030
46 #define AES_WRITEIV0_REG 0x0040
48 /* A very large counter that is used to gracefully bail out of an
49 * operation in case of trouble
52 #define AES_OP_TIMEOUT 0x50000
54 /* Static structures */
56 static void __iomem
* _iobase
;
57 static spinlock_t lock
;
59 /* Write a 128 bit field (either a writable key or IV) */
61 _writefield(u32 offset
, void *value
)
64 for(i
= 0; i
< 4; i
++)
65 iowrite32(((u32
*) value
)[i
], _iobase
+ offset
+ (i
* 4));
68 /* Read a 128 bit field (either a writable key or IV) */
70 _readfield(u32 offset
, void *value
)
73 for(i
= 0; i
< 4; i
++)
74 ((u32
*) value
)[i
] = ioread32(_iobase
+ offset
+ (i
* 4));
78 do_crypt(void *src
, void *dst
, int len
, u32 flags
)
81 u32 counter
= AES_OP_TIMEOUT
;
83 iowrite32(virt_to_phys(src
), _iobase
+ AES_SOURCEA_REG
);
84 iowrite32(virt_to_phys(dst
), _iobase
+ AES_DSTA_REG
);
85 iowrite32(len
, _iobase
+ AES_LENA_REG
);
87 /* Start the operation */
88 iowrite32(AES_CTRL_START
| flags
, _iobase
+ AES_CTRLA_REG
);
91 status
= ioread32(_iobase
+ AES_INTR_REG
);
92 while(!(status
& AES_INTRA_PENDING
) && --counter
);
95 iowrite32((status
& 0xFF) | AES_INTRA_PENDING
, _iobase
+ AES_INTR_REG
);
96 return counter
? 0 : 1;
100 geode_aes_crypt(struct geode_aes_op
*op
)
103 unsigned long iflags
;
108 /* If the source and destination is the same, then
109 * we need to turn on the coherent flags, otherwise
110 * we don't need to worry
113 flags
|= (AES_CTRL_DCA
| AES_CTRL_SCA
);
115 if (op
->dir
== AES_DIR_ENCRYPT
)
116 flags
|= AES_CTRL_ENCRYPT
;
118 /* Start the critical section */
120 spin_lock_irqsave(&lock
, iflags
);
122 if (op
->mode
== AES_MODE_CBC
) {
123 flags
|= AES_CTRL_CBC
;
124 _writefield(AES_WRITEIV0_REG
, op
->iv
);
127 if (!(op
->flags
& AES_FLAGS_HIDDENKEY
)) {
128 flags
|= AES_CTRL_WRKEY
;
129 _writefield(AES_WRITEKEY0_REG
, op
->key
);
132 do_crypt(op
->src
, op
->dst
, op
->len
, flags
);
134 if (op
->mode
== AES_MODE_CBC
)
135 _readfield(AES_WRITEIV0_REG
, op
->iv
);
137 spin_unlock_irqrestore(&lock
, iflags
);
142 /* CRYPTO-API Functions */
145 geode_setkey(struct crypto_tfm
*tfm
, const u8
*key
, unsigned int len
)
147 struct geode_aes_op
*op
= crypto_tfm_ctx(tfm
);
149 if (len
!= AES_KEY_LENGTH
) {
150 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
154 memcpy(op
->key
, key
, len
);
159 geode_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
161 struct geode_aes_op
*op
= crypto_tfm_ctx(tfm
);
163 if ((out
== NULL
) || (in
== NULL
))
166 op
->src
= (void *) in
;
167 op
->dst
= (void *) out
;
168 op
->mode
= AES_MODE_ECB
;
170 op
->len
= AES_MIN_BLOCK_SIZE
;
171 op
->dir
= AES_DIR_ENCRYPT
;
178 geode_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
180 struct geode_aes_op
*op
= crypto_tfm_ctx(tfm
);
182 if ((out
== NULL
) || (in
== NULL
))
185 op
->src
= (void *) in
;
186 op
->dst
= (void *) out
;
187 op
->mode
= AES_MODE_ECB
;
189 op
->len
= AES_MIN_BLOCK_SIZE
;
190 op
->dir
= AES_DIR_DECRYPT
;
196 static struct crypto_alg geode_alg
= {
198 .cra_driver_name
= "geode-aes-128",
201 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
202 .cra_blocksize
= AES_MIN_BLOCK_SIZE
,
203 .cra_ctxsize
= sizeof(struct geode_aes_op
),
204 .cra_module
= THIS_MODULE
,
205 .cra_list
= LIST_HEAD_INIT(geode_alg
.cra_list
),
208 .cia_min_keysize
= AES_KEY_LENGTH
,
209 .cia_max_keysize
= AES_KEY_LENGTH
,
210 .cia_setkey
= geode_setkey
,
211 .cia_encrypt
= geode_encrypt
,
212 .cia_decrypt
= geode_decrypt
218 geode_cbc_decrypt(struct blkcipher_desc
*desc
,
219 struct scatterlist
*dst
, struct scatterlist
*src
,
222 struct geode_aes_op
*op
= crypto_blkcipher_ctx(desc
->tfm
);
223 struct blkcipher_walk walk
;
226 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
227 err
= blkcipher_walk_virt(desc
, &walk
);
229 while((nbytes
= walk
.nbytes
)) {
230 op
->src
= walk
.src
.virt
.addr
,
231 op
->dst
= walk
.dst
.virt
.addr
;
232 op
->mode
= AES_MODE_CBC
;
233 op
->len
= nbytes
- (nbytes
% AES_MIN_BLOCK_SIZE
);
234 op
->dir
= AES_DIR_DECRYPT
;
236 memcpy(op
->iv
, walk
.iv
, AES_IV_LENGTH
);
238 ret
= geode_aes_crypt(op
);
240 memcpy(walk
.iv
, op
->iv
, AES_IV_LENGTH
);
243 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
250 geode_cbc_encrypt(struct blkcipher_desc
*desc
,
251 struct scatterlist
*dst
, struct scatterlist
*src
,
254 struct geode_aes_op
*op
= crypto_blkcipher_ctx(desc
->tfm
);
255 struct blkcipher_walk walk
;
258 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
259 err
= blkcipher_walk_virt(desc
, &walk
);
261 while((nbytes
= walk
.nbytes
)) {
262 op
->src
= walk
.src
.virt
.addr
,
263 op
->dst
= walk
.dst
.virt
.addr
;
264 op
->mode
= AES_MODE_CBC
;
265 op
->len
= nbytes
- (nbytes
% AES_MIN_BLOCK_SIZE
);
266 op
->dir
= AES_DIR_ENCRYPT
;
268 memcpy(op
->iv
, walk
.iv
, AES_IV_LENGTH
);
270 ret
= geode_aes_crypt(op
);
272 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
278 static struct crypto_alg geode_cbc_alg
= {
279 .cra_name
= "cbc(aes)",
280 .cra_driver_name
= "cbc-aes-geode-128",
282 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
283 .cra_blocksize
= AES_MIN_BLOCK_SIZE
,
284 .cra_ctxsize
= sizeof(struct geode_aes_op
),
286 .cra_type
= &crypto_blkcipher_type
,
287 .cra_module
= THIS_MODULE
,
288 .cra_list
= LIST_HEAD_INIT(geode_cbc_alg
.cra_list
),
291 .min_keysize
= AES_KEY_LENGTH
,
292 .max_keysize
= AES_KEY_LENGTH
,
293 .setkey
= geode_setkey
,
294 .encrypt
= geode_cbc_encrypt
,
295 .decrypt
= geode_cbc_decrypt
,
296 .ivsize
= AES_IV_LENGTH
,
302 geode_ecb_decrypt(struct blkcipher_desc
*desc
,
303 struct scatterlist
*dst
, struct scatterlist
*src
,
306 struct geode_aes_op
*op
= crypto_blkcipher_ctx(desc
->tfm
);
307 struct blkcipher_walk walk
;
310 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
311 err
= blkcipher_walk_virt(desc
, &walk
);
313 while((nbytes
= walk
.nbytes
)) {
314 op
->src
= walk
.src
.virt
.addr
,
315 op
->dst
= walk
.dst
.virt
.addr
;
316 op
->mode
= AES_MODE_ECB
;
317 op
->len
= nbytes
- (nbytes
% AES_MIN_BLOCK_SIZE
);
318 op
->dir
= AES_DIR_DECRYPT
;
320 ret
= geode_aes_crypt(op
);
322 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
329 geode_ecb_encrypt(struct blkcipher_desc
*desc
,
330 struct scatterlist
*dst
, struct scatterlist
*src
,
333 struct geode_aes_op
*op
= crypto_blkcipher_ctx(desc
->tfm
);
334 struct blkcipher_walk walk
;
337 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
338 err
= blkcipher_walk_virt(desc
, &walk
);
340 while((nbytes
= walk
.nbytes
)) {
341 op
->src
= walk
.src
.virt
.addr
,
342 op
->dst
= walk
.dst
.virt
.addr
;
343 op
->mode
= AES_MODE_ECB
;
344 op
->len
= nbytes
- (nbytes
% AES_MIN_BLOCK_SIZE
);
345 op
->dir
= AES_DIR_ENCRYPT
;
347 ret
= geode_aes_crypt(op
);
349 ret
= blkcipher_walk_done(desc
, &walk
, nbytes
);
355 static struct crypto_alg geode_ecb_alg
= {
356 .cra_name
= "ecb(aes)",
357 .cra_driver_name
= "ecb-aes-geode-128",
359 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
360 .cra_blocksize
= AES_MIN_BLOCK_SIZE
,
361 .cra_ctxsize
= sizeof(struct geode_aes_op
),
363 .cra_type
= &crypto_blkcipher_type
,
364 .cra_module
= THIS_MODULE
,
365 .cra_list
= LIST_HEAD_INIT(geode_ecb_alg
.cra_list
),
368 .min_keysize
= AES_KEY_LENGTH
,
369 .max_keysize
= AES_KEY_LENGTH
,
370 .setkey
= geode_setkey
,
371 .encrypt
= geode_ecb_encrypt
,
372 .decrypt
= geode_ecb_decrypt
,
378 geode_aes_remove(struct pci_dev
*dev
)
380 crypto_unregister_alg(&geode_alg
);
381 crypto_unregister_alg(&geode_ecb_alg
);
382 crypto_unregister_alg(&geode_cbc_alg
);
384 pci_iounmap(dev
, _iobase
);
387 pci_release_regions(dev
);
388 pci_disable_device(dev
);
393 geode_aes_probe(struct pci_dev
*dev
, const struct pci_device_id
*id
)
397 if ((ret
= pci_enable_device(dev
)))
400 if ((ret
= pci_request_regions(dev
, "geode-aes-128")))
403 _iobase
= pci_iomap(dev
, 0, 0);
405 if (_iobase
== NULL
) {
410 spin_lock_init(&lock
);
412 /* Clear any pending activity */
413 iowrite32(AES_INTR_PENDING
| AES_INTR_MASK
, _iobase
+ AES_INTR_REG
);
415 if ((ret
= crypto_register_alg(&geode_alg
)))
418 if ((ret
= crypto_register_alg(&geode_ecb_alg
)))
421 if ((ret
= crypto_register_alg(&geode_cbc_alg
)))
424 printk(KERN_NOTICE
"geode-aes: GEODE AES engine enabled.\n");
428 crypto_unregister_alg(&geode_ecb_alg
);
431 crypto_unregister_alg(&geode_alg
);
434 pci_iounmap(dev
, _iobase
);
437 pci_release_regions(dev
);
440 pci_disable_device(dev
);
442 printk(KERN_ERR
"geode-aes: GEODE AES initialization failed.\n");
446 static struct pci_device_id geode_aes_tbl
[] = {
447 { PCI_VENDOR_ID_AMD
, PCI_DEVICE_ID_AMD_LX_AES
, PCI_ANY_ID
, PCI_ANY_ID
} ,
451 MODULE_DEVICE_TABLE(pci
, geode_aes_tbl
);
453 static struct pci_driver geode_aes_driver
= {
454 .name
= "Geode LX AES",
455 .id_table
= geode_aes_tbl
,
456 .probe
= geode_aes_probe
,
457 .remove
= __devexit_p(geode_aes_remove
)
463 return pci_register_driver(&geode_aes_driver
);
469 pci_unregister_driver(&geode_aes_driver
);
472 MODULE_AUTHOR("Advanced Micro Devices, Inc.");
473 MODULE_DESCRIPTION("Geode LX Hardware AES driver");
474 MODULE_LICENSE("GPL");
477 module_init(geode_aes_init
);
478 module_exit(geode_aes_exit
);