4 * Support for VIA PadLock hardware crypto engine.
6 * Copyright (c) 2006 Michal Ludvig <michal@logix.cz>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
15 #include <crypto/algapi.h>
16 #include <crypto/sha.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/init.h>
20 #include <linux/errno.h>
21 #include <linux/cryptohash.h>
22 #include <linux/interrupt.h>
23 #include <linux/kernel.h>
24 #include <linux/scatterlist.h>
27 #define SHA1_DEFAULT_FALLBACK "sha1-generic"
28 #define SHA256_DEFAULT_FALLBACK "sha256-generic"
30 struct padlock_sha_ctx
{
34 void (*f_sha_padlock
)(const char *in
, char *out
, int count
);
35 struct hash_desc fallback
;
38 static inline struct padlock_sha_ctx
*ctx(struct crypto_tfm
*tfm
)
40 return crypto_tfm_ctx(tfm
);
43 /* We'll need aligned address on the stack */
44 #define NEAREST_ALIGNED(ptr) \
45 ((void *)ALIGN((size_t)(ptr), PADLOCK_ALIGNMENT))
47 static struct crypto_alg sha1_alg
, sha256_alg
;
49 static void padlock_sha_bypass(struct crypto_tfm
*tfm
)
54 crypto_hash_init(&ctx(tfm
)->fallback
);
55 if (ctx(tfm
)->data
&& ctx(tfm
)->used
) {
56 struct scatterlist sg
;
58 sg_init_one(&sg
, ctx(tfm
)->data
, ctx(tfm
)->used
);
59 crypto_hash_update(&ctx(tfm
)->fallback
, &sg
, sg
.length
);
66 static void padlock_sha_init(struct crypto_tfm
*tfm
)
72 static void padlock_sha_update(struct crypto_tfm
*tfm
,
73 const uint8_t *data
, unsigned int length
)
75 /* Our buffer is always one page. */
76 if (unlikely(!ctx(tfm
)->bypass
&&
77 (ctx(tfm
)->used
+ length
> PAGE_SIZE
)))
78 padlock_sha_bypass(tfm
);
80 if (unlikely(ctx(tfm
)->bypass
)) {
81 struct scatterlist sg
;
82 sg_init_one(&sg
, (uint8_t *)data
, length
);
83 crypto_hash_update(&ctx(tfm
)->fallback
, &sg
, length
);
87 memcpy(ctx(tfm
)->data
+ ctx(tfm
)->used
, data
, length
);
88 ctx(tfm
)->used
+= length
;
91 static inline void padlock_output_block(uint32_t *src
,
92 uint32_t *dst
, size_t count
)
95 *dst
++ = swab32(*src
++);
98 static void padlock_do_sha1(const char *in
, char *out
, int count
)
100 /* We can't store directly to *out as it may be unaligned. */
101 /* BTW Don't reduce the buffer size below 128 Bytes!
102 * PadLock microcode needs it that big. */
104 char *result
= NEAREST_ALIGNED(buf
);
106 ((uint32_t *)result
)[0] = SHA1_H0
;
107 ((uint32_t *)result
)[1] = SHA1_H1
;
108 ((uint32_t *)result
)[2] = SHA1_H2
;
109 ((uint32_t *)result
)[3] = SHA1_H3
;
110 ((uint32_t *)result
)[4] = SHA1_H4
;
112 asm volatile (".byte 0xf3,0x0f,0xa6,0xc8" /* rep xsha1 */
113 : "+S"(in
), "+D"(result
)
114 : "c"(count
), "a"(0));
116 padlock_output_block((uint32_t *)result
, (uint32_t *)out
, 5);
119 static void padlock_do_sha256(const char *in
, char *out
, int count
)
121 /* We can't store directly to *out as it may be unaligned. */
122 /* BTW Don't reduce the buffer size below 128 Bytes!
123 * PadLock microcode needs it that big. */
125 char *result
= NEAREST_ALIGNED(buf
);
127 ((uint32_t *)result
)[0] = SHA256_H0
;
128 ((uint32_t *)result
)[1] = SHA256_H1
;
129 ((uint32_t *)result
)[2] = SHA256_H2
;
130 ((uint32_t *)result
)[3] = SHA256_H3
;
131 ((uint32_t *)result
)[4] = SHA256_H4
;
132 ((uint32_t *)result
)[5] = SHA256_H5
;
133 ((uint32_t *)result
)[6] = SHA256_H6
;
134 ((uint32_t *)result
)[7] = SHA256_H7
;
136 asm volatile (".byte 0xf3,0x0f,0xa6,0xd0" /* rep xsha256 */
137 : "+S"(in
), "+D"(result
)
138 : "c"(count
), "a"(0));
140 padlock_output_block((uint32_t *)result
, (uint32_t *)out
, 8);
143 static void padlock_sha_final(struct crypto_tfm
*tfm
, uint8_t *out
)
145 if (unlikely(ctx(tfm
)->bypass
)) {
146 crypto_hash_final(&ctx(tfm
)->fallback
, out
);
147 ctx(tfm
)->bypass
= 0;
151 /* Pass the input buffer to PadLock microcode... */
152 ctx(tfm
)->f_sha_padlock(ctx(tfm
)->data
, out
, ctx(tfm
)->used
);
157 static int padlock_cra_init(struct crypto_tfm
*tfm
)
159 const char *fallback_driver_name
= tfm
->__crt_alg
->cra_name
;
160 struct crypto_hash
*fallback_tfm
;
162 /* For now we'll allocate one page. This
163 * could eventually be configurable one day. */
164 ctx(tfm
)->data
= (char *)__get_free_page(GFP_KERNEL
);
168 /* Allocate a fallback and abort if it failed. */
169 fallback_tfm
= crypto_alloc_hash(fallback_driver_name
, 0,
171 CRYPTO_ALG_NEED_FALLBACK
);
172 if (IS_ERR(fallback_tfm
)) {
173 printk(KERN_WARNING PFX
"Fallback driver '%s' could not be loaded!\n",
174 fallback_driver_name
);
175 free_page((unsigned long)(ctx(tfm
)->data
));
176 return PTR_ERR(fallback_tfm
);
179 ctx(tfm
)->fallback
.tfm
= fallback_tfm
;
183 static int padlock_sha1_cra_init(struct crypto_tfm
*tfm
)
185 ctx(tfm
)->f_sha_padlock
= padlock_do_sha1
;
187 return padlock_cra_init(tfm
);
190 static int padlock_sha256_cra_init(struct crypto_tfm
*tfm
)
192 ctx(tfm
)->f_sha_padlock
= padlock_do_sha256
;
194 return padlock_cra_init(tfm
);
197 static void padlock_cra_exit(struct crypto_tfm
*tfm
)
199 if (ctx(tfm
)->data
) {
200 free_page((unsigned long)(ctx(tfm
)->data
));
201 ctx(tfm
)->data
= NULL
;
204 crypto_free_hash(ctx(tfm
)->fallback
.tfm
);
205 ctx(tfm
)->fallback
.tfm
= NULL
;
208 static struct crypto_alg sha1_alg
= {
210 .cra_driver_name
= "sha1-padlock",
211 .cra_priority
= PADLOCK_CRA_PRIORITY
,
212 .cra_flags
= CRYPTO_ALG_TYPE_DIGEST
|
213 CRYPTO_ALG_NEED_FALLBACK
,
214 .cra_blocksize
= SHA1_BLOCK_SIZE
,
215 .cra_ctxsize
= sizeof(struct padlock_sha_ctx
),
216 .cra_module
= THIS_MODULE
,
217 .cra_list
= LIST_HEAD_INIT(sha1_alg
.cra_list
),
218 .cra_init
= padlock_sha1_cra_init
,
219 .cra_exit
= padlock_cra_exit
,
222 .dia_digestsize
= SHA1_DIGEST_SIZE
,
223 .dia_init
= padlock_sha_init
,
224 .dia_update
= padlock_sha_update
,
225 .dia_final
= padlock_sha_final
,
230 static struct crypto_alg sha256_alg
= {
231 .cra_name
= "sha256",
232 .cra_driver_name
= "sha256-padlock",
233 .cra_priority
= PADLOCK_CRA_PRIORITY
,
234 .cra_flags
= CRYPTO_ALG_TYPE_DIGEST
|
235 CRYPTO_ALG_NEED_FALLBACK
,
236 .cra_blocksize
= SHA256_BLOCK_SIZE
,
237 .cra_ctxsize
= sizeof(struct padlock_sha_ctx
),
238 .cra_module
= THIS_MODULE
,
239 .cra_list
= LIST_HEAD_INIT(sha256_alg
.cra_list
),
240 .cra_init
= padlock_sha256_cra_init
,
241 .cra_exit
= padlock_cra_exit
,
244 .dia_digestsize
= SHA256_DIGEST_SIZE
,
245 .dia_init
= padlock_sha_init
,
246 .dia_update
= padlock_sha_update
,
247 .dia_final
= padlock_sha_final
,
252 static int __init
padlock_init(void)
257 printk(KERN_ERR PFX
"VIA PadLock Hash Engine not detected.\n");
261 if (!cpu_has_phe_enabled
) {
262 printk(KERN_ERR PFX
"VIA PadLock detected, but not enabled. Hmm, strange...\n");
266 rc
= crypto_register_alg(&sha1_alg
);
270 rc
= crypto_register_alg(&sha256_alg
);
274 printk(KERN_NOTICE PFX
"Using VIA PadLock ACE for SHA1/SHA256 algorithms.\n");
279 crypto_unregister_alg(&sha1_alg
);
281 printk(KERN_ERR PFX
"VIA PadLock SHA1/SHA256 initialization failed.\n");
285 static void __exit
padlock_fini(void)
287 crypto_unregister_alg(&sha1_alg
);
288 crypto_unregister_alg(&sha256_alg
);
291 module_init(padlock_init
);
292 module_exit(padlock_fini
);
294 MODULE_DESCRIPTION("VIA PadLock SHA1/SHA256 algorithms support.");
295 MODULE_LICENSE("GPL");
296 MODULE_AUTHOR("Michal Ludvig");
298 MODULE_ALIAS("sha1");
299 MODULE_ALIAS("sha256");
300 MODULE_ALIAS("sha1-padlock");
301 MODULE_ALIAS("sha256-padlock");