crypto: glue_helper - Add skcipher xts helpers
[linux-stable.git] / arch / x86 / include / asm / crypto / glue_helper.h
blob29e53ea7d76467cdc5d01da55872fc9885feb02b
1 /*
2 * Shared glue code for 128bit block ciphers
3 */
5 #ifndef _CRYPTO_GLUE_HELPER_H
6 #define _CRYPTO_GLUE_HELPER_H
8 #include <crypto/internal/skcipher.h>
9 #include <linux/kernel.h>
10 #include <asm/fpu/api.h>
11 #include <crypto/b128ops.h>
13 typedef void (*common_glue_func_t)(void *ctx, u8 *dst, const u8 *src);
14 typedef void (*common_glue_cbc_func_t)(void *ctx, u128 *dst, const u128 *src);
15 typedef void (*common_glue_ctr_func_t)(void *ctx, u128 *dst, const u128 *src,
16 le128 *iv);
17 typedef void (*common_glue_xts_func_t)(void *ctx, u128 *dst, const u128 *src,
18 le128 *iv);
20 #define GLUE_FUNC_CAST(fn) ((common_glue_func_t)(fn))
21 #define GLUE_CBC_FUNC_CAST(fn) ((common_glue_cbc_func_t)(fn))
22 #define GLUE_CTR_FUNC_CAST(fn) ((common_glue_ctr_func_t)(fn))
23 #define GLUE_XTS_FUNC_CAST(fn) ((common_glue_xts_func_t)(fn))
25 struct common_glue_func_entry {
26 unsigned int num_blocks; /* number of blocks that @fn will process */
27 union {
28 common_glue_func_t ecb;
29 common_glue_cbc_func_t cbc;
30 common_glue_ctr_func_t ctr;
31 common_glue_xts_func_t xts;
32 } fn_u;
35 struct common_glue_ctx {
36 unsigned int num_funcs;
37 int fpu_blocks_limit; /* -1 means fpu not needed at all */
40 * First funcs entry must have largest num_blocks and last funcs entry
41 * must have num_blocks == 1!
43 struct common_glue_func_entry funcs[];
46 static inline bool glue_fpu_begin(unsigned int bsize, int fpu_blocks_limit,
47 struct blkcipher_desc *desc,
48 bool fpu_enabled, unsigned int nbytes)
50 if (likely(fpu_blocks_limit < 0))
51 return false;
53 if (fpu_enabled)
54 return true;
57 * Vector-registers are only used when chunk to be processed is large
58 * enough, so do not enable FPU until it is necessary.
60 if (nbytes < bsize * (unsigned int)fpu_blocks_limit)
61 return false;
63 if (desc) {
64 /* prevent sleeping if FPU is in use */
65 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
68 kernel_fpu_begin();
69 return true;
72 static inline bool glue_skwalk_fpu_begin(unsigned int bsize,
73 int fpu_blocks_limit,
74 struct skcipher_walk *walk,
75 bool fpu_enabled, unsigned int nbytes)
77 if (likely(fpu_blocks_limit < 0))
78 return false;
80 if (fpu_enabled)
81 return true;
84 * Vector-registers are only used when chunk to be processed is large
85 * enough, so do not enable FPU until it is necessary.
87 if (nbytes < bsize * (unsigned int)fpu_blocks_limit)
88 return false;
90 /* prevent sleeping if FPU is in use */
91 skcipher_walk_atomise(walk);
93 kernel_fpu_begin();
94 return true;
97 static inline void glue_fpu_end(bool fpu_enabled)
99 if (fpu_enabled)
100 kernel_fpu_end();
103 static inline void le128_to_be128(be128 *dst, const le128 *src)
105 dst->a = cpu_to_be64(le64_to_cpu(src->a));
106 dst->b = cpu_to_be64(le64_to_cpu(src->b));
109 static inline void be128_to_le128(le128 *dst, const be128 *src)
111 dst->a = cpu_to_le64(be64_to_cpu(src->a));
112 dst->b = cpu_to_le64(be64_to_cpu(src->b));
115 static inline void le128_inc(le128 *i)
117 u64 a = le64_to_cpu(i->a);
118 u64 b = le64_to_cpu(i->b);
120 b++;
121 if (!b)
122 a++;
124 i->a = cpu_to_le64(a);
125 i->b = cpu_to_le64(b);
128 static inline void le128_gf128mul_x_ble(le128 *dst, const le128 *src)
130 u64 a = le64_to_cpu(src->a);
131 u64 b = le64_to_cpu(src->b);
132 u64 _tt = ((s64)a >> 63) & 0x87;
134 dst->a = cpu_to_le64((a << 1) ^ (b >> 63));
135 dst->b = cpu_to_le64((b << 1) ^ _tt);
138 extern int glue_ecb_crypt_128bit(const struct common_glue_ctx *gctx,
139 struct blkcipher_desc *desc,
140 struct scatterlist *dst,
141 struct scatterlist *src, unsigned int nbytes);
143 extern int glue_cbc_encrypt_128bit(const common_glue_func_t fn,
144 struct blkcipher_desc *desc,
145 struct scatterlist *dst,
146 struct scatterlist *src,
147 unsigned int nbytes);
149 extern int glue_cbc_decrypt_128bit(const struct common_glue_ctx *gctx,
150 struct blkcipher_desc *desc,
151 struct scatterlist *dst,
152 struct scatterlist *src,
153 unsigned int nbytes);
155 extern int glue_ctr_crypt_128bit(const struct common_glue_ctx *gctx,
156 struct blkcipher_desc *desc,
157 struct scatterlist *dst,
158 struct scatterlist *src, unsigned int nbytes);
160 extern int glue_xts_crypt_128bit(const struct common_glue_ctx *gctx,
161 struct blkcipher_desc *desc,
162 struct scatterlist *dst,
163 struct scatterlist *src, unsigned int nbytes,
164 common_glue_func_t tweak_fn, void *tweak_ctx,
165 void *crypt_ctx);
167 extern int glue_xts_crypt_128bit(const struct common_glue_ctx *gctx,
168 struct blkcipher_desc *desc,
169 struct scatterlist *dst,
170 struct scatterlist *src, unsigned int nbytes,
171 common_glue_func_t tweak_fn, void *tweak_ctx,
172 void *crypt_ctx);
174 extern int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
175 struct skcipher_request *req,
176 common_glue_func_t tweak_fn, void *tweak_ctx,
177 void *crypt_ctx);
179 extern void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src,
180 le128 *iv, common_glue_func_t fn);
182 #endif /* _CRYPTO_GLUE_HELPER_H */