Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/s390/linux
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / include / crypto / lrw.h
blob25a2c8716375152ee49195c6beebdf5d0e3751a3
1 #ifndef _CRYPTO_LRW_H
2 #define _CRYPTO_LRW_H
4 #include <crypto/b128ops.h>
6 struct scatterlist;
7 struct gf128mul_64k;
8 struct blkcipher_desc;
10 #define LRW_BLOCK_SIZE 16
12 struct lrw_table_ctx {
13 /* optimizes multiplying a random (non incrementing, as at the
14 * start of a new sector) value with key2, we could also have
15 * used 4k optimization tables or no optimization at all. In the
16 * latter case we would have to store key2 here */
17 struct gf128mul_64k *table;
18 /* stores:
19 * key2*{ 0,0,...0,0,0,0,1 }, key2*{ 0,0,...0,0,0,1,1 },
20 * key2*{ 0,0,...0,0,1,1,1 }, key2*{ 0,0,...0,1,1,1,1 }
21 * key2*{ 0,0,...1,1,1,1,1 }, etc
22 * needed for optimized multiplication of incrementing values
23 * with key2 */
24 be128 mulinc[128];
27 int lrw_init_table(struct lrw_table_ctx *ctx, const u8 *tweak);
28 void lrw_free_table(struct lrw_table_ctx *ctx);
30 struct lrw_crypt_req {
31 be128 *tbuf;
32 unsigned int tbuflen;
34 struct lrw_table_ctx *table_ctx;
35 void *crypt_ctx;
36 void (*crypt_fn)(void *ctx, u8 *blks, unsigned int nbytes);
39 int lrw_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
40 struct scatterlist *src, unsigned int nbytes,
41 struct lrw_crypt_req *req);
43 #endif /* _CRYPTO_LRW_H */