virtio-mem: Drop precopy notifier
[qemu/kevin.git] / crypto / xts.c
blobd4a49fdb709b934e7c7c438313cd2ffd91a0b569
1 /*
2 * QEMU Crypto XTS cipher mode
4 * Copyright (c) 2015-2016 Red Hat, Inc.
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 * This code is originally derived from public domain / WTFPL code in
20 * LibTomCrypt crytographic library http://libtom.org. The XTS code
21 * was donated by Elliptic Semiconductor Inc (www.ellipticsemi.com)
22 * to the LibTom Projects
26 #include "qemu/osdep.h"
27 #include "qemu/bswap.h"
28 #include "crypto/xts.h"
30 typedef union {
31 uint8_t b[XTS_BLOCK_SIZE];
32 uint64_t u[2];
33 } xts_uint128;
35 static inline void xts_uint128_xor(xts_uint128 *D,
36 const xts_uint128 *S1,
37 const xts_uint128 *S2)
39 D->u[0] = S1->u[0] ^ S2->u[0];
40 D->u[1] = S1->u[1] ^ S2->u[1];
43 static inline void xts_uint128_cpu_to_les(xts_uint128 *v)
45 cpu_to_le64s(&v->u[0]);
46 cpu_to_le64s(&v->u[1]);
49 static inline void xts_uint128_le_to_cpus(xts_uint128 *v)
51 le64_to_cpus(&v->u[0]);
52 le64_to_cpus(&v->u[1]);
55 static void xts_mult_x(xts_uint128 *I)
57 uint64_t tt;
59 xts_uint128_le_to_cpus(I);
61 tt = I->u[0] >> 63;
62 I->u[0] <<= 1;
64 if (I->u[1] >> 63) {
65 I->u[0] ^= 0x87;
67 I->u[1] <<= 1;
68 I->u[1] |= tt;
70 xts_uint128_cpu_to_les(I);
74 /**
75 * xts_tweak_encdec:
76 * @param ctxt: the cipher context
77 * @param func: the cipher function
78 * @src: buffer providing the input text of XTS_BLOCK_SIZE bytes
79 * @dst: buffer to output the output text of XTS_BLOCK_SIZE bytes
80 * @iv: the initialization vector tweak of XTS_BLOCK_SIZE bytes
82 * Encrypt/decrypt data with a tweak
84 static inline void xts_tweak_encdec(const void *ctx,
85 xts_cipher_func *func,
86 const xts_uint128 *src,
87 xts_uint128 *dst,
88 xts_uint128 *iv)
90 /* tweak encrypt block i */
91 xts_uint128_xor(dst, src, iv);
93 func(ctx, XTS_BLOCK_SIZE, dst->b, dst->b);
95 xts_uint128_xor(dst, dst, iv);
97 /* LFSR the tweak */
98 xts_mult_x(iv);
102 void xts_decrypt(const void *datactx,
103 const void *tweakctx,
104 xts_cipher_func *encfunc,
105 xts_cipher_func *decfunc,
106 uint8_t *iv,
107 size_t length,
108 uint8_t *dst,
109 const uint8_t *src)
111 xts_uint128 PP, CC, T;
112 unsigned long i, m, mo, lim;
114 /* get number of blocks */
115 m = length >> 4;
116 mo = length & 15;
118 /* must have at least one full block */
119 g_assert(m != 0);
121 if (mo == 0) {
122 lim = m;
123 } else {
124 lim = m - 1;
127 /* encrypt the iv */
128 encfunc(tweakctx, XTS_BLOCK_SIZE, T.b, iv);
130 if (QEMU_PTR_IS_ALIGNED(src, sizeof(uint64_t)) &&
131 QEMU_PTR_IS_ALIGNED(dst, sizeof(uint64_t))) {
132 xts_uint128 *S = (xts_uint128 *)src;
133 xts_uint128 *D = (xts_uint128 *)dst;
134 for (i = 0; i < lim; i++, S++, D++) {
135 xts_tweak_encdec(datactx, decfunc, S, D, &T);
137 } else {
138 xts_uint128 D;
140 for (i = 0; i < lim; i++) {
141 memcpy(&D, src, XTS_BLOCK_SIZE);
142 xts_tweak_encdec(datactx, decfunc, &D, &D, &T);
143 memcpy(dst, &D, XTS_BLOCK_SIZE);
144 src += XTS_BLOCK_SIZE;
145 dst += XTS_BLOCK_SIZE;
149 /* if length is not a multiple of XTS_BLOCK_SIZE then */
150 if (mo > 0) {
151 xts_uint128 S, D;
152 memcpy(&CC, &T, XTS_BLOCK_SIZE);
153 xts_mult_x(&CC);
155 /* PP = tweak decrypt block m-1 */
156 memcpy(&S, src, XTS_BLOCK_SIZE);
157 xts_tweak_encdec(datactx, decfunc, &S, &PP, &CC);
159 /* Pm = first length % XTS_BLOCK_SIZE bytes of PP */
160 for (i = 0; i < mo; i++) {
161 CC.b[i] = src[XTS_BLOCK_SIZE + i];
162 dst[XTS_BLOCK_SIZE + i] = PP.b[i];
164 for (; i < XTS_BLOCK_SIZE; i++) {
165 CC.b[i] = PP.b[i];
168 /* Pm-1 = Tweak uncrypt CC */
169 xts_tweak_encdec(datactx, decfunc, &CC, &D, &T);
170 memcpy(dst, &D, XTS_BLOCK_SIZE);
173 /* Decrypt the iv back */
174 decfunc(tweakctx, XTS_BLOCK_SIZE, iv, T.b);
178 void xts_encrypt(const void *datactx,
179 const void *tweakctx,
180 xts_cipher_func *encfunc,
181 xts_cipher_func *decfunc,
182 uint8_t *iv,
183 size_t length,
184 uint8_t *dst,
185 const uint8_t *src)
187 xts_uint128 PP, CC, T;
188 unsigned long i, m, mo, lim;
190 /* get number of blocks */
191 m = length >> 4;
192 mo = length & 15;
194 /* must have at least one full block */
195 g_assert(m != 0);
197 if (mo == 0) {
198 lim = m;
199 } else {
200 lim = m - 1;
203 /* encrypt the iv */
204 encfunc(tweakctx, XTS_BLOCK_SIZE, T.b, iv);
206 if (QEMU_PTR_IS_ALIGNED(src, sizeof(uint64_t)) &&
207 QEMU_PTR_IS_ALIGNED(dst, sizeof(uint64_t))) {
208 xts_uint128 *S = (xts_uint128 *)src;
209 xts_uint128 *D = (xts_uint128 *)dst;
210 for (i = 0; i < lim; i++, S++, D++) {
211 xts_tweak_encdec(datactx, encfunc, S, D, &T);
213 } else {
214 xts_uint128 D;
216 for (i = 0; i < lim; i++) {
217 memcpy(&D, src, XTS_BLOCK_SIZE);
218 xts_tweak_encdec(datactx, encfunc, &D, &D, &T);
219 memcpy(dst, &D, XTS_BLOCK_SIZE);
221 dst += XTS_BLOCK_SIZE;
222 src += XTS_BLOCK_SIZE;
226 /* if length is not a multiple of XTS_BLOCK_SIZE then */
227 if (mo > 0) {
228 xts_uint128 S, D;
229 /* CC = tweak encrypt block m-1 */
230 memcpy(&S, src, XTS_BLOCK_SIZE);
231 xts_tweak_encdec(datactx, encfunc, &S, &CC, &T);
233 /* Cm = first length % XTS_BLOCK_SIZE bytes of CC */
234 for (i = 0; i < mo; i++) {
235 PP.b[i] = src[XTS_BLOCK_SIZE + i];
236 dst[XTS_BLOCK_SIZE + i] = CC.b[i];
239 for (; i < XTS_BLOCK_SIZE; i++) {
240 PP.b[i] = CC.b[i];
243 /* Cm-1 = Tweak encrypt PP */
244 xts_tweak_encdec(datactx, encfunc, &PP, &D, &T);
245 memcpy(dst, &D, XTS_BLOCK_SIZE);
248 /* Decrypt the iv back */
249 decfunc(tweakctx, XTS_BLOCK_SIZE, iv, T.b);