s3:selftest: run smb2.notify with --signing=required
[Samba.git] / lib / crypto / aes_gcm_128.c
blobf59d65949edcb39621f593e60eac991e34996e0e
1 /*
2 AES-GCM-128
4 Copyright (C) Stefan Metzmacher 2014
6 This program is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3 of the License, or
9 (at your option) any later version.
11 This program is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "replace.h"
21 #include "../lib/crypto/crypto.h"
22 #include "lib/util/byteorder.h"
24 static inline void aes_gcm_128_inc32(uint8_t inout[AES_BLOCK_SIZE])
26 uint32_t v;
28 v = RIVAL(inout, AES_BLOCK_SIZE - 4);
29 v += 1;
30 RSIVAL(inout, AES_BLOCK_SIZE - 4, v);
33 static inline void aes_gcm_128_xor(const uint8_t in1[AES_BLOCK_SIZE],
34 const uint8_t in2[AES_BLOCK_SIZE],
35 uint8_t out[AES_BLOCK_SIZE])
37 uint8_t i;
39 for (i = 0; i < AES_BLOCK_SIZE; i++) {
40 out[i] = in1[i] ^ in2[i];
44 static inline void aes_gcm_128_rightshift(uint8_t x[AES_BLOCK_SIZE])
46 int8_t i;
48 for (i = AES_BLOCK_SIZE - 1; i >=0; i--) {
49 x[i] >>= 1;
50 if (i > 0) {
51 x[i] |= (x[i-1] & 1) << 7;
56 static inline void aes_gcm_128_mul(const uint8_t x[AES_BLOCK_SIZE],
57 const uint8_t y[AES_BLOCK_SIZE],
58 uint8_t z[AES_BLOCK_SIZE])
60 uint8_t i;
61 uint8_t v[AES_BLOCK_SIZE];
62 /* 11100001 || 0^120 */
63 static const uint8_t r[AES_BLOCK_SIZE] = {
64 0xE1, 0x00, 0x00, 0x00,
65 0x00, 0x00, 0x00, 0x00,
66 0x00, 0x00, 0x00, 0x00,
67 0x00, 0x00, 0x00, 0x00,
70 memset(z, 0, AES_BLOCK_SIZE);
71 memcpy(v, y, AES_BLOCK_SIZE);
73 for (i = 0; i < AES_BLOCK_SIZE; i++) {
74 uint8_t mask;
75 for (mask = 0x80; mask != 0 ; mask >>= 1) {
76 uint8_t v_lsb = v[AES_BLOCK_SIZE-1] & 1;
77 if (x[i] & mask) {
78 aes_gcm_128_xor(z, v, z);
81 aes_gcm_128_rightshift(v);
82 if (v_lsb != 0) {
83 aes_gcm_128_xor(v, r, v);
89 static inline void aes_gcm_128_ghash_block(struct aes_gcm_128_context *ctx,
90 const uint8_t in[AES_BLOCK_SIZE])
92 aes_gcm_128_xor(ctx->Y, in, ctx->y.block);
93 aes_gcm_128_mul(ctx->y.block, ctx->H, ctx->Y);
96 void aes_gcm_128_init(struct aes_gcm_128_context *ctx,
97 const uint8_t K[AES_BLOCK_SIZE],
98 const uint8_t IV[AES_GCM_128_IV_SIZE])
100 ZERO_STRUCTP(ctx);
102 AES_set_encrypt_key(K, 128, &ctx->aes_key);
105 * Step 1: generate H (ctx->Y is the zero block here)
107 AES_encrypt(ctx->Y, ctx->H, &ctx->aes_key);
110 * Step 2: generate J0
112 memcpy(ctx->J0, IV, AES_GCM_128_IV_SIZE);
113 aes_gcm_128_inc32(ctx->J0);
116 * We need to prepare CB with J0.
118 memcpy(ctx->CB, ctx->J0, AES_BLOCK_SIZE);
119 ctx->c.ofs = AES_BLOCK_SIZE;
122 static inline void aes_gcm_128_update_tmp(struct aes_gcm_128_context *ctx,
123 struct aes_gcm_128_tmp *tmp,
124 const uint8_t *v, size_t v_len)
126 tmp->total += v_len;
128 if (tmp->ofs > 0) {
129 size_t copy = MIN(AES_BLOCK_SIZE - tmp->ofs, v_len);
131 memcpy(tmp->block + tmp->ofs, v, copy);
132 tmp->ofs += copy;
133 v += copy;
134 v_len -= copy;
137 if (tmp->ofs == AES_BLOCK_SIZE) {
138 aes_gcm_128_ghash_block(ctx, tmp->block);
139 tmp->ofs = 0;
142 while (v_len >= AES_BLOCK_SIZE) {
143 aes_gcm_128_ghash_block(ctx, v);
144 v += AES_BLOCK_SIZE;
145 v_len -= AES_BLOCK_SIZE;
148 if (v_len == 0) {
149 return;
152 ZERO_STRUCT(tmp->block);
153 memcpy(tmp->block, v, v_len);
154 tmp->ofs = v_len;
157 void aes_gcm_128_updateA(struct aes_gcm_128_context *ctx,
158 const uint8_t *a, size_t a_len)
160 aes_gcm_128_update_tmp(ctx, &ctx->A, a, a_len);
163 void aes_gcm_128_updateC(struct aes_gcm_128_context *ctx,
164 const uint8_t *c, size_t c_len)
166 if (ctx->A.ofs > 0) {
167 aes_gcm_128_ghash_block(ctx, ctx->A.block);
168 ctx->A.ofs = 0;
171 aes_gcm_128_update_tmp(ctx, &ctx->C, c, c_len);
174 static inline void aes_gcm_128_crypt_tmp(struct aes_gcm_128_context *ctx,
175 struct aes_gcm_128_tmp *tmp,
176 uint8_t *m, size_t m_len)
178 tmp->total += m_len;
180 while (m_len > 0) {
181 if (tmp->ofs == AES_BLOCK_SIZE) {
182 aes_gcm_128_inc32(ctx->CB);
183 AES_encrypt(ctx->CB, tmp->block, &ctx->aes_key);
184 tmp->ofs = 0;
187 m[0] ^= tmp->block[tmp->ofs];
188 m += 1;
189 m_len -= 1;
190 tmp->ofs += 1;
194 void aes_gcm_128_crypt(struct aes_gcm_128_context *ctx,
195 uint8_t *m, size_t m_len)
197 aes_gcm_128_crypt_tmp(ctx, &ctx->c, m, m_len);
200 void aes_gcm_128_digest(struct aes_gcm_128_context *ctx,
201 uint8_t T[AES_BLOCK_SIZE])
203 if (ctx->A.ofs > 0) {
204 aes_gcm_128_ghash_block(ctx, ctx->A.block);
205 ctx->A.ofs = 0;
208 if (ctx->C.ofs > 0) {
209 aes_gcm_128_ghash_block(ctx, ctx->C.block);
210 ctx->C.ofs = 0;
213 RSBVAL(ctx->AC, 0, ctx->A.total * 8);
214 RSBVAL(ctx->AC, 8, ctx->C.total * 8);
215 aes_gcm_128_ghash_block(ctx, ctx->AC);
217 AES_encrypt(ctx->J0, ctx->c.block, &ctx->aes_key);
218 aes_gcm_128_xor(ctx->c.block, ctx->Y, T);
220 ZERO_STRUCTP(ctx);