1 /* $OpenBSD: e_aes.c,v 1.33 2017/01/31 13:17:21 inoguchi Exp $ */
2 /* ====================================================================
3 * Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved.
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in
14 * the documentation and/or other materials provided with the
17 * 3. All advertising materials mentioning features or use of this
18 * software must display the following acknowledgment:
19 * "This product includes software developed by the OpenSSL Project
20 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
22 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
23 * endorse or promote products derived from this software without
24 * prior written permission. For written permission, please contact
25 * openssl-core@openssl.org.
27 * 5. Products derived from this software may not be called "OpenSSL"
28 * nor may "OpenSSL" appear in their names without prior written
29 * permission of the OpenSSL Project.
31 * 6. Redistributions of any form whatsoever must retain the following
33 * "This product includes software developed by the OpenSSL Project
34 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
36 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
37 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
38 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
39 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
40 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
41 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
42 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
43 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
44 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
45 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
46 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
47 * OF THE POSSIBILITY OF SUCH DAMAGE.
48 * ====================================================================
55 #include <openssl/opensslconf.h>
57 #ifndef OPENSSL_NO_AES
58 #include <openssl/aes.h>
59 #include <openssl/err.h>
60 #include <openssl/evp.h>
63 #include "modes_lcl.h"
75 AES_KEY ks
; /* AES key schedule to use */
76 int key_set
; /* Set if key initialised */
77 int iv_set
; /* Set if an iv is set */
79 unsigned char *iv
; /* Temporary IV store */
80 int ivlen
; /* IV length */
82 int iv_gen
; /* It is OK to generate IVs */
83 int tls_aad_len
; /* TLS AAD length */
88 AES_KEY ks1
, ks2
; /* AES key schedules to use */
90 void (*stream
)(const unsigned char *in
, unsigned char *out
,
91 size_t length
, const AES_KEY
*key1
, const AES_KEY
*key2
,
92 const unsigned char iv
[16]);
96 AES_KEY ks
; /* AES key schedule to use */
97 int key_set
; /* Set if key initialised */
98 int iv_set
; /* Set if an iv is set */
99 int tag_set
; /* Set if tag is valid */
100 int len_set
; /* Set if message length set */
101 int L
, M
; /* L and M parameters from RFC3610 */
106 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
109 int vpaes_set_encrypt_key(const unsigned char *userKey
, int bits
,
111 int vpaes_set_decrypt_key(const unsigned char *userKey
, int bits
,
114 void vpaes_encrypt(const unsigned char *in
, unsigned char *out
,
116 void vpaes_decrypt(const unsigned char *in
, unsigned char *out
,
119 void vpaes_cbc_encrypt(const unsigned char *in
, unsigned char *out
,
120 size_t length
, const AES_KEY
*key
, unsigned char *ivec
, int enc
);
123 void bsaes_cbc_encrypt(const unsigned char *in
, unsigned char *out
,
124 size_t length
, const AES_KEY
*key
, unsigned char ivec
[16], int enc
);
125 void bsaes_ctr32_encrypt_blocks(const unsigned char *in
, unsigned char *out
,
126 size_t len
, const AES_KEY
*key
, const unsigned char ivec
[16]);
127 void bsaes_xts_encrypt(const unsigned char *inp
, unsigned char *out
,
128 size_t len
, const AES_KEY
*key1
, const AES_KEY
*key2
,
129 const unsigned char iv
[16]);
130 void bsaes_xts_decrypt(const unsigned char *inp
, unsigned char *out
,
131 size_t len
, const AES_KEY
*key1
, const AES_KEY
*key2
,
132 const unsigned char iv
[16]);
135 void AES_ctr32_encrypt(const unsigned char *in
, unsigned char *out
,
136 size_t blocks
, const AES_KEY
*key
,
137 const unsigned char ivec
[AES_BLOCK_SIZE
]);
140 void AES_xts_encrypt(const char *inp
, char *out
, size_t len
,
141 const AES_KEY
*key1
, const AES_KEY
*key2
, const unsigned char iv
[16]);
142 void AES_xts_decrypt(const char *inp
, char *out
, size_t len
,
143 const AES_KEY
*key1
, const AES_KEY
*key2
, const unsigned char iv
[16]);
146 #if defined(AES_ASM) && ( \
147 ((defined(__i386) || defined(__i386__) || \
148 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
149 defined(__x86_64) || defined(__x86_64__) || \
150 defined(_M_AMD64) || defined(_M_X64) || \
153 #include "x86_arch.h"
156 #define VPAES_CAPABLE (OPENSSL_cpu_caps() & CPUCAP_MASK_SSSE3)
159 #define BSAES_CAPABLE VPAES_CAPABLE
164 #define AESNI_CAPABLE (OPENSSL_cpu_caps() & CPUCAP_MASK_AESNI)
166 int aesni_set_encrypt_key(const unsigned char *userKey
, int bits
,
168 int aesni_set_decrypt_key(const unsigned char *userKey
, int bits
,
171 void aesni_encrypt(const unsigned char *in
, unsigned char *out
,
173 void aesni_decrypt(const unsigned char *in
, unsigned char *out
,
176 void aesni_ecb_encrypt(const unsigned char *in
, unsigned char *out
,
177 size_t length
, const AES_KEY
*key
, int enc
);
178 void aesni_cbc_encrypt(const unsigned char *in
, unsigned char *out
,
179 size_t length
, const AES_KEY
*key
, unsigned char *ivec
, int enc
);
181 void aesni_ctr32_encrypt_blocks(const unsigned char *in
, unsigned char *out
,
182 size_t blocks
, const void *key
, const unsigned char *ivec
);
184 void aesni_xts_encrypt(const unsigned char *in
, unsigned char *out
,
185 size_t length
, const AES_KEY
*key1
, const AES_KEY
*key2
,
186 const unsigned char iv
[16]);
188 void aesni_xts_decrypt(const unsigned char *in
, unsigned char *out
,
189 size_t length
, const AES_KEY
*key1
, const AES_KEY
*key2
,
190 const unsigned char iv
[16]);
192 void aesni_ccm64_encrypt_blocks (const unsigned char *in
, unsigned char *out
,
193 size_t blocks
, const void *key
, const unsigned char ivec
[16],
194 unsigned char cmac
[16]);
196 void aesni_ccm64_decrypt_blocks (const unsigned char *in
, unsigned char *out
,
197 size_t blocks
, const void *key
, const unsigned char ivec
[16],
198 unsigned char cmac
[16]);
201 aesni_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
202 const unsigned char *iv
, int enc
)
205 EVP_AES_KEY
*dat
= (EVP_AES_KEY
*)ctx
->cipher_data
;
207 mode
= ctx
->cipher
->flags
& EVP_CIPH_MODE
;
208 if ((mode
== EVP_CIPH_ECB_MODE
|| mode
== EVP_CIPH_CBC_MODE
) &&
210 ret
= aesni_set_decrypt_key(key
, ctx
->key_len
* 8,
212 dat
->block
= (block128_f
)aesni_decrypt
;
213 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
214 (cbc128_f
)aesni_cbc_encrypt
: NULL
;
216 ret
= aesni_set_encrypt_key(key
, ctx
->key_len
* 8,
218 dat
->block
= (block128_f
)aesni_encrypt
;
219 if (mode
== EVP_CIPH_CBC_MODE
)
220 dat
->stream
.cbc
= (cbc128_f
)aesni_cbc_encrypt
;
221 else if (mode
== EVP_CIPH_CTR_MODE
)
222 dat
->stream
.ctr
= (ctr128_f
)aesni_ctr32_encrypt_blocks
;
224 dat
->stream
.cbc
= NULL
;
228 EVPerror(EVP_R_AES_KEY_SETUP_FAILED
);
236 aesni_cbc_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
237 const unsigned char *in
, size_t len
)
239 aesni_cbc_encrypt(in
, out
, len
, ctx
->cipher_data
, ctx
->iv
,
246 aesni_ecb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
247 const unsigned char *in
, size_t len
)
249 size_t bl
= ctx
->cipher
->block_size
;
254 aesni_ecb_encrypt(in
, out
, len
, ctx
->cipher_data
, ctx
->encrypt
);
259 #define aesni_ofb_cipher aes_ofb_cipher
260 static int aesni_ofb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
261 const unsigned char *in
, size_t len
);
263 #define aesni_cfb_cipher aes_cfb_cipher
264 static int aesni_cfb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
265 const unsigned char *in
, size_t len
);
267 #define aesni_cfb8_cipher aes_cfb8_cipher
268 static int aesni_cfb8_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
269 const unsigned char *in
, size_t len
);
271 #define aesni_cfb1_cipher aes_cfb1_cipher
272 static int aesni_cfb1_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
273 const unsigned char *in
, size_t len
);
275 #define aesni_ctr_cipher aes_ctr_cipher
276 static int aesni_ctr_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
277 const unsigned char *in
, size_t len
);
280 aesni_gcm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
281 const unsigned char *iv
, int enc
)
283 EVP_AES_GCM_CTX
*gctx
= ctx
->cipher_data
;
288 aesni_set_encrypt_key(key
, ctx
->key_len
* 8, &gctx
->ks
);
289 CRYPTO_gcm128_init(&gctx
->gcm
, &gctx
->ks
,
290 (block128_f
)aesni_encrypt
);
291 gctx
->ctr
= (ctr128_f
)aesni_ctr32_encrypt_blocks
;
292 /* If we have an iv can set it directly, otherwise use
295 if (iv
== NULL
&& gctx
->iv_set
)
298 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
303 /* If key set use IV, otherwise copy */
305 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
307 memcpy(gctx
->iv
, iv
, gctx
->ivlen
);
314 #define aesni_gcm_cipher aes_gcm_cipher
315 static int aesni_gcm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
316 const unsigned char *in
, size_t len
);
319 aesni_xts_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
320 const unsigned char *iv
, int enc
)
322 EVP_AES_XTS_CTX
*xctx
= ctx
->cipher_data
;
328 /* key_len is two AES keys */
330 aesni_set_encrypt_key(key
, ctx
->key_len
* 4,
332 xctx
->xts
.block1
= (block128_f
)aesni_encrypt
;
333 xctx
->stream
= aesni_xts_encrypt
;
335 aesni_set_decrypt_key(key
, ctx
->key_len
* 4,
337 xctx
->xts
.block1
= (block128_f
)aesni_decrypt
;
338 xctx
->stream
= aesni_xts_decrypt
;
341 aesni_set_encrypt_key(key
+ ctx
->key_len
/ 2,
342 ctx
->key_len
* 4, &xctx
->ks2
);
343 xctx
->xts
.block2
= (block128_f
)aesni_encrypt
;
345 xctx
->xts
.key1
= &xctx
->ks1
;
349 xctx
->xts
.key2
= &xctx
->ks2
;
350 memcpy(ctx
->iv
, iv
, 16);
356 #define aesni_xts_cipher aes_xts_cipher
357 static int aesni_xts_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
358 const unsigned char *in
, size_t len
);
361 aesni_ccm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
362 const unsigned char *iv
, int enc
)
364 EVP_AES_CCM_CTX
*cctx
= ctx
->cipher_data
;
369 aesni_set_encrypt_key(key
, ctx
->key_len
* 8, &cctx
->ks
);
370 CRYPTO_ccm128_init(&cctx
->ccm
, cctx
->M
, cctx
->L
,
371 &cctx
->ks
, (block128_f
)aesni_encrypt
);
372 cctx
->str
= enc
? (ccm128_f
)aesni_ccm64_encrypt_blocks
:
373 (ccm128_f
)aesni_ccm64_decrypt_blocks
;
377 memcpy(ctx
->iv
, iv
, 15 - cctx
->L
);
383 #define aesni_ccm_cipher aes_ccm_cipher
384 static int aesni_ccm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
385 const unsigned char *in
, size_t len
);
387 #define BLOCK_CIPHER_generic(n,keylen,blocksize,ivlen,nmode,mode,MODE,fl) \
388 static const EVP_CIPHER aesni_##keylen##_##mode = { \
389 .nid = n##_##keylen##_##nmode, \
390 .block_size = blocksize, \
391 .key_len = keylen / 8, \
393 .flags = fl | EVP_CIPH_##MODE##_MODE, \
394 .init = aesni_init_key, \
395 .do_cipher = aesni_##mode##_cipher, \
396 .ctx_size = sizeof(EVP_AES_KEY) \
398 static const EVP_CIPHER aes_##keylen##_##mode = { \
399 .nid = n##_##keylen##_##nmode, \
400 .block_size = blocksize, \
401 .key_len = keylen / 8, \
403 .flags = fl | EVP_CIPH_##MODE##_MODE, \
404 .init = aes_init_key, \
405 .do_cipher = aes_##mode##_cipher, \
406 .ctx_size = sizeof(EVP_AES_KEY) \
409 EVP_aes_##keylen##_##mode(void) \
411 return AESNI_CAPABLE ? \
412 &aesni_##keylen##_##mode : &aes_##keylen##_##mode; \
415 #define BLOCK_CIPHER_custom(n,keylen,blocksize,ivlen,mode,MODE,fl) \
416 static const EVP_CIPHER aesni_##keylen##_##mode = { \
417 .nid = n##_##keylen##_##mode, \
418 .block_size = blocksize, \
420 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * \
423 .flags = fl | EVP_CIPH_##MODE##_MODE, \
424 .init = aesni_##mode##_init_key, \
425 .do_cipher = aesni_##mode##_cipher, \
426 .cleanup = aes_##mode##_cleanup, \
427 .ctx_size = sizeof(EVP_AES_##MODE##_CTX), \
428 .ctrl = aes_##mode##_ctrl \
430 static const EVP_CIPHER aes_##keylen##_##mode = { \
431 .nid = n##_##keylen##_##mode, \
432 .block_size = blocksize, \
434 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * \
437 .flags = fl | EVP_CIPH_##MODE##_MODE, \
438 .init = aes_##mode##_init_key, \
439 .do_cipher = aes_##mode##_cipher, \
440 .cleanup = aes_##mode##_cleanup, \
441 .ctx_size = sizeof(EVP_AES_##MODE##_CTX), \
442 .ctrl = aes_##mode##_ctrl \
445 EVP_aes_##keylen##_##mode(void) \
447 return AESNI_CAPABLE ? \
448 &aesni_##keylen##_##mode : &aes_##keylen##_##mode; \
453 #define BLOCK_CIPHER_generic(n,keylen,blocksize,ivlen,nmode,mode,MODE,fl) \
454 static const EVP_CIPHER aes_##keylen##_##mode = { \
455 .nid = n##_##keylen##_##nmode, \
456 .block_size = blocksize, \
457 .key_len = keylen / 8, \
459 .flags = fl | EVP_CIPH_##MODE##_MODE, \
460 .init = aes_init_key, \
461 .do_cipher = aes_##mode##_cipher, \
462 .ctx_size = sizeof(EVP_AES_KEY) \
465 EVP_aes_##keylen##_##mode(void) \
467 return &aes_##keylen##_##mode; \
470 #define BLOCK_CIPHER_custom(n,keylen,blocksize,ivlen,mode,MODE,fl) \
471 static const EVP_CIPHER aes_##keylen##_##mode = { \
472 .nid = n##_##keylen##_##mode, \
473 .block_size = blocksize, \
475 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * \
478 .flags = fl | EVP_CIPH_##MODE##_MODE, \
479 .init = aes_##mode##_init_key, \
480 .do_cipher = aes_##mode##_cipher, \
481 .cleanup = aes_##mode##_cleanup, \
482 .ctx_size = sizeof(EVP_AES_##MODE##_CTX), \
483 .ctrl = aes_##mode##_ctrl \
486 EVP_aes_##keylen##_##mode(void) \
488 return &aes_##keylen##_##mode; \
493 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
494 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
495 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
496 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
497 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
498 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
499 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
500 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
503 aes_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
504 const unsigned char *iv
, int enc
)
507 EVP_AES_KEY
*dat
= (EVP_AES_KEY
*)ctx
->cipher_data
;
509 mode
= ctx
->cipher
->flags
& EVP_CIPH_MODE
;
510 if ((mode
== EVP_CIPH_ECB_MODE
|| mode
== EVP_CIPH_CBC_MODE
) &&
513 if (BSAES_CAPABLE
&& mode
== EVP_CIPH_CBC_MODE
) {
514 ret
= AES_set_decrypt_key(key
, ctx
->key_len
* 8,
516 dat
->block
= (block128_f
)AES_decrypt
;
517 dat
->stream
.cbc
= (cbc128_f
)bsaes_cbc_encrypt
;
522 ret
= vpaes_set_decrypt_key(key
, ctx
->key_len
* 8,
524 dat
->block
= (block128_f
)vpaes_decrypt
;
525 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
526 (cbc128_f
)vpaes_cbc_encrypt
: NULL
;
530 ret
= AES_set_decrypt_key(key
, ctx
->key_len
* 8,
532 dat
->block
= (block128_f
)AES_decrypt
;
533 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
534 (cbc128_f
)AES_cbc_encrypt
: NULL
;
537 if (BSAES_CAPABLE
&& mode
== EVP_CIPH_CTR_MODE
) {
538 ret
= AES_set_encrypt_key(key
, ctx
->key_len
* 8,
540 dat
->block
= (block128_f
)AES_encrypt
;
541 dat
->stream
.ctr
= (ctr128_f
)bsaes_ctr32_encrypt_blocks
;
546 ret
= vpaes_set_encrypt_key(key
, ctx
->key_len
* 8,
548 dat
->block
= (block128_f
)vpaes_encrypt
;
549 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
550 (cbc128_f
)vpaes_cbc_encrypt
: NULL
;
554 ret
= AES_set_encrypt_key(key
, ctx
->key_len
* 8,
556 dat
->block
= (block128_f
)AES_encrypt
;
557 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
558 (cbc128_f
)AES_cbc_encrypt
: NULL
;
560 if (mode
== EVP_CIPH_CTR_MODE
)
561 dat
->stream
.ctr
= (ctr128_f
)AES_ctr32_encrypt
;
566 EVPerror(EVP_R_AES_KEY_SETUP_FAILED
);
574 aes_cbc_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
575 const unsigned char *in
, size_t len
)
577 EVP_AES_KEY
*dat
= (EVP_AES_KEY
*)ctx
->cipher_data
;
580 (*dat
->stream
.cbc
)(in
, out
, len
, &dat
->ks
, ctx
->iv
,
582 else if (ctx
->encrypt
)
583 CRYPTO_cbc128_encrypt(in
, out
, len
, &dat
->ks
, ctx
->iv
,
586 CRYPTO_cbc128_decrypt(in
, out
, len
, &dat
->ks
, ctx
->iv
,
593 aes_ecb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
594 const unsigned char *in
, size_t len
)
596 size_t bl
= ctx
->cipher
->block_size
;
598 EVP_AES_KEY
*dat
= (EVP_AES_KEY
*)ctx
->cipher_data
;
603 for (i
= 0, len
-= bl
; i
<= len
; i
+= bl
)
604 (*dat
->block
)(in
+ i
, out
+ i
, &dat
->ks
);
610 aes_ofb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
611 const unsigned char *in
, size_t len
)
613 EVP_AES_KEY
*dat
= (EVP_AES_KEY
*)ctx
->cipher_data
;
615 CRYPTO_ofb128_encrypt(in
, out
, len
, &dat
->ks
, ctx
->iv
, &ctx
->num
,
621 aes_cfb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
622 const unsigned char *in
, size_t len
)
624 EVP_AES_KEY
*dat
= (EVP_AES_KEY
*)ctx
->cipher_data
;
626 CRYPTO_cfb128_encrypt(in
, out
, len
, &dat
->ks
, ctx
->iv
, &ctx
->num
,
627 ctx
->encrypt
, dat
->block
);
632 aes_cfb8_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
633 const unsigned char *in
, size_t len
)
635 EVP_AES_KEY
*dat
= (EVP_AES_KEY
*)ctx
->cipher_data
;
637 CRYPTO_cfb128_8_encrypt(in
, out
, len
, &dat
->ks
, ctx
->iv
, &ctx
->num
,
638 ctx
->encrypt
, dat
->block
);
643 aes_cfb1_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
644 const unsigned char *in
, size_t len
)
646 EVP_AES_KEY
*dat
= (EVP_AES_KEY
*)ctx
->cipher_data
;
648 if (ctx
->flags
&EVP_CIPH_FLAG_LENGTH_BITS
) {
649 CRYPTO_cfb128_1_encrypt(in
, out
, len
, &dat
->ks
, ctx
->iv
,
650 &ctx
->num
, ctx
->encrypt
, dat
->block
);
654 while (len
>= MAXBITCHUNK
) {
655 CRYPTO_cfb128_1_encrypt(in
, out
, MAXBITCHUNK
*8, &dat
->ks
,
656 ctx
->iv
, &ctx
->num
, ctx
->encrypt
, dat
->block
);
660 CRYPTO_cfb128_1_encrypt(in
, out
, len
*8, &dat
->ks
,
661 ctx
->iv
, &ctx
->num
, ctx
->encrypt
, dat
->block
);
666 static int aes_ctr_cipher (EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
667 const unsigned char *in
, size_t len
)
669 unsigned int num
= ctx
->num
;
670 EVP_AES_KEY
*dat
= (EVP_AES_KEY
*)ctx
->cipher_data
;
673 CRYPTO_ctr128_encrypt_ctr32(in
, out
, len
, &dat
->ks
,
674 ctx
->iv
, ctx
->buf
, &num
, dat
->stream
.ctr
);
676 CRYPTO_ctr128_encrypt(in
, out
, len
, &dat
->ks
,
677 ctx
->iv
, ctx
->buf
, &num
, dat
->block
);
678 ctx
->num
= (size_t)num
;
682 BLOCK_CIPHER_generic_pack(NID_aes
, 128, EVP_CIPH_FLAG_FIPS
)
683 BLOCK_CIPHER_generic_pack(NID_aes
, 192, EVP_CIPH_FLAG_FIPS
)
684 BLOCK_CIPHER_generic_pack(NID_aes
, 256, EVP_CIPH_FLAG_FIPS
)
687 aes_gcm_cleanup(EVP_CIPHER_CTX
*c
)
689 EVP_AES_GCM_CTX
*gctx
= c
->cipher_data
;
691 if (gctx
->iv
!= c
->iv
)
693 explicit_bzero(gctx
, sizeof(*gctx
));
697 /* increment counter (64-bit int) by 1 */
699 ctr64_inc(unsigned char *counter
)
715 aes_gcm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
717 EVP_AES_GCM_CTX
*gctx
= c
->cipher_data
;
723 gctx
->ivlen
= c
->cipher
->iv_len
;
727 gctx
->tls_aad_len
= -1;
730 case EVP_CTRL_GCM_SET_IVLEN
:
733 /* Allocate memory for IV if needed */
734 if ((arg
> EVP_MAX_IV_LENGTH
) && (arg
> gctx
->ivlen
)) {
735 if (gctx
->iv
!= c
->iv
)
737 gctx
->iv
= malloc(arg
);
744 case EVP_CTRL_GCM_SET_TAG
:
745 if (arg
<= 0 || arg
> 16 || c
->encrypt
)
747 memcpy(c
->buf
, ptr
, arg
);
751 case EVP_CTRL_GCM_GET_TAG
:
752 if (arg
<= 0 || arg
> 16 || !c
->encrypt
|| gctx
->taglen
< 0)
754 memcpy(ptr
, c
->buf
, arg
);
757 case EVP_CTRL_GCM_SET_IV_FIXED
:
758 /* Special case: -1 length restores whole IV */
760 memcpy(gctx
->iv
, ptr
, gctx
->ivlen
);
764 /* Fixed field must be at least 4 bytes and invocation field
767 if ((arg
< 4) || (gctx
->ivlen
- arg
) < 8)
770 memcpy(gctx
->iv
, ptr
, arg
);
772 arc4random_buf(gctx
->iv
+ arg
, gctx
->ivlen
- arg
);
776 case EVP_CTRL_GCM_IV_GEN
:
777 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0)
779 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
780 if (arg
<= 0 || arg
> gctx
->ivlen
)
782 memcpy(ptr
, gctx
->iv
+ gctx
->ivlen
- arg
, arg
);
783 /* Invocation field will be at least 8 bytes in size and
784 * so no need to check wrap around or increment more than
787 ctr64_inc(gctx
->iv
+ gctx
->ivlen
- 8);
791 case EVP_CTRL_GCM_SET_IV_INV
:
792 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0 || c
->encrypt
)
794 memcpy(gctx
->iv
+ gctx
->ivlen
- arg
, ptr
, arg
);
795 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
799 case EVP_CTRL_AEAD_TLS1_AAD
:
800 /* Save the AAD for later use */
803 memcpy(c
->buf
, ptr
, arg
);
804 gctx
->tls_aad_len
= arg
;
806 unsigned int len
= c
->buf
[arg
- 2] << 8 |
809 /* Correct length for explicit IV */
810 if (len
< EVP_GCM_TLS_EXPLICIT_IV_LEN
)
812 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
814 /* If decrypting correct for tag too */
816 if (len
< EVP_GCM_TLS_TAG_LEN
)
818 len
-= EVP_GCM_TLS_TAG_LEN
;
820 c
->buf
[arg
- 2] = len
>> 8;
821 c
->buf
[arg
- 1] = len
& 0xff;
823 /* Extra padding: tag appended to record */
824 return EVP_GCM_TLS_TAG_LEN
;
828 EVP_CIPHER_CTX
*out
= ptr
;
829 EVP_AES_GCM_CTX
*gctx_out
= out
->cipher_data
;
832 if (gctx
->gcm
.key
!= &gctx
->ks
)
834 gctx_out
->gcm
.key
= &gctx_out
->ks
;
836 if (gctx
->iv
== c
->iv
)
837 gctx_out
->iv
= out
->iv
;
839 gctx_out
->iv
= malloc(gctx
->ivlen
);
842 memcpy(gctx_out
->iv
, gctx
->iv
, gctx
->ivlen
);
854 aes_gcm_set_key(AES_KEY
*aes_key
, GCM128_CONTEXT
*gcm_ctx
,
855 const unsigned char *key
, size_t key_len
)
859 AES_set_encrypt_key(key
, key_len
* 8, aes_key
);
860 CRYPTO_gcm128_init(gcm_ctx
, aes_key
, (block128_f
)AES_encrypt
);
861 return (ctr128_f
)bsaes_ctr32_encrypt_blocks
;
866 vpaes_set_encrypt_key(key
, key_len
* 8, aes_key
);
867 CRYPTO_gcm128_init(gcm_ctx
, aes_key
, (block128_f
)vpaes_encrypt
);
871 (void)0; /* terminate potentially open 'else' */
873 AES_set_encrypt_key(key
, key_len
* 8, aes_key
);
874 CRYPTO_gcm128_init(gcm_ctx
, aes_key
, (block128_f
)AES_encrypt
);
876 return (ctr128_f
)AES_ctr32_encrypt
;
883 aes_gcm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
884 const unsigned char *iv
, int enc
)
886 EVP_AES_GCM_CTX
*gctx
= ctx
->cipher_data
;
891 gctx
->ctr
= aes_gcm_set_key(&gctx
->ks
, &gctx
->gcm
,
894 /* If we have an iv can set it directly, otherwise use
897 if (iv
== NULL
&& gctx
->iv_set
)
900 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
905 /* If key set use IV, otherwise copy */
907 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
909 memcpy(gctx
->iv
, iv
, gctx
->ivlen
);
916 /* Handle TLS GCM packet format. This consists of the last portion of the IV
917 * followed by the payload and finally the tag. On encrypt generate IV,
918 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
923 aes_gcm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
924 const unsigned char *in
, size_t len
)
926 EVP_AES_GCM_CTX
*gctx
= ctx
->cipher_data
;
929 /* Encrypt/decrypt must be performed in place */
931 len
< (EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
))
934 /* Set IV from start of buffer or generate IV and write to start
937 if (EVP_CIPHER_CTX_ctrl(ctx
, ctx
->encrypt
?
938 EVP_CTRL_GCM_IV_GEN
: EVP_CTRL_GCM_SET_IV_INV
,
939 EVP_GCM_TLS_EXPLICIT_IV_LEN
, out
) <= 0)
943 if (CRYPTO_gcm128_aad(&gctx
->gcm
, ctx
->buf
, gctx
->tls_aad_len
))
946 /* Fix buffer and length to point to payload */
947 in
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
948 out
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
949 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
951 /* Encrypt payload */
953 if (CRYPTO_gcm128_encrypt_ctr32(&gctx
->gcm
, in
, out
,
957 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, in
, out
, len
))
962 /* Finally write tag */
963 CRYPTO_gcm128_tag(&gctx
->gcm
, out
, EVP_GCM_TLS_TAG_LEN
);
964 rv
= len
+ EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
968 if (CRYPTO_gcm128_decrypt_ctr32(&gctx
->gcm
, in
, out
,
972 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, in
, out
, len
))
976 CRYPTO_gcm128_tag(&gctx
->gcm
, ctx
->buf
, EVP_GCM_TLS_TAG_LEN
);
978 /* If tag mismatch wipe buffer */
979 if (memcmp(ctx
->buf
, in
+ len
, EVP_GCM_TLS_TAG_LEN
)) {
980 explicit_bzero(out
, len
);
988 gctx
->tls_aad_len
= -1;
993 aes_gcm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
994 const unsigned char *in
, size_t len
)
996 EVP_AES_GCM_CTX
*gctx
= ctx
->cipher_data
;
998 /* If not set up, return error */
1002 if (gctx
->tls_aad_len
>= 0)
1003 return aes_gcm_tls_cipher(ctx
, out
, in
, len
);
1010 if (CRYPTO_gcm128_aad(&gctx
->gcm
, in
, len
))
1012 } else if (ctx
->encrypt
) {
1014 if (CRYPTO_gcm128_encrypt_ctr32(&gctx
->gcm
,
1015 in
, out
, len
, gctx
->ctr
))
1018 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
,
1024 if (CRYPTO_gcm128_decrypt_ctr32(&gctx
->gcm
,
1025 in
, out
, len
, gctx
->ctr
))
1028 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
,
1035 if (!ctx
->encrypt
) {
1036 if (gctx
->taglen
< 0)
1038 if (CRYPTO_gcm128_finish(&gctx
->gcm
, ctx
->buf
,
1044 CRYPTO_gcm128_tag(&gctx
->gcm
, ctx
->buf
, 16);
1047 /* Don't reuse the IV */
1054 #define CUSTOM_FLAGS \
1055 ( EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV | \
1056 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT | \
1057 EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY )
1059 BLOCK_CIPHER_custom(NID_aes
, 128, 1, 12, gcm
, GCM
,
1060 EVP_CIPH_FLAG_FIPS
|EVP_CIPH_FLAG_AEAD_CIPHER
|CUSTOM_FLAGS
)
1061 BLOCK_CIPHER_custom(NID_aes
, 192, 1, 12, gcm
, GCM
,
1062 EVP_CIPH_FLAG_FIPS
|EVP_CIPH_FLAG_AEAD_CIPHER
|CUSTOM_FLAGS
)
1063 BLOCK_CIPHER_custom(NID_aes
, 256, 1, 12, gcm
, GCM
,
1064 EVP_CIPH_FLAG_FIPS
|EVP_CIPH_FLAG_AEAD_CIPHER
|CUSTOM_FLAGS
)
1067 aes_xts_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
1069 EVP_AES_XTS_CTX
*xctx
= c
->cipher_data
;
1074 * key1 and key2 are used as an indicator both key and IV
1077 xctx
->xts
.key1
= NULL
;
1078 xctx
->xts
.key2
= NULL
;
1083 EVP_CIPHER_CTX
*out
= ptr
;
1084 EVP_AES_XTS_CTX
*xctx_out
= out
->cipher_data
;
1086 if (xctx
->xts
.key1
) {
1087 if (xctx
->xts
.key1
!= &xctx
->ks1
)
1089 xctx_out
->xts
.key1
= &xctx_out
->ks1
;
1091 if (xctx
->xts
.key2
) {
1092 if (xctx
->xts
.key2
!= &xctx
->ks2
)
1094 xctx_out
->xts
.key2
= &xctx_out
->ks2
;
1103 aes_xts_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
1104 const unsigned char *iv
, int enc
)
1106 EVP_AES_XTS_CTX
*xctx
= ctx
->cipher_data
;
1113 xctx
->stream
= enc
? AES_xts_encrypt
: AES_xts_decrypt
;
1115 xctx
->stream
= NULL
;
1117 /* key_len is two AES keys */
1118 #ifdef BSAES_CAPABLE
1120 xctx
->stream
= enc
? bsaes_xts_encrypt
:
1124 #ifdef VPAES_CAPABLE
1125 if (VPAES_CAPABLE
) {
1127 vpaes_set_encrypt_key(key
, ctx
->key_len
* 4,
1129 xctx
->xts
.block1
= (block128_f
)vpaes_encrypt
;
1131 vpaes_set_decrypt_key(key
, ctx
->key_len
* 4,
1133 xctx
->xts
.block1
= (block128_f
)vpaes_decrypt
;
1136 vpaes_set_encrypt_key(key
+ ctx
->key_len
/ 2,
1137 ctx
->key_len
* 4, &xctx
->ks2
);
1138 xctx
->xts
.block2
= (block128_f
)vpaes_encrypt
;
1140 xctx
->xts
.key1
= &xctx
->ks1
;
1144 (void)0; /* terminate potentially open 'else' */
1147 AES_set_encrypt_key(key
, ctx
->key_len
* 4, &xctx
->ks1
);
1148 xctx
->xts
.block1
= (block128_f
)AES_encrypt
;
1150 AES_set_decrypt_key(key
, ctx
->key_len
* 4, &xctx
->ks1
);
1151 xctx
->xts
.block1
= (block128_f
)AES_decrypt
;
1154 AES_set_encrypt_key(key
+ ctx
->key_len
/ 2,
1155 ctx
->key_len
* 4, &xctx
->ks2
);
1156 xctx
->xts
.block2
= (block128_f
)AES_encrypt
;
1158 xctx
->xts
.key1
= &xctx
->ks1
;
1162 xctx
->xts
.key2
= &xctx
->ks2
;
1163 memcpy(ctx
->iv
, iv
, 16);
1170 aes_xts_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
1171 const unsigned char *in
, size_t len
)
1173 EVP_AES_XTS_CTX
*xctx
= ctx
->cipher_data
;
1175 if (!xctx
->xts
.key1
|| !xctx
->xts
.key2
)
1177 if (!out
|| !in
|| len
< AES_BLOCK_SIZE
)
1181 (*xctx
->stream
)(in
, out
, len
, xctx
->xts
.key1
, xctx
->xts
.key2
,
1183 else if (CRYPTO_xts128_encrypt(&xctx
->xts
, ctx
->iv
, in
, out
, len
,
1189 #define aes_xts_cleanup NULL
1192 ( EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV | \
1193 EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY )
1195 BLOCK_CIPHER_custom(NID_aes
, 128, 1, 16, xts
, XTS
, EVP_CIPH_FLAG_FIPS
|XTS_FLAGS
)
1196 BLOCK_CIPHER_custom(NID_aes
, 256, 1, 16, xts
, XTS
, EVP_CIPH_FLAG_FIPS
|XTS_FLAGS
)
1199 aes_ccm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
1201 EVP_AES_CCM_CTX
*cctx
= c
->cipher_data
;
1213 case EVP_CTRL_CCM_SET_IVLEN
:
1216 case EVP_CTRL_CCM_SET_L
:
1217 if (arg
< 2 || arg
> 8)
1222 case EVP_CTRL_CCM_SET_TAG
:
1223 if ((arg
& 1) || arg
< 4 || arg
> 16)
1225 if ((c
->encrypt
&& ptr
) || (!c
->encrypt
&& !ptr
))
1229 memcpy(c
->buf
, ptr
, arg
);
1234 case EVP_CTRL_CCM_GET_TAG
:
1235 if (!c
->encrypt
|| !cctx
->tag_set
)
1237 if (!CRYPTO_ccm128_tag(&cctx
->ccm
, ptr
, (size_t)arg
))
1246 EVP_CIPHER_CTX
*out
= ptr
;
1247 EVP_AES_CCM_CTX
*cctx_out
= out
->cipher_data
;
1249 if (cctx
->ccm
.key
) {
1250 if (cctx
->ccm
.key
!= &cctx
->ks
)
1252 cctx_out
->ccm
.key
= &cctx_out
->ks
;
1263 aes_ccm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
1264 const unsigned char *iv
, int enc
)
1266 EVP_AES_CCM_CTX
*cctx
= ctx
->cipher_data
;
1271 #ifdef VPAES_CAPABLE
1272 if (VPAES_CAPABLE
) {
1273 vpaes_set_encrypt_key(key
, ctx
->key_len
*8, &cctx
->ks
);
1274 CRYPTO_ccm128_init(&cctx
->ccm
, cctx
->M
, cctx
->L
,
1275 &cctx
->ks
, (block128_f
)vpaes_encrypt
);
1281 AES_set_encrypt_key(key
, ctx
->key_len
* 8, &cctx
->ks
);
1282 CRYPTO_ccm128_init(&cctx
->ccm
, cctx
->M
, cctx
->L
,
1283 &cctx
->ks
, (block128_f
)AES_encrypt
);
1288 memcpy(ctx
->iv
, iv
, 15 - cctx
->L
);
1295 aes_ccm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
1296 const unsigned char *in
, size_t len
)
1298 EVP_AES_CCM_CTX
*cctx
= ctx
->cipher_data
;
1299 CCM128_CONTEXT
*ccm
= &cctx
->ccm
;
1301 /* If not set up, return error */
1302 if (!cctx
->iv_set
&& !cctx
->key_set
)
1304 if (!ctx
->encrypt
&& !cctx
->tag_set
)
1309 if (CRYPTO_ccm128_setiv(ccm
, ctx
->iv
, 15 - cctx
->L
,
1315 /* If have AAD need message length */
1316 if (!cctx
->len_set
&& len
)
1318 CRYPTO_ccm128_aad(ccm
, in
, len
);
1321 /* EVP_*Final() doesn't return any data */
1324 /* If not set length yet do it */
1325 if (!cctx
->len_set
) {
1326 if (CRYPTO_ccm128_setiv(ccm
, ctx
->iv
, 15 - cctx
->L
, len
))
1331 if (cctx
->str
? CRYPTO_ccm128_encrypt_ccm64(ccm
, in
, out
, len
,
1332 cctx
->str
) : CRYPTO_ccm128_encrypt(ccm
, in
, out
, len
))
1338 if (cctx
->str
? !CRYPTO_ccm128_decrypt_ccm64(ccm
, in
, out
, len
,
1339 cctx
->str
) : !CRYPTO_ccm128_decrypt(ccm
, in
, out
, len
)) {
1340 unsigned char tag
[16];
1341 if (CRYPTO_ccm128_tag(ccm
, tag
, cctx
->M
)) {
1342 if (!memcmp(tag
, ctx
->buf
, cctx
->M
))
1347 explicit_bzero(out
, len
);
1356 #define aes_ccm_cleanup NULL
1358 BLOCK_CIPHER_custom(NID_aes
, 128, 1, 12, ccm
, CCM
,
1359 EVP_CIPH_FLAG_FIPS
|CUSTOM_FLAGS
)
1360 BLOCK_CIPHER_custom(NID_aes
, 192, 1, 12, ccm
, CCM
,
1361 EVP_CIPH_FLAG_FIPS
|CUSTOM_FLAGS
)
1362 BLOCK_CIPHER_custom(NID_aes
, 256, 1, 12, ccm
, CCM
,
1363 EVP_CIPH_FLAG_FIPS
|CUSTOM_FLAGS
)
1365 #define EVP_AEAD_AES_GCM_TAG_LEN 16
1367 struct aead_aes_gcm_ctx
{
1374 unsigned char tag_len
;
1378 aead_aes_gcm_init(EVP_AEAD_CTX
*ctx
, const unsigned char *key
, size_t key_len
,
1381 struct aead_aes_gcm_ctx
*gcm_ctx
;
1382 const size_t key_bits
= key_len
* 8;
1384 /* EVP_AEAD_CTX_init should catch this. */
1385 if (key_bits
!= 128 && key_bits
!= 256) {
1386 EVPerror(EVP_R_BAD_KEY_LENGTH
);
1390 if (tag_len
== EVP_AEAD_DEFAULT_TAG_LENGTH
)
1391 tag_len
= EVP_AEAD_AES_GCM_TAG_LEN
;
1393 if (tag_len
> EVP_AEAD_AES_GCM_TAG_LEN
) {
1394 EVPerror(EVP_R_TAG_TOO_LARGE
);
1398 gcm_ctx
= malloc(sizeof(struct aead_aes_gcm_ctx
));
1399 if (gcm_ctx
== NULL
)
1402 #ifdef AESNI_CAPABLE
1403 if (AESNI_CAPABLE
) {
1404 aesni_set_encrypt_key(key
, key_bits
, &gcm_ctx
->ks
.ks
);
1405 CRYPTO_gcm128_init(&gcm_ctx
->gcm
, &gcm_ctx
->ks
.ks
,
1406 (block128_f
)aesni_encrypt
);
1407 gcm_ctx
->ctr
= (ctr128_f
) aesni_ctr32_encrypt_blocks
;
1411 gcm_ctx
->ctr
= aes_gcm_set_key(&gcm_ctx
->ks
.ks
, &gcm_ctx
->gcm
,
1414 gcm_ctx
->tag_len
= tag_len
;
1415 ctx
->aead_state
= gcm_ctx
;
1421 aead_aes_gcm_cleanup(EVP_AEAD_CTX
*ctx
)
1423 struct aead_aes_gcm_ctx
*gcm_ctx
= ctx
->aead_state
;
1425 explicit_bzero(gcm_ctx
, sizeof(*gcm_ctx
));
1430 aead_aes_gcm_seal(const EVP_AEAD_CTX
*ctx
, unsigned char *out
, size_t *out_len
,
1431 size_t max_out_len
, const unsigned char *nonce
, size_t nonce_len
,
1432 const unsigned char *in
, size_t in_len
, const unsigned char *ad
,
1435 const struct aead_aes_gcm_ctx
*gcm_ctx
= ctx
->aead_state
;
1439 if (max_out_len
< in_len
+ gcm_ctx
->tag_len
) {
1440 EVPerror(EVP_R_BUFFER_TOO_SMALL
);
1444 memcpy(&gcm
, &gcm_ctx
->gcm
, sizeof(gcm
));
1445 CRYPTO_gcm128_setiv(&gcm
, nonce
, nonce_len
);
1447 if (ad_len
> 0 && CRYPTO_gcm128_aad(&gcm
, ad
, ad_len
))
1451 if (CRYPTO_gcm128_encrypt_ctr32(&gcm
, in
+ bulk
, out
+ bulk
,
1452 in_len
- bulk
, gcm_ctx
->ctr
))
1455 if (CRYPTO_gcm128_encrypt(&gcm
, in
+ bulk
, out
+ bulk
,
1460 CRYPTO_gcm128_tag(&gcm
, out
+ in_len
, gcm_ctx
->tag_len
);
1461 *out_len
= in_len
+ gcm_ctx
->tag_len
;
1467 aead_aes_gcm_open(const EVP_AEAD_CTX
*ctx
, unsigned char *out
, size_t *out_len
,
1468 size_t max_out_len
, const unsigned char *nonce
, size_t nonce_len
,
1469 const unsigned char *in
, size_t in_len
, const unsigned char *ad
,
1472 const struct aead_aes_gcm_ctx
*gcm_ctx
= ctx
->aead_state
;
1473 unsigned char tag
[EVP_AEAD_AES_GCM_TAG_LEN
];
1475 size_t plaintext_len
;
1478 if (in_len
< gcm_ctx
->tag_len
) {
1479 EVPerror(EVP_R_BAD_DECRYPT
);
1483 plaintext_len
= in_len
- gcm_ctx
->tag_len
;
1485 if (max_out_len
< plaintext_len
) {
1486 EVPerror(EVP_R_BUFFER_TOO_SMALL
);
1490 memcpy(&gcm
, &gcm_ctx
->gcm
, sizeof(gcm
));
1491 CRYPTO_gcm128_setiv(&gcm
, nonce
, nonce_len
);
1493 if (CRYPTO_gcm128_aad(&gcm
, ad
, ad_len
))
1497 if (CRYPTO_gcm128_decrypt_ctr32(&gcm
, in
+ bulk
, out
+ bulk
,
1498 in_len
- bulk
- gcm_ctx
->tag_len
, gcm_ctx
->ctr
))
1501 if (CRYPTO_gcm128_decrypt(&gcm
, in
+ bulk
, out
+ bulk
,
1502 in_len
- bulk
- gcm_ctx
->tag_len
))
1506 CRYPTO_gcm128_tag(&gcm
, tag
, gcm_ctx
->tag_len
);
1507 if (timingsafe_memcmp(tag
, in
+ plaintext_len
, gcm_ctx
->tag_len
) != 0) {
1508 EVPerror(EVP_R_BAD_DECRYPT
);
1512 *out_len
= plaintext_len
;
1517 static const EVP_AEAD aead_aes_128_gcm
= {
1520 .overhead
= EVP_AEAD_AES_GCM_TAG_LEN
,
1521 .max_tag_len
= EVP_AEAD_AES_GCM_TAG_LEN
,
1523 .init
= aead_aes_gcm_init
,
1524 .cleanup
= aead_aes_gcm_cleanup
,
1525 .seal
= aead_aes_gcm_seal
,
1526 .open
= aead_aes_gcm_open
,
1529 static const EVP_AEAD aead_aes_256_gcm
= {
1532 .overhead
= EVP_AEAD_AES_GCM_TAG_LEN
,
1533 .max_tag_len
= EVP_AEAD_AES_GCM_TAG_LEN
,
1535 .init
= aead_aes_gcm_init
,
1536 .cleanup
= aead_aes_gcm_cleanup
,
1537 .seal
= aead_aes_gcm_seal
,
1538 .open
= aead_aes_gcm_open
,
1542 EVP_aead_aes_128_gcm(void)
1544 return &aead_aes_128_gcm
;
1548 EVP_aead_aes_256_gcm(void)
1550 return &aead_aes_256_gcm
;