1 /* ====================================================================
2 * Copyright (c) 2011-2013 The OpenSSL Project. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in
13 * the documentation and/or other materials provided with the
16 * 3. All advertising materials mentioning features or use of this
17 * software must display the following acknowledgment:
18 * "This product includes software developed by the OpenSSL Project
19 * for use in the OpenSSL Toolkit. (http://www.OpenSSL.org/)"
21 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22 * endorse or promote products derived from this software without
23 * prior written permission. For written permission, please contact
24 * licensing@OpenSSL.org.
26 * 5. Products derived from this software may not be called "OpenSSL"
27 * nor may "OpenSSL" appear in their names without prior written
28 * permission of the OpenSSL Project.
30 * 6. Redistributions of any form whatsoever must retain the following
32 * "This product includes software developed by the OpenSSL Project
33 * for use in the OpenSSL Toolkit (http://www.OpenSSL.org/)"
35 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
39 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46 * OF THE POSSIBILITY OF SUCH DAMAGE.
47 * ====================================================================
50 #include <openssl/opensslconf.h>
55 #if !defined(OPENSSL_NO_AES) && !defined(OPENSSL_NO_SHA256)
57 # include <openssl/evp.h>
58 # include <openssl/objects.h>
59 # include <openssl/aes.h>
60 # include <openssl/sha.h>
61 # include <openssl/rand.h>
62 # include "modes_lcl.h"
64 # ifndef EVP_CIPH_FLAG_AEAD_CIPHER
65 # define EVP_CIPH_FLAG_AEAD_CIPHER 0x200000
66 # define EVP_CTRL_AEAD_TLS1_AAD 0x16
67 # define EVP_CTRL_AEAD_SET_MAC_KEY 0x17
70 # if !defined(EVP_CIPH_FLAG_DEFAULT_ASN1)
71 # define EVP_CIPH_FLAG_DEFAULT_ASN1 0
74 # if !defined(EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK)
75 # define EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK 0
78 # define TLS1_1_VERSION 0x0302
82 SHA256_CTX head
, tail
, md
;
83 size_t payload_length
; /* AAD length in decrypt case */
86 unsigned char tls_aad
[16]; /* 13 used */
88 } EVP_AES_HMAC_SHA256
;
90 # define NO_PAYLOAD_LENGTH ((size_t)-1)
92 # if defined(AES_ASM) && ( \
93 defined(__x86_64) || defined(__x86_64__) || \
94 defined(_M_AMD64) || defined(_M_X64) || \
97 extern unsigned int OPENSSL_ia32cap_P
[];
98 # define AESNI_CAPABLE (1<<(57-32))
100 int aesni_set_encrypt_key(const unsigned char *userKey
, int bits
,
102 int aesni_set_decrypt_key(const unsigned char *userKey
, int bits
,
105 void aesni_cbc_encrypt(const unsigned char *in
,
108 const AES_KEY
*key
, unsigned char *ivec
, int enc
);
110 int aesni_cbc_sha256_enc(const void *inp
, void *out
, size_t blocks
,
111 const AES_KEY
*key
, unsigned char iv
[16],
112 SHA256_CTX
*ctx
, const void *in0
);
114 # define data(ctx) ((EVP_AES_HMAC_SHA256 *)(ctx)->cipher_data)
116 static int aesni_cbc_hmac_sha256_init_key(EVP_CIPHER_CTX
*ctx
,
117 const unsigned char *inkey
,
118 const unsigned char *iv
, int enc
)
120 EVP_AES_HMAC_SHA256
*key
= data(ctx
);
124 memset(&key
->ks
, 0, sizeof(key
->ks
.rd_key
)),
125 ret
= aesni_set_encrypt_key(inkey
, ctx
->key_len
* 8, &key
->ks
);
127 ret
= aesni_set_decrypt_key(inkey
, ctx
->key_len
* 8, &key
->ks
);
129 SHA256_Init(&key
->head
); /* handy when benchmarking */
130 key
->tail
= key
->head
;
133 key
->payload_length
= NO_PAYLOAD_LENGTH
;
135 return ret
< 0 ? 0 : 1;
138 # define STITCHED_CALL
140 # if !defined(STITCHED_CALL)
144 void sha256_block_data_order(void *c
, const void *p
, size_t len
);
146 static void sha256_update(SHA256_CTX
*c
, const void *data
, size_t len
)
148 const unsigned char *ptr
= data
;
151 if ((res
= c
->num
)) {
152 res
= SHA256_CBLOCK
- res
;
155 SHA256_Update(c
, ptr
, res
);
160 res
= len
% SHA256_CBLOCK
;
164 sha256_block_data_order(c
, ptr
, len
/ SHA256_CBLOCK
);
169 if (c
->Nl
< (unsigned int)len
)
174 SHA256_Update(c
, ptr
, res
);
177 # ifdef SHA256_Update
178 # undef SHA256_Update
180 # define SHA256_Update sha256_update
182 # if !defined(OPENSSL_NO_MULTIBLOCK) && EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
185 unsigned int A
[8], B
[8], C
[8], D
[8], E
[8], F
[8], G
[8], H
[8];
188 const unsigned char *ptr
;
192 void sha256_multi_block(SHA256_MB_CTX
*, const HASH_DESC
*, int);
195 const unsigned char *inp
;
201 void aesni_multi_cbc_encrypt(CIPH_DESC
*, void *, int);
203 static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA256
*key
,
205 const unsigned char *inp
,
206 size_t inp_len
, int n4x
)
207 { /* n4x is 1 or 2 */
208 HASH_DESC hash_d
[8], edges
[8];
210 unsigned char storage
[sizeof(SHA256_MB_CTX
) + 32];
217 unsigned int frag
, last
, packlen
, i
, x4
= 4 * n4x
, minblocks
, processed
=
225 /* ask for IVs in bulk */
226 if (RAND_bytes((IVs
= blocks
[0].c
), 16 * x4
) <= 0)
230 ctx
= (SHA256_MB_CTX
*) (storage
+ 32 - ((size_t)storage
% 32));
232 frag
= (unsigned int)inp_len
>> (1 + n4x
);
233 last
= (unsigned int)inp_len
+ frag
- (frag
<< (1 + n4x
));
234 if (last
> frag
&& ((last
+ 13 + 9) % 64) < (x4
- 1)) {
239 packlen
= 5 + 16 + ((frag
+ 32 + 16) & -16);
241 /* populate descriptors with pointers and IVs */
244 /* 5+16 is place for header and explicit IV */
245 ciph_d
[0].out
= out
+ 5 + 16;
246 memcpy(ciph_d
[0].out
- 16, IVs
, 16);
247 memcpy(ciph_d
[0].iv
, IVs
, 16);
250 for (i
= 1; i
< x4
; i
++) {
251 ciph_d
[i
].inp
= hash_d
[i
].ptr
= hash_d
[i
- 1].ptr
+ frag
;
252 ciph_d
[i
].out
= ciph_d
[i
- 1].out
+ packlen
;
253 memcpy(ciph_d
[i
].out
- 16, IVs
, 16);
254 memcpy(ciph_d
[i
].iv
, IVs
, 16);
259 memcpy(blocks
[0].c
, key
->md
.data
, 8);
260 seqnum
= BSWAP8(blocks
[0].q
[0]);
262 for (i
= 0; i
< x4
; i
++) {
263 unsigned int len
= (i
== (x4
- 1) ? last
: frag
);
264 # if !defined(BSWAP8)
265 unsigned int carry
, j
;
268 ctx
->A
[i
] = key
->md
.h
[0];
269 ctx
->B
[i
] = key
->md
.h
[1];
270 ctx
->C
[i
] = key
->md
.h
[2];
271 ctx
->D
[i
] = key
->md
.h
[3];
272 ctx
->E
[i
] = key
->md
.h
[4];
273 ctx
->F
[i
] = key
->md
.h
[5];
274 ctx
->G
[i
] = key
->md
.h
[6];
275 ctx
->H
[i
] = key
->md
.h
[7];
279 blocks
[i
].q
[0] = BSWAP8(seqnum
+ i
);
281 for (carry
= i
, j
= 8; j
--;) {
282 blocks
[i
].c
[j
] = ((u8
*)key
->md
.data
)[j
] + carry
;
283 carry
= (blocks
[i
].c
[j
] - carry
) >> (sizeof(carry
) * 8 - 1);
286 blocks
[i
].c
[8] = ((u8
*)key
->md
.data
)[8];
287 blocks
[i
].c
[9] = ((u8
*)key
->md
.data
)[9];
288 blocks
[i
].c
[10] = ((u8
*)key
->md
.data
)[10];
290 blocks
[i
].c
[11] = (u8
)(len
>> 8);
291 blocks
[i
].c
[12] = (u8
)(len
);
293 memcpy(blocks
[i
].c
+ 13, hash_d
[i
].ptr
, 64 - 13);
294 hash_d
[i
].ptr
+= 64 - 13;
295 hash_d
[i
].blocks
= (len
- (64 - 13)) / 64;
297 edges
[i
].ptr
= blocks
[i
].c
;
301 /* hash 13-byte headers and first 64-13 bytes of inputs */
302 sha256_multi_block(ctx
, edges
, n4x
);
303 /* hash bulk inputs */
304 # define MAXCHUNKSIZE 2048
306 # error "MAXCHUNKSIZE is not divisible by 64"
309 * goal is to minimize pressure on L1 cache by moving in shorter steps,
310 * so that hashed data is still in the cache by the time we encrypt it
312 minblocks
= ((frag
<= last
? frag
: last
) - (64 - 13)) / 64;
313 if (minblocks
> MAXCHUNKSIZE
/ 64) {
314 for (i
= 0; i
< x4
; i
++) {
315 edges
[i
].ptr
= hash_d
[i
].ptr
;
316 edges
[i
].blocks
= MAXCHUNKSIZE
/ 64;
317 ciph_d
[i
].blocks
= MAXCHUNKSIZE
/ 16;
320 sha256_multi_block(ctx
, edges
, n4x
);
321 aesni_multi_cbc_encrypt(ciph_d
, &key
->ks
, n4x
);
323 for (i
= 0; i
< x4
; i
++) {
324 edges
[i
].ptr
= hash_d
[i
].ptr
+= MAXCHUNKSIZE
;
325 hash_d
[i
].blocks
-= MAXCHUNKSIZE
/ 64;
326 edges
[i
].blocks
= MAXCHUNKSIZE
/ 64;
327 ciph_d
[i
].inp
+= MAXCHUNKSIZE
;
328 ciph_d
[i
].out
+= MAXCHUNKSIZE
;
329 ciph_d
[i
].blocks
= MAXCHUNKSIZE
/ 16;
330 memcpy(ciph_d
[i
].iv
, ciph_d
[i
].out
- 16, 16);
332 processed
+= MAXCHUNKSIZE
;
333 minblocks
-= MAXCHUNKSIZE
/ 64;
334 } while (minblocks
> MAXCHUNKSIZE
/ 64);
338 sha256_multi_block(ctx
, hash_d
, n4x
);
340 memset(blocks
, 0, sizeof(blocks
));
341 for (i
= 0; i
< x4
; i
++) {
342 unsigned int len
= (i
== (x4
- 1) ? last
: frag
),
343 off
= hash_d
[i
].blocks
* 64;
344 const unsigned char *ptr
= hash_d
[i
].ptr
+ off
;
346 off
= (len
- processed
) - (64 - 13) - off
; /* remainder actually */
347 memcpy(blocks
[i
].c
, ptr
, off
);
348 blocks
[i
].c
[off
] = 0x80;
349 len
+= 64 + 13; /* 64 is HMAC header */
350 len
*= 8; /* convert to bits */
351 if (off
< (64 - 8)) {
353 blocks
[i
].d
[15] = BSWAP4(len
);
355 PUTU32(blocks
[i
].c
+ 60, len
);
360 blocks
[i
].d
[31] = BSWAP4(len
);
362 PUTU32(blocks
[i
].c
+ 124, len
);
366 edges
[i
].ptr
= blocks
[i
].c
;
369 /* hash input tails and finalize */
370 sha256_multi_block(ctx
, edges
, n4x
);
372 memset(blocks
, 0, sizeof(blocks
));
373 for (i
= 0; i
< x4
; i
++) {
375 blocks
[i
].d
[0] = BSWAP4(ctx
->A
[i
]);
376 ctx
->A
[i
] = key
->tail
.h
[0];
377 blocks
[i
].d
[1] = BSWAP4(ctx
->B
[i
]);
378 ctx
->B
[i
] = key
->tail
.h
[1];
379 blocks
[i
].d
[2] = BSWAP4(ctx
->C
[i
]);
380 ctx
->C
[i
] = key
->tail
.h
[2];
381 blocks
[i
].d
[3] = BSWAP4(ctx
->D
[i
]);
382 ctx
->D
[i
] = key
->tail
.h
[3];
383 blocks
[i
].d
[4] = BSWAP4(ctx
->E
[i
]);
384 ctx
->E
[i
] = key
->tail
.h
[4];
385 blocks
[i
].d
[5] = BSWAP4(ctx
->F
[i
]);
386 ctx
->F
[i
] = key
->tail
.h
[5];
387 blocks
[i
].d
[6] = BSWAP4(ctx
->G
[i
]);
388 ctx
->G
[i
] = key
->tail
.h
[6];
389 blocks
[i
].d
[7] = BSWAP4(ctx
->H
[i
]);
390 ctx
->H
[i
] = key
->tail
.h
[7];
391 blocks
[i
].c
[32] = 0x80;
392 blocks
[i
].d
[15] = BSWAP4((64 + 32) * 8);
394 PUTU32(blocks
[i
].c
+ 0, ctx
->A
[i
]);
395 ctx
->A
[i
] = key
->tail
.h
[0];
396 PUTU32(blocks
[i
].c
+ 4, ctx
->B
[i
]);
397 ctx
->B
[i
] = key
->tail
.h
[1];
398 PUTU32(blocks
[i
].c
+ 8, ctx
->C
[i
]);
399 ctx
->C
[i
] = key
->tail
.h
[2];
400 PUTU32(blocks
[i
].c
+ 12, ctx
->D
[i
]);
401 ctx
->D
[i
] = key
->tail
.h
[3];
402 PUTU32(blocks
[i
].c
+ 16, ctx
->E
[i
]);
403 ctx
->E
[i
] = key
->tail
.h
[4];
404 PUTU32(blocks
[i
].c
+ 20, ctx
->F
[i
]);
405 ctx
->F
[i
] = key
->tail
.h
[5];
406 PUTU32(blocks
[i
].c
+ 24, ctx
->G
[i
]);
407 ctx
->G
[i
] = key
->tail
.h
[6];
408 PUTU32(blocks
[i
].c
+ 28, ctx
->H
[i
]);
409 ctx
->H
[i
] = key
->tail
.h
[7];
410 blocks
[i
].c
[32] = 0x80;
411 PUTU32(blocks
[i
].c
+ 60, (64 + 32) * 8);
413 edges
[i
].ptr
= blocks
[i
].c
;
418 sha256_multi_block(ctx
, edges
, n4x
);
420 for (i
= 0; i
< x4
; i
++) {
421 unsigned int len
= (i
== (x4
- 1) ? last
: frag
), pad
, j
;
422 unsigned char *out0
= out
;
424 memcpy(ciph_d
[i
].out
, ciph_d
[i
].inp
, len
- processed
);
425 ciph_d
[i
].inp
= ciph_d
[i
].out
;
430 PUTU32(out
+ 0, ctx
->A
[i
]);
431 PUTU32(out
+ 4, ctx
->B
[i
]);
432 PUTU32(out
+ 8, ctx
->C
[i
]);
433 PUTU32(out
+ 12, ctx
->D
[i
]);
434 PUTU32(out
+ 16, ctx
->E
[i
]);
435 PUTU32(out
+ 20, ctx
->F
[i
]);
436 PUTU32(out
+ 24, ctx
->G
[i
]);
437 PUTU32(out
+ 28, ctx
->H
[i
]);
443 for (j
= 0; j
<= pad
; j
++)
447 ciph_d
[i
].blocks
= (len
- processed
) / 16;
448 len
+= 16; /* account for explicit iv */
451 out0
[0] = ((u8
*)key
->md
.data
)[8];
452 out0
[1] = ((u8
*)key
->md
.data
)[9];
453 out0
[2] = ((u8
*)key
->md
.data
)[10];
454 out0
[3] = (u8
)(len
>> 8);
461 aesni_multi_cbc_encrypt(ciph_d
, &key
->ks
, n4x
);
463 OPENSSL_cleanse(blocks
, sizeof(blocks
));
464 OPENSSL_cleanse(ctx
, sizeof(*ctx
));
470 static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX
*ctx
,
472 const unsigned char *in
, size_t len
)
474 EVP_AES_HMAC_SHA256
*key
= data(ctx
);
476 size_t plen
= key
->payload_length
, iv
= 0, /* explicit IV in TLS 1.1 and
479 # if defined(STITCHED_CALL)
480 size_t aes_off
= 0, blocks
;
482 sha_off
= SHA256_CBLOCK
- key
->md
.num
;
485 key
->payload_length
= NO_PAYLOAD_LENGTH
;
487 if (len
% AES_BLOCK_SIZE
)
491 if (plen
== NO_PAYLOAD_LENGTH
)
494 ((plen
+ SHA256_DIGEST_LENGTH
+
495 AES_BLOCK_SIZE
) & -AES_BLOCK_SIZE
))
497 else if (key
->aux
.tls_ver
>= TLS1_1_VERSION
)
500 # if defined(STITCHED_CALL)
501 if (OPENSSL_ia32cap_P
[1] & (1 << (60 - 32)) && /* AVX? */
502 plen
> (sha_off
+ iv
) &&
503 (blocks
= (plen
- (sha_off
+ iv
)) / SHA256_CBLOCK
)) {
504 SHA256_Update(&key
->md
, in
+ iv
, sha_off
);
506 (void)aesni_cbc_sha256_enc(in
, out
, blocks
, &key
->ks
,
507 ctx
->iv
, &key
->md
, in
+ iv
+ sha_off
);
508 blocks
*= SHA256_CBLOCK
;
511 key
->md
.Nh
+= blocks
>> 29;
512 key
->md
.Nl
+= blocks
<<= 3;
513 if (key
->md
.Nl
< (unsigned int)blocks
)
520 SHA256_Update(&key
->md
, in
+ sha_off
, plen
- sha_off
);
522 if (plen
!= len
) { /* "TLS" mode of operation */
524 memcpy(out
+ aes_off
, in
+ aes_off
, plen
- aes_off
);
526 /* calculate HMAC and append it to payload */
527 SHA256_Final(out
+ plen
, &key
->md
);
529 SHA256_Update(&key
->md
, out
+ plen
, SHA256_DIGEST_LENGTH
);
530 SHA256_Final(out
+ plen
, &key
->md
);
532 /* pad the payload|hmac */
533 plen
+= SHA256_DIGEST_LENGTH
;
534 for (l
= len
- plen
- 1; plen
< len
; plen
++)
536 /* encrypt HMAC|padding at once */
537 aesni_cbc_encrypt(out
+ aes_off
, out
+ aes_off
, len
- aes_off
,
538 &key
->ks
, ctx
->iv
, 1);
540 aesni_cbc_encrypt(in
+ aes_off
, out
+ aes_off
, len
- aes_off
,
541 &key
->ks
, ctx
->iv
, 1);
545 unsigned int u
[SHA256_DIGEST_LENGTH
/ sizeof(unsigned int)];
546 unsigned char c
[64 + SHA256_DIGEST_LENGTH
];
549 /* arrange cache line alignment */
550 pmac
= (void *)(((size_t)mac
.c
+ 63) & ((size_t)0 - 64));
552 /* decrypt HMAC|padding at once */
553 aesni_cbc_encrypt(in
, out
, len
, &key
->ks
, ctx
->iv
, 0);
555 if (plen
!= NO_PAYLOAD_LENGTH
) { /* "TLS" mode of operation */
556 size_t inp_len
, mask
, j
, i
;
557 unsigned int res
, maxpad
, pad
, bitlen
;
560 unsigned int u
[SHA_LBLOCK
];
561 unsigned char c
[SHA256_CBLOCK
];
562 } *data
= (void *)key
->md
.data
;
564 if ((key
->aux
.tls_aad
[plen
- 4] << 8 | key
->aux
.tls_aad
[plen
- 3])
568 if (len
< (iv
+ SHA256_DIGEST_LENGTH
+ 1))
571 /* omit explicit iv */
575 /* figure out payload length */
577 maxpad
= len
- (SHA256_DIGEST_LENGTH
+ 1);
578 maxpad
|= (255 - maxpad
) >> (sizeof(maxpad
) * 8 - 8);
581 inp_len
= len
- (SHA256_DIGEST_LENGTH
+ pad
+ 1);
582 mask
= (0 - ((inp_len
- len
) >> (sizeof(inp_len
) * 8 - 1)));
586 key
->aux
.tls_aad
[plen
- 2] = inp_len
>> 8;
587 key
->aux
.tls_aad
[plen
- 1] = inp_len
;
591 SHA256_Update(&key
->md
, key
->aux
.tls_aad
, plen
);
594 len
-= SHA256_DIGEST_LENGTH
; /* amend mac */
595 if (len
>= (256 + SHA256_CBLOCK
)) {
596 j
= (len
- (256 + SHA256_CBLOCK
)) & (0 - SHA256_CBLOCK
);
597 j
+= SHA256_CBLOCK
- key
->md
.num
;
598 SHA256_Update(&key
->md
, out
, j
);
604 /* but pretend as if we hashed padded payload */
605 bitlen
= key
->md
.Nl
+ (inp_len
<< 3); /* at most 18 bits */
607 bitlen
= BSWAP4(bitlen
);
610 mac
.c
[1] = (unsigned char)(bitlen
>> 16);
611 mac
.c
[2] = (unsigned char)(bitlen
>> 8);
612 mac
.c
[3] = (unsigned char)bitlen
;
625 for (res
= key
->md
.num
, j
= 0; j
< len
; j
++) {
627 mask
= (j
- inp_len
) >> (sizeof(j
) * 8 - 8);
629 c
|= 0x80 & ~mask
& ~((inp_len
- j
) >> (sizeof(j
) * 8 - 8));
630 data
->c
[res
++] = (unsigned char)c
;
632 if (res
!= SHA256_CBLOCK
)
635 /* j is not incremented yet */
636 mask
= 0 - ((inp_len
+ 7 - j
) >> (sizeof(j
) * 8 - 1));
637 data
->u
[SHA_LBLOCK
- 1] |= bitlen
& mask
;
638 sha256_block_data_order(&key
->md
, data
, 1);
639 mask
&= 0 - ((j
- inp_len
- 72) >> (sizeof(j
) * 8 - 1));
640 pmac
->u
[0] |= key
->md
.h
[0] & mask
;
641 pmac
->u
[1] |= key
->md
.h
[1] & mask
;
642 pmac
->u
[2] |= key
->md
.h
[2] & mask
;
643 pmac
->u
[3] |= key
->md
.h
[3] & mask
;
644 pmac
->u
[4] |= key
->md
.h
[4] & mask
;
645 pmac
->u
[5] |= key
->md
.h
[5] & mask
;
646 pmac
->u
[6] |= key
->md
.h
[6] & mask
;
647 pmac
->u
[7] |= key
->md
.h
[7] & mask
;
651 for (i
= res
; i
< SHA256_CBLOCK
; i
++, j
++)
654 if (res
> SHA256_CBLOCK
- 8) {
655 mask
= 0 - ((inp_len
+ 8 - j
) >> (sizeof(j
) * 8 - 1));
656 data
->u
[SHA_LBLOCK
- 1] |= bitlen
& mask
;
657 sha256_block_data_order(&key
->md
, data
, 1);
658 mask
&= 0 - ((j
- inp_len
- 73) >> (sizeof(j
) * 8 - 1));
659 pmac
->u
[0] |= key
->md
.h
[0] & mask
;
660 pmac
->u
[1] |= key
->md
.h
[1] & mask
;
661 pmac
->u
[2] |= key
->md
.h
[2] & mask
;
662 pmac
->u
[3] |= key
->md
.h
[3] & mask
;
663 pmac
->u
[4] |= key
->md
.h
[4] & mask
;
664 pmac
->u
[5] |= key
->md
.h
[5] & mask
;
665 pmac
->u
[6] |= key
->md
.h
[6] & mask
;
666 pmac
->u
[7] |= key
->md
.h
[7] & mask
;
668 memset(data
, 0, SHA256_CBLOCK
);
671 data
->u
[SHA_LBLOCK
- 1] = bitlen
;
672 sha256_block_data_order(&key
->md
, data
, 1);
673 mask
= 0 - ((j
- inp_len
- 73) >> (sizeof(j
) * 8 - 1));
674 pmac
->u
[0] |= key
->md
.h
[0] & mask
;
675 pmac
->u
[1] |= key
->md
.h
[1] & mask
;
676 pmac
->u
[2] |= key
->md
.h
[2] & mask
;
677 pmac
->u
[3] |= key
->md
.h
[3] & mask
;
678 pmac
->u
[4] |= key
->md
.h
[4] & mask
;
679 pmac
->u
[5] |= key
->md
.h
[5] & mask
;
680 pmac
->u
[6] |= key
->md
.h
[6] & mask
;
681 pmac
->u
[7] |= key
->md
.h
[7] & mask
;
684 pmac
->u
[0] = BSWAP4(pmac
->u
[0]);
685 pmac
->u
[1] = BSWAP4(pmac
->u
[1]);
686 pmac
->u
[2] = BSWAP4(pmac
->u
[2]);
687 pmac
->u
[3] = BSWAP4(pmac
->u
[3]);
688 pmac
->u
[4] = BSWAP4(pmac
->u
[4]);
689 pmac
->u
[5] = BSWAP4(pmac
->u
[5]);
690 pmac
->u
[6] = BSWAP4(pmac
->u
[6]);
691 pmac
->u
[7] = BSWAP4(pmac
->u
[7]);
693 for (i
= 0; i
< 8; i
++) {
695 pmac
->c
[4 * i
+ 0] = (unsigned char)(res
>> 24);
696 pmac
->c
[4 * i
+ 1] = (unsigned char)(res
>> 16);
697 pmac
->c
[4 * i
+ 2] = (unsigned char)(res
>> 8);
698 pmac
->c
[4 * i
+ 3] = (unsigned char)res
;
701 len
+= SHA256_DIGEST_LENGTH
;
703 SHA256_Update(&key
->md
, out
, inp_len
);
705 SHA256_Final(pmac
->c
, &key
->md
);
708 unsigned int inp_blocks
, pad_blocks
;
710 /* but pretend as if we hashed padded payload */
712 1 + ((SHA256_CBLOCK
- 9 - res
) >> (sizeof(res
) * 8 - 1));
713 res
+= (unsigned int)(len
- inp_len
);
714 pad_blocks
= res
/ SHA256_CBLOCK
;
715 res
%= SHA256_CBLOCK
;
717 1 + ((SHA256_CBLOCK
- 9 - res
) >> (sizeof(res
) * 8 - 1));
718 for (; inp_blocks
< pad_blocks
; inp_blocks
++)
719 sha1_block_data_order(&key
->md
, data
, 1);
723 SHA256_Update(&key
->md
, pmac
->c
, SHA256_DIGEST_LENGTH
);
724 SHA256_Final(pmac
->c
, &key
->md
);
732 out
+ len
- 1 - maxpad
- SHA256_DIGEST_LENGTH
;
733 size_t off
= out
- p
;
734 unsigned int c
, cmask
;
736 maxpad
+= SHA256_DIGEST_LENGTH
;
737 for (res
= 0, i
= 0, j
= 0; j
< maxpad
; j
++) {
740 ((int)(j
- off
- SHA256_DIGEST_LENGTH
)) >>
741 (sizeof(int) * 8 - 1);
742 res
|= (c
^ pad
) & ~cmask
; /* ... and padding */
743 cmask
&= ((int)(off
- 1 - j
)) >> (sizeof(int) * 8 - 1);
744 res
|= (c
^ pmac
->c
[i
]) & cmask
;
747 maxpad
-= SHA256_DIGEST_LENGTH
;
749 res
= 0 - ((0 - res
) >> (sizeof(res
) * 8 - 1));
753 for (res
= 0, i
= 0; i
< SHA256_DIGEST_LENGTH
; i
++)
754 res
|= out
[i
] ^ pmac
->c
[i
];
755 res
= 0 - ((0 - res
) >> (sizeof(res
) * 8 - 1));
759 pad
= (pad
& ~res
) | (maxpad
& res
);
760 out
= out
+ len
- 1 - pad
;
761 for (res
= 0, i
= 0; i
< pad
; i
++)
764 res
= (0 - res
) >> (sizeof(res
) * 8 - 1);
769 SHA256_Update(&key
->md
, out
, len
);
776 static int aesni_cbc_hmac_sha256_ctrl(EVP_CIPHER_CTX
*ctx
, int type
, int arg
,
779 EVP_AES_HMAC_SHA256
*key
= data(ctx
);
782 case EVP_CTRL_AEAD_SET_MAC_KEY
:
785 unsigned char hmac_key
[64];
787 memset(hmac_key
, 0, sizeof(hmac_key
));
789 if (arg
> (int)sizeof(hmac_key
)) {
790 SHA256_Init(&key
->head
);
791 SHA256_Update(&key
->head
, ptr
, arg
);
792 SHA256_Final(hmac_key
, &key
->head
);
794 memcpy(hmac_key
, ptr
, arg
);
797 for (i
= 0; i
< sizeof(hmac_key
); i
++)
798 hmac_key
[i
] ^= 0x36; /* ipad */
799 SHA256_Init(&key
->head
);
800 SHA256_Update(&key
->head
, hmac_key
, sizeof(hmac_key
));
802 for (i
= 0; i
< sizeof(hmac_key
); i
++)
803 hmac_key
[i
] ^= 0x36 ^ 0x5c; /* opad */
804 SHA256_Init(&key
->tail
);
805 SHA256_Update(&key
->tail
, hmac_key
, sizeof(hmac_key
));
807 OPENSSL_cleanse(hmac_key
, sizeof(hmac_key
));
811 case EVP_CTRL_AEAD_TLS1_AAD
:
813 unsigned char *p
= ptr
;
814 unsigned int len
= p
[arg
- 2] << 8 | p
[arg
- 1];
816 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
819 len
= p
[arg
- 2] << 8 | p
[arg
- 1];
822 key
->payload_length
= len
;
823 if ((key
->aux
.tls_ver
=
824 p
[arg
- 4] << 8 | p
[arg
- 3]) >= TLS1_1_VERSION
) {
825 len
-= AES_BLOCK_SIZE
;
826 p
[arg
- 2] = len
>> 8;
830 SHA256_Update(&key
->md
, p
, arg
);
832 return (int)(((len
+ SHA256_DIGEST_LENGTH
+
833 AES_BLOCK_SIZE
) & -AES_BLOCK_SIZE
)
836 memcpy(key
->aux
.tls_aad
, ptr
, arg
);
837 key
->payload_length
= arg
;
839 return SHA256_DIGEST_LENGTH
;
842 # if !defined(OPENSSL_NO_MULTIBLOCK) && EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
843 case EVP_CTRL_TLS1_1_MULTIBLOCK_MAX_BUFSIZE
:
844 return (int)(5 + 16 + ((arg
+ 32 + 16) & -16));
845 case EVP_CTRL_TLS1_1_MULTIBLOCK_AAD
:
847 EVP_CTRL_TLS1_1_MULTIBLOCK_PARAM
*param
=
848 (EVP_CTRL_TLS1_1_MULTIBLOCK_PARAM
*) ptr
;
849 unsigned int n4x
= 1, x4
;
850 unsigned int frag
, last
, packlen
, inp_len
;
852 if (arg
< (int)sizeof(EVP_CTRL_TLS1_1_MULTIBLOCK_PARAM
))
855 inp_len
= param
->inp
[11] << 8 | param
->inp
[12];
858 if ((param
->inp
[9] << 8 | param
->inp
[10]) < TLS1_1_VERSION
)
863 return 0; /* too short */
865 if (inp_len
>= 8192 && OPENSSL_ia32cap_P
[2] & (1 << 5))
867 } else if ((n4x
= param
->interleave
/ 4) && n4x
<= 2)
868 inp_len
= param
->len
;
873 SHA256_Update(&key
->md
, param
->inp
, 13);
878 frag
= inp_len
>> n4x
;
879 last
= inp_len
+ frag
- (frag
<< n4x
);
880 if (last
> frag
&& ((last
+ 13 + 9) % 64 < (x4
- 1))) {
885 packlen
= 5 + 16 + ((frag
+ 32 + 16) & -16);
886 packlen
= (packlen
<< n4x
) - packlen
;
887 packlen
+= 5 + 16 + ((last
+ 32 + 16) & -16);
889 param
->interleave
= x4
;
893 return -1; /* not yet */
895 case EVP_CTRL_TLS1_1_MULTIBLOCK_ENCRYPT
:
897 EVP_CTRL_TLS1_1_MULTIBLOCK_PARAM
*param
=
898 (EVP_CTRL_TLS1_1_MULTIBLOCK_PARAM
*) ptr
;
900 return (int)tls1_1_multi_block_encrypt(key
, param
->out
,
901 param
->inp
, param
->len
,
902 param
->interleave
/ 4);
904 case EVP_CTRL_TLS1_1_MULTIBLOCK_DECRYPT
:
911 static EVP_CIPHER aesni_128_cbc_hmac_sha256_cipher
= {
912 # ifdef NID_aes_128_cbc_hmac_sha256
913 NID_aes_128_cbc_hmac_sha256
,
918 EVP_CIPH_CBC_MODE
| EVP_CIPH_FLAG_DEFAULT_ASN1
|
919 EVP_CIPH_FLAG_AEAD_CIPHER
| EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
,
920 aesni_cbc_hmac_sha256_init_key
,
921 aesni_cbc_hmac_sha256_cipher
,
923 sizeof(EVP_AES_HMAC_SHA256
),
924 EVP_CIPH_FLAG_DEFAULT_ASN1
? NULL
: EVP_CIPHER_set_asn1_iv
,
925 EVP_CIPH_FLAG_DEFAULT_ASN1
? NULL
: EVP_CIPHER_get_asn1_iv
,
926 aesni_cbc_hmac_sha256_ctrl
,
930 static EVP_CIPHER aesni_256_cbc_hmac_sha256_cipher
= {
931 # ifdef NID_aes_256_cbc_hmac_sha256
932 NID_aes_256_cbc_hmac_sha256
,
937 EVP_CIPH_CBC_MODE
| EVP_CIPH_FLAG_DEFAULT_ASN1
|
938 EVP_CIPH_FLAG_AEAD_CIPHER
| EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
,
939 aesni_cbc_hmac_sha256_init_key
,
940 aesni_cbc_hmac_sha256_cipher
,
942 sizeof(EVP_AES_HMAC_SHA256
),
943 EVP_CIPH_FLAG_DEFAULT_ASN1
? NULL
: EVP_CIPHER_set_asn1_iv
,
944 EVP_CIPH_FLAG_DEFAULT_ASN1
? NULL
: EVP_CIPHER_get_asn1_iv
,
945 aesni_cbc_hmac_sha256_ctrl
,
949 const EVP_CIPHER
*EVP_aes_128_cbc_hmac_sha256(void)
951 return ((OPENSSL_ia32cap_P
[1] & AESNI_CAPABLE
) &&
952 aesni_cbc_sha256_enc(NULL
, NULL
, 0, NULL
, NULL
, NULL
, NULL
) ?
953 &aesni_128_cbc_hmac_sha256_cipher
: NULL
);
956 const EVP_CIPHER
*EVP_aes_256_cbc_hmac_sha256(void)
958 return ((OPENSSL_ia32cap_P
[1] & AESNI_CAPABLE
) &&
959 aesni_cbc_sha256_enc(NULL
, NULL
, 0, NULL
, NULL
, NULL
, NULL
) ?
960 &aesni_256_cbc_hmac_sha256_cipher
: NULL
);
963 const EVP_CIPHER
*EVP_aes_128_cbc_hmac_sha256(void)
968 const EVP_CIPHER
*EVP_aes_256_cbc_hmac_sha256(void)