4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 * Copyright 2017 Nexenta Systems, Inc. All rights reserved.
27 * AES provider for the Kernel Cryptographic Framework (KCF)
30 #include <sys/types.h>
31 #include <sys/systm.h>
32 #include <sys/modctl.h>
33 #include <sys/cmn_err.h>
35 #include <sys/crypto/common.h>
36 #include <sys/crypto/impl.h>
37 #include <sys/crypto/spi.h>
38 #include <sys/sysmacros.h>
39 #include <sys/strsun.h>
40 #include <modes/modes.h>
42 #include <aes/aes_impl.h>
44 extern struct mod_ops mod_cryptoops
;
47 * Module linkage information for the kernel.
49 static struct modlcrypto modlcrypto
= {
51 "AES Kernel SW Provider"
54 static struct modlinkage modlinkage
= {
61 * Mechanism info structure passed to KCF during registration.
63 static crypto_mech_info_t aes_mech_info_tab
[] = {
65 {SUN_CKM_AES_ECB
, AES_ECB_MECH_INFO_TYPE
,
66 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
67 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
68 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
70 {SUN_CKM_AES_CBC
, AES_CBC_MECH_INFO_TYPE
,
71 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
72 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
73 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
75 {SUN_CKM_AES_CMAC
, AES_CMAC_MECH_INFO_TYPE
,
76 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
77 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
|
78 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
80 {SUN_CKM_AES_CTR
, AES_CTR_MECH_INFO_TYPE
,
81 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
82 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
83 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
85 {SUN_CKM_AES_CCM
, AES_CCM_MECH_INFO_TYPE
,
86 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
87 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
88 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
90 {SUN_CKM_AES_GCM
, AES_GCM_MECH_INFO_TYPE
,
91 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
92 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
93 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
95 {SUN_CKM_AES_GMAC
, AES_GMAC_MECH_INFO_TYPE
,
96 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
97 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
|
98 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
|
99 CRYPTO_FG_SIGN
| CRYPTO_FG_SIGN_ATOMIC
|
100 CRYPTO_FG_VERIFY
| CRYPTO_FG_VERIFY_ATOMIC
,
101 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
}
104 /* operations are in-place if the output buffer is NULL */
105 #define AES_ARG_INPLACE(input, output) \
106 if ((output) == NULL) \
109 static void aes_provider_status(crypto_provider_handle_t
, uint_t
*);
111 static crypto_control_ops_t aes_control_ops
= {
115 static int aes_encrypt_init(crypto_ctx_t
*, crypto_mechanism_t
*,
116 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
117 static int aes_decrypt_init(crypto_ctx_t
*, crypto_mechanism_t
*,
118 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
119 static int aes_common_init(crypto_ctx_t
*, crypto_mechanism_t
*,
120 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
, boolean_t
);
121 static int aes_common_init_ctx(aes_ctx_t
*, crypto_spi_ctx_template_t
*,
122 crypto_mechanism_t
*, crypto_key_t
*, int, boolean_t
);
123 static int aes_encrypt_final(crypto_ctx_t
*, crypto_data_t
*,
124 crypto_req_handle_t
);
125 static int aes_decrypt_final(crypto_ctx_t
*, crypto_data_t
*,
126 crypto_req_handle_t
);
128 static int aes_encrypt(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
129 crypto_req_handle_t
);
130 static int aes_encrypt_update(crypto_ctx_t
*, crypto_data_t
*,
131 crypto_data_t
*, crypto_req_handle_t
);
132 static int aes_encrypt_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
133 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*,
134 crypto_data_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
136 static int aes_decrypt(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
137 crypto_req_handle_t
);
138 static int aes_decrypt_update(crypto_ctx_t
*, crypto_data_t
*,
139 crypto_data_t
*, crypto_req_handle_t
);
140 static int aes_decrypt_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
141 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*,
142 crypto_data_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
144 static crypto_cipher_ops_t aes_cipher_ops
= {
157 static int aes_mac_init(crypto_ctx_t
*, crypto_mechanism_t
*,
158 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
159 static int aes_mac(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
160 crypto_req_handle_t
);
161 static int aes_mac_update(crypto_ctx_t
*, crypto_data_t
*,
162 crypto_req_handle_t
);
163 static int aes_mac_final(crypto_ctx_t
*, crypto_data_t
*,
164 crypto_req_handle_t
);
165 static int aes_mac_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
166 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
167 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
168 static int aes_mac_verify_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
169 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
170 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
172 static crypto_mac_ops_t aes_mac_ops
= {
178 aes_mac_verify_atomic
181 static int aes_create_ctx_template(crypto_provider_handle_t
,
182 crypto_mechanism_t
*, crypto_key_t
*, crypto_spi_ctx_template_t
*,
183 size_t *, crypto_req_handle_t
);
184 static int aes_free_context(crypto_ctx_t
*);
186 static crypto_ctx_ops_t aes_ctx_ops
= {
187 aes_create_ctx_template
,
191 static crypto_ops_t aes_crypto_ops
= {
211 static crypto_provider_info_t aes_prov_info
= {
212 CRYPTO_SPI_VERSION_4
,
213 "AES Software Provider",
218 sizeof (aes_mech_info_tab
)/sizeof (crypto_mech_info_t
),
222 static crypto_kcf_provider_handle_t aes_prov_handle
= NULL
;
223 static crypto_data_t null_crypto_data
= { CRYPTO_DATA_RAW
};
230 if ((ret
= mod_install(&modlinkage
)) != 0)
233 /* Register with KCF. If the registration fails, remove the module. */
234 if (crypto_register_provider(&aes_prov_info
, &aes_prov_handle
)) {
235 (void) mod_remove(&modlinkage
);
245 /* Unregister from KCF if module is registered */
246 if (aes_prov_handle
!= NULL
) {
247 if (crypto_unregister_provider(aes_prov_handle
))
250 aes_prov_handle
= NULL
;
253 return (mod_remove(&modlinkage
));
257 _info(struct modinfo
*modinfop
)
259 return (mod_info(&modlinkage
, modinfop
));
264 aes_check_mech_param(crypto_mechanism_t
*mechanism
, aes_ctx_t
**ctx
, int kmflag
)
267 boolean_t param_required
= B_TRUE
;
269 void *(*alloc_fun
)(int);
270 int rv
= CRYPTO_SUCCESS
;
272 switch (mechanism
->cm_type
) {
273 case AES_ECB_MECH_INFO_TYPE
:
274 param_required
= B_FALSE
;
275 alloc_fun
= ecb_alloc_ctx
;
277 case AES_CBC_MECH_INFO_TYPE
:
278 param_len
= AES_BLOCK_LEN
;
279 alloc_fun
= cbc_alloc_ctx
;
281 case AES_CMAC_MECH_INFO_TYPE
:
282 param_required
= B_FALSE
;
283 alloc_fun
= cmac_alloc_ctx
;
285 case AES_CTR_MECH_INFO_TYPE
:
286 param_len
= sizeof (CK_AES_CTR_PARAMS
);
287 alloc_fun
= ctr_alloc_ctx
;
289 case AES_CCM_MECH_INFO_TYPE
:
290 param_len
= sizeof (CK_AES_CCM_PARAMS
);
291 alloc_fun
= ccm_alloc_ctx
;
293 case AES_GCM_MECH_INFO_TYPE
:
294 param_len
= sizeof (CK_AES_GCM_PARAMS
);
295 alloc_fun
= gcm_alloc_ctx
;
297 case AES_GMAC_MECH_INFO_TYPE
:
298 param_len
= sizeof (CK_AES_GMAC_PARAMS
);
299 alloc_fun
= gmac_alloc_ctx
;
302 rv
= CRYPTO_MECHANISM_INVALID
;
305 if (param_required
&& mechanism
->cm_param
!= NULL
&&
306 mechanism
->cm_param_len
!= param_len
) {
307 rv
= CRYPTO_MECHANISM_PARAM_INVALID
;
310 p
= (alloc_fun
)(kmflag
);
317 * Initialize key schedules for AES
320 init_keysched(crypto_key_t
*key
, void *newbie
)
323 * Only keys by value are supported by this module.
325 switch (key
->ck_format
) {
327 if (key
->ck_length
< AES_MINBITS
||
328 key
->ck_length
> AES_MAXBITS
) {
329 return (CRYPTO_KEY_SIZE_RANGE
);
332 /* key length must be either 128, 192, or 256 */
333 if ((key
->ck_length
& 63) != 0)
334 return (CRYPTO_KEY_SIZE_RANGE
);
337 return (CRYPTO_KEY_TYPE_INCONSISTENT
);
340 aes_init_keysched(key
->ck_data
, key
->ck_length
, newbie
);
341 return (CRYPTO_SUCCESS
);
345 * KCF software provider control entry points.
349 aes_provider_status(crypto_provider_handle_t provider
, uint_t
*status
)
351 *status
= CRYPTO_PROVIDER_READY
;
355 aes_encrypt_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
356 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
357 crypto_req_handle_t req
)
359 return (aes_common_init(ctx
, mechanism
, key
, template, req
, B_TRUE
));
363 aes_decrypt_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
364 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
365 crypto_req_handle_t req
)
367 return (aes_common_init(ctx
, mechanism
, key
, template, req
, B_FALSE
));
373 * KCF software provider encrypt entry points.
376 aes_common_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
377 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
378 crypto_req_handle_t req
, boolean_t is_encrypt_init
)
385 * Only keys by value are supported by this module.
387 if (key
->ck_format
!= CRYPTO_KEY_RAW
) {
388 return (CRYPTO_KEY_TYPE_INCONSISTENT
);
391 kmflag
= crypto_kmflag(req
);
392 if ((rv
= aes_check_mech_param(mechanism
, &aes_ctx
, kmflag
))
396 rv
= aes_common_init_ctx(aes_ctx
, template, mechanism
, key
, kmflag
,
398 if (rv
!= CRYPTO_SUCCESS
) {
399 crypto_free_mode_ctx(aes_ctx
);
403 ctx
->cc_provider_private
= aes_ctx
;
405 return (CRYPTO_SUCCESS
);
409 aes_copy_block64(uint8_t *in
, uint64_t *out
)
411 if (IS_P2ALIGNED(in
, sizeof (uint64_t))) {
412 /* LINTED: pointer alignment */
413 out
[0] = *(uint64_t *)&in
[0];
414 /* LINTED: pointer alignment */
415 out
[1] = *(uint64_t *)&in
[8];
417 uint8_t *iv8
= (uint8_t *)&out
[0];
419 AES_COPY_BLOCK(in
, iv8
);
425 aes_encrypt(crypto_ctx_t
*ctx
, crypto_data_t
*plaintext
,
426 crypto_data_t
*ciphertext
, crypto_req_handle_t req
)
428 int ret
= CRYPTO_FAILED
;
431 size_t saved_length
, saved_offset
, length_needed
;
433 ASSERT(ctx
->cc_provider_private
!= NULL
);
434 aes_ctx
= ctx
->cc_provider_private
;
437 * For block ciphers, plaintext must be a multiple of AES block size.
438 * This test is only valid for ciphers whose blocksize is a power of 2.
440 if (((aes_ctx
->ac_flags
& (CMAC_MODE
|CTR_MODE
|CCM_MODE
|
441 GCM_MODE
|GMAC_MODE
)) == 0) &&
442 (plaintext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
443 return (CRYPTO_DATA_LEN_RANGE
);
445 AES_ARG_INPLACE(plaintext
, ciphertext
);
448 * We need to just return the length needed to store the output.
449 * We should not destroy the context for the following case.
451 switch (aes_ctx
->ac_flags
& (CMAC_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
)) {
453 length_needed
= plaintext
->cd_length
+ aes_ctx
->ac_mac_len
;
456 length_needed
= plaintext
->cd_length
+ aes_ctx
->ac_tag_len
;
459 length_needed
= AES_BLOCK_LEN
;
462 if (plaintext
->cd_length
!= 0)
463 return (CRYPTO_ARGUMENTS_BAD
);
465 length_needed
= aes_ctx
->ac_tag_len
;
468 length_needed
= plaintext
->cd_length
;
471 if (ciphertext
->cd_length
< length_needed
) {
472 ciphertext
->cd_length
= length_needed
;
473 return (CRYPTO_BUFFER_TOO_SMALL
);
476 saved_length
= ciphertext
->cd_length
;
477 saved_offset
= ciphertext
->cd_offset
;
480 * Do an update on the specified input data.
482 ret
= aes_encrypt_update(ctx
, plaintext
, ciphertext
, req
);
483 if (ret
!= CRYPTO_SUCCESS
) {
488 * For CCM mode, aes_ccm_encrypt_final() will take care of any
489 * left-over unprocessed data, and compute the MAC
491 if (aes_ctx
->ac_flags
& CCM_MODE
) {
493 * ccm_encrypt_final() will compute the MAC and append
494 * it to existing ciphertext. So, need to adjust the left over
495 * length value accordingly
498 /* order of following 2 lines MUST not be reversed */
499 ciphertext
->cd_offset
= ciphertext
->cd_length
;
500 ciphertext
->cd_length
= saved_length
- ciphertext
->cd_length
;
501 ret
= ccm_encrypt_final((ccm_ctx_t
*)aes_ctx
, ciphertext
,
502 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
503 if (ret
!= CRYPTO_SUCCESS
) {
507 if (plaintext
!= ciphertext
) {
508 ciphertext
->cd_length
=
509 ciphertext
->cd_offset
- saved_offset
;
511 ciphertext
->cd_offset
= saved_offset
;
512 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
514 * gcm_encrypt_final() will compute the MAC and append
515 * it to existing ciphertext. So, need to adjust the left over
516 * length value accordingly
519 /* order of following 2 lines MUST not be reversed */
520 ciphertext
->cd_offset
= ciphertext
->cd_length
;
521 ciphertext
->cd_length
= saved_length
- ciphertext
->cd_length
;
522 ret
= gcm_encrypt_final((gcm_ctx_t
*)aes_ctx
, ciphertext
,
523 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
525 if (ret
!= CRYPTO_SUCCESS
) {
529 if (plaintext
!= ciphertext
) {
530 ciphertext
->cd_length
=
531 ciphertext
->cd_offset
- saved_offset
;
533 ciphertext
->cd_offset
= saved_offset
;
534 } else if (aes_ctx
->ac_flags
& CMAC_MODE
) {
535 /* cmac_update doesn't store data */
536 ciphertext
->cd_length
= saved_length
;
537 ret
= cmac_mode_final((cbc_ctx_t
*)aes_ctx
, ciphertext
,
538 aes_encrypt_block
, aes_xor_block
);
539 aes_ctx
->ac_remainder_len
= 0;
542 ASSERT(aes_ctx
->ac_remainder_len
== 0);
543 (void) aes_free_context(ctx
);
550 aes_decrypt(crypto_ctx_t
*ctx
, crypto_data_t
*ciphertext
,
551 crypto_data_t
*plaintext
, crypto_req_handle_t req
)
553 int ret
= CRYPTO_FAILED
;
557 size_t saved_length
, length_needed
;
559 ASSERT(ctx
->cc_provider_private
!= NULL
);
560 aes_ctx
= ctx
->cc_provider_private
;
563 * For block ciphers, plaintext must be a multiple of AES block size.
564 * This test is only valid for ciphers whose blocksize is a power of 2.
566 if (((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
))
567 == 0) && (ciphertext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0) {
568 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
571 AES_ARG_INPLACE(ciphertext
, plaintext
);
574 * Return length needed to store the output.
575 * Do not destroy context when plaintext buffer is too small.
577 * CCM: plaintext is MAC len smaller than cipher text
578 * GCM: plaintext is TAG len smaller than cipher text
579 * GMAC: plaintext length must be zero
581 switch (aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) {
583 length_needed
= aes_ctx
->ac_processed_data_len
;
586 length_needed
= ciphertext
->cd_length
- aes_ctx
->ac_tag_len
;
589 if (plaintext
->cd_length
!= 0)
590 return (CRYPTO_ARGUMENTS_BAD
);
595 length_needed
= ciphertext
->cd_length
;
598 if (plaintext
->cd_length
< length_needed
) {
599 plaintext
->cd_length
= length_needed
;
600 return (CRYPTO_BUFFER_TOO_SMALL
);
603 saved_offset
= plaintext
->cd_offset
;
604 saved_length
= plaintext
->cd_length
;
607 * Do an update on the specified input data.
609 ret
= aes_decrypt_update(ctx
, ciphertext
, plaintext
, req
);
610 if (ret
!= CRYPTO_SUCCESS
) {
614 if (aes_ctx
->ac_flags
& CCM_MODE
) {
615 ASSERT(aes_ctx
->ac_processed_data_len
== aes_ctx
->ac_data_len
);
616 ASSERT(aes_ctx
->ac_processed_mac_len
== aes_ctx
->ac_mac_len
);
618 /* order of following 2 lines MUST not be reversed */
619 plaintext
->cd_offset
= plaintext
->cd_length
;
620 plaintext
->cd_length
= saved_length
- plaintext
->cd_length
;
622 ret
= ccm_decrypt_final((ccm_ctx_t
*)aes_ctx
, plaintext
,
623 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
625 if (ret
== CRYPTO_SUCCESS
) {
626 if (plaintext
!= ciphertext
) {
627 plaintext
->cd_length
=
628 plaintext
->cd_offset
- saved_offset
;
631 plaintext
->cd_length
= saved_length
;
634 plaintext
->cd_offset
= saved_offset
;
635 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
636 /* order of following 2 lines MUST not be reversed */
637 plaintext
->cd_offset
= plaintext
->cd_length
;
638 plaintext
->cd_length
= saved_length
- plaintext
->cd_length
;
640 ret
= gcm_decrypt_final((gcm_ctx_t
*)aes_ctx
, plaintext
,
641 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
642 if (ret
== CRYPTO_SUCCESS
) {
643 if (plaintext
!= ciphertext
) {
644 plaintext
->cd_length
=
645 plaintext
->cd_offset
- saved_offset
;
648 plaintext
->cd_length
= saved_length
;
651 plaintext
->cd_offset
= saved_offset
;
654 ASSERT(aes_ctx
->ac_remainder_len
== 0);
657 (void) aes_free_context(ctx
);
665 aes_encrypt_update(crypto_ctx_t
*ctx
, crypto_data_t
*plaintext
,
666 crypto_data_t
*ciphertext
, crypto_req_handle_t req
)
669 size_t saved_length
, out_len
;
670 int ret
= CRYPTO_SUCCESS
;
673 ASSERT(ctx
->cc_provider_private
!= NULL
);
674 aes_ctx
= ctx
->cc_provider_private
;
676 AES_ARG_INPLACE(plaintext
, ciphertext
);
678 /* compute number of bytes that will hold the ciphertext */
679 out_len
= aes_ctx
->ac_remainder_len
;
680 out_len
+= plaintext
->cd_length
;
681 out_len
&= ~(AES_BLOCK_LEN
- 1);
684 * return length needed to store the output.
685 * CMAC stores its output in a local buffer until *_final.
687 if ((aes_ctx
->ac_flags
& CMAC_MODE
) == 0 &&
688 ciphertext
->cd_length
< out_len
) {
689 ciphertext
->cd_length
= out_len
;
690 return (CRYPTO_BUFFER_TOO_SMALL
);
693 saved_offset
= ciphertext
->cd_offset
;
694 saved_length
= ciphertext
->cd_length
;
697 * Do the AES update on the specified input data.
699 switch (plaintext
->cd_format
) {
700 case CRYPTO_DATA_RAW
:
701 ret
= crypto_update_iov(ctx
->cc_provider_private
,
702 plaintext
, ciphertext
, aes_encrypt_contiguous_blocks
,
705 case CRYPTO_DATA_UIO
:
706 ret
= crypto_update_uio(ctx
->cc_provider_private
,
707 plaintext
, ciphertext
, aes_encrypt_contiguous_blocks
,
710 case CRYPTO_DATA_MBLK
:
711 ret
= crypto_update_mp(ctx
->cc_provider_private
,
712 plaintext
, ciphertext
, aes_encrypt_contiguous_blocks
,
716 ret
= CRYPTO_ARGUMENTS_BAD
;
720 * Since AES counter mode is a stream cipher, we call
721 * ctr_mode_final() to pick up any remaining bytes.
722 * It is an internal function that does not destroy
723 * the context like *normal* final routines.
725 if ((aes_ctx
->ac_flags
& CTR_MODE
) && (aes_ctx
->ac_remainder_len
> 0)) {
726 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
,
727 ciphertext
, aes_encrypt_block
);
730 if (ret
== CRYPTO_SUCCESS
) {
731 if (plaintext
!= ciphertext
)
732 ciphertext
->cd_length
=
733 ciphertext
->cd_offset
- saved_offset
;
735 ciphertext
->cd_length
= saved_length
;
737 ciphertext
->cd_offset
= saved_offset
;
744 aes_decrypt_update(crypto_ctx_t
*ctx
, crypto_data_t
*ciphertext
,
745 crypto_data_t
*plaintext
, crypto_req_handle_t req
)
748 size_t saved_length
, out_len
;
749 int ret
= CRYPTO_SUCCESS
;
752 ASSERT(ctx
->cc_provider_private
!= NULL
);
753 aes_ctx
= ctx
->cc_provider_private
;
755 AES_ARG_INPLACE(ciphertext
, plaintext
);
758 * Compute number of bytes that will hold the plaintext.
759 * This is not necessary for CCM, GCM, and GMAC since these
760 * mechanisms never return plaintext for update operations.
762 if ((aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) == 0) {
763 out_len
= aes_ctx
->ac_remainder_len
;
764 out_len
+= ciphertext
->cd_length
;
765 out_len
&= ~(AES_BLOCK_LEN
- 1);
767 /* return length needed to store the output */
768 if (plaintext
->cd_length
< out_len
) {
769 plaintext
->cd_length
= out_len
;
770 return (CRYPTO_BUFFER_TOO_SMALL
);
774 saved_offset
= plaintext
->cd_offset
;
775 saved_length
= plaintext
->cd_length
;
777 if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
))
778 gcm_set_kmflag((gcm_ctx_t
*)aes_ctx
, crypto_kmflag(req
));
781 * Do the AES update on the specified input data.
783 switch (ciphertext
->cd_format
) {
784 case CRYPTO_DATA_RAW
:
785 ret
= crypto_update_iov(ctx
->cc_provider_private
,
786 ciphertext
, plaintext
, aes_decrypt_contiguous_blocks
,
789 case CRYPTO_DATA_UIO
:
790 ret
= crypto_update_uio(ctx
->cc_provider_private
,
791 ciphertext
, plaintext
, aes_decrypt_contiguous_blocks
,
794 case CRYPTO_DATA_MBLK
:
795 ret
= crypto_update_mp(ctx
->cc_provider_private
,
796 ciphertext
, plaintext
, aes_decrypt_contiguous_blocks
,
800 ret
= CRYPTO_ARGUMENTS_BAD
;
804 * Since AES counter mode is a stream cipher, we call
805 * ctr_mode_final() to pick up any remaining bytes.
806 * It is an internal function that does not destroy
807 * the context like *normal* final routines.
809 if ((aes_ctx
->ac_flags
& CTR_MODE
) && (aes_ctx
->ac_remainder_len
> 0)) {
810 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, plaintext
,
812 if (ret
== CRYPTO_DATA_LEN_RANGE
)
813 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
816 if (ret
== CRYPTO_SUCCESS
) {
817 if (ciphertext
!= plaintext
)
818 plaintext
->cd_length
=
819 plaintext
->cd_offset
- saved_offset
;
821 plaintext
->cd_length
= saved_length
;
823 plaintext
->cd_offset
= saved_offset
;
831 aes_encrypt_final(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
832 crypto_req_handle_t req
)
837 ASSERT(ctx
->cc_provider_private
!= NULL
);
838 aes_ctx
= ctx
->cc_provider_private
;
840 if (data
->cd_format
!= CRYPTO_DATA_RAW
&&
841 data
->cd_format
!= CRYPTO_DATA_UIO
&&
842 data
->cd_format
!= CRYPTO_DATA_MBLK
) {
843 return (CRYPTO_ARGUMENTS_BAD
);
846 if (aes_ctx
->ac_flags
& CTR_MODE
) {
847 if (aes_ctx
->ac_remainder_len
> 0) {
848 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, data
,
850 if (ret
!= CRYPTO_SUCCESS
)
853 } else if (aes_ctx
->ac_flags
& CCM_MODE
) {
854 ret
= ccm_encrypt_final((ccm_ctx_t
*)aes_ctx
, data
,
855 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
856 if (ret
!= CRYPTO_SUCCESS
) {
859 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
860 size_t saved_offset
= data
->cd_offset
;
862 ret
= gcm_encrypt_final((gcm_ctx_t
*)aes_ctx
, data
,
863 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
865 if (ret
!= CRYPTO_SUCCESS
) {
868 data
->cd_length
= data
->cd_offset
- saved_offset
;
869 data
->cd_offset
= saved_offset
;
870 } else if (aes_ctx
->ac_flags
& CMAC_MODE
) {
871 ret
= cmac_mode_final((cbc_ctx_t
*)aes_ctx
, data
,
872 aes_encrypt_block
, aes_xor_block
);
873 if (ret
!= CRYPTO_SUCCESS
)
875 data
->cd_length
= AES_BLOCK_LEN
;
878 * There must be no unprocessed plaintext.
879 * This happens if the length of the last data is
880 * not a multiple of the AES block length.
882 if (aes_ctx
->ac_remainder_len
> 0) {
883 return (CRYPTO_DATA_LEN_RANGE
);
888 (void) aes_free_context(ctx
);
890 return (CRYPTO_SUCCESS
);
895 aes_decrypt_final(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
896 crypto_req_handle_t req
)
903 ASSERT(ctx
->cc_provider_private
!= NULL
);
904 aes_ctx
= ctx
->cc_provider_private
;
906 if (data
->cd_format
!= CRYPTO_DATA_RAW
&&
907 data
->cd_format
!= CRYPTO_DATA_UIO
&&
908 data
->cd_format
!= CRYPTO_DATA_MBLK
) {
909 return (CRYPTO_ARGUMENTS_BAD
);
913 * There must be no unprocessed ciphertext.
914 * This happens if the length of the last ciphertext is
915 * not a multiple of the AES block length.
917 if (aes_ctx
->ac_remainder_len
> 0) {
918 if ((aes_ctx
->ac_flags
& CTR_MODE
) == 0)
919 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
921 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, data
,
923 if (ret
== CRYPTO_DATA_LEN_RANGE
)
924 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
925 if (ret
!= CRYPTO_SUCCESS
)
930 if (aes_ctx
->ac_flags
& CCM_MODE
) {
932 * This is where all the plaintext is returned, make sure
933 * the plaintext buffer is big enough
935 size_t pt_len
= aes_ctx
->ac_data_len
;
936 if (data
->cd_length
< pt_len
) {
937 data
->cd_length
= pt_len
;
938 return (CRYPTO_BUFFER_TOO_SMALL
);
941 ASSERT(aes_ctx
->ac_processed_data_len
== pt_len
);
942 ASSERT(aes_ctx
->ac_processed_mac_len
== aes_ctx
->ac_mac_len
);
943 saved_offset
= data
->cd_offset
;
944 saved_length
= data
->cd_length
;
945 ret
= ccm_decrypt_final((ccm_ctx_t
*)aes_ctx
, data
,
946 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
948 if (ret
== CRYPTO_SUCCESS
) {
949 data
->cd_length
= data
->cd_offset
- saved_offset
;
951 data
->cd_length
= saved_length
;
954 data
->cd_offset
= saved_offset
;
955 if (ret
!= CRYPTO_SUCCESS
) {
958 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
960 * This is where all the plaintext is returned, make sure
961 * the plaintext buffer is big enough
963 gcm_ctx_t
*ctx
= (gcm_ctx_t
*)aes_ctx
;
964 size_t pt_len
= ctx
->gcm_processed_data_len
- ctx
->gcm_tag_len
;
966 if (data
->cd_length
< pt_len
) {
967 data
->cd_length
= pt_len
;
968 return (CRYPTO_BUFFER_TOO_SMALL
);
971 saved_offset
= data
->cd_offset
;
972 saved_length
= data
->cd_length
;
973 ret
= gcm_decrypt_final((gcm_ctx_t
*)aes_ctx
, data
,
974 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
975 if (ret
== CRYPTO_SUCCESS
) {
976 data
->cd_length
= data
->cd_offset
- saved_offset
;
978 data
->cd_length
= saved_length
;
981 data
->cd_offset
= saved_offset
;
982 if (ret
!= CRYPTO_SUCCESS
) {
988 if ((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
)) == 0) {
992 (void) aes_free_context(ctx
);
994 return (CRYPTO_SUCCESS
);
999 aes_encrypt_atomic(crypto_provider_handle_t provider
,
1000 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1001 crypto_key_t
*key
, crypto_data_t
*plaintext
, crypto_data_t
*ciphertext
,
1002 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1004 aes_ctx_t aes_ctx
; /* on the stack */
1006 size_t saved_length
;
1007 size_t length_needed
;
1010 AES_ARG_INPLACE(plaintext
, ciphertext
);
1013 * CTR, CCM, CMAC, GCM, and GMAC modes do not require that plaintext
1014 * be a multiple of AES block size.
1016 switch (mechanism
->cm_type
) {
1017 case AES_CTR_MECH_INFO_TYPE
:
1018 case AES_CCM_MECH_INFO_TYPE
:
1019 case AES_GCM_MECH_INFO_TYPE
:
1020 case AES_GMAC_MECH_INFO_TYPE
:
1021 case AES_CMAC_MECH_INFO_TYPE
:
1024 if ((plaintext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
1025 return (CRYPTO_DATA_LEN_RANGE
);
1028 if ((ret
= aes_check_mech_param(mechanism
, NULL
, 0)) != CRYPTO_SUCCESS
)
1031 bzero(&aes_ctx
, sizeof (aes_ctx_t
));
1033 ret
= aes_common_init_ctx(&aes_ctx
, template, mechanism
, key
,
1034 crypto_kmflag(req
), B_TRUE
);
1035 if (ret
!= CRYPTO_SUCCESS
)
1038 switch (mechanism
->cm_type
) {
1039 case AES_CCM_MECH_INFO_TYPE
:
1040 length_needed
= plaintext
->cd_length
+ aes_ctx
.ac_mac_len
;
1042 case AES_GMAC_MECH_INFO_TYPE
:
1043 if (plaintext
->cd_length
!= 0)
1044 return (CRYPTO_ARGUMENTS_BAD
);
1046 case AES_GCM_MECH_INFO_TYPE
:
1047 length_needed
= plaintext
->cd_length
+ aes_ctx
.ac_tag_len
;
1049 case AES_CMAC_MECH_INFO_TYPE
:
1050 length_needed
= AES_BLOCK_LEN
;
1053 length_needed
= plaintext
->cd_length
;
1056 /* return size of buffer needed to store output */
1057 if (ciphertext
->cd_length
< length_needed
) {
1058 ciphertext
->cd_length
= length_needed
;
1059 ret
= CRYPTO_BUFFER_TOO_SMALL
;
1063 saved_offset
= ciphertext
->cd_offset
;
1064 saved_length
= ciphertext
->cd_length
;
1067 * Do an update on the specified input data.
1069 switch (plaintext
->cd_format
) {
1070 case CRYPTO_DATA_RAW
:
1071 ret
= crypto_update_iov(&aes_ctx
, plaintext
, ciphertext
,
1072 aes_encrypt_contiguous_blocks
, aes_copy_block64
);
1074 case CRYPTO_DATA_UIO
:
1075 ret
= crypto_update_uio(&aes_ctx
, plaintext
, ciphertext
,
1076 aes_encrypt_contiguous_blocks
, aes_copy_block64
);
1078 case CRYPTO_DATA_MBLK
:
1079 ret
= crypto_update_mp(&aes_ctx
, plaintext
, ciphertext
,
1080 aes_encrypt_contiguous_blocks
, aes_copy_block64
);
1083 ret
= CRYPTO_ARGUMENTS_BAD
;
1086 if (ret
== CRYPTO_SUCCESS
) {
1087 if (mechanism
->cm_type
== AES_CCM_MECH_INFO_TYPE
) {
1088 ret
= ccm_encrypt_final((ccm_ctx_t
*)&aes_ctx
,
1089 ciphertext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1091 if (ret
!= CRYPTO_SUCCESS
)
1093 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1094 } else if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1095 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
) {
1096 ret
= gcm_encrypt_final((gcm_ctx_t
*)&aes_ctx
,
1097 ciphertext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1098 aes_copy_block
, aes_xor_block
);
1099 if (ret
!= CRYPTO_SUCCESS
)
1101 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1102 } else if (mechanism
->cm_type
== AES_CTR_MECH_INFO_TYPE
) {
1103 if (aes_ctx
.ac_remainder_len
> 0) {
1104 ret
= ctr_mode_final((ctr_ctx_t
*)&aes_ctx
,
1105 ciphertext
, aes_encrypt_block
);
1106 if (ret
!= CRYPTO_SUCCESS
)
1109 } else if (mechanism
->cm_type
== AES_CMAC_MECH_INFO_TYPE
) {
1110 ret
= cmac_mode_final((cbc_ctx_t
*)&aes_ctx
,
1111 ciphertext
, aes_encrypt_block
,
1113 if (ret
!= CRYPTO_SUCCESS
)
1116 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1119 if (plaintext
!= ciphertext
) {
1120 ciphertext
->cd_length
=
1121 ciphertext
->cd_offset
- saved_offset
;
1124 ciphertext
->cd_length
= saved_length
;
1126 ciphertext
->cd_offset
= saved_offset
;
1129 if (aes_ctx
.ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1130 bzero(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1131 kmem_free(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1139 aes_decrypt_atomic(crypto_provider_handle_t provider
,
1140 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1141 crypto_key_t
*key
, crypto_data_t
*ciphertext
, crypto_data_t
*plaintext
,
1142 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1144 aes_ctx_t aes_ctx
; /* on the stack */
1146 size_t saved_length
;
1147 size_t length_needed
;
1150 AES_ARG_INPLACE(ciphertext
, plaintext
);
1153 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1154 * be a multiple of AES block size.
1156 switch (mechanism
->cm_type
) {
1157 case AES_CTR_MECH_INFO_TYPE
:
1158 case AES_CCM_MECH_INFO_TYPE
:
1159 case AES_GCM_MECH_INFO_TYPE
:
1160 case AES_GMAC_MECH_INFO_TYPE
:
1163 if ((ciphertext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
1164 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
1167 if ((ret
= aes_check_mech_param(mechanism
, NULL
, 0)) != CRYPTO_SUCCESS
)
1170 bzero(&aes_ctx
, sizeof (aes_ctx_t
));
1172 ret
= aes_common_init_ctx(&aes_ctx
, template, mechanism
, key
,
1173 crypto_kmflag(req
), B_FALSE
);
1174 if (ret
!= CRYPTO_SUCCESS
)
1177 switch (mechanism
->cm_type
) {
1178 case AES_CCM_MECH_INFO_TYPE
:
1179 length_needed
= aes_ctx
.ac_data_len
;
1181 case AES_GCM_MECH_INFO_TYPE
:
1182 length_needed
= ciphertext
->cd_length
- aes_ctx
.ac_tag_len
;
1184 case AES_GMAC_MECH_INFO_TYPE
:
1185 if (plaintext
->cd_length
!= 0)
1186 return (CRYPTO_ARGUMENTS_BAD
);
1190 length_needed
= ciphertext
->cd_length
;
1193 /* return size of buffer needed to store output */
1194 if (plaintext
->cd_length
< length_needed
) {
1195 plaintext
->cd_length
= length_needed
;
1196 ret
= CRYPTO_BUFFER_TOO_SMALL
;
1200 saved_offset
= plaintext
->cd_offset
;
1201 saved_length
= plaintext
->cd_length
;
1203 if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1204 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
)
1205 gcm_set_kmflag((gcm_ctx_t
*)&aes_ctx
, crypto_kmflag(req
));
1208 * Do an update on the specified input data.
1210 switch (ciphertext
->cd_format
) {
1211 case CRYPTO_DATA_RAW
:
1212 ret
= crypto_update_iov(&aes_ctx
, ciphertext
, plaintext
,
1213 aes_decrypt_contiguous_blocks
, aes_copy_block64
);
1215 case CRYPTO_DATA_UIO
:
1216 ret
= crypto_update_uio(&aes_ctx
, ciphertext
, plaintext
,
1217 aes_decrypt_contiguous_blocks
, aes_copy_block64
);
1219 case CRYPTO_DATA_MBLK
:
1220 ret
= crypto_update_mp(&aes_ctx
, ciphertext
, plaintext
,
1221 aes_decrypt_contiguous_blocks
, aes_copy_block64
);
1224 ret
= CRYPTO_ARGUMENTS_BAD
;
1227 if (ret
== CRYPTO_SUCCESS
) {
1228 if (mechanism
->cm_type
== AES_CCM_MECH_INFO_TYPE
) {
1229 ASSERT(aes_ctx
.ac_processed_data_len
1230 == aes_ctx
.ac_data_len
);
1231 ASSERT(aes_ctx
.ac_processed_mac_len
1232 == aes_ctx
.ac_mac_len
);
1233 ret
= ccm_decrypt_final((ccm_ctx_t
*)&aes_ctx
,
1234 plaintext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1235 aes_copy_block
, aes_xor_block
);
1236 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1237 if ((ret
== CRYPTO_SUCCESS
) &&
1238 (ciphertext
!= plaintext
)) {
1239 plaintext
->cd_length
=
1240 plaintext
->cd_offset
- saved_offset
;
1242 plaintext
->cd_length
= saved_length
;
1244 } else if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1245 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
) {
1246 ret
= gcm_decrypt_final((gcm_ctx_t
*)&aes_ctx
,
1247 plaintext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1249 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1250 if ((ret
== CRYPTO_SUCCESS
) &&
1251 (ciphertext
!= plaintext
)) {
1252 plaintext
->cd_length
=
1253 plaintext
->cd_offset
- saved_offset
;
1255 plaintext
->cd_length
= saved_length
;
1257 } else if (mechanism
->cm_type
!= AES_CTR_MECH_INFO_TYPE
) {
1258 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1259 if (ciphertext
!= plaintext
)
1260 plaintext
->cd_length
=
1261 plaintext
->cd_offset
- saved_offset
;
1263 if (aes_ctx
.ac_remainder_len
> 0) {
1264 ret
= ctr_mode_final((ctr_ctx_t
*)&aes_ctx
,
1265 plaintext
, aes_encrypt_block
);
1266 if (ret
== CRYPTO_DATA_LEN_RANGE
)
1267 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
1268 if (ret
!= CRYPTO_SUCCESS
)
1271 if (ciphertext
!= plaintext
)
1272 plaintext
->cd_length
=
1273 plaintext
->cd_offset
- saved_offset
;
1276 plaintext
->cd_length
= saved_length
;
1278 plaintext
->cd_offset
= saved_offset
;
1281 if (aes_ctx
.ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1282 bzero(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1283 kmem_free(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1286 if (aes_ctx
.ac_flags
& CCM_MODE
) {
1287 if (aes_ctx
.ac_pt_buf
!= NULL
) {
1288 kmem_free(aes_ctx
.ac_pt_buf
, aes_ctx
.ac_data_len
);
1290 } else if (aes_ctx
.ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
1291 if (((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf
!= NULL
) {
1292 kmem_free(((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf
,
1293 ((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf_len
);
1301 * KCF software provider context template entry points.
1305 aes_create_ctx_template(crypto_provider_handle_t provider
,
1306 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
,
1307 crypto_spi_ctx_template_t
*tmpl
, size_t *tmpl_size
, crypto_req_handle_t req
)
1313 if (mechanism
->cm_type
!= AES_ECB_MECH_INFO_TYPE
&&
1314 mechanism
->cm_type
!= AES_CBC_MECH_INFO_TYPE
&&
1315 mechanism
->cm_type
!= AES_CMAC_MECH_INFO_TYPE
&&
1316 mechanism
->cm_type
!= AES_CTR_MECH_INFO_TYPE
&&
1317 mechanism
->cm_type
!= AES_CCM_MECH_INFO_TYPE
&&
1318 mechanism
->cm_type
!= AES_GCM_MECH_INFO_TYPE
&&
1319 mechanism
->cm_type
!= AES_GMAC_MECH_INFO_TYPE
)
1320 return (CRYPTO_MECHANISM_INVALID
);
1322 if ((keysched
= aes_alloc_keysched(&size
,
1323 crypto_kmflag(req
))) == NULL
) {
1324 return (CRYPTO_HOST_MEMORY
);
1328 * Initialize key schedule. Key length information is stored
1331 if ((rv
= init_keysched(key
, keysched
)) != CRYPTO_SUCCESS
) {
1332 bzero(keysched
, size
);
1333 kmem_free(keysched
, size
);
1340 return (CRYPTO_SUCCESS
);
1345 aes_free_context(crypto_ctx_t
*ctx
)
1347 aes_ctx_t
*aes_ctx
= ctx
->cc_provider_private
;
1349 if (aes_ctx
!= NULL
) {
1350 if (aes_ctx
->ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1351 ASSERT(aes_ctx
->ac_keysched_len
!= 0);
1352 bzero(aes_ctx
->ac_keysched
, aes_ctx
->ac_keysched_len
);
1353 kmem_free(aes_ctx
->ac_keysched
,
1354 aes_ctx
->ac_keysched_len
);
1356 crypto_free_mode_ctx(aes_ctx
);
1357 ctx
->cc_provider_private
= NULL
;
1360 return (CRYPTO_SUCCESS
);
1365 aes_common_init_ctx(aes_ctx_t
*aes_ctx
, crypto_spi_ctx_template_t
*template,
1366 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
, int kmflag
,
1367 boolean_t is_encrypt_init
)
1369 int rv
= CRYPTO_SUCCESS
;
1373 if (template == NULL
) {
1374 if ((keysched
= aes_alloc_keysched(&size
, kmflag
)) == NULL
)
1375 return (CRYPTO_HOST_MEMORY
);
1377 * Initialize key schedule.
1378 * Key length is stored in the key.
1380 if ((rv
= init_keysched(key
, keysched
)) != CRYPTO_SUCCESS
) {
1381 kmem_free(keysched
, size
);
1385 aes_ctx
->ac_flags
|= PROVIDER_OWNS_KEY_SCHEDULE
;
1386 aes_ctx
->ac_keysched_len
= size
;
1388 keysched
= template;
1390 aes_ctx
->ac_keysched
= keysched
;
1392 switch (mechanism
->cm_type
) {
1393 case AES_CBC_MECH_INFO_TYPE
:
1394 rv
= cbc_init_ctx((cbc_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1395 mechanism
->cm_param_len
, AES_BLOCK_LEN
, aes_copy_block64
);
1397 case AES_CMAC_MECH_INFO_TYPE
:
1398 rv
= cmac_init_ctx((cbc_ctx_t
*)aes_ctx
, AES_BLOCK_LEN
);
1400 case AES_CTR_MECH_INFO_TYPE
: {
1401 CK_AES_CTR_PARAMS
*pp
;
1403 if (mechanism
->cm_param
== NULL
||
1404 mechanism
->cm_param_len
!= sizeof (CK_AES_CTR_PARAMS
)) {
1405 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1407 pp
= (CK_AES_CTR_PARAMS
*)(void *)mechanism
->cm_param
;
1408 rv
= ctr_init_ctx((ctr_ctx_t
*)aes_ctx
, pp
->ulCounterBits
,
1409 pp
->cb
, aes_copy_block
);
1412 case AES_CCM_MECH_INFO_TYPE
:
1413 if (mechanism
->cm_param
== NULL
||
1414 mechanism
->cm_param_len
!= sizeof (CK_AES_CCM_PARAMS
)) {
1415 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1417 rv
= ccm_init_ctx((ccm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1418 kmflag
, is_encrypt_init
, AES_BLOCK_LEN
, aes_encrypt_block
,
1421 case AES_GCM_MECH_INFO_TYPE
:
1422 if (mechanism
->cm_param
== NULL
||
1423 mechanism
->cm_param_len
!= sizeof (CK_AES_GCM_PARAMS
)) {
1424 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1426 rv
= gcm_init_ctx((gcm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1427 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
1430 case AES_GMAC_MECH_INFO_TYPE
:
1431 if (mechanism
->cm_param
== NULL
||
1432 mechanism
->cm_param_len
!= sizeof (CK_AES_GMAC_PARAMS
)) {
1433 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1435 rv
= gmac_init_ctx((gcm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1436 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
1439 case AES_ECB_MECH_INFO_TYPE
:
1440 aes_ctx
->ac_flags
|= ECB_MODE
;
1443 if (rv
!= CRYPTO_SUCCESS
) {
1444 if (aes_ctx
->ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1445 bzero(keysched
, size
);
1446 kmem_free(keysched
, size
);
1454 process_gmac_mech(crypto_mechanism_t
*mech
, crypto_data_t
*data
,
1455 CK_AES_GCM_PARAMS
*gcm_params
)
1457 /* LINTED: pointer alignment */
1458 CK_AES_GMAC_PARAMS
*params
= (CK_AES_GMAC_PARAMS
*)mech
->cm_param
;
1460 if (mech
->cm_type
!= AES_GMAC_MECH_INFO_TYPE
)
1461 return (CRYPTO_MECHANISM_INVALID
);
1463 if (mech
->cm_param_len
!= sizeof (CK_AES_GMAC_PARAMS
))
1464 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1466 if (params
->pIv
== NULL
)
1467 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1469 gcm_params
->pIv
= params
->pIv
;
1470 gcm_params
->ulIvLen
= AES_GMAC_IV_LEN
;
1471 gcm_params
->ulTagBits
= AES_GMAC_TAG_BITS
;
1474 return (CRYPTO_SUCCESS
);
1476 if (data
->cd_format
!= CRYPTO_DATA_RAW
)
1477 return (CRYPTO_ARGUMENTS_BAD
);
1479 gcm_params
->pAAD
= (uchar_t
*)data
->cd_raw
.iov_base
;
1480 gcm_params
->ulAADLen
= data
->cd_length
;
1481 return (CRYPTO_SUCCESS
);
1485 aes_mac_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
1486 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
1487 crypto_req_handle_t req
)
1489 return (aes_encrypt_init(ctx
, mechanism
,
1490 key
, template, req
));
1494 aes_mac(crypto_ctx_t
*ctx
, crypto_data_t
*plaintext
, crypto_data_t
*ciphertext
,
1495 crypto_req_handle_t req
)
1497 return (aes_encrypt(ctx
, plaintext
, ciphertext
, req
));
1501 aes_mac_update(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
1502 crypto_req_handle_t req
)
1505 uint8_t block
[AES_BLOCK_LEN
];
1506 out
.cd_format
= CRYPTO_DATA_RAW
;
1508 out
.cd_length
= sizeof (block
);
1509 out
.cd_miscdata
= NULL
;
1510 out
.cd_raw
.iov_base
= (void *)block
;
1511 out
.cd_raw
.iov_len
= sizeof (block
);
1513 return (aes_encrypt_update(ctx
, data
, &out
, req
));
1517 aes_mac_final(crypto_ctx_t
*ctx
, crypto_data_t
*mac
, crypto_req_handle_t req
)
1519 return (aes_encrypt_final(ctx
, mac
, req
));
1523 aes_mac_atomic(crypto_provider_handle_t provider
,
1524 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1525 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1526 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1528 CK_AES_GCM_PARAMS gcm_params
;
1529 crypto_mechanism_t gcm_mech
;
1532 if (mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
) {
1533 if ((rv
= process_gmac_mech(mechanism
, data
, &gcm_params
))
1537 gcm_mech
.cm_type
= AES_GCM_MECH_INFO_TYPE
;
1538 gcm_mech
.cm_param_len
= sizeof (CK_AES_GCM_PARAMS
);
1539 gcm_mech
.cm_param
= (char *)&gcm_params
;
1541 return (aes_encrypt_atomic(provider
, session_id
, &gcm_mech
,
1542 key
, &null_crypto_data
, mac
, template, req
));
1545 return (aes_encrypt_atomic(provider
, session_id
, mechanism
,
1546 key
, data
, mac
, template, req
));
1550 aes_mac_verify_atomic(crypto_provider_handle_t provider
,
1551 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1552 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1553 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1555 CK_AES_GCM_PARAMS gcm_params
;
1556 crypto_mechanism_t gcm_mech
;
1557 crypto_data_t data_mac
;
1558 char buf
[AES_BLOCK_LEN
];
1561 if (mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
) {
1562 if ((rv
= process_gmac_mech(mechanism
, data
, &gcm_params
))
1566 gcm_mech
.cm_type
= AES_GCM_MECH_INFO_TYPE
;
1567 gcm_mech
.cm_param_len
= sizeof (CK_AES_GCM_PARAMS
);
1568 gcm_mech
.cm_param
= (char *)&gcm_params
;
1570 return (aes_decrypt_atomic(provider
, session_id
, &gcm_mech
,
1571 key
, mac
, &null_crypto_data
, template, req
));
1576 data_mac
.cd_format
= CRYPTO_DATA_RAW
;
1577 data_mac
.cd_offset
= 0;
1578 data_mac
.cd_length
= AES_BLOCK_LEN
;
1579 data_mac
.cd_miscdata
= NULL
;
1580 data_mac
.cd_raw
.iov_base
= (void *) buf
;
1581 data_mac
.cd_raw
.iov_len
= AES_BLOCK_LEN
;
1583 rv
= aes_encrypt_atomic(provider
, session_id
, &gcm_mech
,
1584 key
, data
, &data_mac
, template, req
);
1586 if (rv
!= CRYPTO_SUCCESS
)
1589 /* should use get_input_data for mac? */
1590 if (bcmp(buf
, mac
->cd_raw
.iov_base
+ mac
->cd_offset
,
1591 AES_BLOCK_LEN
) != 0)
1592 return (CRYPTO_INVALID_MAC
);
1594 return (CRYPTO_SUCCESS
);