4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
26 * AES provider for the Kernel Cryptographic Framework (KCF)
29 #include <sys/types.h>
30 #include <sys/systm.h>
31 #include <sys/modctl.h>
32 #include <sys/cmn_err.h>
34 #include <sys/crypto/common.h>
35 #include <sys/crypto/impl.h>
36 #include <sys/crypto/spi.h>
37 #include <sys/sysmacros.h>
38 #include <sys/strsun.h>
39 #include <modes/modes.h>
41 #include <aes/aes_impl.h>
43 extern struct mod_ops mod_cryptoops
;
46 * Module linkage information for the kernel.
48 static struct modlcrypto modlcrypto
= {
50 "AES Kernel SW Provider"
53 static struct modlinkage modlinkage
= {
60 * Mechanism info structure passed to KCF during registration.
62 static crypto_mech_info_t aes_mech_info_tab
[] = {
64 {SUN_CKM_AES_ECB
, AES_ECB_MECH_INFO_TYPE
,
65 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
66 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
67 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
69 {SUN_CKM_AES_CBC
, AES_CBC_MECH_INFO_TYPE
,
70 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
71 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
72 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
74 {SUN_CKM_AES_CTR
, AES_CTR_MECH_INFO_TYPE
,
75 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
76 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
77 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
79 {SUN_CKM_AES_CCM
, AES_CCM_MECH_INFO_TYPE
,
80 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
81 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
82 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
84 {SUN_CKM_AES_GCM
, AES_GCM_MECH_INFO_TYPE
,
85 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
86 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
87 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
89 {SUN_CKM_AES_GMAC
, AES_GMAC_MECH_INFO_TYPE
,
90 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
91 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
|
92 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
|
93 CRYPTO_FG_SIGN
| CRYPTO_FG_SIGN_ATOMIC
|
94 CRYPTO_FG_VERIFY
| CRYPTO_FG_VERIFY_ATOMIC
,
95 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
}
98 /* operations are in-place if the output buffer is NULL */
99 #define AES_ARG_INPLACE(input, output) \
100 if ((output) == NULL) \
103 static void aes_provider_status(crypto_provider_handle_t
, uint_t
*);
105 static crypto_control_ops_t aes_control_ops
= {
109 static int aes_encrypt_init(crypto_ctx_t
*, crypto_mechanism_t
*,
110 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
111 static int aes_decrypt_init(crypto_ctx_t
*, crypto_mechanism_t
*,
112 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
113 static int aes_common_init(crypto_ctx_t
*, crypto_mechanism_t
*,
114 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
, boolean_t
);
115 static int aes_common_init_ctx(aes_ctx_t
*, crypto_spi_ctx_template_t
*,
116 crypto_mechanism_t
*, crypto_key_t
*, int, boolean_t
);
117 static int aes_encrypt_final(crypto_ctx_t
*, crypto_data_t
*,
118 crypto_req_handle_t
);
119 static int aes_decrypt_final(crypto_ctx_t
*, crypto_data_t
*,
120 crypto_req_handle_t
);
122 static int aes_encrypt(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
123 crypto_req_handle_t
);
124 static int aes_encrypt_update(crypto_ctx_t
*, crypto_data_t
*,
125 crypto_data_t
*, crypto_req_handle_t
);
126 static int aes_encrypt_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
127 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*,
128 crypto_data_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
130 static int aes_decrypt(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
131 crypto_req_handle_t
);
132 static int aes_decrypt_update(crypto_ctx_t
*, crypto_data_t
*,
133 crypto_data_t
*, crypto_req_handle_t
);
134 static int aes_decrypt_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
135 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*,
136 crypto_data_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
138 static crypto_cipher_ops_t aes_cipher_ops
= {
151 static int aes_mac_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
152 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
153 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
154 static int aes_mac_verify_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
155 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
156 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
158 static crypto_mac_ops_t aes_mac_ops
= {
164 aes_mac_verify_atomic
167 static int aes_create_ctx_template(crypto_provider_handle_t
,
168 crypto_mechanism_t
*, crypto_key_t
*, crypto_spi_ctx_template_t
*,
169 size_t *, crypto_req_handle_t
);
170 static int aes_free_context(crypto_ctx_t
*);
172 static crypto_ctx_ops_t aes_ctx_ops
= {
173 aes_create_ctx_template
,
177 static crypto_ops_t aes_crypto_ops
= {
197 static crypto_provider_info_t aes_prov_info
= {
198 CRYPTO_SPI_VERSION_4
,
199 "AES Software Provider",
204 sizeof (aes_mech_info_tab
)/sizeof (crypto_mech_info_t
),
208 static crypto_kcf_provider_handle_t aes_prov_handle
= NULL
;
209 static crypto_data_t null_crypto_data
= { CRYPTO_DATA_RAW
};
216 if ((ret
= mod_install(&modlinkage
)) != 0)
219 /* Register with KCF. If the registration fails, remove the module. */
220 if (crypto_register_provider(&aes_prov_info
, &aes_prov_handle
)) {
221 (void) mod_remove(&modlinkage
);
231 /* Unregister from KCF if module is registered */
232 if (aes_prov_handle
!= NULL
) {
233 if (crypto_unregister_provider(aes_prov_handle
))
236 aes_prov_handle
= NULL
;
239 return (mod_remove(&modlinkage
));
243 _info(struct modinfo
*modinfop
)
245 return (mod_info(&modlinkage
, modinfop
));
250 aes_check_mech_param(crypto_mechanism_t
*mechanism
, aes_ctx_t
**ctx
, int kmflag
)
253 boolean_t param_required
= B_TRUE
;
255 void *(*alloc_fun
)(int);
256 int rv
= CRYPTO_SUCCESS
;
258 switch (mechanism
->cm_type
) {
259 case AES_ECB_MECH_INFO_TYPE
:
260 param_required
= B_FALSE
;
261 alloc_fun
= ecb_alloc_ctx
;
263 case AES_CBC_MECH_INFO_TYPE
:
264 param_len
= AES_BLOCK_LEN
;
265 alloc_fun
= cbc_alloc_ctx
;
267 case AES_CTR_MECH_INFO_TYPE
:
268 param_len
= sizeof (CK_AES_CTR_PARAMS
);
269 alloc_fun
= ctr_alloc_ctx
;
271 case AES_CCM_MECH_INFO_TYPE
:
272 param_len
= sizeof (CK_AES_CCM_PARAMS
);
273 alloc_fun
= ccm_alloc_ctx
;
275 case AES_GCM_MECH_INFO_TYPE
:
276 param_len
= sizeof (CK_AES_GCM_PARAMS
);
277 alloc_fun
= gcm_alloc_ctx
;
279 case AES_GMAC_MECH_INFO_TYPE
:
280 param_len
= sizeof (CK_AES_GMAC_PARAMS
);
281 alloc_fun
= gmac_alloc_ctx
;
284 rv
= CRYPTO_MECHANISM_INVALID
;
287 if (param_required
&& mechanism
->cm_param
!= NULL
&&
288 mechanism
->cm_param_len
!= param_len
) {
289 rv
= CRYPTO_MECHANISM_PARAM_INVALID
;
292 p
= (alloc_fun
)(kmflag
);
299 * Initialize key schedules for AES
302 init_keysched(crypto_key_t
*key
, void *newbie
)
305 * Only keys by value are supported by this module.
307 switch (key
->ck_format
) {
309 if (key
->ck_length
< AES_MINBITS
||
310 key
->ck_length
> AES_MAXBITS
) {
311 return (CRYPTO_KEY_SIZE_RANGE
);
314 /* key length must be either 128, 192, or 256 */
315 if ((key
->ck_length
& 63) != 0)
316 return (CRYPTO_KEY_SIZE_RANGE
);
319 return (CRYPTO_KEY_TYPE_INCONSISTENT
);
322 aes_init_keysched(key
->ck_data
, key
->ck_length
, newbie
);
323 return (CRYPTO_SUCCESS
);
327 * KCF software provider control entry points.
331 aes_provider_status(crypto_provider_handle_t provider
, uint_t
*status
)
333 *status
= CRYPTO_PROVIDER_READY
;
337 aes_encrypt_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
338 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
339 crypto_req_handle_t req
) {
340 return (aes_common_init(ctx
, mechanism
, key
, template, req
, B_TRUE
));
344 aes_decrypt_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
345 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
346 crypto_req_handle_t req
) {
347 return (aes_common_init(ctx
, mechanism
, key
, template, req
, B_FALSE
));
353 * KCF software provider encrypt entry points.
356 aes_common_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
357 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
358 crypto_req_handle_t req
, boolean_t is_encrypt_init
)
365 * Only keys by value are supported by this module.
367 if (key
->ck_format
!= CRYPTO_KEY_RAW
) {
368 return (CRYPTO_KEY_TYPE_INCONSISTENT
);
371 kmflag
= crypto_kmflag(req
);
372 if ((rv
= aes_check_mech_param(mechanism
, &aes_ctx
, kmflag
))
376 rv
= aes_common_init_ctx(aes_ctx
, template, mechanism
, key
, kmflag
,
378 if (rv
!= CRYPTO_SUCCESS
) {
379 crypto_free_mode_ctx(aes_ctx
);
383 ctx
->cc_provider_private
= aes_ctx
;
385 return (CRYPTO_SUCCESS
);
389 aes_copy_block64(uint8_t *in
, uint64_t *out
)
391 if (IS_P2ALIGNED(in
, sizeof (uint64_t))) {
392 /* LINTED: pointer alignment */
393 out
[0] = *(uint64_t *)&in
[0];
394 /* LINTED: pointer alignment */
395 out
[1] = *(uint64_t *)&in
[8];
397 uint8_t *iv8
= (uint8_t *)&out
[0];
399 AES_COPY_BLOCK(in
, iv8
);
405 aes_encrypt(crypto_ctx_t
*ctx
, crypto_data_t
*plaintext
,
406 crypto_data_t
*ciphertext
, crypto_req_handle_t req
)
408 int ret
= CRYPTO_FAILED
;
411 size_t saved_length
, saved_offset
, length_needed
;
413 ASSERT(ctx
->cc_provider_private
!= NULL
);
414 aes_ctx
= ctx
->cc_provider_private
;
417 * For block ciphers, plaintext must be a multiple of AES block size.
418 * This test is only valid for ciphers whose blocksize is a power of 2.
420 if (((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
))
421 == 0) && (plaintext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
422 return (CRYPTO_DATA_LEN_RANGE
);
424 AES_ARG_INPLACE(plaintext
, ciphertext
);
427 * We need to just return the length needed to store the output.
428 * We should not destroy the context for the following case.
430 switch (aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) {
432 length_needed
= plaintext
->cd_length
+ aes_ctx
->ac_mac_len
;
435 length_needed
= plaintext
->cd_length
+ aes_ctx
->ac_tag_len
;
438 if (plaintext
->cd_length
!= 0)
439 return (CRYPTO_ARGUMENTS_BAD
);
441 length_needed
= aes_ctx
->ac_tag_len
;
444 length_needed
= plaintext
->cd_length
;
447 if (ciphertext
->cd_length
< length_needed
) {
448 ciphertext
->cd_length
= length_needed
;
449 return (CRYPTO_BUFFER_TOO_SMALL
);
452 saved_length
= ciphertext
->cd_length
;
453 saved_offset
= ciphertext
->cd_offset
;
456 * Do an update on the specified input data.
458 ret
= aes_encrypt_update(ctx
, plaintext
, ciphertext
, req
);
459 if (ret
!= CRYPTO_SUCCESS
) {
464 * For CCM mode, aes_ccm_encrypt_final() will take care of any
465 * left-over unprocessed data, and compute the MAC
467 if (aes_ctx
->ac_flags
& CCM_MODE
) {
469 * ccm_encrypt_final() will compute the MAC and append
470 * it to existing ciphertext. So, need to adjust the left over
471 * length value accordingly
474 /* order of following 2 lines MUST not be reversed */
475 ciphertext
->cd_offset
= ciphertext
->cd_length
;
476 ciphertext
->cd_length
= saved_length
- ciphertext
->cd_length
;
477 ret
= ccm_encrypt_final((ccm_ctx_t
*)aes_ctx
, ciphertext
,
478 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
479 if (ret
!= CRYPTO_SUCCESS
) {
483 if (plaintext
!= ciphertext
) {
484 ciphertext
->cd_length
=
485 ciphertext
->cd_offset
- saved_offset
;
487 ciphertext
->cd_offset
= saved_offset
;
488 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
490 * gcm_encrypt_final() will compute the MAC and append
491 * it to existing ciphertext. So, need to adjust the left over
492 * length value accordingly
495 /* order of following 2 lines MUST not be reversed */
496 ciphertext
->cd_offset
= ciphertext
->cd_length
;
497 ciphertext
->cd_length
= saved_length
- ciphertext
->cd_length
;
498 ret
= gcm_encrypt_final((gcm_ctx_t
*)aes_ctx
, ciphertext
,
499 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
501 if (ret
!= CRYPTO_SUCCESS
) {
505 if (plaintext
!= ciphertext
) {
506 ciphertext
->cd_length
=
507 ciphertext
->cd_offset
- saved_offset
;
509 ciphertext
->cd_offset
= saved_offset
;
512 ASSERT(aes_ctx
->ac_remainder_len
== 0);
513 (void) aes_free_context(ctx
);
520 aes_decrypt(crypto_ctx_t
*ctx
, crypto_data_t
*ciphertext
,
521 crypto_data_t
*plaintext
, crypto_req_handle_t req
)
523 int ret
= CRYPTO_FAILED
;
527 size_t saved_length
, length_needed
;
529 ASSERT(ctx
->cc_provider_private
!= NULL
);
530 aes_ctx
= ctx
->cc_provider_private
;
533 * For block ciphers, plaintext must be a multiple of AES block size.
534 * This test is only valid for ciphers whose blocksize is a power of 2.
536 if (((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
))
537 == 0) && (ciphertext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0) {
538 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
541 AES_ARG_INPLACE(ciphertext
, plaintext
);
544 * Return length needed to store the output.
545 * Do not destroy context when plaintext buffer is too small.
547 * CCM: plaintext is MAC len smaller than cipher text
548 * GCM: plaintext is TAG len smaller than cipher text
549 * GMAC: plaintext length must be zero
551 switch (aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) {
553 length_needed
= aes_ctx
->ac_processed_data_len
;
556 length_needed
= ciphertext
->cd_length
- aes_ctx
->ac_tag_len
;
559 if (plaintext
->cd_length
!= 0)
560 return (CRYPTO_ARGUMENTS_BAD
);
565 length_needed
= ciphertext
->cd_length
;
568 if (plaintext
->cd_length
< length_needed
) {
569 plaintext
->cd_length
= length_needed
;
570 return (CRYPTO_BUFFER_TOO_SMALL
);
573 saved_offset
= plaintext
->cd_offset
;
574 saved_length
= plaintext
->cd_length
;
577 * Do an update on the specified input data.
579 ret
= aes_decrypt_update(ctx
, ciphertext
, plaintext
, req
);
580 if (ret
!= CRYPTO_SUCCESS
) {
584 if (aes_ctx
->ac_flags
& CCM_MODE
) {
585 ASSERT(aes_ctx
->ac_processed_data_len
== aes_ctx
->ac_data_len
);
586 ASSERT(aes_ctx
->ac_processed_mac_len
== aes_ctx
->ac_mac_len
);
588 /* order of following 2 lines MUST not be reversed */
589 plaintext
->cd_offset
= plaintext
->cd_length
;
590 plaintext
->cd_length
= saved_length
- plaintext
->cd_length
;
592 ret
= ccm_decrypt_final((ccm_ctx_t
*)aes_ctx
, plaintext
,
593 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
595 if (ret
== CRYPTO_SUCCESS
) {
596 if (plaintext
!= ciphertext
) {
597 plaintext
->cd_length
=
598 plaintext
->cd_offset
- saved_offset
;
601 plaintext
->cd_length
= saved_length
;
604 plaintext
->cd_offset
= saved_offset
;
605 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
606 /* order of following 2 lines MUST not be reversed */
607 plaintext
->cd_offset
= plaintext
->cd_length
;
608 plaintext
->cd_length
= saved_length
- plaintext
->cd_length
;
610 ret
= gcm_decrypt_final((gcm_ctx_t
*)aes_ctx
, plaintext
,
611 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
612 if (ret
== CRYPTO_SUCCESS
) {
613 if (plaintext
!= ciphertext
) {
614 plaintext
->cd_length
=
615 plaintext
->cd_offset
- saved_offset
;
618 plaintext
->cd_length
= saved_length
;
621 plaintext
->cd_offset
= saved_offset
;
624 ASSERT(aes_ctx
->ac_remainder_len
== 0);
627 (void) aes_free_context(ctx
);
635 aes_encrypt_update(crypto_ctx_t
*ctx
, crypto_data_t
*plaintext
,
636 crypto_data_t
*ciphertext
, crypto_req_handle_t req
)
639 size_t saved_length
, out_len
;
640 int ret
= CRYPTO_SUCCESS
;
643 ASSERT(ctx
->cc_provider_private
!= NULL
);
644 aes_ctx
= ctx
->cc_provider_private
;
646 AES_ARG_INPLACE(plaintext
, ciphertext
);
648 /* compute number of bytes that will hold the ciphertext */
649 out_len
= aes_ctx
->ac_remainder_len
;
650 out_len
+= plaintext
->cd_length
;
651 out_len
&= ~(AES_BLOCK_LEN
- 1);
653 /* return length needed to store the output */
654 if (ciphertext
->cd_length
< out_len
) {
655 ciphertext
->cd_length
= out_len
;
656 return (CRYPTO_BUFFER_TOO_SMALL
);
659 saved_offset
= ciphertext
->cd_offset
;
660 saved_length
= ciphertext
->cd_length
;
663 * Do the AES update on the specified input data.
665 switch (plaintext
->cd_format
) {
666 case CRYPTO_DATA_RAW
:
667 ret
= crypto_update_iov(ctx
->cc_provider_private
,
668 plaintext
, ciphertext
, aes_encrypt_contiguous_blocks
,
671 case CRYPTO_DATA_UIO
:
672 ret
= crypto_update_uio(ctx
->cc_provider_private
,
673 plaintext
, ciphertext
, aes_encrypt_contiguous_blocks
,
676 case CRYPTO_DATA_MBLK
:
677 ret
= crypto_update_mp(ctx
->cc_provider_private
,
678 plaintext
, ciphertext
, aes_encrypt_contiguous_blocks
,
682 ret
= CRYPTO_ARGUMENTS_BAD
;
686 * Since AES counter mode is a stream cipher, we call
687 * ctr_mode_final() to pick up any remaining bytes.
688 * It is an internal function that does not destroy
689 * the context like *normal* final routines.
691 if ((aes_ctx
->ac_flags
& CTR_MODE
) && (aes_ctx
->ac_remainder_len
> 0)) {
692 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
,
693 ciphertext
, aes_encrypt_block
);
696 if (ret
== CRYPTO_SUCCESS
) {
697 if (plaintext
!= ciphertext
)
698 ciphertext
->cd_length
=
699 ciphertext
->cd_offset
- saved_offset
;
701 ciphertext
->cd_length
= saved_length
;
703 ciphertext
->cd_offset
= saved_offset
;
710 aes_decrypt_update(crypto_ctx_t
*ctx
, crypto_data_t
*ciphertext
,
711 crypto_data_t
*plaintext
, crypto_req_handle_t req
)
714 size_t saved_length
, out_len
;
715 int ret
= CRYPTO_SUCCESS
;
718 ASSERT(ctx
->cc_provider_private
!= NULL
);
719 aes_ctx
= ctx
->cc_provider_private
;
721 AES_ARG_INPLACE(ciphertext
, plaintext
);
724 * Compute number of bytes that will hold the plaintext.
725 * This is not necessary for CCM, GCM, and GMAC since these
726 * mechanisms never return plaintext for update operations.
728 if ((aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) == 0) {
729 out_len
= aes_ctx
->ac_remainder_len
;
730 out_len
+= ciphertext
->cd_length
;
731 out_len
&= ~(AES_BLOCK_LEN
- 1);
733 /* return length needed to store the output */
734 if (plaintext
->cd_length
< out_len
) {
735 plaintext
->cd_length
= out_len
;
736 return (CRYPTO_BUFFER_TOO_SMALL
);
740 saved_offset
= plaintext
->cd_offset
;
741 saved_length
= plaintext
->cd_length
;
743 if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
))
744 gcm_set_kmflag((gcm_ctx_t
*)aes_ctx
, crypto_kmflag(req
));
747 * Do the AES update on the specified input data.
749 switch (ciphertext
->cd_format
) {
750 case CRYPTO_DATA_RAW
:
751 ret
= crypto_update_iov(ctx
->cc_provider_private
,
752 ciphertext
, plaintext
, aes_decrypt_contiguous_blocks
,
755 case CRYPTO_DATA_UIO
:
756 ret
= crypto_update_uio(ctx
->cc_provider_private
,
757 ciphertext
, plaintext
, aes_decrypt_contiguous_blocks
,
760 case CRYPTO_DATA_MBLK
:
761 ret
= crypto_update_mp(ctx
->cc_provider_private
,
762 ciphertext
, plaintext
, aes_decrypt_contiguous_blocks
,
766 ret
= CRYPTO_ARGUMENTS_BAD
;
770 * Since AES counter mode is a stream cipher, we call
771 * ctr_mode_final() to pick up any remaining bytes.
772 * It is an internal function that does not destroy
773 * the context like *normal* final routines.
775 if ((aes_ctx
->ac_flags
& CTR_MODE
) && (aes_ctx
->ac_remainder_len
> 0)) {
776 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, plaintext
,
778 if (ret
== CRYPTO_DATA_LEN_RANGE
)
779 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
782 if (ret
== CRYPTO_SUCCESS
) {
783 if (ciphertext
!= plaintext
)
784 plaintext
->cd_length
=
785 plaintext
->cd_offset
- saved_offset
;
787 plaintext
->cd_length
= saved_length
;
789 plaintext
->cd_offset
= saved_offset
;
797 aes_encrypt_final(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
798 crypto_req_handle_t req
)
803 ASSERT(ctx
->cc_provider_private
!= NULL
);
804 aes_ctx
= ctx
->cc_provider_private
;
806 if (data
->cd_format
!= CRYPTO_DATA_RAW
&&
807 data
->cd_format
!= CRYPTO_DATA_UIO
&&
808 data
->cd_format
!= CRYPTO_DATA_MBLK
) {
809 return (CRYPTO_ARGUMENTS_BAD
);
812 if (aes_ctx
->ac_flags
& CTR_MODE
) {
813 if (aes_ctx
->ac_remainder_len
> 0) {
814 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, data
,
816 if (ret
!= CRYPTO_SUCCESS
)
819 } else if (aes_ctx
->ac_flags
& CCM_MODE
) {
820 ret
= ccm_encrypt_final((ccm_ctx_t
*)aes_ctx
, data
,
821 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
822 if (ret
!= CRYPTO_SUCCESS
) {
825 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
826 size_t saved_offset
= data
->cd_offset
;
828 ret
= gcm_encrypt_final((gcm_ctx_t
*)aes_ctx
, data
,
829 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
831 if (ret
!= CRYPTO_SUCCESS
) {
834 data
->cd_length
= data
->cd_offset
- saved_offset
;
835 data
->cd_offset
= saved_offset
;
838 * There must be no unprocessed plaintext.
839 * This happens if the length of the last data is
840 * not a multiple of the AES block length.
842 if (aes_ctx
->ac_remainder_len
> 0) {
843 return (CRYPTO_DATA_LEN_RANGE
);
848 (void) aes_free_context(ctx
);
850 return (CRYPTO_SUCCESS
);
855 aes_decrypt_final(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
856 crypto_req_handle_t req
)
863 ASSERT(ctx
->cc_provider_private
!= NULL
);
864 aes_ctx
= ctx
->cc_provider_private
;
866 if (data
->cd_format
!= CRYPTO_DATA_RAW
&&
867 data
->cd_format
!= CRYPTO_DATA_UIO
&&
868 data
->cd_format
!= CRYPTO_DATA_MBLK
) {
869 return (CRYPTO_ARGUMENTS_BAD
);
873 * There must be no unprocessed ciphertext.
874 * This happens if the length of the last ciphertext is
875 * not a multiple of the AES block length.
877 if (aes_ctx
->ac_remainder_len
> 0) {
878 if ((aes_ctx
->ac_flags
& CTR_MODE
) == 0)
879 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
881 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, data
,
883 if (ret
== CRYPTO_DATA_LEN_RANGE
)
884 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
885 if (ret
!= CRYPTO_SUCCESS
)
890 if (aes_ctx
->ac_flags
& CCM_MODE
) {
892 * This is where all the plaintext is returned, make sure
893 * the plaintext buffer is big enough
895 size_t pt_len
= aes_ctx
->ac_data_len
;
896 if (data
->cd_length
< pt_len
) {
897 data
->cd_length
= pt_len
;
898 return (CRYPTO_BUFFER_TOO_SMALL
);
901 ASSERT(aes_ctx
->ac_processed_data_len
== pt_len
);
902 ASSERT(aes_ctx
->ac_processed_mac_len
== aes_ctx
->ac_mac_len
);
903 saved_offset
= data
->cd_offset
;
904 saved_length
= data
->cd_length
;
905 ret
= ccm_decrypt_final((ccm_ctx_t
*)aes_ctx
, data
,
906 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
908 if (ret
== CRYPTO_SUCCESS
) {
909 data
->cd_length
= data
->cd_offset
- saved_offset
;
911 data
->cd_length
= saved_length
;
914 data
->cd_offset
= saved_offset
;
915 if (ret
!= CRYPTO_SUCCESS
) {
918 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
920 * This is where all the plaintext is returned, make sure
921 * the plaintext buffer is big enough
923 gcm_ctx_t
*ctx
= (gcm_ctx_t
*)aes_ctx
;
924 size_t pt_len
= ctx
->gcm_processed_data_len
- ctx
->gcm_tag_len
;
926 if (data
->cd_length
< pt_len
) {
927 data
->cd_length
= pt_len
;
928 return (CRYPTO_BUFFER_TOO_SMALL
);
931 saved_offset
= data
->cd_offset
;
932 saved_length
= data
->cd_length
;
933 ret
= gcm_decrypt_final((gcm_ctx_t
*)aes_ctx
, data
,
934 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
935 if (ret
== CRYPTO_SUCCESS
) {
936 data
->cd_length
= data
->cd_offset
- saved_offset
;
938 data
->cd_length
= saved_length
;
941 data
->cd_offset
= saved_offset
;
942 if (ret
!= CRYPTO_SUCCESS
) {
948 if ((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
)) == 0) {
952 (void) aes_free_context(ctx
);
954 return (CRYPTO_SUCCESS
);
959 aes_encrypt_atomic(crypto_provider_handle_t provider
,
960 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
961 crypto_key_t
*key
, crypto_data_t
*plaintext
, crypto_data_t
*ciphertext
,
962 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
964 aes_ctx_t aes_ctx
; /* on the stack */
967 size_t length_needed
;
970 AES_ARG_INPLACE(plaintext
, ciphertext
);
973 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
974 * be a multiple of AES block size.
976 switch (mechanism
->cm_type
) {
977 case AES_CTR_MECH_INFO_TYPE
:
978 case AES_CCM_MECH_INFO_TYPE
:
979 case AES_GCM_MECH_INFO_TYPE
:
980 case AES_GMAC_MECH_INFO_TYPE
:
983 if ((plaintext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
984 return (CRYPTO_DATA_LEN_RANGE
);
987 if ((ret
= aes_check_mech_param(mechanism
, NULL
, 0)) != CRYPTO_SUCCESS
)
990 bzero(&aes_ctx
, sizeof (aes_ctx_t
));
992 ret
= aes_common_init_ctx(&aes_ctx
, template, mechanism
, key
,
993 crypto_kmflag(req
), B_TRUE
);
994 if (ret
!= CRYPTO_SUCCESS
)
997 switch (mechanism
->cm_type
) {
998 case AES_CCM_MECH_INFO_TYPE
:
999 length_needed
= plaintext
->cd_length
+ aes_ctx
.ac_mac_len
;
1001 case AES_GMAC_MECH_INFO_TYPE
:
1002 if (plaintext
->cd_length
!= 0)
1003 return (CRYPTO_ARGUMENTS_BAD
);
1005 case AES_GCM_MECH_INFO_TYPE
:
1006 length_needed
= plaintext
->cd_length
+ aes_ctx
.ac_tag_len
;
1009 length_needed
= plaintext
->cd_length
;
1012 /* return size of buffer needed to store output */
1013 if (ciphertext
->cd_length
< length_needed
) {
1014 ciphertext
->cd_length
= length_needed
;
1015 ret
= CRYPTO_BUFFER_TOO_SMALL
;
1019 saved_offset
= ciphertext
->cd_offset
;
1020 saved_length
= ciphertext
->cd_length
;
1023 * Do an update on the specified input data.
1025 switch (plaintext
->cd_format
) {
1026 case CRYPTO_DATA_RAW
:
1027 ret
= crypto_update_iov(&aes_ctx
, plaintext
, ciphertext
,
1028 aes_encrypt_contiguous_blocks
, aes_copy_block64
);
1030 case CRYPTO_DATA_UIO
:
1031 ret
= crypto_update_uio(&aes_ctx
, plaintext
, ciphertext
,
1032 aes_encrypt_contiguous_blocks
, aes_copy_block64
);
1034 case CRYPTO_DATA_MBLK
:
1035 ret
= crypto_update_mp(&aes_ctx
, plaintext
, ciphertext
,
1036 aes_encrypt_contiguous_blocks
, aes_copy_block64
);
1039 ret
= CRYPTO_ARGUMENTS_BAD
;
1042 if (ret
== CRYPTO_SUCCESS
) {
1043 if (mechanism
->cm_type
== AES_CCM_MECH_INFO_TYPE
) {
1044 ret
= ccm_encrypt_final((ccm_ctx_t
*)&aes_ctx
,
1045 ciphertext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1047 if (ret
!= CRYPTO_SUCCESS
)
1049 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1050 } else if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1051 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
) {
1052 ret
= gcm_encrypt_final((gcm_ctx_t
*)&aes_ctx
,
1053 ciphertext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1054 aes_copy_block
, aes_xor_block
);
1055 if (ret
!= CRYPTO_SUCCESS
)
1057 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1058 } else if (mechanism
->cm_type
== AES_CTR_MECH_INFO_TYPE
) {
1059 if (aes_ctx
.ac_remainder_len
> 0) {
1060 ret
= ctr_mode_final((ctr_ctx_t
*)&aes_ctx
,
1061 ciphertext
, aes_encrypt_block
);
1062 if (ret
!= CRYPTO_SUCCESS
)
1066 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1069 if (plaintext
!= ciphertext
) {
1070 ciphertext
->cd_length
=
1071 ciphertext
->cd_offset
- saved_offset
;
1074 ciphertext
->cd_length
= saved_length
;
1076 ciphertext
->cd_offset
= saved_offset
;
1079 if (aes_ctx
.ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1080 bzero(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1081 kmem_free(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1089 aes_decrypt_atomic(crypto_provider_handle_t provider
,
1090 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1091 crypto_key_t
*key
, crypto_data_t
*ciphertext
, crypto_data_t
*plaintext
,
1092 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1094 aes_ctx_t aes_ctx
; /* on the stack */
1096 size_t saved_length
;
1097 size_t length_needed
;
1100 AES_ARG_INPLACE(ciphertext
, plaintext
);
1103 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1104 * be a multiple of AES block size.
1106 switch (mechanism
->cm_type
) {
1107 case AES_CTR_MECH_INFO_TYPE
:
1108 case AES_CCM_MECH_INFO_TYPE
:
1109 case AES_GCM_MECH_INFO_TYPE
:
1110 case AES_GMAC_MECH_INFO_TYPE
:
1113 if ((ciphertext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
1114 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
1117 if ((ret
= aes_check_mech_param(mechanism
, NULL
, 0)) != CRYPTO_SUCCESS
)
1120 bzero(&aes_ctx
, sizeof (aes_ctx_t
));
1122 ret
= aes_common_init_ctx(&aes_ctx
, template, mechanism
, key
,
1123 crypto_kmflag(req
), B_FALSE
);
1124 if (ret
!= CRYPTO_SUCCESS
)
1127 switch (mechanism
->cm_type
) {
1128 case AES_CCM_MECH_INFO_TYPE
:
1129 length_needed
= aes_ctx
.ac_data_len
;
1131 case AES_GCM_MECH_INFO_TYPE
:
1132 length_needed
= ciphertext
->cd_length
- aes_ctx
.ac_tag_len
;
1134 case AES_GMAC_MECH_INFO_TYPE
:
1135 if (plaintext
->cd_length
!= 0)
1136 return (CRYPTO_ARGUMENTS_BAD
);
1140 length_needed
= ciphertext
->cd_length
;
1143 /* return size of buffer needed to store output */
1144 if (plaintext
->cd_length
< length_needed
) {
1145 plaintext
->cd_length
= length_needed
;
1146 ret
= CRYPTO_BUFFER_TOO_SMALL
;
1150 saved_offset
= plaintext
->cd_offset
;
1151 saved_length
= plaintext
->cd_length
;
1153 if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1154 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
)
1155 gcm_set_kmflag((gcm_ctx_t
*)&aes_ctx
, crypto_kmflag(req
));
1158 * Do an update on the specified input data.
1160 switch (ciphertext
->cd_format
) {
1161 case CRYPTO_DATA_RAW
:
1162 ret
= crypto_update_iov(&aes_ctx
, ciphertext
, plaintext
,
1163 aes_decrypt_contiguous_blocks
, aes_copy_block64
);
1165 case CRYPTO_DATA_UIO
:
1166 ret
= crypto_update_uio(&aes_ctx
, ciphertext
, plaintext
,
1167 aes_decrypt_contiguous_blocks
, aes_copy_block64
);
1169 case CRYPTO_DATA_MBLK
:
1170 ret
= crypto_update_mp(&aes_ctx
, ciphertext
, plaintext
,
1171 aes_decrypt_contiguous_blocks
, aes_copy_block64
);
1174 ret
= CRYPTO_ARGUMENTS_BAD
;
1177 if (ret
== CRYPTO_SUCCESS
) {
1178 if (mechanism
->cm_type
== AES_CCM_MECH_INFO_TYPE
) {
1179 ASSERT(aes_ctx
.ac_processed_data_len
1180 == aes_ctx
.ac_data_len
);
1181 ASSERT(aes_ctx
.ac_processed_mac_len
1182 == aes_ctx
.ac_mac_len
);
1183 ret
= ccm_decrypt_final((ccm_ctx_t
*)&aes_ctx
,
1184 plaintext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1185 aes_copy_block
, aes_xor_block
);
1186 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1187 if ((ret
== CRYPTO_SUCCESS
) &&
1188 (ciphertext
!= plaintext
)) {
1189 plaintext
->cd_length
=
1190 plaintext
->cd_offset
- saved_offset
;
1192 plaintext
->cd_length
= saved_length
;
1194 } else if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1195 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
) {
1196 ret
= gcm_decrypt_final((gcm_ctx_t
*)&aes_ctx
,
1197 plaintext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1199 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1200 if ((ret
== CRYPTO_SUCCESS
) &&
1201 (ciphertext
!= plaintext
)) {
1202 plaintext
->cd_length
=
1203 plaintext
->cd_offset
- saved_offset
;
1205 plaintext
->cd_length
= saved_length
;
1207 } else if (mechanism
->cm_type
!= AES_CTR_MECH_INFO_TYPE
) {
1208 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1209 if (ciphertext
!= plaintext
)
1210 plaintext
->cd_length
=
1211 plaintext
->cd_offset
- saved_offset
;
1213 if (aes_ctx
.ac_remainder_len
> 0) {
1214 ret
= ctr_mode_final((ctr_ctx_t
*)&aes_ctx
,
1215 plaintext
, aes_encrypt_block
);
1216 if (ret
== CRYPTO_DATA_LEN_RANGE
)
1217 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
1218 if (ret
!= CRYPTO_SUCCESS
)
1221 if (ciphertext
!= plaintext
)
1222 plaintext
->cd_length
=
1223 plaintext
->cd_offset
- saved_offset
;
1226 plaintext
->cd_length
= saved_length
;
1228 plaintext
->cd_offset
= saved_offset
;
1231 if (aes_ctx
.ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1232 bzero(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1233 kmem_free(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1236 if (aes_ctx
.ac_flags
& CCM_MODE
) {
1237 if (aes_ctx
.ac_pt_buf
!= NULL
) {
1238 kmem_free(aes_ctx
.ac_pt_buf
, aes_ctx
.ac_data_len
);
1240 } else if (aes_ctx
.ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
1241 if (((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf
!= NULL
) {
1242 kmem_free(((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf
,
1243 ((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf_len
);
1251 * KCF software provider context template entry points.
1255 aes_create_ctx_template(crypto_provider_handle_t provider
,
1256 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
,
1257 crypto_spi_ctx_template_t
*tmpl
, size_t *tmpl_size
, crypto_req_handle_t req
)
1263 if (mechanism
->cm_type
!= AES_ECB_MECH_INFO_TYPE
&&
1264 mechanism
->cm_type
!= AES_CBC_MECH_INFO_TYPE
&&
1265 mechanism
->cm_type
!= AES_CTR_MECH_INFO_TYPE
&&
1266 mechanism
->cm_type
!= AES_CCM_MECH_INFO_TYPE
&&
1267 mechanism
->cm_type
!= AES_GCM_MECH_INFO_TYPE
&&
1268 mechanism
->cm_type
!= AES_GMAC_MECH_INFO_TYPE
)
1269 return (CRYPTO_MECHANISM_INVALID
);
1271 if ((keysched
= aes_alloc_keysched(&size
,
1272 crypto_kmflag(req
))) == NULL
) {
1273 return (CRYPTO_HOST_MEMORY
);
1277 * Initialize key schedule. Key length information is stored
1280 if ((rv
= init_keysched(key
, keysched
)) != CRYPTO_SUCCESS
) {
1281 bzero(keysched
, size
);
1282 kmem_free(keysched
, size
);
1289 return (CRYPTO_SUCCESS
);
1294 aes_free_context(crypto_ctx_t
*ctx
)
1296 aes_ctx_t
*aes_ctx
= ctx
->cc_provider_private
;
1298 if (aes_ctx
!= NULL
) {
1299 if (aes_ctx
->ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1300 ASSERT(aes_ctx
->ac_keysched_len
!= 0);
1301 bzero(aes_ctx
->ac_keysched
, aes_ctx
->ac_keysched_len
);
1302 kmem_free(aes_ctx
->ac_keysched
,
1303 aes_ctx
->ac_keysched_len
);
1305 crypto_free_mode_ctx(aes_ctx
);
1306 ctx
->cc_provider_private
= NULL
;
1309 return (CRYPTO_SUCCESS
);
1314 aes_common_init_ctx(aes_ctx_t
*aes_ctx
, crypto_spi_ctx_template_t
*template,
1315 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
, int kmflag
,
1316 boolean_t is_encrypt_init
)
1318 int rv
= CRYPTO_SUCCESS
;
1322 if (template == NULL
) {
1323 if ((keysched
= aes_alloc_keysched(&size
, kmflag
)) == NULL
)
1324 return (CRYPTO_HOST_MEMORY
);
1326 * Initialize key schedule.
1327 * Key length is stored in the key.
1329 if ((rv
= init_keysched(key
, keysched
)) != CRYPTO_SUCCESS
) {
1330 kmem_free(keysched
, size
);
1334 aes_ctx
->ac_flags
|= PROVIDER_OWNS_KEY_SCHEDULE
;
1335 aes_ctx
->ac_keysched_len
= size
;
1337 keysched
= template;
1339 aes_ctx
->ac_keysched
= keysched
;
1341 switch (mechanism
->cm_type
) {
1342 case AES_CBC_MECH_INFO_TYPE
:
1343 rv
= cbc_init_ctx((cbc_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1344 mechanism
->cm_param_len
, AES_BLOCK_LEN
, aes_copy_block64
);
1346 case AES_CTR_MECH_INFO_TYPE
: {
1347 CK_AES_CTR_PARAMS
*pp
;
1349 if (mechanism
->cm_param
== NULL
||
1350 mechanism
->cm_param_len
!= sizeof (CK_AES_CTR_PARAMS
)) {
1351 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1353 pp
= (CK_AES_CTR_PARAMS
*)(void *)mechanism
->cm_param
;
1354 rv
= ctr_init_ctx((ctr_ctx_t
*)aes_ctx
, pp
->ulCounterBits
,
1355 pp
->cb
, aes_copy_block
);
1358 case AES_CCM_MECH_INFO_TYPE
:
1359 if (mechanism
->cm_param
== NULL
||
1360 mechanism
->cm_param_len
!= sizeof (CK_AES_CCM_PARAMS
)) {
1361 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1363 rv
= ccm_init_ctx((ccm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1364 kmflag
, is_encrypt_init
, AES_BLOCK_LEN
, aes_encrypt_block
,
1367 case AES_GCM_MECH_INFO_TYPE
:
1368 if (mechanism
->cm_param
== NULL
||
1369 mechanism
->cm_param_len
!= sizeof (CK_AES_GCM_PARAMS
)) {
1370 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1372 rv
= gcm_init_ctx((gcm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1373 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
1376 case AES_GMAC_MECH_INFO_TYPE
:
1377 if (mechanism
->cm_param
== NULL
||
1378 mechanism
->cm_param_len
!= sizeof (CK_AES_GMAC_PARAMS
)) {
1379 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1381 rv
= gmac_init_ctx((gcm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1382 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
1385 case AES_ECB_MECH_INFO_TYPE
:
1386 aes_ctx
->ac_flags
|= ECB_MODE
;
1389 if (rv
!= CRYPTO_SUCCESS
) {
1390 if (aes_ctx
->ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1391 bzero(keysched
, size
);
1392 kmem_free(keysched
, size
);
1400 process_gmac_mech(crypto_mechanism_t
*mech
, crypto_data_t
*data
,
1401 CK_AES_GCM_PARAMS
*gcm_params
)
1403 /* LINTED: pointer alignment */
1404 CK_AES_GMAC_PARAMS
*params
= (CK_AES_GMAC_PARAMS
*)mech
->cm_param
;
1406 if (mech
->cm_type
!= AES_GMAC_MECH_INFO_TYPE
)
1407 return (CRYPTO_MECHANISM_INVALID
);
1409 if (mech
->cm_param_len
!= sizeof (CK_AES_GMAC_PARAMS
))
1410 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1412 if (params
->pIv
== NULL
)
1413 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1415 gcm_params
->pIv
= params
->pIv
;
1416 gcm_params
->ulIvLen
= AES_GMAC_IV_LEN
;
1417 gcm_params
->ulTagBits
= AES_GMAC_TAG_BITS
;
1420 return (CRYPTO_SUCCESS
);
1422 if (data
->cd_format
!= CRYPTO_DATA_RAW
)
1423 return (CRYPTO_ARGUMENTS_BAD
);
1425 gcm_params
->pAAD
= (uchar_t
*)data
->cd_raw
.iov_base
;
1426 gcm_params
->ulAADLen
= data
->cd_length
;
1427 return (CRYPTO_SUCCESS
);
1431 aes_mac_atomic(crypto_provider_handle_t provider
,
1432 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1433 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1434 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1436 CK_AES_GCM_PARAMS gcm_params
;
1437 crypto_mechanism_t gcm_mech
;
1440 if ((rv
= process_gmac_mech(mechanism
, data
, &gcm_params
))
1444 gcm_mech
.cm_type
= AES_GCM_MECH_INFO_TYPE
;
1445 gcm_mech
.cm_param_len
= sizeof (CK_AES_GCM_PARAMS
);
1446 gcm_mech
.cm_param
= (char *)&gcm_params
;
1448 return (aes_encrypt_atomic(provider
, session_id
, &gcm_mech
,
1449 key
, &null_crypto_data
, mac
, template, req
));
1453 aes_mac_verify_atomic(crypto_provider_handle_t provider
,
1454 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1455 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1456 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1458 CK_AES_GCM_PARAMS gcm_params
;
1459 crypto_mechanism_t gcm_mech
;
1462 if ((rv
= process_gmac_mech(mechanism
, data
, &gcm_params
))
1466 gcm_mech
.cm_type
= AES_GCM_MECH_INFO_TYPE
;
1467 gcm_mech
.cm_param_len
= sizeof (CK_AES_GCM_PARAMS
);
1468 gcm_mech
.cm_param
= (char *)&gcm_params
;
1470 return (aes_decrypt_atomic(provider
, session_id
, &gcm_mech
,
1471 key
, mac
, &null_crypto_data
, template, req
));