2 * Copyright (c) 2003, PADL Software Pty Ltd.
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
16 * 3. Neither the name of PADL Software nor the names of its contributors
17 * may be used to endorse or promote products derived from this software
18 * without specific prior written permission.
20 * THIS SOFTWARE IS PROVIDED BY PADL SOFTWARE AND CONTRIBUTORS ``AS IS'' AND
21 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
23 * ARE DISCLAIMED. IN NO EVENT SHALL PADL SOFTWARE OR CONTRIBUTORS BE LIABLE
24 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
26 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
27 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
28 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
29 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
33 #include "gsskrb5_locl.h"
36 * Implementation of RFC 4121
39 #define CFXSentByAcceptor (1 << 0)
40 #define CFXSealed (1 << 1)
41 #define CFXAcceptorSubkey (1 << 2)
44 _gsskrb5cfx_wrap_length_cfx(krb5_context context
,
48 size_t *output_length
,
55 /* 16-byte header is always first */
56 *output_length
= sizeof(gss_cfx_wrap_token_desc
);
59 ret
= krb5_crypto_get_checksum_type(context
, crypto
, &type
);
63 ret
= krb5_checksumsize(context
, type
, cksumsize
);
70 /* Header is concatenated with data before encryption */
71 input_length
+= sizeof(gss_cfx_wrap_token_desc
);
73 ret
= krb5_crypto_getpadsize(context
, crypto
, &padsize
);
79 *padlength
= padsize
- (input_length
% padsize
);
81 /* We add the pad ourselves (noted here for completeness only) */
82 input_length
+= *padlength
;
85 *output_length
+= krb5_get_wrapped_length(context
,
86 crypto
, input_length
);
88 /* Checksum is concatenated with data */
89 *output_length
+= input_length
+ *cksumsize
;
92 assert(*output_length
> input_length
);
98 _gssapi_wrap_size_cfx(OM_uint32
*minor_status
,
99 const gsskrb5_ctx ctx
,
100 krb5_context context
,
103 OM_uint32 req_output_size
,
104 OM_uint32
*max_input_size
)
110 /* 16-byte header is always first */
111 if (req_output_size
< 16)
113 req_output_size
-= 16;
116 size_t wrapped_size
, sz
;
118 wrapped_size
= req_output_size
+ 1;
121 sz
= krb5_get_wrapped_length(context
,
122 ctx
->crypto
, wrapped_size
);
123 } while (wrapped_size
&& sz
> req_output_size
);
124 if (wrapped_size
== 0)
128 if (wrapped_size
< 16)
133 *max_input_size
= wrapped_size
;
138 ret
= krb5_crypto_get_checksum_type(context
, ctx
->crypto
, &type
);
142 ret
= krb5_checksumsize(context
, type
, &cksumsize
);
146 if (req_output_size
< cksumsize
)
149 /* Checksum is concatenated with data */
150 *max_input_size
= req_output_size
- cksumsize
;
157 * Rotate "rrc" bytes to the front or back
160 static krb5_error_code
161 rrc_rotate(void *data
, size_t len
, uint16_t rrc
, krb5_boolean unrotate
)
163 u_char
*tmp
, buf
[256];
176 if (rrc
<= sizeof(buf
)) {
185 memcpy(tmp
, data
, rrc
);
186 memmove(data
, (u_char
*)data
+ rrc
, left
);
187 memcpy((u_char
*)data
+ left
, tmp
, rrc
);
189 memcpy(tmp
, (u_char
*)data
+ left
, rrc
);
190 memmove((u_char
*)data
+ rrc
, data
, left
);
191 memcpy(data
, tmp
, rrc
);
194 if (rrc
> sizeof(buf
))
200 gss_iov_buffer_desc
*
201 _gk_find_buffer(gss_iov_buffer_desc
*iov
, int iov_count
, OM_uint32 type
)
205 for (i
= 0; i
< iov_count
; i
++)
206 if (type
== GSS_IOV_BUFFER_TYPE(iov
[i
].type
))
212 allocate_buffer(OM_uint32
*minor_status
, gss_iov_buffer_desc
*buffer
, size_t size
)
214 if (buffer
->type
& GSS_IOV_BUFFER_TYPE_FLAG_ALLOCATED
) {
215 if (buffer
->buffer
.length
== size
)
216 return GSS_S_COMPLETE
;
217 free(buffer
->buffer
.value
);
220 buffer
->buffer
.value
= malloc(size
);
221 buffer
->buffer
.length
= size
;
222 if (buffer
->buffer
.value
== NULL
) {
223 *minor_status
= ENOMEM
;
224 return GSS_S_FAILURE
;
226 buffer
->type
|= GSS_IOV_BUFFER_TYPE_FLAG_ALLOCATED
;
228 return GSS_S_COMPLETE
;
234 _gssapi_wrap_cfx_iov(OM_uint32
*minor_status
,
236 krb5_context context
,
239 gss_iov_buffer_desc
*iov
,
242 OM_uint32 major_status
, junk
;
243 gss_iov_buffer_desc
*header
, *trailer
, *padding
;
244 size_t gsshsize
, k5hsize
;
245 size_t gsstsize
, k5tsize
;
246 size_t i
, padlength
, rrc
= 0, ec
= 0;
247 gss_cfx_wrap_token token
;
251 krb5_crypto_iov
*data
= NULL
;
252 int paddingoffset
= 0;
254 header
= _gk_find_buffer(iov
, iov_count
, GSS_IOV_BUFFER_TYPE_HEADER
);
255 if (header
== NULL
) {
256 *minor_status
= EINVAL
;
257 return GSS_S_FAILURE
;
260 krb5_crypto_length(context
, ctx
->crypto
, KRB5_CRYPTO_TYPE_PADDING
, &padlength
);
262 padding
= _gk_find_buffer(iov
, iov_count
, GSS_IOV_BUFFER_TYPE_PADDING
);
263 if (padlength
!= 0 && padding
== NULL
) {
264 *minor_status
= EINVAL
;
265 return GSS_S_FAILURE
;
268 trailer
= _gk_find_buffer(iov
, iov_count
, GSS_IOV_BUFFER_TYPE_TRAILER
);
273 krb5_crypto_length(context
, ctx
->crypto
, KRB5_CRYPTO_TYPE_TRAILER
, &k5tsize
);
274 krb5_crypto_length(context
, ctx
->crypto
, KRB5_CRYPTO_TYPE_HEADER
, &k5hsize
);
276 gsshsize
= k5hsize
+ sizeof(*token
);
277 gsstsize
= k5tsize
+ sizeof(*token
); /* encrypted token stored in trailer */
281 krb5_crypto_length(context
, ctx
->crypto
, KRB5_CRYPTO_TYPE_CHECKSUM
, &k5tsize
);
283 gsshsize
= sizeof(*token
);
291 if (trailer
== NULL
) {
292 /* conf_req_flag=0 doesn't support DCE_STYLE */
293 if (conf_req_flag
== 0) {
294 *minor_status
= EINVAL
;
295 major_status
= GSS_S_FAILURE
;
299 if (IS_DCE_STYLE(ctx
))
301 gsshsize
+= gsstsize
;
303 } else if (GSS_IOV_BUFFER_FLAGS(trailer
->type
) & GSS_IOV_BUFFER_TYPE_FLAG_ALLOCATE
) {
304 major_status
= allocate_buffer(minor_status
, trailer
, gsstsize
);
307 } else if (trailer
->buffer
.length
< gsstsize
) {
308 *minor_status
= KRB5_BAD_MSIZE
;
309 major_status
= GSS_S_FAILURE
;
312 trailer
->buffer
.length
= gsstsize
;
318 if (GSS_IOV_BUFFER_FLAGS(header
->type
) & GSS_IOV_BUFFER_TYPE_FLAG_ALLOCATE
) {
319 major_status
= allocate_buffer(minor_status
, header
, gsshsize
);
320 if (major_status
!= GSS_S_COMPLETE
)
322 } else if (header
->buffer
.length
< gsshsize
) {
323 *minor_status
= KRB5_BAD_MSIZE
;
324 major_status
= GSS_S_FAILURE
;
327 header
->buffer
.length
= gsshsize
;
329 token
= (gss_cfx_wrap_token
)header
->buffer
.value
;
331 token
->TOK_ID
[0] = 0x05;
332 token
->TOK_ID
[1] = 0x04;
334 token
->Filler
= 0xFF;
336 if (ctx
->more_flags
& ACCEPTOR_SUBKEY
)
337 token
->Flags
|= CFXAcceptorSubkey
;
339 if (ctx
->more_flags
& LOCAL
)
340 usage
= KRB5_KU_USAGE_INITIATOR_SEAL
;
342 usage
= KRB5_KU_USAGE_ACCEPTOR_SEAL
;
346 * In Wrap tokens with confidentiality, the EC field is
347 * used to encode the size (in bytes) of the random filler.
349 token
->Flags
|= CFXSealed
;
350 token
->EC
[0] = (padlength
>> 8) & 0xFF;
351 token
->EC
[1] = (padlength
>> 0) & 0xFF;
355 * In Wrap tokens without confidentiality, the EC field is
356 * used to encode the size (in bytes) of the trailing
359 * This is not used in the checksum calcuation itself,
360 * because the checksum length could potentially vary
361 * depending on the data length.
368 * In Wrap tokens that provide for confidentiality, the RRC
369 * field in the header contains the hex value 00 00 before
372 * In Wrap tokens that do not provide for confidentiality,
373 * both the EC and RRC fields in the appended checksum
374 * contain the hex value 00 00 for the purpose of calculating
380 HEIMDAL_MUTEX_lock(&ctx
->ctx_id_mutex
);
381 krb5_auth_con_getlocalseqnumber(context
,
384 _gsskrb5_encode_be_om_uint32(0, &token
->SND_SEQ
[0]);
385 _gsskrb5_encode_be_om_uint32(seq_number
, &token
->SND_SEQ
[4]);
386 krb5_auth_con_setlocalseqnumber(context
,
389 HEIMDAL_MUTEX_unlock(&ctx
->ctx_id_mutex
);
391 data
= calloc(iov_count
+ 3, sizeof(data
[0]));
393 *minor_status
= ENOMEM
;
394 major_status
= GSS_S_FAILURE
;
402 {"header" | encrypt(plaintext-data | padding | E"header")}
404 Expanded, this is with with RRC = 0:
406 {"header" | krb5-header | plaintext-data | padding | E"header" | krb5-trailer }
408 In DCE-RPC mode == no trailer: RRC = gss "trailer" == length(padding | E"header" | krb5-trailer)
410 {"header" | padding | E"header" | krb5-trailer | krb5-header | plaintext-data }
414 data
[i
].flags
= KRB5_CRYPTO_TYPE_HEADER
;
415 data
[i
].data
.data
= ((uint8_t *)header
->buffer
.value
) + header
->buffer
.length
- k5hsize
;
416 data
[i
].data
.length
= k5hsize
;
418 for (i
= 1; i
< iov_count
+ 1; i
++) {
419 switch (GSS_IOV_BUFFER_TYPE(iov
[i
- 1].type
)) {
420 case GSS_IOV_BUFFER_TYPE_DATA
:
421 data
[i
].flags
= KRB5_CRYPTO_TYPE_DATA
;
423 case GSS_IOV_BUFFER_TYPE_PADDING
:
424 data
[i
].flags
= KRB5_CRYPTO_TYPE_PADDING
;
427 case GSS_IOV_BUFFER_TYPE_SIGN_ONLY
:
428 data
[i
].flags
= KRB5_CRYPTO_TYPE_SIGN_ONLY
;
431 data
[i
].flags
= KRB5_CRYPTO_TYPE_EMPTY
;
434 data
[i
].data
.length
= iov
[i
- 1].buffer
.length
;
435 data
[i
].data
.data
= iov
[i
- 1].buffer
.value
;
439 * Any necessary padding is added here to ensure that the
440 * encrypted token header is always at the end of the
444 /* XXX KRB5_CRYPTO_TYPE_PADDING */
446 /* encrypted CFX header in trailer (or after the header if in
447 DCE mode). Copy in header into E"header"
449 data
[i
].flags
= KRB5_CRYPTO_TYPE_DATA
;
451 data
[i
].data
.data
= trailer
->buffer
.value
;
453 data
[i
].data
.data
= ((uint8_t *)header
->buffer
.value
) + header
->buffer
.length
- k5hsize
- k5tsize
- sizeof(*token
);
455 data
[i
].data
.length
= sizeof(*token
);
456 memcpy(data
[i
].data
.data
, token
, sizeof(*token
));
459 /* Kerberos trailer comes after the gss trailer */
460 data
[i
].flags
= KRB5_CRYPTO_TYPE_TRAILER
;
461 data
[i
].data
.data
= ((uint8_t *)data
[i
-1].data
.data
) + sizeof(*token
);
462 data
[i
].data
.length
= k5tsize
;
465 ret
= krb5_encrypt_iov_ivec(context
, ctx
->crypto
, usage
, data
, i
, NULL
);
468 major_status
= GSS_S_FAILURE
;
473 token
->RRC
[0] = (rrc
>> 8) & 0xFF;
474 token
->RRC
[1] = (rrc
>> 0) & 0xFF;
478 padding
->buffer
.length
= data
[paddingoffset
].data
.length
;
484 {data | "header" | gss-trailer (krb5 checksum)
490 for (i
= 0; i
< iov_count
; i
++) {
491 switch (GSS_IOV_BUFFER_TYPE(iov
[i
].type
)) {
492 case GSS_IOV_BUFFER_TYPE_DATA
:
493 case GSS_IOV_BUFFER_TYPE_PADDING
:
494 data
[i
].flags
= KRB5_CRYPTO_TYPE_DATA
;
496 case GSS_IOV_BUFFER_TYPE_SIGN_ONLY
:
497 data
[i
].flags
= KRB5_CRYPTO_TYPE_SIGN_ONLY
;
500 data
[i
].flags
= KRB5_CRYPTO_TYPE_EMPTY
;
503 data
[i
].data
.length
= iov
[i
].buffer
.length
;
504 data
[i
].data
.data
= iov
[i
].buffer
.value
;
507 data
[i
].flags
= KRB5_CRYPTO_TYPE_DATA
;
508 data
[i
].data
.data
= header
->buffer
.value
;
509 data
[i
].data
.length
= header
->buffer
.length
;
512 data
[i
].flags
= KRB5_CRYPTO_TYPE_CHECKSUM
;
513 data
[i
].data
.data
= trailer
->buffer
.value
;
514 data
[i
].data
.length
= trailer
->buffer
.length
;
517 ret
= krb5_create_checksum_iov(context
, ctx
->crypto
, usage
, data
, i
, NULL
);
520 major_status
= GSS_S_FAILURE
;
524 token
->EC
[0] = (trailer
->buffer
.length
>> 8) & 0xFF;
525 token
->EC
[1] = (trailer
->buffer
.length
>> 0) & 0xFF;
528 if (conf_state
!= NULL
)
529 *conf_state
= conf_req_flag
;
534 return GSS_S_COMPLETE
;
540 gss_release_iov_buffer(&junk
, iov
, iov_count
);
545 /* This is slowpath */
547 unrotate_iov(OM_uint32
*minor_status
, size_t rrc
, gss_iov_buffer_desc
*iov
, int iov_count
)
550 size_t len
= 0, skip
;
553 for (i
= 0; i
< iov_count
; i
++)
554 if (GSS_IOV_BUFFER_TYPE(iov
[i
].type
) == GSS_IOV_BUFFER_TYPE_DATA
||
555 GSS_IOV_BUFFER_TYPE(iov
[i
].type
) == GSS_IOV_BUFFER_TYPE_PADDING
||
556 GSS_IOV_BUFFER_TYPE(iov
[i
].type
) == GSS_IOV_BUFFER_TYPE_TRAILER
)
557 len
+= iov
[i
].buffer
.length
;
561 *minor_status
= ENOMEM
;
562 return GSS_S_FAILURE
;
568 for (i
= 0; i
< iov_count
; i
++) {
569 if (GSS_IOV_BUFFER_TYPE(iov
[i
].type
) == GSS_IOV_BUFFER_TYPE_DATA
||
570 GSS_IOV_BUFFER_TYPE(iov
[i
].type
) == GSS_IOV_BUFFER_TYPE_PADDING
||
571 GSS_IOV_BUFFER_TYPE(iov
[i
].type
) == GSS_IOV_BUFFER_TYPE_TRAILER
)
573 memcpy(q
, iov
[i
].buffer
.value
, iov
[i
].buffer
.length
);
574 q
+= iov
[i
].buffer
.length
;
577 assert((q
- p
) == len
);
579 /* unrotate first part */
582 for (i
= 0; i
< iov_count
; i
++) {
583 if (GSS_IOV_BUFFER_TYPE(iov
[i
].type
) == GSS_IOV_BUFFER_TYPE_DATA
||
584 GSS_IOV_BUFFER_TYPE(iov
[i
].type
) == GSS_IOV_BUFFER_TYPE_PADDING
||
585 GSS_IOV_BUFFER_TYPE(iov
[i
].type
) == GSS_IOV_BUFFER_TYPE_TRAILER
)
587 if (iov
[i
].buffer
.length
<= skip
) {
588 skip
-= iov
[i
].buffer
.length
;
590 memcpy(((uint8_t *)iov
[i
].buffer
.value
) + skip
, q
, iov
[i
].buffer
.length
- skip
);
591 q
+= iov
[i
].buffer
.length
- skip
;
599 for (i
= 0; i
< iov_count
; i
++) {
600 if (GSS_IOV_BUFFER_TYPE(iov
[i
].type
) == GSS_IOV_BUFFER_TYPE_DATA
||
601 GSS_IOV_BUFFER_TYPE(iov
[i
].type
) == GSS_IOV_BUFFER_TYPE_PADDING
||
602 GSS_IOV_BUFFER_TYPE(iov
[i
].type
) == GSS_IOV_BUFFER_TYPE_TRAILER
)
604 memcpy(q
, iov
[i
].buffer
.value
, MIN(iov
[i
].buffer
.length
, skip
));
605 if (iov
[i
].buffer
.length
> skip
)
607 skip
-= iov
[i
].buffer
.length
;
608 q
+= iov
[i
].buffer
.length
;
611 return GSS_S_COMPLETE
;
616 _gssapi_unwrap_cfx_iov(OM_uint32
*minor_status
,
618 krb5_context context
,
620 gss_qop_t
*qop_state
,
621 gss_iov_buffer_desc
*iov
,
624 OM_uint32 seq_number_lo
, seq_number_hi
, major_status
, junk
;
625 gss_iov_buffer_desc
*header
, *trailer
;
626 gss_cfx_wrap_token token
, ttoken
;
631 krb5_crypto_iov
*data
= NULL
;
636 header
= _gk_find_buffer(iov
, iov_count
, GSS_IOV_BUFFER_TYPE_HEADER
);
637 if (header
== NULL
) {
638 *minor_status
= EINVAL
;
639 return GSS_S_FAILURE
;
642 if (header
->buffer
.length
< sizeof(*token
)) /* we check exact below */
643 return GSS_S_DEFECTIVE_TOKEN
;
645 trailer
= _gk_find_buffer(iov
, iov_count
, GSS_IOV_BUFFER_TYPE_TRAILER
);
647 token
= (gss_cfx_wrap_token
)header
->buffer
.value
;
649 if (token
->TOK_ID
[0] != 0x05 || token
->TOK_ID
[1] != 0x04)
650 return GSS_S_DEFECTIVE_TOKEN
;
652 /* Ignore unknown flags */
653 token_flags
= token
->Flags
&
654 (CFXSentByAcceptor
| CFXSealed
| CFXAcceptorSubkey
);
656 if (token_flags
& CFXSentByAcceptor
) {
657 if ((ctx
->more_flags
& LOCAL
) == 0)
658 return GSS_S_DEFECTIVE_TOKEN
;
661 if (ctx
->more_flags
& ACCEPTOR_SUBKEY
) {
662 if ((token_flags
& CFXAcceptorSubkey
) == 0)
663 return GSS_S_DEFECTIVE_TOKEN
;
665 if (token_flags
& CFXAcceptorSubkey
)
666 return GSS_S_DEFECTIVE_TOKEN
;
669 if (token
->Filler
!= 0xFF)
670 return GSS_S_DEFECTIVE_TOKEN
;
672 if (conf_state
!= NULL
)
673 *conf_state
= (token_flags
& CFXSealed
) ? 1 : 0;
675 ec
= (token
->EC
[0] << 8) | token
->EC
[1];
676 rrc
= (token
->RRC
[0] << 8) | token
->RRC
[1];
679 * Check sequence number
681 _gsskrb5_decode_be_om_uint32(&token
->SND_SEQ
[0], &seq_number_hi
);
682 _gsskrb5_decode_be_om_uint32(&token
->SND_SEQ
[4], &seq_number_lo
);
684 /* no support for 64-bit sequence numbers */
685 *minor_status
= ERANGE
;
686 return GSS_S_UNSEQ_TOKEN
;
689 HEIMDAL_MUTEX_lock(&ctx
->ctx_id_mutex
);
690 ret
= _gssapi_msg_order_check(ctx
->order
, seq_number_lo
);
693 HEIMDAL_MUTEX_unlock(&ctx
->ctx_id_mutex
);
696 HEIMDAL_MUTEX_unlock(&ctx
->ctx_id_mutex
);
699 * Decrypt and/or verify checksum
702 if (ctx
->more_flags
& LOCAL
) {
703 usage
= KRB5_KU_USAGE_ACCEPTOR_SEAL
;
705 usage
= KRB5_KU_USAGE_INITIATOR_SEAL
;
708 data
= calloc(iov_count
+ 3, sizeof(data
[0]));
710 *minor_status
= ENOMEM
;
711 major_status
= GSS_S_FAILURE
;
715 if (token_flags
& CFXSealed
) {
716 size_t k5tsize
, k5hsize
;
718 krb5_crypto_length(context
, ctx
->crypto
, KRB5_CRYPTO_TYPE_HEADER
, &k5hsize
);
719 krb5_crypto_length(context
, ctx
->crypto
, KRB5_CRYPTO_TYPE_TRAILER
, &k5tsize
);
721 /* Rotate by RRC; bogus to do this in-place XXX */
724 if (trailer
== NULL
) {
725 size_t gsstsize
= k5tsize
+ sizeof(*token
);
726 size_t gsshsize
= k5hsize
+ sizeof(*token
);
728 if (IS_DCE_STYLE(ctx
))
730 gsshsize
+= gsstsize
;
732 if (rrc
!= gsstsize
) {
733 major_status
= GSS_S_DEFECTIVE_TOKEN
;
736 if (header
->buffer
.length
!= gsshsize
) {
737 major_status
= GSS_S_DEFECTIVE_TOKEN
;
740 } else if (trailer
->buffer
.length
!= sizeof(*token
) + k5tsize
) {
741 major_status
= GSS_S_DEFECTIVE_TOKEN
;
743 } else if (header
->buffer
.length
!= sizeof(*token
) + k5hsize
) {
744 major_status
= GSS_S_DEFECTIVE_TOKEN
;
746 } else if (rrc
!= 0) {
747 /* go though slowpath */
748 major_status
= unrotate_iov(minor_status
, rrc
, iov
, iov_count
);
754 data
[i
].flags
= KRB5_CRYPTO_TYPE_HEADER
;
755 data
[i
].data
.data
= ((uint8_t *)header
->buffer
.value
) + header
->buffer
.length
- k5hsize
;
756 data
[i
].data
.length
= k5hsize
;
759 for (j
= 0; j
< iov_count
; i
++, j
++) {
760 switch (GSS_IOV_BUFFER_TYPE(iov
[j
].type
)) {
761 case GSS_IOV_BUFFER_TYPE_DATA
:
762 case GSS_IOV_BUFFER_TYPE_PADDING
:
763 data
[i
].flags
= KRB5_CRYPTO_TYPE_DATA
;
765 case GSS_IOV_BUFFER_TYPE_SIGN_ONLY
:
766 data
[i
].flags
= KRB5_CRYPTO_TYPE_SIGN_ONLY
;
769 data
[i
].flags
= KRB5_CRYPTO_TYPE_EMPTY
;
772 data
[i
].data
.length
= iov
[j
].buffer
.length
;
773 data
[i
].data
.data
= iov
[j
].buffer
.value
;
776 /* encrypted CFX header in trailer (or after the header if in
777 DCE mode). Copy in header into E"header"
779 data
[i
].flags
= KRB5_CRYPTO_TYPE_DATA
;
781 data
[i
].data
.data
= trailer
->buffer
.value
;
783 data
[i
].data
.data
= ((uint8_t *)header
->buffer
.value
) + header
->buffer
.length
- k5hsize
- k5tsize
- sizeof(*token
);
784 data
[i
].data
.length
= sizeof(*token
);
785 ttoken
= (gss_cfx_wrap_token
)data
[i
].data
.data
;
788 /* Kerberos trailer comes after the gss trailer */
789 data
[i
].flags
= KRB5_CRYPTO_TYPE_TRAILER
;
790 data
[i
].data
.data
= ((uint8_t *)data
[i
-1].data
.data
) + sizeof(*token
);
791 data
[i
].data
.length
= k5tsize
;
794 ret
= krb5_decrypt_iov_ivec(context
, ctx
->crypto
, usage
, data
, i
, NULL
);
797 major_status
= GSS_S_FAILURE
;
801 ttoken
->RRC
[0] = token
->RRC
[0];
802 ttoken
->RRC
[1] = token
->RRC
[1];
804 /* Check the integrity of the header */
805 if (memcmp(ttoken
, token
, sizeof(*token
)) != 0) {
806 major_status
= GSS_S_BAD_MIC
;
812 *minor_status
= EINVAL
;
813 major_status
= GSS_S_FAILURE
;
817 if (trailer
== NULL
) {
818 *minor_status
= EINVAL
;
819 major_status
= GSS_S_FAILURE
;
823 if (trailer
->buffer
.length
!= ec
) {
824 *minor_status
= EINVAL
;
825 major_status
= GSS_S_FAILURE
;
829 for (i
= 0; i
< iov_count
; i
++) {
830 switch (GSS_IOV_BUFFER_TYPE(iov
[i
].type
)) {
831 case GSS_IOV_BUFFER_TYPE_DATA
:
832 case GSS_IOV_BUFFER_TYPE_PADDING
:
833 data
[i
].flags
= KRB5_CRYPTO_TYPE_DATA
;
835 case GSS_IOV_BUFFER_TYPE_SIGN_ONLY
:
836 data
[i
].flags
= KRB5_CRYPTO_TYPE_SIGN_ONLY
;
839 data
[i
].flags
= KRB5_CRYPTO_TYPE_EMPTY
;
842 data
[i
].data
.length
= iov
[i
].buffer
.length
;
843 data
[i
].data
.data
= iov
[i
].buffer
.value
;
846 data
[i
].flags
= KRB5_CRYPTO_TYPE_DATA
;
847 data
[i
].data
.data
= header
->buffer
.value
;
848 data
[i
].data
.length
= header
->buffer
.length
;
851 data
[i
].flags
= KRB5_CRYPTO_TYPE_CHECKSUM
;
852 data
[i
].data
.data
= trailer
->buffer
.value
;
853 data
[i
].data
.length
= trailer
->buffer
.length
;
856 token
= (gss_cfx_wrap_token
)header
->buffer
.value
;
862 ret
= krb5_verify_checksum_iov(context
, ctx
->crypto
, usage
, data
, i
, NULL
);
865 major_status
= GSS_S_FAILURE
;
870 if (qop_state
!= NULL
) {
871 *qop_state
= GSS_C_QOP_DEFAULT
;
877 return GSS_S_COMPLETE
;
883 gss_release_iov_buffer(&junk
, iov
, iov_count
);
889 _gssapi_wrap_iov_length_cfx(OM_uint32
*minor_status
,
891 krb5_context context
,
895 gss_iov_buffer_desc
*iov
,
900 size_t *padding
= NULL
;
902 GSSAPI_KRB5_INIT (&context
);
905 for (size
= 0, i
= 0; i
< iov_count
; i
++) {
906 switch(GSS_IOV_BUFFER_TYPE(iov
[i
].type
)) {
907 case GSS_IOV_BUFFER_TYPE_EMPTY
:
909 case GSS_IOV_BUFFER_TYPE_DATA
:
910 size
+= iov
[i
].buffer
.length
;
912 case GSS_IOV_BUFFER_TYPE_HEADER
:
913 *minor_status
= krb5_crypto_length(context
, ctx
->crypto
, KRB5_CRYPTO_TYPE_HEADER
, &iov
[i
].buffer
.length
);
915 return GSS_S_FAILURE
;
917 case GSS_IOV_BUFFER_TYPE_TRAILER
:
918 *minor_status
= krb5_crypto_length(context
, ctx
->crypto
, KRB5_CRYPTO_TYPE_TRAILER
, &iov
[i
].buffer
.length
);
920 return GSS_S_FAILURE
;
922 case GSS_IOV_BUFFER_TYPE_PADDING
:
923 if (padding
!= NULL
) {
925 return GSS_S_FAILURE
;
927 padding
= &iov
[i
].buffer
.length
;
929 case GSS_IOV_BUFFER_TYPE_SIGN_ONLY
:
932 *minor_status
= EINVAL
;
933 return GSS_S_FAILURE
;
938 krb5_crypto_length(context
, ctx
->crypto
, KRB5_CRYPTO_TYPE_PADDING
, &pad
);
940 *padding
= pad
- (size
% pad
);
947 return GSS_S_COMPLETE
;
953 OM_uint32
_gssapi_wrap_cfx(OM_uint32
*minor_status
,
954 const gsskrb5_ctx ctx
,
955 krb5_context context
,
957 const gss_buffer_t input_message_buffer
,
959 gss_buffer_t output_message_buffer
)
961 gss_cfx_wrap_token token
;
965 size_t wrapped_len
, cksumsize
;
966 uint16_t padlength
, rrc
= 0;
970 ret
= _gsskrb5cfx_wrap_length_cfx(context
,
971 ctx
->crypto
, conf_req_flag
,
972 input_message_buffer
->length
,
973 &wrapped_len
, &cksumsize
, &padlength
);
976 return GSS_S_FAILURE
;
979 /* Always rotate encrypted token (if any) and checksum to header */
980 rrc
= (conf_req_flag
? sizeof(*token
) : 0) + (uint16_t)cksumsize
;
982 output_message_buffer
->length
= wrapped_len
;
983 output_message_buffer
->value
= malloc(output_message_buffer
->length
);
984 if (output_message_buffer
->value
== NULL
) {
985 *minor_status
= ENOMEM
;
986 return GSS_S_FAILURE
;
989 p
= output_message_buffer
->value
;
990 token
= (gss_cfx_wrap_token
)p
;
991 token
->TOK_ID
[0] = 0x05;
992 token
->TOK_ID
[1] = 0x04;
994 token
->Filler
= 0xFF;
995 if ((ctx
->more_flags
& LOCAL
) == 0)
996 token
->Flags
|= CFXSentByAcceptor
;
997 if (ctx
->more_flags
& ACCEPTOR_SUBKEY
)
998 token
->Flags
|= CFXAcceptorSubkey
;
1001 * In Wrap tokens with confidentiality, the EC field is
1002 * used to encode the size (in bytes) of the random filler.
1004 token
->Flags
|= CFXSealed
;
1005 token
->EC
[0] = (padlength
>> 8) & 0xFF;
1006 token
->EC
[1] = (padlength
>> 0) & 0xFF;
1009 * In Wrap tokens without confidentiality, the EC field is
1010 * used to encode the size (in bytes) of the trailing
1013 * This is not used in the checksum calcuation itself,
1014 * because the checksum length could potentially vary
1015 * depending on the data length.
1022 * In Wrap tokens that provide for confidentiality, the RRC
1023 * field in the header contains the hex value 00 00 before
1026 * In Wrap tokens that do not provide for confidentiality,
1027 * both the EC and RRC fields in the appended checksum
1028 * contain the hex value 00 00 for the purpose of calculating
1034 HEIMDAL_MUTEX_lock(&ctx
->ctx_id_mutex
);
1035 krb5_auth_con_getlocalseqnumber(context
,
1038 _gsskrb5_encode_be_om_uint32(0, &token
->SND_SEQ
[0]);
1039 _gsskrb5_encode_be_om_uint32(seq_number
, &token
->SND_SEQ
[4]);
1040 krb5_auth_con_setlocalseqnumber(context
,
1043 HEIMDAL_MUTEX_unlock(&ctx
->ctx_id_mutex
);
1046 * If confidentiality is requested, the token header is
1047 * appended to the plaintext before encryption; the resulting
1048 * token is {"header" | encrypt(plaintext | pad | "header")}.
1050 * If no confidentiality is requested, the checksum is
1051 * calculated over the plaintext concatenated with the
1054 if (ctx
->more_flags
& LOCAL
) {
1055 usage
= KRB5_KU_USAGE_INITIATOR_SEAL
;
1057 usage
= KRB5_KU_USAGE_ACCEPTOR_SEAL
;
1060 if (conf_req_flag
) {
1062 * Any necessary padding is added here to ensure that the
1063 * encrypted token header is always at the end of the
1066 * The specification does not require that the padding
1067 * bytes are initialized.
1069 p
+= sizeof(*token
);
1070 memcpy(p
, input_message_buffer
->value
, input_message_buffer
->length
);
1071 memset(p
+ input_message_buffer
->length
, 0xFF, padlength
);
1072 memcpy(p
+ input_message_buffer
->length
+ padlength
,
1073 token
, sizeof(*token
));
1075 ret
= krb5_encrypt(context
, ctx
->crypto
,
1077 input_message_buffer
->length
+ padlength
+
1081 *minor_status
= ret
;
1082 _gsskrb5_release_buffer(minor_status
, output_message_buffer
);
1083 return GSS_S_FAILURE
;
1085 assert(sizeof(*token
) + cipher
.length
== wrapped_len
);
1086 token
->RRC
[0] = (rrc
>> 8) & 0xFF;
1087 token
->RRC
[1] = (rrc
>> 0) & 0xFF;
1089 ret
= rrc_rotate(cipher
.data
, cipher
.length
, rrc
, FALSE
);
1091 *minor_status
= ret
;
1092 _gsskrb5_release_buffer(minor_status
, output_message_buffer
);
1093 return GSS_S_FAILURE
;
1095 memcpy(p
, cipher
.data
, cipher
.length
);
1096 krb5_data_free(&cipher
);
1101 buf
= malloc(input_message_buffer
->length
+ sizeof(*token
));
1103 *minor_status
= ENOMEM
;
1104 _gsskrb5_release_buffer(minor_status
, output_message_buffer
);
1105 return GSS_S_FAILURE
;
1107 memcpy(buf
, input_message_buffer
->value
, input_message_buffer
->length
);
1108 memcpy(buf
+ input_message_buffer
->length
, token
, sizeof(*token
));
1110 ret
= krb5_create_checksum(context
, ctx
->crypto
,
1112 input_message_buffer
->length
+
1116 *minor_status
= ret
;
1117 _gsskrb5_release_buffer(minor_status
, output_message_buffer
);
1119 return GSS_S_FAILURE
;
1124 assert(cksum
.checksum
.length
== cksumsize
);
1125 token
->EC
[0] = (cksum
.checksum
.length
>> 8) & 0xFF;
1126 token
->EC
[1] = (cksum
.checksum
.length
>> 0) & 0xFF;
1127 token
->RRC
[0] = (rrc
>> 8) & 0xFF;
1128 token
->RRC
[1] = (rrc
>> 0) & 0xFF;
1130 p
+= sizeof(*token
);
1131 memcpy(p
, input_message_buffer
->value
, input_message_buffer
->length
);
1132 memcpy(p
+ input_message_buffer
->length
,
1133 cksum
.checksum
.data
, cksum
.checksum
.length
);
1136 input_message_buffer
->length
+ cksum
.checksum
.length
, rrc
, FALSE
);
1138 *minor_status
= ret
;
1139 _gsskrb5_release_buffer(minor_status
, output_message_buffer
);
1140 free_Checksum(&cksum
);
1141 return GSS_S_FAILURE
;
1143 free_Checksum(&cksum
);
1146 if (conf_state
!= NULL
) {
1147 *conf_state
= conf_req_flag
;
1151 return GSS_S_COMPLETE
;
1154 OM_uint32
_gssapi_unwrap_cfx(OM_uint32
*minor_status
,
1155 const gsskrb5_ctx ctx
,
1156 krb5_context context
,
1157 const gss_buffer_t input_message_buffer
,
1158 gss_buffer_t output_message_buffer
,
1160 gss_qop_t
*qop_state
)
1162 gss_cfx_wrap_token token
;
1164 krb5_error_code ret
;
1168 OM_uint32 seq_number_lo
, seq_number_hi
;
1174 if (input_message_buffer
->length
< sizeof(*token
)) {
1175 return GSS_S_DEFECTIVE_TOKEN
;
1178 p
= input_message_buffer
->value
;
1180 token
= (gss_cfx_wrap_token
)p
;
1182 if (token
->TOK_ID
[0] != 0x05 || token
->TOK_ID
[1] != 0x04) {
1183 return GSS_S_DEFECTIVE_TOKEN
;
1186 /* Ignore unknown flags */
1187 token_flags
= token
->Flags
&
1188 (CFXSentByAcceptor
| CFXSealed
| CFXAcceptorSubkey
);
1190 if (token_flags
& CFXSentByAcceptor
) {
1191 if ((ctx
->more_flags
& LOCAL
) == 0)
1192 return GSS_S_DEFECTIVE_TOKEN
;
1195 if (ctx
->more_flags
& ACCEPTOR_SUBKEY
) {
1196 if ((token_flags
& CFXAcceptorSubkey
) == 0)
1197 return GSS_S_DEFECTIVE_TOKEN
;
1199 if (token_flags
& CFXAcceptorSubkey
)
1200 return GSS_S_DEFECTIVE_TOKEN
;
1203 if (token
->Filler
!= 0xFF) {
1204 return GSS_S_DEFECTIVE_TOKEN
;
1207 if (conf_state
!= NULL
) {
1208 *conf_state
= (token_flags
& CFXSealed
) ? 1 : 0;
1211 ec
= (token
->EC
[0] << 8) | token
->EC
[1];
1212 rrc
= (token
->RRC
[0] << 8) | token
->RRC
[1];
1215 * Check sequence number
1217 _gsskrb5_decode_be_om_uint32(&token
->SND_SEQ
[0], &seq_number_hi
);
1218 _gsskrb5_decode_be_om_uint32(&token
->SND_SEQ
[4], &seq_number_lo
);
1219 if (seq_number_hi
) {
1220 /* no support for 64-bit sequence numbers */
1221 *minor_status
= ERANGE
;
1222 return GSS_S_UNSEQ_TOKEN
;
1225 HEIMDAL_MUTEX_lock(&ctx
->ctx_id_mutex
);
1226 ret
= _gssapi_msg_order_check(ctx
->order
, seq_number_lo
);
1229 HEIMDAL_MUTEX_unlock(&ctx
->ctx_id_mutex
);
1230 _gsskrb5_release_buffer(minor_status
, output_message_buffer
);
1233 HEIMDAL_MUTEX_unlock(&ctx
->ctx_id_mutex
);
1236 * Decrypt and/or verify checksum
1239 if (ctx
->more_flags
& LOCAL
) {
1240 usage
= KRB5_KU_USAGE_ACCEPTOR_SEAL
;
1242 usage
= KRB5_KU_USAGE_INITIATOR_SEAL
;
1245 p
+= sizeof(*token
);
1246 len
= input_message_buffer
->length
;
1247 len
-= (p
- (u_char
*)input_message_buffer
->value
);
1249 /* Rotate by RRC; bogus to do this in-place XXX */
1250 *minor_status
= rrc_rotate(p
, len
, rrc
, TRUE
);
1251 if (*minor_status
!= 0) {
1252 return GSS_S_FAILURE
;
1255 if (token_flags
& CFXSealed
) {
1256 ret
= krb5_decrypt(context
, ctx
->crypto
, usage
,
1259 *minor_status
= ret
;
1260 return GSS_S_BAD_MIC
;
1263 /* Check that there is room for the pad and token header */
1264 if (data
.length
< ec
+ sizeof(*token
)) {
1265 krb5_data_free(&data
);
1266 return GSS_S_DEFECTIVE_TOKEN
;
1269 p
+= data
.length
- sizeof(*token
);
1271 /* RRC is unprotected; don't modify input buffer */
1272 ((gss_cfx_wrap_token
)p
)->RRC
[0] = token
->RRC
[0];
1273 ((gss_cfx_wrap_token
)p
)->RRC
[1] = token
->RRC
[1];
1275 /* Check the integrity of the header */
1276 if (memcmp(p
, token
, sizeof(*token
)) != 0) {
1277 krb5_data_free(&data
);
1278 return GSS_S_BAD_MIC
;
1281 output_message_buffer
->value
= data
.data
;
1282 output_message_buffer
->length
= data
.length
- ec
- sizeof(*token
);
1286 /* Determine checksum type */
1287 ret
= krb5_crypto_get_checksum_type(context
,
1291 *minor_status
= ret
;
1292 return GSS_S_FAILURE
;
1295 cksum
.checksum
.length
= ec
;
1297 /* Check we have at least as much data as the checksum */
1298 if (len
< cksum
.checksum
.length
) {
1299 *minor_status
= ERANGE
;
1300 return GSS_S_BAD_MIC
;
1303 /* Length now is of the plaintext only, no checksum */
1304 len
-= cksum
.checksum
.length
;
1305 cksum
.checksum
.data
= p
+ len
;
1307 output_message_buffer
->length
= len
; /* for later */
1308 output_message_buffer
->value
= malloc(len
+ sizeof(*token
));
1309 if (output_message_buffer
->value
== NULL
) {
1310 *minor_status
= ENOMEM
;
1311 return GSS_S_FAILURE
;
1314 /* Checksum is over (plaintext-data | "header") */
1315 memcpy(output_message_buffer
->value
, p
, len
);
1316 memcpy((u_char
*)output_message_buffer
->value
+ len
,
1317 token
, sizeof(*token
));
1319 /* EC is not included in checksum calculation */
1320 token
= (gss_cfx_wrap_token
)((u_char
*)output_message_buffer
->value
+
1327 ret
= krb5_verify_checksum(context
, ctx
->crypto
,
1329 output_message_buffer
->value
,
1330 len
+ sizeof(*token
),
1333 *minor_status
= ret
;
1334 _gsskrb5_release_buffer(minor_status
, output_message_buffer
);
1335 return GSS_S_BAD_MIC
;
1339 if (qop_state
!= NULL
) {
1340 *qop_state
= GSS_C_QOP_DEFAULT
;
1344 return GSS_S_COMPLETE
;
1347 OM_uint32
_gssapi_mic_cfx(OM_uint32
*minor_status
,
1348 const gsskrb5_ctx ctx
,
1349 krb5_context context
,
1351 const gss_buffer_t message_buffer
,
1352 gss_buffer_t message_token
)
1354 gss_cfx_mic_token token
;
1355 krb5_error_code ret
;
1362 len
= message_buffer
->length
+ sizeof(*token
);
1365 *minor_status
= ENOMEM
;
1366 return GSS_S_FAILURE
;
1369 memcpy(buf
, message_buffer
->value
, message_buffer
->length
);
1371 token
= (gss_cfx_mic_token
)(buf
+ message_buffer
->length
);
1372 token
->TOK_ID
[0] = 0x04;
1373 token
->TOK_ID
[1] = 0x04;
1375 if ((ctx
->more_flags
& LOCAL
) == 0)
1376 token
->Flags
|= CFXSentByAcceptor
;
1377 if (ctx
->more_flags
& ACCEPTOR_SUBKEY
)
1378 token
->Flags
|= CFXAcceptorSubkey
;
1379 memset(token
->Filler
, 0xFF, 5);
1381 HEIMDAL_MUTEX_lock(&ctx
->ctx_id_mutex
);
1382 krb5_auth_con_getlocalseqnumber(context
,
1385 _gsskrb5_encode_be_om_uint32(0, &token
->SND_SEQ
[0]);
1386 _gsskrb5_encode_be_om_uint32(seq_number
, &token
->SND_SEQ
[4]);
1387 krb5_auth_con_setlocalseqnumber(context
,
1390 HEIMDAL_MUTEX_unlock(&ctx
->ctx_id_mutex
);
1392 if (ctx
->more_flags
& LOCAL
) {
1393 usage
= KRB5_KU_USAGE_INITIATOR_SIGN
;
1395 usage
= KRB5_KU_USAGE_ACCEPTOR_SIGN
;
1398 ret
= krb5_create_checksum(context
, ctx
->crypto
,
1399 usage
, 0, buf
, len
, &cksum
);
1401 *minor_status
= ret
;
1403 return GSS_S_FAILURE
;
1406 /* Determine MIC length */
1407 message_token
->length
= sizeof(*token
) + cksum
.checksum
.length
;
1408 message_token
->value
= malloc(message_token
->length
);
1409 if (message_token
->value
== NULL
) {
1410 *minor_status
= ENOMEM
;
1411 free_Checksum(&cksum
);
1413 return GSS_S_FAILURE
;
1416 /* Token is { "header" | get_mic("header" | plaintext-data) } */
1417 memcpy(message_token
->value
, token
, sizeof(*token
));
1418 memcpy((u_char
*)message_token
->value
+ sizeof(*token
),
1419 cksum
.checksum
.data
, cksum
.checksum
.length
);
1421 free_Checksum(&cksum
);
1425 return GSS_S_COMPLETE
;
1428 OM_uint32
_gssapi_verify_mic_cfx(OM_uint32
*minor_status
,
1429 const gsskrb5_ctx ctx
,
1430 krb5_context context
,
1431 const gss_buffer_t message_buffer
,
1432 const gss_buffer_t token_buffer
,
1433 gss_qop_t
*qop_state
)
1435 gss_cfx_mic_token token
;
1437 krb5_error_code ret
;
1439 OM_uint32 seq_number_lo
, seq_number_hi
;
1445 if (token_buffer
->length
< sizeof(*token
)) {
1446 return GSS_S_DEFECTIVE_TOKEN
;
1449 p
= token_buffer
->value
;
1451 token
= (gss_cfx_mic_token
)p
;
1453 if (token
->TOK_ID
[0] != 0x04 || token
->TOK_ID
[1] != 0x04) {
1454 return GSS_S_DEFECTIVE_TOKEN
;
1457 /* Ignore unknown flags */
1458 token_flags
= token
->Flags
& (CFXSentByAcceptor
| CFXAcceptorSubkey
);
1460 if (token_flags
& CFXSentByAcceptor
) {
1461 if ((ctx
->more_flags
& LOCAL
) == 0)
1462 return GSS_S_DEFECTIVE_TOKEN
;
1464 if (ctx
->more_flags
& ACCEPTOR_SUBKEY
) {
1465 if ((token_flags
& CFXAcceptorSubkey
) == 0)
1466 return GSS_S_DEFECTIVE_TOKEN
;
1468 if (token_flags
& CFXAcceptorSubkey
)
1469 return GSS_S_DEFECTIVE_TOKEN
;
1472 if (memcmp(token
->Filler
, "\xff\xff\xff\xff\xff", 5) != 0) {
1473 return GSS_S_DEFECTIVE_TOKEN
;
1477 * Check sequence number
1479 _gsskrb5_decode_be_om_uint32(&token
->SND_SEQ
[0], &seq_number_hi
);
1480 _gsskrb5_decode_be_om_uint32(&token
->SND_SEQ
[4], &seq_number_lo
);
1481 if (seq_number_hi
) {
1482 *minor_status
= ERANGE
;
1483 return GSS_S_UNSEQ_TOKEN
;
1486 HEIMDAL_MUTEX_lock(&ctx
->ctx_id_mutex
);
1487 ret
= _gssapi_msg_order_check(ctx
->order
, seq_number_lo
);
1490 HEIMDAL_MUTEX_unlock(&ctx
->ctx_id_mutex
);
1493 HEIMDAL_MUTEX_unlock(&ctx
->ctx_id_mutex
);
1498 ret
= krb5_crypto_get_checksum_type(context
, ctx
->crypto
,
1501 *minor_status
= ret
;
1502 return GSS_S_FAILURE
;
1505 cksum
.checksum
.data
= p
+ sizeof(*token
);
1506 cksum
.checksum
.length
= token_buffer
->length
- sizeof(*token
);
1508 if (ctx
->more_flags
& LOCAL
) {
1509 usage
= KRB5_KU_USAGE_ACCEPTOR_SIGN
;
1511 usage
= KRB5_KU_USAGE_INITIATOR_SIGN
;
1514 buf
= malloc(message_buffer
->length
+ sizeof(*token
));
1516 *minor_status
= ENOMEM
;
1517 return GSS_S_FAILURE
;
1519 memcpy(buf
, message_buffer
->value
, message_buffer
->length
);
1520 memcpy(buf
+ message_buffer
->length
, token
, sizeof(*token
));
1522 ret
= krb5_verify_checksum(context
, ctx
->crypto
,
1525 sizeof(*token
) + message_buffer
->length
,
1528 *minor_status
= ret
;
1530 return GSS_S_BAD_MIC
;
1535 if (qop_state
!= NULL
) {
1536 *qop_state
= GSS_C_QOP_DEFAULT
;
1539 return GSS_S_COMPLETE
;