* semantics.c (finish_handler): End the scope of the handler
[official-gcc.git] / gcc / java / verify.c
blob41ad823cb462e95f4bab29a005da59056fe23f15
1 /* Handle verification of bytecoded methods for the GNU compiler for
2 the Java(TM) language.
3 Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA.
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
26 #include "config.h"
27 #include "system.h"
28 #include "tree.h"
29 #include "java-tree.h"
30 #include "javaop.h"
31 #include "java-opcodes.h"
32 #include "jcf.h"
33 #include "java-except.h"
34 #include "toplev.h"
36 static void push_pending_label PROTO ((tree));
37 static tree merge_types PROTO ((tree, tree));
38 static const char *check_pending_block PROTO ((tree));
39 static void type_stack_dup PROTO ((int, int));
40 static int start_pc_cmp PROTO ((const PTR, const PTR));
42 extern int stack_pointer;
44 /* During verification, start of the current subroutine (jsr target). */
45 tree current_subr;
47 /* A list of pending blocks, chained using LABEL_PENDING_CHAIN.
48 A pending block is one that has LABEL_CHANGED set, which means
49 it requires (re-) verification. */
50 tree pending_blocks;
52 /* Append TARGET_LABEL to the pending_block stack unless already in it. */
54 static void
55 push_pending_label (target_label)
56 tree target_label;
58 if (! LABEL_CHANGED (target_label))
60 LABEL_PENDING_CHAIN (target_label) = pending_blocks;
61 pending_blocks = target_label;
62 LABEL_CHANGED (target_label) = 1;
66 /* Note that TARGET_LABEL is a possible successor instruction.
67 Merge the type state etc.
68 Return NULL on sucess, or an error message on failure. */
70 static const char *
71 check_pending_block (target_label)
72 tree target_label;
74 int changed = merge_type_state (target_label);
76 if (changed)
78 if (changed < 0)
79 return "types could not be merged";
80 push_pending_label (target_label);
83 if (current_subr == NULL)
85 if (LABEL_IN_SUBR (target_label))
86 return "might transfer control into subroutine";
88 else
90 if (LABEL_IN_SUBR (target_label))
92 if (LABEL_SUBR_START (target_label) != current_subr)
93 return "transfer out of subroutine";
95 else if (! LABEL_VERIFIED (target_label))
97 LABEL_IN_SUBR (target_label) = 1;
98 LABEL_SUBR_START (target_label) = current_subr;
100 else
101 return "transfer out of subroutine";
103 return NULL;
106 /* Return the "merged" types of TYPE1 and TYPE2.
107 If either is primitive, the other must match (after promotion to int).
108 For reference types, return the common super-class.
109 Return TYPE_UNKNOWN if the types cannot be merged. */
111 static tree
112 merge_types (type1, type2)
113 tree type1, type2;
115 if (type1 == type2)
116 return type1;
117 if (type1 == TYPE_UNKNOWN || type2 == TYPE_UNKNOWN
118 || type1 == TYPE_RETURN_ADDR || type2 == TYPE_RETURN_ADDR)
119 return TYPE_UNKNOWN;
120 if (TREE_CODE (type1) == POINTER_TYPE && TREE_CODE (type2) == POINTER_TYPE)
122 int depth1, depth2;
123 tree tt1, tt2;
124 /* ptr_type_node is only used for a null reference,
125 which is compatible with any reference type. */
126 if (type1 == ptr_type_node || type2 == object_ptr_type_node)
127 return type2;
128 if (type2 == ptr_type_node || type1 == object_ptr_type_node)
129 return type1;
131 tt1 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type1));
132 tt2 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type2));
134 if (TYPE_ARRAY_P (tt1) || TYPE_ARRAY_P (tt2))
136 if (TYPE_ARRAY_P (tt1) == TYPE_ARRAY_P (tt2))
138 tree el_type1 = TYPE_ARRAY_ELEMENT (tt1);
139 tree el_type2 = TYPE_ARRAY_ELEMENT (tt2);
140 tree el_type = NULL_TREE;
141 if (el_type1 == el_type2)
142 el_type = el_type1;
143 else if (TREE_CODE (el_type1) == POINTER_TYPE
144 && TREE_CODE (el_type2) == POINTER_TYPE)
145 el_type = merge_types (el_type1, el_type2);
146 if (el_type != NULL_TREE)
148 HOST_WIDE_INT len1 = java_array_type_length (tt1);
149 HOST_WIDE_INT len2 = java_array_type_length (tt2);
150 if (len1 != len2)
151 len1 = -1;
152 else if (el_type1 == el_type2)
153 return type1;
154 return promote_type (build_java_array_type (el_type, len1));
157 return object_ptr_type_node;
160 if (CLASS_INTERFACE (TYPE_NAME (tt1)))
162 if (CLASS_INTERFACE (TYPE_NAME (tt2)))
164 /* This is a kludge, but matches what Sun's verifier does.
165 It can be tricked, but is safe as long as type errors
166 (i.e. interface method calls) are caught at run-time. */
167 return object_ptr_type_node;
169 else
171 if (can_widen_reference_to (tt2, tt1))
172 return type1;
173 else
174 return TYPE_UNKNOWN;
177 else if (CLASS_INTERFACE (TYPE_NAME (tt2)))
179 if (can_widen_reference_to (tt1, tt2))
180 return type2;
181 else
182 return TYPE_UNKNOWN;
185 type1 = tt1;
186 type2 = tt2;
188 depth1 = class_depth (type1);
189 depth2 = class_depth (type2);
190 for ( ; depth1 > depth2; depth1--)
191 type1 = TYPE_BINFO_BASETYPE (type1, 0);
192 for ( ; depth2 > depth1; depth2--)
193 type2 = TYPE_BINFO_BASETYPE (type2, 0);
194 while (type1 != type2)
196 type1 = TYPE_BINFO_BASETYPE (type1, 0);
197 type2 = TYPE_BINFO_BASETYPE (type2, 0);
199 return promote_type (type1);
201 if (INTEGRAL_TYPE_P (type1) && INTEGRAL_TYPE_P (type2)
202 && TYPE_PRECISION (type1) <= 32 && TYPE_PRECISION (type2) <= 32)
203 return int_type_node;
204 return TYPE_UNKNOWN;
207 /* Merge the current type state with that at LABEL.
208 Return -1 the the states are incompatible (i.e. on error),
209 0 if there was no change, and 1 if there was a change. */
212 merge_type_state (label)
213 tree label;
215 int nlocals = DECL_MAX_LOCALS(current_function_decl);
216 int cur_length = stack_pointer + nlocals;
217 tree vec = LABEL_TYPE_STATE (label);
218 tree return_map;
219 if (vec == NULL_TREE)
221 vec = make_tree_vec (cur_length);
222 LABEL_TYPE_STATE (label) = vec;
223 while (--cur_length >= 0)
224 TREE_VEC_ELT (vec, cur_length) = type_map [cur_length];
225 return 1;
227 else
229 int i;
230 int changed = 0;
231 if (LABEL_IS_SUBR_START (label) && LABEL_VERIFIED (label)
232 && current_subr != label)
233 return_map = LABEL_RETURN_TYPE_STATE (label);
234 else
235 return_map = NULL_TREE;
236 if (TREE_VEC_LENGTH (vec) != cur_length)
238 return -1;
240 for (i = 0; i < cur_length; i++)
242 tree old_type = TREE_VEC_ELT (vec, i);
243 tree new_type = merge_types (old_type, type_map [i]);
244 if (TREE_VEC_ELT (vec, i) != new_type)
246 /* If there has been a change, note that since we must re-verify.
247 However, if the label is the start of a subroutine,
248 we don't care about local variables that are neither
249 set nor used in the sub-routine. */
250 if (return_map == NULL_TREE || i >= nlocals
251 || TREE_VEC_ELT (return_map, i) != TYPE_UNUSED
252 || (TYPE_IS_WIDE (new_type)
253 && TREE_VEC_ELT (return_map, i+1) != TYPE_UNUSED))
254 changed = 1;
256 TREE_VEC_ELT (vec, i) = new_type;
257 if (new_type == TYPE_UNKNOWN)
259 if (i >= nlocals)
260 return -1;
262 else if (TYPE_IS_WIDE (new_type))
263 i++;
265 return changed;
269 /* Handle dup-like operations. */
271 static void
272 type_stack_dup (size, offset)
273 int size, offset;
275 tree type[4];
276 int index;
277 if (size + offset > stack_pointer)
278 error ("stack underflow - dup* operation");
279 for (index = 0; index < size + offset; index++)
281 type[index] = stack_type_map[stack_pointer - 1];
282 if (type[index] == void_type_node)
284 index++;
285 type[index] = stack_type_map[stack_pointer - 2];
286 if (! TYPE_IS_WIDE (type[index]))
287 fatal ("internal error - dup operation");
288 if (index == size || index == size + offset)
289 fatal ("dup operation splits 64-bit number");
291 pop_type (type[index]);
293 for (index = size; --index >= 0; )
295 if (type[index] != void_type_node)
296 push_type (type[index]);
299 for (index = size + offset; --index >= 0; )
301 if (type[index] != void_type_node)
302 push_type (type[index]);
306 /* This keeps track of a start PC and corresponding initial index. */
307 struct pc_index
309 int start_pc;
310 int index;
313 /* A helper that is used when sorting exception ranges. */
314 static int
315 start_pc_cmp (xp, yp)
316 const PTR xp;
317 const PTR yp;
319 const struct pc_index *x = (const struct pc_index *) xp;
320 const struct pc_index *y = (const struct pc_index *) yp;
321 return x->start_pc - y->start_pc;
324 /* This causes the next iteration to ignore the next instruction
325 and look for some other unhandled instruction. */
326 #define INVALIDATE_PC (prevpc = -1, oldpc = PC, PC = INVALID_PC)
327 #define INVALID_PC (-1)
329 #define VERIFICATION_ERROR(MESSAGE) \
330 do { message = MESSAGE; goto verify_error; } while (0)
332 #define PUSH_PENDING(LABEL) \
333 do { if ((message = check_pending_block (LABEL)) != NULL) \
334 goto verify_error; } while (0)
336 #ifdef __GNUC__
337 #define CHECK_PC_IN_RANGE(PC) ({if (PC < 0 || PC > length) goto bad_pc; 1;})
338 #else
339 #define CHECK_PC_IN_RANGE(PC) (PC < 0 || PC > length ? \
340 (fatal("Bad byte codes.\n"), 0) : 1)
341 #endif
343 #define BCODE byte_ops
345 /* Verify the bytecodes of the current method.
346 Return 1 on sucess, 0 on failure. */
348 verify_jvm_instructions (jcf, byte_ops, length)
349 JCF* jcf;
350 const unsigned char *byte_ops;
351 long length;
353 tree label;
354 int wide = 0;
355 int op_code;
356 int PC;
357 int oldpc = 0; /* PC of start of instruction. */
358 int prevpc = 0; /* If >= 0, PC of previous instruction. */
359 const char *message;
360 int i;
361 register unsigned char *p;
362 struct eh_range *prev_eh_ranges = NULL_EH_RANGE;
363 struct eh_range *eh_ranges;
364 tree return_type = TREE_TYPE (TREE_TYPE (current_function_decl));
365 struct pc_index *starts;
366 int eh_count;
368 jint int_value = -1;
370 pending_blocks = NULL_TREE;
372 /* Handle the exception table. */
373 method_init_exceptions ();
374 JCF_SEEK (jcf, DECL_CODE_OFFSET (current_function_decl) + length);
375 eh_count = JCF_readu2 (jcf);
377 /* We read the exception handlers in order of increasing start PC.
378 To do this we first read and sort the start PCs. */
379 starts = (struct pc_index *) xmalloc (eh_count * sizeof (struct pc_index));
380 for (i = 0; i < eh_count; ++i)
382 starts[i].start_pc = GET_u2 (jcf->read_ptr + 8 * i);
383 starts[i].index = i;
385 qsort (starts, eh_count, sizeof (struct pc_index), start_pc_cmp);
387 for (i = 0; i < eh_count; ++i)
389 int start_pc, end_pc, handler_pc, catch_type;
391 p = jcf->read_ptr + 8 * starts[i].index;
393 start_pc = GET_u2 (p);
394 end_pc = GET_u2 (p+2);
395 handler_pc = GET_u2 (p+4);
396 catch_type = GET_u2 (p+6);
398 if (start_pc < 0 || start_pc >= length
399 || end_pc < 0 || end_pc > length || start_pc >= end_pc
400 || handler_pc < 0 || handler_pc >= length
401 || (handler_pc >= start_pc && handler_pc < end_pc)
402 || ! (instruction_bits [start_pc] & BCODE_INSTRUCTION_START)
403 || (end_pc < length &&
404 ! (instruction_bits [end_pc] & BCODE_INSTRUCTION_START))
405 || ! (instruction_bits [handler_pc] & BCODE_INSTRUCTION_START))
407 error ("bad pc in exception_table");
408 free (starts);
409 return 0;
412 add_handler (start_pc, end_pc,
413 lookup_label (handler_pc),
414 catch_type == 0 ? NULL_TREE
415 : get_class_constant (jcf, catch_type));
417 instruction_bits [handler_pc] |= BCODE_EXCEPTION_TARGET;
420 free (starts);
421 handle_nested_ranges ();
423 for (PC = 0;;)
425 int index;
426 tree type, tmp;
427 if (((PC != INVALID_PC
428 && instruction_bits [PC] & BCODE_TARGET) != 0)
429 || PC == 0)
431 PUSH_PENDING (lookup_label (PC));
432 INVALIDATE_PC;
434 /* Check if there are any more pending blocks in the current
435 subroutine. Because we push pending blocks in a
436 last-in-first-out order, and because we don't push anything
437 from our caller until we are done with this subroutine or
438 anything nested in it, then we are done if the top of the
439 pending_blocks stack is not in a subroutine, or it is in our
440 caller. */
441 if (current_subr
442 && PC == INVALID_PC)
444 tree caller = LABEL_SUBR_CONTEXT (current_subr);
446 if (pending_blocks == NULL_TREE
447 || ! LABEL_IN_SUBR (pending_blocks)
448 || LABEL_SUBR_START (pending_blocks) == caller)
450 int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
451 tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
452 tmp = LABEL_RETURN_LABELS (current_subr);
454 /* FIXME: If we exit a subroutine via a throw, we might
455 have returned to an earlier caller. Obviously a
456 "ret" can only return one level, but a throw may
457 return many levels.*/
458 current_subr = caller;
460 if (RETURN_MAP_ADJUSTED (ret_map))
462 /* Since we are done with this subroutine , set up
463 the (so far known) return address as pending -
464 with the merged type state. */
465 for ( ; tmp != NULL_TREE; tmp = TREE_CHAIN (tmp))
467 tree return_label = TREE_VALUE (tmp);
468 tree return_state = LABEL_TYPE_STATE (return_label);
469 if (return_state == NULL_TREE)
471 /* This means means we had not verified the
472 subroutine earlier, so this is the first jsr to
473 call it. In this case, the type_map of the return
474 address is just the current type_map - and that
475 is handled by the following PUSH_PENDING. */
477 else
479 /* In this case we have to do a merge. But first
480 restore the type_map for unused slots to those
481 that were in effect at the jsr. */
482 for (index = size; --index >= 0; )
484 type_map[index] = TREE_VEC_ELT (ret_map, index);
485 if (type_map[index] == TYPE_UNUSED)
486 type_map[index]
487 = TREE_VEC_ELT (return_state, index);
490 PUSH_PENDING (return_label);
495 if (PC == INVALID_PC)
497 label = pending_blocks;
498 if (label == NULL_TREE)
499 break; /* We're done! */
500 pending_blocks = LABEL_PENDING_CHAIN (label);
501 LABEL_CHANGED (label) = 0;
503 if (LABEL_IN_SUBR (label))
504 current_subr = LABEL_SUBR_START (label);
505 else
506 current_subr = NULL_TREE;
508 /* Restore type_map and stack_pointer from
509 LABEL_TYPE_STATE (label), and continue
510 compiling from there. */
511 load_type_state (label);
512 PC = LABEL_PC (label);
514 else if (PC >= length)
515 VERIFICATION_ERROR ("falling through end of method");
517 /* fprintf (stderr, "** %d\n", PC); */
519 oldpc = PC;
521 if (!(instruction_bits [PC] & BCODE_INSTRUCTION_START) && ! wide)
522 VERIFICATION_ERROR ("PC not at instruction start");
524 instruction_bits[PC] |= BCODE_VERIFIED;
526 eh_ranges = find_handler (oldpc);
528 op_code = byte_ops[PC++];
529 switch (op_code)
531 int is_static, is_putting;
532 case OPCODE_nop:
533 break;
534 case OPCODE_iconst_m1:
535 case OPCODE_iconst_0: case OPCODE_iconst_1: case OPCODE_iconst_2:
536 case OPCODE_iconst_3: case OPCODE_iconst_4: case OPCODE_iconst_5:
537 i = op_code - OPCODE_iconst_0;
538 goto push_int;
539 push_int:
540 if (byte_ops[PC] == OPCODE_newarray
541 || byte_ops[PC] == OPCODE_newarray)
542 int_value = i;
543 push_type (int_type_node); break;
544 case OPCODE_lconst_0: case OPCODE_lconst_1:
545 push_type (long_type_node); break;
546 case OPCODE_fconst_0: case OPCODE_fconst_1: case OPCODE_fconst_2:
547 push_type (float_type_node); break;
548 case OPCODE_dconst_0: case OPCODE_dconst_1:
549 push_type (double_type_node); break;
550 case OPCODE_bipush:
551 i = IMMEDIATE_s1;
552 goto push_int;
553 case OPCODE_sipush:
554 i = IMMEDIATE_s2;
555 goto push_int;
556 case OPCODE_iload: type = int_type_node; goto general_load;
557 case OPCODE_lload: type = long_type_node; goto general_load;
558 case OPCODE_fload: type = float_type_node; goto general_load;
559 case OPCODE_dload: type = double_type_node; goto general_load;
560 case OPCODE_aload: type = ptr_type_node; goto general_load;
561 general_load:
562 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
563 wide = 0;
564 goto load;
565 case OPCODE_iload_0: type = int_type_node; index = 0; goto load;
566 case OPCODE_iload_1: type = int_type_node; index = 1; goto load;
567 case OPCODE_iload_2: type = int_type_node; index = 2; goto load;
568 case OPCODE_iload_3: type = int_type_node; index = 3; goto load;
569 case OPCODE_lload_0: type = long_type_node; index = 0; goto load;
570 case OPCODE_lload_1: type = long_type_node; index = 1; goto load;
571 case OPCODE_lload_2: type = long_type_node; index = 2; goto load;
572 case OPCODE_lload_3: type = long_type_node; index = 3; goto load;
573 case OPCODE_fload_0: type = float_type_node; index = 0; goto load;
574 case OPCODE_fload_1: type = float_type_node; index = 1; goto load;
575 case OPCODE_fload_2: type = float_type_node; index = 2; goto load;
576 case OPCODE_fload_3: type = float_type_node; index = 3; goto load;
577 case OPCODE_dload_0: type = double_type_node; index = 0; goto load;
578 case OPCODE_dload_1: type = double_type_node; index = 1; goto load;
579 case OPCODE_dload_2: type = double_type_node; index = 2; goto load;
580 case OPCODE_dload_3: type = double_type_node; index = 3; goto load;
581 case OPCODE_aload_0: type = ptr_type_node; index = 0; goto load;
582 case OPCODE_aload_1: type = ptr_type_node; index = 1; goto load;
583 case OPCODE_aload_2: type = ptr_type_node; index = 2; goto load;
584 case OPCODE_aload_3: type = ptr_type_node; index = 3; goto load;
585 load:
586 if (index < 0
587 || (index + TYPE_IS_WIDE (type)
588 >= DECL_MAX_LOCALS (current_function_decl)))
589 VERIFICATION_ERROR ("invalid local variable index in load");
590 tmp = type_map[index];
591 if (tmp == TYPE_UNKNOWN || tmp == TYPE_SECOND
592 || (TYPE_IS_WIDE (type)
593 && type_map[index+1] != void_type_node)
594 || (type == ptr_type_node
595 ? TREE_CODE (tmp) != POINTER_TYPE
596 : type == int_type_node
597 ? (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
598 : type != tmp))
599 VERIFICATION_ERROR("invalid local variable type in load");
600 push_type (tmp);
601 goto note_used;
602 case OPCODE_istore: type = int_type_node; goto general_store;
603 case OPCODE_lstore: type = long_type_node; goto general_store;
604 case OPCODE_fstore: type = float_type_node; goto general_store;
605 case OPCODE_dstore: type = double_type_node; goto general_store;
606 case OPCODE_astore: type = ptr_type_node; goto general_store;
607 general_store:
608 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
609 wide = 0;
610 goto store;
611 case OPCODE_istore_0: type = int_type_node; index = 0; goto store;
612 case OPCODE_istore_1: type = int_type_node; index = 1; goto store;
613 case OPCODE_istore_2: type = int_type_node; index = 2; goto store;
614 case OPCODE_istore_3: type = int_type_node; index = 3; goto store;
615 case OPCODE_lstore_0: type = long_type_node; index=0; goto store;
616 case OPCODE_lstore_1: type = long_type_node; index=1; goto store;
617 case OPCODE_lstore_2: type = long_type_node; index=2; goto store;
618 case OPCODE_lstore_3: type = long_type_node; index=3; goto store;
619 case OPCODE_fstore_0: type=float_type_node; index=0; goto store;
620 case OPCODE_fstore_1: type=float_type_node; index=1; goto store;
621 case OPCODE_fstore_2: type=float_type_node; index=2; goto store;
622 case OPCODE_fstore_3: type=float_type_node; index=3; goto store;
623 case OPCODE_dstore_0: type=double_type_node; index=0; goto store;
624 case OPCODE_dstore_1: type=double_type_node; index=1; goto store;
625 case OPCODE_dstore_2: type=double_type_node; index=2; goto store;
626 case OPCODE_dstore_3: type=double_type_node; index=3; goto store;
627 case OPCODE_astore_0: type = ptr_type_node; index = 0; goto store;
628 case OPCODE_astore_1: type = ptr_type_node; index = 1; goto store;
629 case OPCODE_astore_2: type = ptr_type_node; index = 2; goto store;
630 case OPCODE_astore_3: type = ptr_type_node; index = 3; goto store;
631 store:
632 if (index < 0
633 || (index + TYPE_IS_WIDE (type)
634 >= DECL_MAX_LOCALS (current_function_decl)))
636 VERIFICATION_ERROR ("invalid local variable index in store");
637 return 0;
639 type = pop_type (type);
640 type_map[index] = type;
642 /* If local variable changed, we need to reconsider eh handlers. */
643 prev_eh_ranges = NULL_EH_RANGE;
645 /* Allocate decl and rtx for this variable now, so if we're not
646 optmizing, we get a temporary that survives the whole method. */
647 find_local_variable (index, type, oldpc);
649 if (TYPE_IS_WIDE (type))
650 type_map[index+1] = TYPE_SECOND;
651 /* ... fall through to note_used ... */
652 note_used:
653 /* For store or load, note that local variable INDEX is used.
654 This is needed to verify try-finally sub-routines. */
655 if (current_subr)
657 tree vec = LABEL_RETURN_TYPE_STATE (current_subr);
658 tree subr_vec = LABEL_TYPE_STATE (current_subr);
659 int len = 1 + TYPE_IS_WIDE (type);
660 while (--len >= 0)
662 if (TREE_VEC_ELT (vec, index) == TYPE_UNUSED)
663 TREE_VEC_ELT (vec, index) = TREE_VEC_ELT (subr_vec, index);
666 break;
667 case OPCODE_iadd:
668 case OPCODE_iand:
669 case OPCODE_idiv:
670 case OPCODE_imul:
671 case OPCODE_ior:
672 case OPCODE_irem:
673 case OPCODE_ishl:
674 case OPCODE_ishr:
675 case OPCODE_isub:
676 case OPCODE_iushr:
677 case OPCODE_ixor:
678 type = int_type_node; goto binop;
679 case OPCODE_ineg:
680 case OPCODE_i2c:
681 case OPCODE_i2b:
682 case OPCODE_i2s:
683 type = int_type_node; goto unop;
684 case OPCODE_ladd:
685 case OPCODE_land:
686 case OPCODE_ldiv:
687 case OPCODE_lsub:
688 case OPCODE_lmul:
689 case OPCODE_lrem:
690 case OPCODE_lor:
691 case OPCODE_lxor:
692 type = long_type_node; goto binop;
693 case OPCODE_lneg:
694 type = long_type_node; goto unop;
695 case OPCODE_fadd: case OPCODE_fsub:
696 case OPCODE_fmul: case OPCODE_fdiv: case OPCODE_frem:
697 type = float_type_node; goto binop;
698 case OPCODE_fneg:
699 type = float_type_node; goto unop;
700 case OPCODE_dadd: case OPCODE_dsub:
701 case OPCODE_dmul: case OPCODE_ddiv: case OPCODE_drem:
702 type = double_type_node; goto binop;
703 case OPCODE_dneg:
704 type = double_type_node; goto unop;
705 unop:
706 pop_type (type);
707 push_type (type);
708 break;
709 binop:
710 pop_type (type);
711 pop_type (type);
712 push_type (type);
713 break;
714 case OPCODE_lshl:
715 case OPCODE_lshr:
716 case OPCODE_lushr:
717 pop_type (int_type_node);
718 pop_type (long_type_node);
719 push_type (long_type_node);
720 break;
721 case OPCODE_iinc:
722 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
723 PC += wide + 1;
724 wide = 0;
725 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl))
726 VERIFICATION_ERROR ("invalid local variable index in iinc");
727 tmp = type_map[index];
728 if (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
729 VERIFICATION_ERROR ("invalid local variable type in iinc");
730 break;
731 case OPCODE_i2l:
732 pop_type (int_type_node); push_type (long_type_node); break;
733 case OPCODE_i2f:
734 pop_type (int_type_node); push_type (float_type_node); break;
735 case OPCODE_i2d:
736 pop_type (int_type_node); push_type (double_type_node); break;
737 case OPCODE_l2i:
738 pop_type (long_type_node); push_type (int_type_node); break;
739 case OPCODE_l2f:
740 pop_type (long_type_node); push_type (float_type_node); break;
741 case OPCODE_l2d:
742 pop_type (long_type_node); push_type (double_type_node); break;
743 case OPCODE_f2i:
744 pop_type (float_type_node); push_type (int_type_node); break;
745 case OPCODE_f2l:
746 pop_type (float_type_node); push_type (long_type_node); break;
747 case OPCODE_f2d:
748 pop_type (float_type_node); push_type (double_type_node); break;
749 case OPCODE_d2i:
750 pop_type (double_type_node); push_type (int_type_node); break;
751 case OPCODE_d2l:
752 pop_type (double_type_node); push_type (long_type_node); break;
753 case OPCODE_d2f:
754 pop_type (double_type_node); push_type (float_type_node); break;
755 case OPCODE_lcmp:
756 type = long_type_node; goto compare;
757 case OPCODE_fcmpl:
758 case OPCODE_fcmpg:
759 type = float_type_node; goto compare;
760 case OPCODE_dcmpl:
761 case OPCODE_dcmpg:
762 type = double_type_node; goto compare;
763 compare:
764 pop_type (type); pop_type (type);
765 push_type (int_type_node); break;
766 case OPCODE_ifeq:
767 case OPCODE_ifne:
768 case OPCODE_iflt:
769 case OPCODE_ifge:
770 case OPCODE_ifgt:
771 case OPCODE_ifle:
772 pop_type (int_type_node); goto cond;
773 case OPCODE_ifnull:
774 case OPCODE_ifnonnull:
775 pop_type (ptr_type_node ); goto cond;
776 case OPCODE_if_icmpeq:
777 case OPCODE_if_icmpne:
778 case OPCODE_if_icmplt:
779 case OPCODE_if_icmpge:
780 case OPCODE_if_icmpgt:
781 case OPCODE_if_icmple:
782 pop_type (int_type_node); pop_type (int_type_node); goto cond;
783 case OPCODE_if_acmpeq:
784 case OPCODE_if_acmpne:
785 pop_type (object_ptr_type_node); pop_type (object_ptr_type_node);
786 goto cond;
787 cond:
788 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
789 break;
790 case OPCODE_goto:
791 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
792 INVALIDATE_PC;
793 break;
794 case OPCODE_wide:
795 switch (byte_ops[PC])
797 case OPCODE_iload: case OPCODE_lload:
798 case OPCODE_fload: case OPCODE_dload: case OPCODE_aload:
799 case OPCODE_istore: case OPCODE_lstore:
800 case OPCODE_fstore: case OPCODE_dstore: case OPCODE_astore:
801 case OPCODE_iinc:
802 case OPCODE_ret:
803 wide = 1;
804 break;
805 default:
806 VERIFICATION_ERROR ("invalid use of wide instruction");
808 break;
809 case OPCODE_return: type = void_type_node; goto ret;
810 case OPCODE_ireturn:
811 if ((TREE_CODE (return_type) == BOOLEAN_TYPE
812 || TREE_CODE (return_type) == CHAR_TYPE
813 || TREE_CODE (return_type) == INTEGER_TYPE)
814 && TYPE_PRECISION (return_type) <= 32)
815 type = return_type;
816 else
817 type = NULL_TREE;
818 goto ret;
819 case OPCODE_lreturn: type = long_type_node; goto ret;
820 case OPCODE_freturn: type = float_type_node; goto ret;
821 case OPCODE_dreturn: type = double_type_node; goto ret;
822 case OPCODE_areturn:
823 if (TREE_CODE (return_type) == POINTER_TYPE)
824 type = return_type;
825 else
826 type = NULL_TREE;
827 goto ret;
828 ret:
829 if (type != return_type)
830 VERIFICATION_ERROR ("incorrect ?return opcode");
831 if (type != void_type_node)
833 if (pop_type_0 (type) == NULL_TREE)
834 VERIFICATION_ERROR ("return value has wrong type");
836 INVALIDATE_PC;
837 break;
838 case OPCODE_getstatic: is_putting = 0; is_static = 1; goto field;
839 case OPCODE_putstatic: is_putting = 1; is_static = 1; goto field;
840 case OPCODE_getfield: is_putting = 0; is_static = 0; goto field;
841 case OPCODE_putfield: is_putting = 1; is_static = 0; goto field;
842 field:
844 int index = IMMEDIATE_u2;
845 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool, index);
846 tree field_type = get_type_from_signature (field_signature);
847 if (is_putting)
848 pop_type (field_type);
849 if (! is_static)
851 int clindex = COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
852 index);
853 tree self_type = get_class_constant (current_jcf, clindex);
854 /* Defer actual checking until next pass. */
855 if (pop_type_0 (self_type) == NULL_TREE)
856 VERIFICATION_ERROR ("incorrect type for field reference");
858 if (! is_putting)
859 push_type (field_type);
860 break;
862 case OPCODE_new:
863 push_type (get_class_constant (jcf, IMMEDIATE_u2));
864 break;
865 case OPCODE_dup: type_stack_dup (1, 0); break;
866 case OPCODE_dup_x1: type_stack_dup (1, 1); break;
867 case OPCODE_dup_x2: type_stack_dup (1, 2); break;
868 case OPCODE_dup2: type_stack_dup (2, 0); break;
869 case OPCODE_dup2_x1: type_stack_dup (2, 1); break;
870 case OPCODE_dup2_x2: type_stack_dup (2, 2); break;
871 case OPCODE_pop: index = 1; goto pop;
872 case OPCODE_pop2: index = 2; goto pop;
873 pop:
874 if (stack_pointer < index)
875 VERIFICATION_ERROR ("stack underflow");
876 stack_pointer -= index;
877 break;
878 case OPCODE_swap:
879 if (stack_pointer < 2)
880 VERIFICATION_ERROR ("stack underflow (in swap)");
881 else
883 tree type1 = stack_type_map[stack_pointer - 1];
884 tree type2 = stack_type_map[stack_pointer - 2];
885 if (type1 == void_type_node || type2 == void_type_node)
886 VERIFICATION_ERROR ("verifier (swap): double or long value");
887 stack_type_map[stack_pointer - 2] = type1;
888 stack_type_map[stack_pointer - 1] = type2;
890 break;
891 case OPCODE_ldc: index = IMMEDIATE_u1; goto ldc;
892 case OPCODE_ldc2_w:
893 case OPCODE_ldc_w:
894 index = IMMEDIATE_u2; goto ldc;
895 ldc:
896 if (index <= 0 || index >= JPOOL_SIZE(current_jcf))
897 VERIFICATION_ERROR ("bad constant pool index in ldc");
898 int_value = -1;
899 switch (JPOOL_TAG (current_jcf, index) & ~CONSTANT_ResolvedFlag)
901 case CONSTANT_Integer: type = int_type_node; goto check_ldc;
902 case CONSTANT_Float: type = float_type_node; goto check_ldc;
903 case CONSTANT_String: type = string_type_node; goto check_ldc;
904 case CONSTANT_Long: type = long_type_node; goto check_ldc;
905 case CONSTANT_Double: type = double_type_node; goto check_ldc;
906 check_ldc:
907 if (TYPE_IS_WIDE (type) == (op_code == OPCODE_ldc2_w))
908 break;
909 /* ... else fall through ... */
910 default:
911 VERIFICATION_ERROR ("bad constant pool tag in ldc");
913 if (type == int_type_node)
915 i = TREE_INT_CST_LOW (get_constant (current_jcf, index));
916 goto push_int;
918 push_type (type);
919 break;
921 case OPCODE_invokevirtual:
922 case OPCODE_invokespecial:
923 case OPCODE_invokestatic:
924 case OPCODE_invokeinterface:
926 int index = IMMEDIATE_u2;
927 tree sig = COMPONENT_REF_SIGNATURE (&current_jcf->cpool, index);
928 tree self_type = get_class_constant
929 (current_jcf, COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
930 index));
931 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool, index);
932 tree method_type;
933 method_type = parse_signature_string (IDENTIFIER_POINTER (sig),
934 IDENTIFIER_LENGTH (sig));
935 if (TREE_CODE (method_type) != FUNCTION_TYPE)
936 VERIFICATION_ERROR ("bad method signature");
937 pop_argument_types (TYPE_ARG_TYPES (method_type));
939 /* Can't invoke <clinit> */
940 if (method_name == clinit_identifier_node)
941 VERIFICATION_ERROR ("invoke opcode can't invoke <clinit>");
942 /* Apart invokespecial, can't invoke <init> */
943 if (op_code != OPCODE_invokespecial
944 && method_name == init_identifier_node)
945 VERIFICATION_ERROR ("invoke opcode can't invoke <init>");
947 if (op_code != OPCODE_invokestatic)
948 pop_type (self_type);
950 switch (op_code)
952 case OPCODE_invokeinterface:
954 int nargs = IMMEDIATE_u1;
955 int notZero = IMMEDIATE_u1;
957 if (!nargs || notZero)
958 VERIFICATION_ERROR
959 ("invalid argument number in invokeinterface");
960 break;
964 if (TREE_TYPE (method_type) != void_type_node)
965 push_type (TREE_TYPE (method_type));
966 break;
969 case OPCODE_arraylength:
970 /* Type checking actually made during code generation */
971 pop_type( ptr_type_node );
972 push_type( int_type_node );
973 break;
975 /* Q&D verification *or* more checking done during code generation
976 for byte/boolean/char/short, the value popped is a int coerced
977 into the right type before being stored. */
978 case OPCODE_iastore: type = int_type_node; goto astore;
979 case OPCODE_lastore: type = long_type_node; goto astore;
980 case OPCODE_fastore: type = float_type_node; goto astore;
981 case OPCODE_dastore: type = double_type_node; goto astore;
982 case OPCODE_aastore: type = ptr_type_node; goto astore;
983 case OPCODE_bastore: type = int_type_node; goto astore;
984 case OPCODE_castore: type = int_type_node; goto astore;
985 case OPCODE_sastore: type = int_type_node; goto astore;
986 astore:
987 /* FIXME - need better verification here */
988 pop_type (type); /* new value */
989 pop_type (int_type_node); /* index */
990 pop_type (ptr_type_node); /* array */
991 break;
993 /* Q&D verification *or* more checking done during code generation
994 for byte/boolean/char/short, the value pushed is a int. */
995 case OPCODE_iaload: type = int_type_node; goto aload;
996 case OPCODE_laload: type = long_type_node; goto aload;
997 case OPCODE_faload: type = float_type_node; goto aload;
998 case OPCODE_daload: type = double_type_node; goto aload;
999 case OPCODE_aaload: type = ptr_type_node; goto aload;
1000 case OPCODE_baload: type = promote_type (byte_type_node); goto aload;
1001 case OPCODE_caload: type = promote_type (char_type_node); goto aload;
1002 case OPCODE_saload: type = promote_type (short_type_node); goto aload;
1003 aload:
1004 pop_type (int_type_node);
1005 tmp = pop_type (ptr_type_node);
1006 if (is_array_type_p (tmp))
1007 type = TYPE_ARRAY_ELEMENT (TREE_TYPE (tmp));
1008 else if (tmp != TYPE_NULL)
1009 VERIFICATION_ERROR ("array load from non-array type");
1010 push_type (type);
1011 break;
1013 case OPCODE_anewarray:
1014 type = get_class_constant (current_jcf, IMMEDIATE_u2);
1015 type = promote_type (type);
1016 goto newarray;
1018 case OPCODE_newarray:
1019 index = IMMEDIATE_u1;
1020 type = decode_newarray_type (index);
1021 if (type == NULL_TREE)
1022 VERIFICATION_ERROR ("invalid type code in newarray opcode");
1023 goto newarray;
1025 newarray:
1026 if (int_value >= 0 && prevpc >= 0)
1028 /* If previous instruction pushed int constant,
1029 we want to use it. */
1030 switch (byte_ops[prevpc])
1032 case OPCODE_iconst_0: case OPCODE_iconst_1:
1033 case OPCODE_iconst_2: case OPCODE_iconst_3:
1034 case OPCODE_iconst_4: case OPCODE_iconst_5:
1035 case OPCODE_bipush: case OPCODE_sipush:
1036 case OPCODE_ldc: case OPCODE_ldc_w:
1037 break;
1038 default:
1039 int_value = -1;
1042 else
1043 int_value = -1;
1044 type = build_java_array_type (type, int_value);
1045 pop_type (int_type_node);
1046 push_type (type);
1047 break;
1049 case OPCODE_multianewarray:
1051 int ndim, i;
1052 index = IMMEDIATE_u2;
1053 ndim = IMMEDIATE_u1;
1055 if( ndim < 1 )
1056 VERIFICATION_ERROR ("number of dimension lower that 1 in multianewarray" );
1058 for( i = 0; i < ndim; i++ )
1059 pop_type (int_type_node);
1060 push_type (get_class_constant (current_jcf, index));
1061 break;
1064 case OPCODE_aconst_null:
1065 push_type (ptr_type_node);
1066 break;
1068 case OPCODE_athrow:
1069 /* FIXME: athrow also empties the stack. */
1070 pop_type (throwable_type_node);
1071 INVALIDATE_PC;
1072 break;
1074 case OPCODE_checkcast:
1075 pop_type (ptr_type_node);
1076 type = get_class_constant (current_jcf, IMMEDIATE_u2);
1077 push_type (type);
1078 break;
1079 case OPCODE_instanceof:
1080 pop_type (ptr_type_node);
1081 get_class_constant (current_jcf, IMMEDIATE_u2);
1082 push_type (int_type_node);
1083 break;
1085 case OPCODE_tableswitch:
1087 jint low, high;
1089 pop_type (int_type_node);
1090 while (PC%4)
1092 if (byte_ops[PC++])
1093 VERIFICATION_ERROR ("bad alignment in tableswitch pad");
1095 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
1096 low = IMMEDIATE_s4;
1097 high = IMMEDIATE_s4;
1099 if (low > high)
1100 VERIFICATION_ERROR ("unsorted low/high value in tableswitch");
1102 while (low++ <= high)
1103 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1104 INVALIDATE_PC;
1105 break;
1108 case OPCODE_lookupswitch:
1110 jint npairs, last = 0, not_registered = 1;
1112 pop_type (int_type_node);
1113 while (PC%4)
1115 if (byte_ops[PC++])
1116 VERIFICATION_ERROR ("bad alignment in lookupswitch pad");
1119 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
1120 npairs = IMMEDIATE_s4;
1122 if (npairs < 0)
1123 VERIFICATION_ERROR ("invalid number of targets in lookupswitch");
1125 while (npairs--)
1127 int match = IMMEDIATE_s4;
1128 if (not_registered)
1129 not_registered = 0;
1130 else if (last >= match)
1131 VERIFICATION_ERROR ("unsorted match value in lookupswitch");
1133 last = match;
1134 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1136 INVALIDATE_PC;
1137 break;
1140 case OPCODE_monitorenter:
1141 /* fall thru */
1142 case OPCODE_monitorexit:
1143 pop_type (ptr_type_node);
1144 break;
1146 case OPCODE_goto_w:
1147 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1148 INVALIDATE_PC;
1149 break;
1151 case OPCODE_jsr:
1153 tree target = lookup_label (oldpc + IMMEDIATE_s2);
1154 tree return_label = lookup_label (PC);
1155 push_type (return_address_type_node);
1156 if (! LABEL_VERIFIED (target))
1158 /* first time seen */
1159 tree return_type_map;
1160 int nlocals = DECL_MAX_LOCALS (current_function_decl);
1161 index = nlocals + DECL_MAX_STACK (current_function_decl);
1162 return_type_map = make_tree_vec (index);
1163 while (index > nlocals)
1164 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNKNOWN;
1165 while (index > 0)
1166 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNUSED;
1167 LABEL_RETURN_LABEL (target)
1168 = build_decl (LABEL_DECL, NULL_TREE, TREE_TYPE (target));
1169 LABEL_PC (LABEL_RETURN_LABEL (target)) = -1;
1170 LABEL_RETURN_TYPE_STATE (target) = return_type_map;
1171 LABEL_IS_SUBR_START (target) = 1;
1172 LABEL_IN_SUBR (target) = 1;
1173 LABEL_SUBR_START (target) = target;
1174 LABEL_SUBR_CONTEXT (target) = current_subr;
1176 else if (! LABEL_IS_SUBR_START (target)
1177 || LABEL_SUBR_CONTEXT (target) != current_subr)
1178 VERIFICATION_ERROR ("label part of different subroutines");
1180 i = merge_type_state (target);
1181 if (i != 0)
1183 if (i < 0)
1184 VERIFICATION_ERROR ("types could not be merged at jsr");
1185 push_pending_label (target);
1187 current_subr = target;
1189 /* Chain return_pc onto LABEL_RETURN_LABELS (target) if needed. */
1190 if (! value_member (return_label, LABEL_RETURN_LABELS (target)))
1192 LABEL_RETURN_LABELS (target)
1193 = tree_cons (NULL_TREE, return_label,
1194 LABEL_RETURN_LABELS (target));
1197 if (LABEL_VERIFIED (target))
1199 tree return_map = LABEL_RETURN_TYPE_STATE (target);
1200 int len = TREE_VEC_LENGTH (return_map);
1201 stack_pointer = len - DECL_MAX_LOCALS (current_function_decl);
1202 while (--len >= 0)
1204 if (TREE_VEC_ELT (return_map, len) != TYPE_UNUSED)
1205 type_map[len] = TREE_VEC_ELT (return_map, len);
1207 current_subr = LABEL_SUBR_CONTEXT (target);
1208 PUSH_PENDING (return_label);
1211 INVALIDATE_PC;
1213 break;
1214 case OPCODE_ret:
1215 if (current_subr == NULL)
1216 VERIFICATION_ERROR ("ret instruction not in a jsr subroutine");
1217 else
1219 tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
1220 int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
1221 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
1222 wide = 0;
1223 INVALIDATE_PC;
1224 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl)
1225 || type_map[index] != TYPE_RETURN_ADDR)
1226 VERIFICATION_ERROR ("invalid ret index");
1228 /* The next chunk of code is similar to an inlined version of
1229 * merge_type_state (LABEL_RETURN_LABEL (current_subr)).
1230 * The main differences are that LABEL_RETURN_LABEL is
1231 * pre-allocated by the jsr (but we don't know the size then);
1232 * and that we have to handle TYPE_UNUSED. */
1234 if (! RETURN_MAP_ADJUSTED (ret_map))
1235 { /* First return from this subroutine - fix stack pointer. */
1236 TREE_VEC_LENGTH (ret_map) = size;
1237 for (index = size; --index >= 0; )
1239 if (TREE_VEC_ELT (ret_map, index) != TYPE_UNUSED)
1240 TREE_VEC_ELT (ret_map, index) = type_map[index];
1242 RETURN_MAP_ADJUSTED (ret_map) = 1;
1244 else
1246 if (TREE_VEC_LENGTH (ret_map) != size)
1247 VERIFICATION_ERROR ("inconsistent stack size on ret");
1248 for (index = 0; index < size; index++)
1250 tree type = TREE_VEC_ELT (ret_map, index);
1251 if (type != TYPE_UNUSED)
1253 type = merge_types (type, type_map [index]);
1254 TREE_VEC_ELT (ret_map, index) = type;
1255 if (type == TYPE_UNKNOWN)
1257 if (index >= size - stack_pointer)
1258 VERIFICATION_ERROR
1259 ("inconsistent types on ret from jsr");
1261 else if (TYPE_IS_WIDE (type))
1262 index++;
1269 break;
1270 case OPCODE_jsr_w:
1271 case OPCODE_ret_w:
1272 default:
1273 error ("unknown opcode %d@pc=%d during verification", op_code, PC-1);
1274 return 0;
1277 prevpc = oldpc;
1279 /* The following test is true if we have entered or exited an exception
1280 handler range *or* we have done a store to a local variable.
1281 In either case we need to consider any exception handlers that
1282 might "follow" this instruction. */
1284 if (eh_ranges != prev_eh_ranges)
1286 int save_stack_pointer = stack_pointer;
1287 int index = DECL_MAX_LOCALS (current_function_decl);
1288 tree save_type = type_map[index];
1289 tree save_current_subr = current_subr;
1290 struct eh_range *ranges = find_handler (oldpc);
1291 stack_pointer = 1;
1292 for (; ranges != NULL_EH_RANGE; ranges = ranges->outer)
1294 tree chain = ranges->handlers;
1296 /* We need to determine if the handler is part of current_subr.
1297 The are two cases: (1) The exception catch range
1298 is entirely within current_subr. In that case the handler
1299 is also part of current_subr.
1300 (2) Some of the catch range is not in current_subr.
1301 In that case, the handler is *not* part of current_subr.
1303 Figuring out which is the case is not necessarily obvious,
1304 in the presence of clever code generators (and obfuscators).
1305 We make a simplifying assumption that in case (2) we
1306 have that the current_subr is entirely within the catch range.
1307 In that case we can assume if that if a caller (the jsr) of
1308 a subroutine is within the catch range, then the handler is
1309 *not* part of the subroutine, and vice versa. */
1311 current_subr = save_current_subr;
1312 for ( ; current_subr != NULL_TREE;
1313 current_subr = LABEL_SUBR_CONTEXT (current_subr))
1315 tree return_labels = LABEL_RETURN_LABELS (current_subr);
1316 /* There could be multiple return_labels, but
1317 we only need to check one. */
1318 int return_pc = LABEL_PC (TREE_VALUE (return_labels));
1319 if (return_pc <= ranges->start_pc
1320 || return_pc > ranges->end_pc)
1321 break;
1324 for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain))
1326 tree handler = TREE_VALUE (chain);
1327 tree type = TREE_PURPOSE (chain);
1328 if (type == NULL_TREE) /* a finally handler */
1329 type = throwable_type_node;
1330 type_map[index] = promote_type (type);
1332 PUSH_PENDING (handler);
1335 stack_pointer = save_stack_pointer;
1336 current_subr = save_current_subr;
1337 type_map[index] = save_type;
1338 prev_eh_ranges = eh_ranges;
1341 return 1;
1342 bad_pc:
1343 message = "program counter out of range";
1344 goto verify_error;
1345 verify_error:
1346 error ("verification error at PC=%d", oldpc);
1347 error ("%s", message);
1348 return 0;