Handle DFmode and DImode constant addresses.
[official-gcc.git] / gcc / java / verify.c
blob07556e850e0c2074db2e05e265c44d76e6296880
1 /* Handle verification of bytecoded methods for the GNU compiler for
2 the Java(TM) language.
3 Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA.
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
26 #include "config.h"
27 #include "system.h"
28 #include "tree.h"
29 #include "java-tree.h"
30 #include "javaop.h"
31 #include "java-opcodes.h"
32 #include "jcf.h"
33 #include "java-except.h"
34 #include "toplev.h"
36 static void push_pending_label PROTO ((tree));
37 static tree merge_types PROTO ((tree, tree));
38 static const char *check_pending_block PROTO ((tree));
39 static void type_stack_dup PROTO ((int, int));
40 static int start_pc_cmp PROTO ((const PTR, const PTR));
42 extern int stack_pointer;
44 /* During verification, start of the current subroutine (jsr target). */
45 tree current_subr;
47 /* A list of pending blocks, chained using LABEL_PENDING_CHAIN.
48 A pending block is one that has LABEL_CHANGED set, which means
49 it requires (re-) verification. */
50 tree pending_blocks;
52 /* Append TARGET_LABEL to the pending_block stack unless already in it. */
54 static void
55 push_pending_label (target_label)
56 tree target_label;
58 if (! LABEL_CHANGED (target_label))
60 LABEL_PENDING_CHAIN (target_label) = pending_blocks;
61 pending_blocks = target_label;
62 LABEL_CHANGED (target_label) = 1;
66 /* Note that TARGET_LABEL is a possible successor instruction.
67 Merge the type state etc.
68 Return NULL on sucess, or an error message on failure. */
70 static const char *
71 check_pending_block (target_label)
72 tree target_label;
74 int changed = merge_type_state (target_label);
76 if (changed)
78 if (changed < 0)
79 return "types could not be merged";
80 push_pending_label (target_label);
83 if (current_subr == NULL)
85 if (LABEL_IN_SUBR (target_label))
86 return "might transfer control into subroutine";
88 else
90 if (LABEL_IN_SUBR (target_label))
92 if (LABEL_SUBR_START (target_label) != current_subr)
93 return "transfer out of subroutine";
95 else if (! LABEL_VERIFIED (target_label))
97 LABEL_IN_SUBR (target_label) = 1;
98 LABEL_SUBR_START (target_label) = current_subr;
100 else
101 return "transfer out of subroutine";
103 return NULL;
106 /* Return the "merged" types of TYPE1 and TYPE2.
107 If either is primitive, the other must match (after promotion to int).
108 For reference types, return the common super-class.
109 Return TYPE_UNKNOWN if the types cannot be merged. */
111 static tree
112 merge_types (type1, type2)
113 tree type1, type2;
115 if (type1 == type2)
116 return type1;
117 if (type1 == TYPE_UNKNOWN || type2 == TYPE_UNKNOWN
118 || type1 == TYPE_RETURN_ADDR || type2 == TYPE_RETURN_ADDR)
119 return TYPE_UNKNOWN;
120 if (TREE_CODE (type1) == POINTER_TYPE && TREE_CODE (type2) == POINTER_TYPE)
122 int depth1, depth2;
123 tree tt1, tt2;
124 /* ptr_type_node is only used for a null reference,
125 which is compatible with any reference type. */
126 if (type1 == ptr_type_node || type2 == object_ptr_type_node)
127 return type2;
128 if (type2 == ptr_type_node || type1 == object_ptr_type_node)
129 return type1;
131 tt1 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type1));
132 tt2 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type2));
134 if (TYPE_ARRAY_P (tt1) || TYPE_ARRAY_P (tt2))
136 if (TYPE_ARRAY_P (tt1) == TYPE_ARRAY_P (tt2))
138 tree el_type1 = TYPE_ARRAY_ELEMENT (tt1);
139 tree el_type2 = TYPE_ARRAY_ELEMENT (tt2);
140 tree el_type = NULL_TREE;
141 if (el_type1 == el_type2)
142 el_type = el_type1;
143 else if (TREE_CODE (el_type1) == POINTER_TYPE
144 && TREE_CODE (el_type2) == POINTER_TYPE)
145 el_type = merge_types (el_type1, el_type2);
146 if (el_type != NULL_TREE)
148 HOST_WIDE_INT len1 = java_array_type_length (tt1);
149 HOST_WIDE_INT len2 = java_array_type_length (tt2);
150 if (len1 != len2)
151 len1 = -1;
152 else if (el_type1 == el_type2)
153 return type1;
154 return promote_type (build_java_array_type (el_type, len1));
157 return object_ptr_type_node;
160 if (CLASS_INTERFACE (TYPE_NAME (tt1)))
162 if (CLASS_INTERFACE (TYPE_NAME (tt2)))
164 /* This is a kludge, but matches what Sun's verifier does.
165 It can be tricked, but is safe as long as type errors
166 (i.e. interface method calls) are caught at run-time. */
167 return object_ptr_type_node;
169 else
171 if (can_widen_reference_to (tt2, tt1))
172 return type1;
173 else
174 return TYPE_UNKNOWN;
177 else if (CLASS_INTERFACE (TYPE_NAME (tt2)))
179 if (can_widen_reference_to (tt1, tt2))
180 return type2;
181 else
182 return TYPE_UNKNOWN;
185 type1 = tt1;
186 type2 = tt2;
188 depth1 = class_depth (type1);
189 depth2 = class_depth (type2);
190 for ( ; depth1 > depth2; depth1--)
191 type1 = TYPE_BINFO_BASETYPE (type1, 0);
192 for ( ; depth2 > depth1; depth2--)
193 type2 = TYPE_BINFO_BASETYPE (type2, 0);
194 while (type1 != type2)
196 type1 = TYPE_BINFO_BASETYPE (type1, 0);
197 type2 = TYPE_BINFO_BASETYPE (type2, 0);
199 return promote_type (type1);
201 if (INTEGRAL_TYPE_P (type1) && INTEGRAL_TYPE_P (type2)
202 && TYPE_PRECISION (type1) <= 32 && TYPE_PRECISION (type2) <= 32)
203 return int_type_node;
204 return TYPE_UNKNOWN;
207 /* Merge the current type state with that at LABEL.
208 Return -1 the the states are incompatible (i.e. on error),
209 0 if there was no change, and 1 if there was a change. */
212 merge_type_state (label)
213 tree label;
215 int nlocals = DECL_MAX_LOCALS(current_function_decl);
216 int cur_length = stack_pointer + nlocals;
217 tree vec = LABEL_TYPE_STATE (label);
218 tree return_map;
219 if (vec == NULL_TREE)
221 vec = make_tree_vec (cur_length);
222 LABEL_TYPE_STATE (label) = vec;
223 while (--cur_length >= 0)
224 TREE_VEC_ELT (vec, cur_length) = type_map [cur_length];
225 return 1;
227 else
229 int i;
230 int changed = 0;
231 if (LABEL_IS_SUBR_START (label) && LABEL_VERIFIED (label)
232 && current_subr != label)
233 return_map = LABEL_RETURN_TYPE_STATE (label);
234 else
235 return_map = NULL_TREE;
236 if (TREE_VEC_LENGTH (vec) != cur_length)
238 return -1;
240 for (i = 0; i < cur_length; i++)
242 tree old_type = TREE_VEC_ELT (vec, i);
243 tree new_type = merge_types (old_type, type_map [i]);
244 if (TREE_VEC_ELT (vec, i) != new_type)
246 /* If there has been a change, note that since we must re-verify.
247 However, if the label is the start of a subroutine,
248 we don't care about local variables that are neither
249 set nor used in the sub-routine. */
250 if (return_map == NULL_TREE || i >= nlocals
251 || TREE_VEC_ELT (return_map, i) != TYPE_UNUSED
252 || (TYPE_IS_WIDE (new_type)
253 && TREE_VEC_ELT (return_map, i+1) != TYPE_UNUSED))
254 changed = 1;
256 TREE_VEC_ELT (vec, i) = new_type;
257 if (new_type == TYPE_UNKNOWN)
259 if (i >= nlocals)
260 return -1;
262 else if (TYPE_IS_WIDE (new_type))
263 i++;
265 return changed;
269 /* Handle dup-like operations. */
271 static void
272 type_stack_dup (size, offset)
273 int size, offset;
275 tree type[4];
276 int index;
277 if (size + offset > stack_pointer)
278 error ("stack underflow - dup* operation");
279 for (index = 0; index < size + offset; index++)
281 type[index] = stack_type_map[stack_pointer - 1];
282 if (type[index] == void_type_node)
284 index++;
285 type[index] = stack_type_map[stack_pointer - 2];
286 if (! TYPE_IS_WIDE (type[index]))
287 fatal ("internal error - dup operation");
288 if (index == size || index == size + offset)
289 fatal ("dup operation splits 64-bit number");
291 pop_type (type[index]);
293 for (index = size; --index >= 0; )
295 if (type[index] != void_type_node)
296 push_type (type[index]);
299 for (index = size + offset; --index >= 0; )
301 if (type[index] != void_type_node)
302 push_type (type[index]);
306 /* This keeps track of a start PC and corresponding initial index. */
307 struct pc_index
309 int start_pc;
310 int index;
313 /* A helper that is used when sorting exception ranges. */
314 static int
315 start_pc_cmp (xp, yp)
316 const PTR xp;
317 const PTR yp;
319 struct pc_index *x = (struct pc_index *) xp;
320 struct pc_index *y = (struct pc_index *) yp;
321 return x->start_pc - y->start_pc;
324 /* This causes the next iteration to ignore the next instruction
325 and look for some other unhandled instruction. */
326 #define INVALIDATE_PC (prevpc = -1, oldpc = PC, PC = INVALID_PC)
327 #define INVALID_PC (-1)
329 #define VERIFICATION_ERROR(MESSAGE) \
330 do { message = MESSAGE; goto verify_error; } while (0)
332 #define PUSH_PENDING(LABEL) \
333 do { if ((message = check_pending_block (LABEL)) != NULL) \
334 goto verify_error; } while (0)
336 #ifdef __GNUC__
337 #define CHECK_PC_IN_RANGE(PC) ({if (PC < 0 || PC > length) goto bad_pc; 1;})
338 #else
339 #define CHECK_PC_IN_RANGE(PC) (PC < 0 || PC > length ? \
340 (fatal("Bad byte codes.\n"), 0) : 1)
341 #endif
343 #define BCODE byte_ops
345 /* Verify the bytecodes of the current method.
346 Return 1 on sucess, 0 on failure. */
348 verify_jvm_instructions (jcf, byte_ops, length)
349 JCF* jcf;
350 const unsigned char *byte_ops;
351 long length;
353 tree label;
354 int wide = 0;
355 int op_code;
356 int PC;
357 int oldpc; /* PC of start of instruction. */
358 int prevpc; /* If >= 0, PC of previous instruction. */
359 const char *message;
360 int i;
361 register unsigned char *p;
362 struct eh_range *prev_eh_ranges = NULL_EH_RANGE;
363 struct eh_range *eh_ranges;
364 tree return_type = TREE_TYPE (TREE_TYPE (current_function_decl));
365 struct pc_index *starts;
366 int eh_count;
368 jint int_value = -1;
370 pending_blocks = NULL_TREE;
372 /* Handle the exception table. */
373 method_init_exceptions ();
374 JCF_SEEK (jcf, DECL_CODE_OFFSET (current_function_decl) + length);
375 eh_count = JCF_readu2 (jcf);
377 /* We read the exception handlers in order of increasing start PC.
378 To do this we first read and sort the start PCs. */
379 starts = (struct pc_index *) xmalloc (eh_count * sizeof (struct pc_index));
380 for (i = 0; i < eh_count; ++i)
382 starts[i].start_pc = GET_u2 (jcf->read_ptr + 8 * i);
383 starts[i].index = i;
385 qsort (starts, eh_count, sizeof (struct pc_index), start_pc_cmp);
387 for (i = 0; i < eh_count; ++i)
389 int start_pc, end_pc, handler_pc, catch_type;
391 p = jcf->read_ptr + 8 * starts[i].index;
393 start_pc = GET_u2 (p);
394 end_pc = GET_u2 (p+2);
395 handler_pc = GET_u2 (p+4);
396 catch_type = GET_u2 (p+6);
398 if (start_pc < 0 || start_pc >= length
399 || end_pc < 0 || end_pc > length || start_pc >= end_pc
400 || handler_pc < 0 || handler_pc >= length
401 || (handler_pc >= start_pc && handler_pc < end_pc)
402 || ! (instruction_bits [start_pc] & BCODE_INSTRUCTION_START)
403 || ! (instruction_bits [end_pc] & BCODE_INSTRUCTION_START)
404 || ! (instruction_bits [handler_pc] & BCODE_INSTRUCTION_START))
406 error ("bad pc in exception_table");
407 free (starts);
408 return 0;
411 add_handler (start_pc, end_pc,
412 lookup_label (handler_pc),
413 catch_type == 0 ? NULL_TREE
414 : get_class_constant (jcf, catch_type));
416 instruction_bits [handler_pc] |= BCODE_EXCEPTION_TARGET;
419 free (starts);
420 handle_nested_ranges ();
422 for (PC = 0;;)
424 int index;
425 tree type, tmp;
426 if (((PC != INVALID_PC
427 && instruction_bits [PC] & BCODE_TARGET) != 0)
428 || PC == 0)
430 PUSH_PENDING (lookup_label (PC));
431 INVALIDATE_PC;
433 /* Check if there are any more pending blocks in the current
434 subroutine. Because we push pending blocks in a
435 last-in-first-out order, and because we don't push anything
436 from our caller until we are done with this subroutine or
437 anything nested in it, then we are done if the top of the
438 pending_blocks stack is not in a subroutine, or it is in our
439 caller. */
440 if (current_subr
441 && PC == INVALID_PC)
443 tree caller = LABEL_SUBR_CONTEXT (current_subr);
445 if (pending_blocks == NULL_TREE
446 || ! LABEL_IN_SUBR (pending_blocks)
447 || LABEL_SUBR_START (pending_blocks) == caller)
449 int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
450 tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
451 tmp = LABEL_RETURN_LABELS (current_subr);
453 /* FIXME: If we exit a subroutine via a throw, we might
454 have returned to an earlier caller. Obviously a
455 "ret" can only return one level, but a throw may
456 return many levels.*/
457 current_subr = caller;
459 if (RETURN_MAP_ADJUSTED (ret_map))
461 /* Since we are done with this subroutine , set up
462 the (so far known) return address as pending -
463 with the merged type state. */
464 for ( ; tmp != NULL_TREE; tmp = TREE_CHAIN (tmp))
466 tree return_label = TREE_VALUE (tmp);
467 tree return_state = LABEL_TYPE_STATE (return_label);
468 if (return_state == NULL_TREE)
470 /* This means means we had not verified the
471 subroutine earlier, so this is the first jsr to
472 call it. In this case, the type_map of the return
473 address is just the current type_map - and that
474 is handled by the following PUSH_PENDING. */
476 else
478 /* In this case we have to do a merge. But first
479 restore the type_map for unused slots to those
480 that were in effect at the jsr. */
481 for (index = size; --index >= 0; )
483 type_map[index] = TREE_VEC_ELT (ret_map, index);
484 if (type_map[index] == TYPE_UNUSED)
485 type_map[index]
486 = TREE_VEC_ELT (return_state, index);
489 PUSH_PENDING (return_label);
494 if (PC == INVALID_PC)
496 label = pending_blocks;
497 if (label == NULL_TREE)
498 break; /* We're done! */
499 pending_blocks = LABEL_PENDING_CHAIN (label);
500 LABEL_CHANGED (label) = 0;
502 if (LABEL_IN_SUBR (label))
503 current_subr = LABEL_SUBR_START (label);
504 else
505 current_subr = NULL_TREE;
507 /* Restore type_map and stack_pointer from
508 LABEL_TYPE_STATE (label), and continue
509 compiling from there. */
510 load_type_state (label);
511 PC = LABEL_PC (label);
513 else if (PC >= length)
514 VERIFICATION_ERROR ("falling through end of method");
516 /* fprintf (stderr, "** %d\n", PC); */
518 oldpc = PC;
520 if (!(instruction_bits [PC] & BCODE_INSTRUCTION_START) && ! wide)
521 VERIFICATION_ERROR ("PC not at instruction start");
523 instruction_bits[PC] |= BCODE_VERIFIED;
525 eh_ranges = find_handler (oldpc);
527 op_code = byte_ops[PC++];
528 switch (op_code)
530 int is_static, is_putting;
531 case OPCODE_nop:
532 break;
533 case OPCODE_iconst_m1:
534 case OPCODE_iconst_0: case OPCODE_iconst_1: case OPCODE_iconst_2:
535 case OPCODE_iconst_3: case OPCODE_iconst_4: case OPCODE_iconst_5:
536 i = op_code - OPCODE_iconst_0;
537 goto push_int;
538 push_int:
539 if (byte_ops[PC] == OPCODE_newarray
540 || byte_ops[PC] == OPCODE_newarray)
541 int_value = i;
542 push_type (int_type_node); break;
543 case OPCODE_lconst_0: case OPCODE_lconst_1:
544 push_type (long_type_node); break;
545 case OPCODE_fconst_0: case OPCODE_fconst_1: case OPCODE_fconst_2:
546 push_type (float_type_node); break;
547 case OPCODE_dconst_0: case OPCODE_dconst_1:
548 push_type (double_type_node); break;
549 case OPCODE_bipush:
550 i = IMMEDIATE_s1;
551 goto push_int;
552 case OPCODE_sipush:
553 i = IMMEDIATE_s2;
554 goto push_int;
555 case OPCODE_iload: type = int_type_node; goto general_load;
556 case OPCODE_lload: type = long_type_node; goto general_load;
557 case OPCODE_fload: type = float_type_node; goto general_load;
558 case OPCODE_dload: type = double_type_node; goto general_load;
559 case OPCODE_aload: type = ptr_type_node; goto general_load;
560 general_load:
561 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
562 wide = 0;
563 goto load;
564 case OPCODE_iload_0: type = int_type_node; index = 0; goto load;
565 case OPCODE_iload_1: type = int_type_node; index = 1; goto load;
566 case OPCODE_iload_2: type = int_type_node; index = 2; goto load;
567 case OPCODE_iload_3: type = int_type_node; index = 3; goto load;
568 case OPCODE_lload_0: type = long_type_node; index = 0; goto load;
569 case OPCODE_lload_1: type = long_type_node; index = 1; goto load;
570 case OPCODE_lload_2: type = long_type_node; index = 2; goto load;
571 case OPCODE_lload_3: type = long_type_node; index = 3; goto load;
572 case OPCODE_fload_0: type = float_type_node; index = 0; goto load;
573 case OPCODE_fload_1: type = float_type_node; index = 1; goto load;
574 case OPCODE_fload_2: type = float_type_node; index = 2; goto load;
575 case OPCODE_fload_3: type = float_type_node; index = 3; goto load;
576 case OPCODE_dload_0: type = double_type_node; index = 0; goto load;
577 case OPCODE_dload_1: type = double_type_node; index = 1; goto load;
578 case OPCODE_dload_2: type = double_type_node; index = 2; goto load;
579 case OPCODE_dload_3: type = double_type_node; index = 3; goto load;
580 case OPCODE_aload_0: type = ptr_type_node; index = 0; goto load;
581 case OPCODE_aload_1: type = ptr_type_node; index = 1; goto load;
582 case OPCODE_aload_2: type = ptr_type_node; index = 2; goto load;
583 case OPCODE_aload_3: type = ptr_type_node; index = 3; goto load;
584 load:
585 if (index < 0
586 || (index + TYPE_IS_WIDE (type)
587 >= DECL_MAX_LOCALS (current_function_decl)))
588 VERIFICATION_ERROR ("invalid local variable index in load");
589 tmp = type_map[index];
590 if (tmp == TYPE_UNKNOWN || tmp == TYPE_SECOND
591 || (TYPE_IS_WIDE (type)
592 && type_map[index+1] != void_type_node)
593 || (type == ptr_type_node
594 ? TREE_CODE (tmp) != POINTER_TYPE
595 : type == int_type_node
596 ? (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
597 : type != tmp))
598 VERIFICATION_ERROR("invalid local variable type in load");
599 push_type (tmp);
600 goto note_used;
601 case OPCODE_istore: type = int_type_node; goto general_store;
602 case OPCODE_lstore: type = long_type_node; goto general_store;
603 case OPCODE_fstore: type = float_type_node; goto general_store;
604 case OPCODE_dstore: type = double_type_node; goto general_store;
605 case OPCODE_astore: type = ptr_type_node; goto general_store;
606 general_store:
607 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
608 wide = 0;
609 goto store;
610 case OPCODE_istore_0: type = int_type_node; index = 0; goto store;
611 case OPCODE_istore_1: type = int_type_node; index = 1; goto store;
612 case OPCODE_istore_2: type = int_type_node; index = 2; goto store;
613 case OPCODE_istore_3: type = int_type_node; index = 3; goto store;
614 case OPCODE_lstore_0: type = long_type_node; index=0; goto store;
615 case OPCODE_lstore_1: type = long_type_node; index=1; goto store;
616 case OPCODE_lstore_2: type = long_type_node; index=2; goto store;
617 case OPCODE_lstore_3: type = long_type_node; index=3; goto store;
618 case OPCODE_fstore_0: type=float_type_node; index=0; goto store;
619 case OPCODE_fstore_1: type=float_type_node; index=1; goto store;
620 case OPCODE_fstore_2: type=float_type_node; index=2; goto store;
621 case OPCODE_fstore_3: type=float_type_node; index=3; goto store;
622 case OPCODE_dstore_0: type=double_type_node; index=0; goto store;
623 case OPCODE_dstore_1: type=double_type_node; index=1; goto store;
624 case OPCODE_dstore_2: type=double_type_node; index=2; goto store;
625 case OPCODE_dstore_3: type=double_type_node; index=3; goto store;
626 case OPCODE_astore_0: type = ptr_type_node; index = 0; goto store;
627 case OPCODE_astore_1: type = ptr_type_node; index = 1; goto store;
628 case OPCODE_astore_2: type = ptr_type_node; index = 2; goto store;
629 case OPCODE_astore_3: type = ptr_type_node; index = 3; goto store;
630 store:
631 if (index < 0
632 || (index + TYPE_IS_WIDE (type)
633 >= DECL_MAX_LOCALS (current_function_decl)))
635 VERIFICATION_ERROR ("invalid local variable index in store");
636 return 0;
638 type = pop_type (type);
639 type_map[index] = type;
641 /* If local variable changed, we need to reconsider eh handlers. */
642 prev_eh_ranges = NULL_EH_RANGE;
644 /* Allocate decl and rtx for this variable now, so if we're not
645 optmizing, we get a temporary that survives the whole method. */
646 find_local_variable (index, type, oldpc);
648 if (TYPE_IS_WIDE (type))
649 type_map[index+1] = TYPE_SECOND;
650 /* ... fall through to note_used ... */
651 note_used:
652 /* For store or load, note that local variable INDEX is used.
653 This is needed to verify try-finally sub-routines. */
654 if (current_subr)
656 tree vec = LABEL_RETURN_TYPE_STATE (current_subr);
657 tree subr_vec = LABEL_TYPE_STATE (current_subr);
658 int len = 1 + TYPE_IS_WIDE (type);
659 while (--len >= 0)
661 if (TREE_VEC_ELT (vec, index) == TYPE_UNUSED)
662 TREE_VEC_ELT (vec, index) = TREE_VEC_ELT (subr_vec, index);
665 break;
666 case OPCODE_iadd:
667 case OPCODE_iand:
668 case OPCODE_idiv:
669 case OPCODE_imul:
670 case OPCODE_ior:
671 case OPCODE_irem:
672 case OPCODE_ishl:
673 case OPCODE_ishr:
674 case OPCODE_isub:
675 case OPCODE_iushr:
676 case OPCODE_ixor:
677 type = int_type_node; goto binop;
678 case OPCODE_ineg:
679 case OPCODE_i2c:
680 case OPCODE_i2b:
681 case OPCODE_i2s:
682 type = int_type_node; goto unop;
683 case OPCODE_ladd:
684 case OPCODE_land:
685 case OPCODE_ldiv:
686 case OPCODE_lsub:
687 case OPCODE_lmul:
688 case OPCODE_lrem:
689 case OPCODE_lor:
690 case OPCODE_lxor:
691 type = long_type_node; goto binop;
692 case OPCODE_lneg:
693 type = long_type_node; goto unop;
694 case OPCODE_fadd: case OPCODE_fsub:
695 case OPCODE_fmul: case OPCODE_fdiv: case OPCODE_frem:
696 type = float_type_node; goto binop;
697 case OPCODE_fneg:
698 type = float_type_node; goto unop;
699 case OPCODE_dadd: case OPCODE_dsub:
700 case OPCODE_dmul: case OPCODE_ddiv: case OPCODE_drem:
701 type = double_type_node; goto binop;
702 case OPCODE_dneg:
703 type = double_type_node; goto unop;
704 unop:
705 pop_type (type);
706 push_type (type);
707 break;
708 binop:
709 pop_type (type);
710 pop_type (type);
711 push_type (type);
712 break;
713 case OPCODE_lshl:
714 case OPCODE_lshr:
715 case OPCODE_lushr:
716 pop_type (int_type_node);
717 pop_type (long_type_node);
718 push_type (long_type_node);
719 break;
720 case OPCODE_iinc:
721 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
722 PC += wide + 1;
723 wide = 0;
724 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl))
725 VERIFICATION_ERROR ("invalid local variable index in iinc");
726 tmp = type_map[index];
727 if (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
728 VERIFICATION_ERROR ("invalid local variable type in iinc");
729 break;
730 case OPCODE_i2l:
731 pop_type (int_type_node); push_type (long_type_node); break;
732 case OPCODE_i2f:
733 pop_type (int_type_node); push_type (float_type_node); break;
734 case OPCODE_i2d:
735 pop_type (int_type_node); push_type (double_type_node); break;
736 case OPCODE_l2i:
737 pop_type (long_type_node); push_type (int_type_node); break;
738 case OPCODE_l2f:
739 pop_type (long_type_node); push_type (float_type_node); break;
740 case OPCODE_l2d:
741 pop_type (long_type_node); push_type (double_type_node); break;
742 case OPCODE_f2i:
743 pop_type (float_type_node); push_type (int_type_node); break;
744 case OPCODE_f2l:
745 pop_type (float_type_node); push_type (long_type_node); break;
746 case OPCODE_f2d:
747 pop_type (float_type_node); push_type (double_type_node); break;
748 case OPCODE_d2i:
749 pop_type (double_type_node); push_type (int_type_node); break;
750 case OPCODE_d2l:
751 pop_type (double_type_node); push_type (long_type_node); break;
752 case OPCODE_d2f:
753 pop_type (double_type_node); push_type (float_type_node); break;
754 case OPCODE_lcmp:
755 type = long_type_node; goto compare;
756 case OPCODE_fcmpl:
757 case OPCODE_fcmpg:
758 type = float_type_node; goto compare;
759 case OPCODE_dcmpl:
760 case OPCODE_dcmpg:
761 type = double_type_node; goto compare;
762 compare:
763 pop_type (type); pop_type (type);
764 push_type (int_type_node); break;
765 case OPCODE_ifeq:
766 case OPCODE_ifne:
767 case OPCODE_iflt:
768 case OPCODE_ifge:
769 case OPCODE_ifgt:
770 case OPCODE_ifle:
771 pop_type (int_type_node); goto cond;
772 case OPCODE_ifnull:
773 case OPCODE_ifnonnull:
774 pop_type (ptr_type_node ); goto cond;
775 case OPCODE_if_icmpeq:
776 case OPCODE_if_icmpne:
777 case OPCODE_if_icmplt:
778 case OPCODE_if_icmpge:
779 case OPCODE_if_icmpgt:
780 case OPCODE_if_icmple:
781 pop_type (int_type_node); pop_type (int_type_node); goto cond;
782 case OPCODE_if_acmpeq:
783 case OPCODE_if_acmpne:
784 pop_type (object_ptr_type_node); pop_type (object_ptr_type_node);
785 goto cond;
786 cond:
787 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
788 break;
789 case OPCODE_goto:
790 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
791 INVALIDATE_PC;
792 break;
793 case OPCODE_wide:
794 switch (byte_ops[PC])
796 case OPCODE_iload: case OPCODE_lload:
797 case OPCODE_fload: case OPCODE_dload: case OPCODE_aload:
798 case OPCODE_istore: case OPCODE_lstore:
799 case OPCODE_fstore: case OPCODE_dstore: case OPCODE_astore:
800 case OPCODE_iinc:
801 case OPCODE_ret:
802 wide = 1;
803 break;
804 default:
805 VERIFICATION_ERROR ("invalid use of wide instruction");
807 break;
808 case OPCODE_return: type = void_type_node; goto ret;
809 case OPCODE_ireturn:
810 if ((TREE_CODE (return_type) == BOOLEAN_TYPE
811 || TREE_CODE (return_type) == CHAR_TYPE
812 || TREE_CODE (return_type) == INTEGER_TYPE)
813 && TYPE_PRECISION (return_type) <= 32)
814 type = return_type;
815 else
816 type = NULL_TREE;
817 goto ret;
818 case OPCODE_lreturn: type = long_type_node; goto ret;
819 case OPCODE_freturn: type = float_type_node; goto ret;
820 case OPCODE_dreturn: type = double_type_node; goto ret;
821 case OPCODE_areturn:
822 if (TREE_CODE (return_type) == POINTER_TYPE)
823 type = return_type;
824 else
825 type = NULL_TREE;
826 goto ret;
827 ret:
828 if (type != return_type)
829 VERIFICATION_ERROR ("incorrect ?return opcode");
830 if (type != void_type_node)
832 if (pop_type_0 (type) == NULL_TREE)
833 VERIFICATION_ERROR ("return value has wrong type");
835 INVALIDATE_PC;
836 break;
837 case OPCODE_getstatic: is_putting = 0; is_static = 1; goto field;
838 case OPCODE_putstatic: is_putting = 1; is_static = 1; goto field;
839 case OPCODE_getfield: is_putting = 0; is_static = 0; goto field;
840 case OPCODE_putfield: is_putting = 1; is_static = 0; goto field;
841 field:
843 int index = IMMEDIATE_u2;
844 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool, index);
845 tree field_type = get_type_from_signature (field_signature);
846 if (is_putting)
847 pop_type (field_type);
848 if (! is_static)
850 int clindex = COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
851 index);
852 tree self_type = get_class_constant (current_jcf, clindex);
853 /* Defer actual checking until next pass. */
854 if (pop_type_0 (self_type) == NULL_TREE)
855 VERIFICATION_ERROR ("incorrect type for field reference");
857 if (! is_putting)
858 push_type (field_type);
859 break;
861 case OPCODE_new:
862 push_type (get_class_constant (jcf, IMMEDIATE_u2));
863 break;
864 case OPCODE_dup: type_stack_dup (1, 0); break;
865 case OPCODE_dup_x1: type_stack_dup (1, 1); break;
866 case OPCODE_dup_x2: type_stack_dup (1, 2); break;
867 case OPCODE_dup2: type_stack_dup (2, 0); break;
868 case OPCODE_dup2_x1: type_stack_dup (2, 1); break;
869 case OPCODE_dup2_x2: type_stack_dup (2, 2); break;
870 case OPCODE_pop: index = 1; goto pop;
871 case OPCODE_pop2: index = 2; goto pop;
872 pop:
873 if (stack_pointer < index)
874 VERIFICATION_ERROR ("stack underflow");
875 stack_pointer -= index;
876 break;
877 case OPCODE_swap:
878 if (stack_pointer < 2)
879 VERIFICATION_ERROR ("stack underflow (in swap)");
880 else
882 tree type1 = stack_type_map[stack_pointer - 1];
883 tree type2 = stack_type_map[stack_pointer - 2];
884 if (type1 == void_type_node || type2 == void_type_node)
885 VERIFICATION_ERROR ("verifier (swap): double or long value");
886 stack_type_map[stack_pointer - 2] = type1;
887 stack_type_map[stack_pointer - 1] = type2;
889 break;
890 case OPCODE_ldc: index = IMMEDIATE_u1; goto ldc;
891 case OPCODE_ldc2_w:
892 case OPCODE_ldc_w:
893 index = IMMEDIATE_u2; goto ldc;
894 ldc:
895 if (index <= 0 || index >= JPOOL_SIZE(current_jcf))
896 VERIFICATION_ERROR ("bad constant pool index in ldc");
897 int_value = -1;
898 switch (JPOOL_TAG (current_jcf, index) & ~CONSTANT_ResolvedFlag)
900 case CONSTANT_Integer: type = int_type_node; goto check_ldc;
901 case CONSTANT_Float: type = float_type_node; goto check_ldc;
902 case CONSTANT_String: type = string_type_node; goto check_ldc;
903 case CONSTANT_Long: type = long_type_node; goto check_ldc;
904 case CONSTANT_Double: type = double_type_node; goto check_ldc;
905 check_ldc:
906 if (TYPE_IS_WIDE (type) == (op_code == OPCODE_ldc2_w))
907 break;
908 /* ... else fall through ... */
909 default:
910 VERIFICATION_ERROR ("bad constant pool tag in ldc");
912 if (type == int_type_node)
914 i = TREE_INT_CST_LOW (get_constant (current_jcf, index));
915 goto push_int;
917 push_type (type);
918 break;
920 case OPCODE_invokevirtual:
921 case OPCODE_invokespecial:
922 case OPCODE_invokestatic:
923 case OPCODE_invokeinterface:
925 int index = IMMEDIATE_u2;
926 tree sig = COMPONENT_REF_SIGNATURE (&current_jcf->cpool, index);
927 tree self_type = get_class_constant
928 (current_jcf, COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
929 index));
930 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool, index);
931 tree method_type;
932 method_type = parse_signature_string (IDENTIFIER_POINTER (sig),
933 IDENTIFIER_LENGTH (sig));
934 if (TREE_CODE (method_type) != FUNCTION_TYPE)
935 VERIFICATION_ERROR ("bad method signature");
936 pop_argument_types (TYPE_ARG_TYPES (method_type));
938 /* Can't invoke <clinit> */
939 if (method_name == clinit_identifier_node)
940 VERIFICATION_ERROR ("invoke opcode can't invoke <clinit>");
941 /* Apart invokespecial, can't invoke <init> */
942 if (op_code != OPCODE_invokespecial
943 && method_name == init_identifier_node)
944 VERIFICATION_ERROR ("invoke opcode can't invoke <init>");
946 if (op_code != OPCODE_invokestatic)
947 pop_type (self_type);
949 switch (op_code)
951 case OPCODE_invokeinterface:
953 int nargs = IMMEDIATE_u1;
954 int notZero = IMMEDIATE_u1;
956 if (!nargs || notZero)
957 VERIFICATION_ERROR
958 ("invalid argument number in invokeinterface");
959 break;
963 if (TREE_TYPE (method_type) != void_type_node)
964 push_type (TREE_TYPE (method_type));
965 break;
968 case OPCODE_arraylength:
969 /* Type checking actually made during code generation */
970 pop_type( ptr_type_node );
971 push_type( int_type_node );
972 break;
974 /* Q&D verification *or* more checking done during code generation
975 for byte/boolean/char/short, the value popped is a int coerced
976 into the right type before being stored. */
977 case OPCODE_iastore: type = int_type_node; goto astore;
978 case OPCODE_lastore: type = long_type_node; goto astore;
979 case OPCODE_fastore: type = float_type_node; goto astore;
980 case OPCODE_dastore: type = double_type_node; goto astore;
981 case OPCODE_aastore: type = ptr_type_node; goto astore;
982 case OPCODE_bastore: type = int_type_node; goto astore;
983 case OPCODE_castore: type = int_type_node; goto astore;
984 case OPCODE_sastore: type = int_type_node; goto astore;
985 astore:
986 /* FIXME - need better verification here */
987 pop_type (type); /* new value */
988 pop_type (int_type_node); /* index */
989 pop_type (ptr_type_node); /* array */
990 break;
992 /* Q&D verification *or* more checking done during code generation
993 for byte/boolean/char/short, the value pushed is a int. */
994 case OPCODE_iaload: type = int_type_node; goto aload;
995 case OPCODE_laload: type = long_type_node; goto aload;
996 case OPCODE_faload: type = float_type_node; goto aload;
997 case OPCODE_daload: type = double_type_node; goto aload;
998 case OPCODE_aaload: type = ptr_type_node; goto aload;
999 case OPCODE_baload: type = promote_type (byte_type_node); goto aload;
1000 case OPCODE_caload: type = promote_type (char_type_node); goto aload;
1001 case OPCODE_saload: type = promote_type (short_type_node); goto aload;
1002 aload:
1003 pop_type (int_type_node);
1004 tmp = pop_type (ptr_type_node);
1005 if (is_array_type_p (tmp))
1006 type = TYPE_ARRAY_ELEMENT (TREE_TYPE (tmp));
1007 else if (tmp != TYPE_NULL)
1008 VERIFICATION_ERROR ("array load from non-array type");
1009 push_type (type);
1010 break;
1012 case OPCODE_anewarray:
1013 type = get_class_constant (current_jcf, IMMEDIATE_u2);
1014 type = promote_type (type);
1015 goto newarray;
1017 case OPCODE_newarray:
1018 index = IMMEDIATE_u1;
1019 type = decode_newarray_type (index);
1020 if (type == NULL_TREE)
1021 VERIFICATION_ERROR ("invalid type code in newarray opcode");
1022 goto newarray;
1024 newarray:
1025 if (int_value >= 0 && prevpc >= 0)
1027 /* If previous instruction pushed int constant,
1028 we want to use it. */
1029 switch (byte_ops[prevpc])
1031 case OPCODE_iconst_0: case OPCODE_iconst_1:
1032 case OPCODE_iconst_2: case OPCODE_iconst_3:
1033 case OPCODE_iconst_4: case OPCODE_iconst_5:
1034 case OPCODE_bipush: case OPCODE_sipush:
1035 case OPCODE_ldc: case OPCODE_ldc_w:
1036 break;
1037 default:
1038 int_value = -1;
1041 else
1042 int_value = -1;
1043 type = build_java_array_type (type, int_value);
1044 pop_type (int_type_node);
1045 push_type (type);
1046 break;
1048 case OPCODE_multianewarray:
1050 int ndim, i;
1051 index = IMMEDIATE_u2;
1052 ndim = IMMEDIATE_u1;
1054 if( ndim < 1 )
1055 VERIFICATION_ERROR ("number of dimension lower that 1 in multianewarray" );
1057 for( i = 0; i < ndim; i++ )
1058 pop_type (int_type_node);
1059 push_type (get_class_constant (current_jcf, index));
1060 break;
1063 case OPCODE_aconst_null:
1064 push_type (ptr_type_node);
1065 break;
1067 case OPCODE_athrow:
1068 // FIXME: athrow also empties the stack.
1069 pop_type (throwable_type_node);
1070 INVALIDATE_PC;
1071 break;
1073 case OPCODE_checkcast:
1074 pop_type (ptr_type_node);
1075 type = get_class_constant (current_jcf, IMMEDIATE_u2);
1076 push_type (type);
1077 break;
1078 case OPCODE_instanceof:
1079 pop_type (ptr_type_node);
1080 get_class_constant (current_jcf, IMMEDIATE_u2);
1081 push_type (int_type_node);
1082 break;
1084 case OPCODE_tableswitch:
1086 jint low, high;
1088 pop_type (int_type_node);
1089 while (PC%4)
1091 if (byte_ops[PC++])
1092 VERIFICATION_ERROR ("bad alignment in tableswitch pad");
1094 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
1095 low = IMMEDIATE_s4;
1096 high = IMMEDIATE_s4;
1098 if (low > high)
1099 VERIFICATION_ERROR ("unsorted low/high value in tableswitch");
1101 while (low++ <= high)
1102 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1103 INVALIDATE_PC;
1104 break;
1107 case OPCODE_lookupswitch:
1109 jint npairs, last = 0, not_registered = 1;
1111 pop_type (int_type_node);
1112 while (PC%4)
1114 if (byte_ops[PC++])
1115 VERIFICATION_ERROR ("bad alignment in lookupswitch pad");
1118 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
1119 npairs = IMMEDIATE_s4;
1121 if (npairs < 0)
1122 VERIFICATION_ERROR ("invalid number of targets in lookupswitch");
1124 while (npairs--)
1126 int match = IMMEDIATE_s4;
1127 if (not_registered)
1128 not_registered = 0;
1129 else if (last >= match)
1130 VERIFICATION_ERROR ("unsorted match value in lookupswitch");
1132 last = match;
1133 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1135 INVALIDATE_PC;
1136 break;
1139 case OPCODE_monitorenter:
1140 /* fall thru */
1141 case OPCODE_monitorexit:
1142 pop_type (ptr_type_node);
1143 break;
1145 case OPCODE_goto_w:
1146 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1147 INVALIDATE_PC;
1148 break;
1150 case OPCODE_jsr:
1152 tree target = lookup_label (oldpc + IMMEDIATE_s2);
1153 tree return_label = lookup_label (PC);
1154 push_type (return_address_type_node);
1155 if (! LABEL_VERIFIED (target))
1157 /* first time seen */
1158 tree return_type_map;
1159 int nlocals = DECL_MAX_LOCALS (current_function_decl);
1160 index = nlocals + DECL_MAX_STACK (current_function_decl);
1161 return_type_map = make_tree_vec (index);
1162 while (index > nlocals)
1163 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNKNOWN;
1164 while (index > 0)
1165 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNUSED;
1166 LABEL_RETURN_LABEL (target)
1167 = build_decl (LABEL_DECL, NULL_TREE, TREE_TYPE (target));
1168 LABEL_PC (LABEL_RETURN_LABEL (target)) = -1;
1169 LABEL_RETURN_TYPE_STATE (target) = return_type_map;
1170 LABEL_IS_SUBR_START (target) = 1;
1171 LABEL_IN_SUBR (target) = 1;
1172 LABEL_SUBR_START (target) = target;
1173 LABEL_SUBR_CONTEXT (target) = current_subr;
1175 else if (! LABEL_IS_SUBR_START (target)
1176 || LABEL_SUBR_CONTEXT (target) != current_subr)
1177 VERIFICATION_ERROR ("label part of different subroutines");
1179 i = merge_type_state (target);
1180 if (i != 0)
1182 if (i < 0)
1183 VERIFICATION_ERROR ("types could not be merged at jsr");
1184 push_pending_label (target);
1186 current_subr = target;
1188 /* Chain return_pc onto LABEL_RETURN_LABELS (target) if needed. */
1189 if (! value_member (return_label, LABEL_RETURN_LABELS (target)))
1191 LABEL_RETURN_LABELS (target)
1192 = tree_cons (NULL_TREE, return_label,
1193 LABEL_RETURN_LABELS (target));
1196 if (LABEL_VERIFIED (target))
1198 tree return_map = LABEL_RETURN_TYPE_STATE (target);
1199 int len = TREE_VEC_LENGTH (return_map);
1200 stack_pointer = len - DECL_MAX_LOCALS (current_function_decl);
1201 while (--len >= 0)
1203 if (TREE_VEC_ELT (return_map, len) != TYPE_UNUSED)
1204 type_map[len] = TREE_VEC_ELT (return_map, len);
1206 current_subr = LABEL_SUBR_CONTEXT (target);
1207 PUSH_PENDING (return_label);
1210 INVALIDATE_PC;
1212 break;
1213 case OPCODE_ret:
1214 if (current_subr == NULL)
1215 VERIFICATION_ERROR ("ret instruction not in a jsr subroutine");
1216 else
1218 tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
1219 int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
1220 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
1221 wide = 0;
1222 INVALIDATE_PC;
1223 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl)
1224 || type_map[index] != TYPE_RETURN_ADDR)
1225 VERIFICATION_ERROR ("invalid ret index");
1227 /* The next chunk of code is similar to an inlined version of
1228 * merge_type_state (LABEL_RETURN_LABEL (current_subr)).
1229 * The main differences are that LABEL_RETURN_LABEL is
1230 * pre-allocated by the jsr (but we don't know the size then);
1231 * and that we have to handle TYPE_UNUSED. */
1233 if (! RETURN_MAP_ADJUSTED (ret_map))
1234 { /* First return from this subroutine - fix stack pointer. */
1235 TREE_VEC_LENGTH (ret_map) = size;
1236 for (index = size; --index >= 0; )
1238 if (TREE_VEC_ELT (ret_map, index) != TYPE_UNUSED)
1239 TREE_VEC_ELT (ret_map, index) = type_map[index];
1241 RETURN_MAP_ADJUSTED (ret_map) = 1;
1243 else
1245 if (TREE_VEC_LENGTH (ret_map) != size)
1246 VERIFICATION_ERROR ("inconsistent stack size on ret");
1247 for (index = 0; index < size; index++)
1249 tree type = TREE_VEC_ELT (ret_map, index);
1250 if (type != TYPE_UNUSED)
1252 type = merge_types (type, type_map [index]);
1253 TREE_VEC_ELT (ret_map, index) = type;
1254 if (type == TYPE_UNKNOWN)
1256 if (index >= size - stack_pointer)
1257 VERIFICATION_ERROR
1258 ("inconsistent types on ret from jsr");
1260 else if (TYPE_IS_WIDE (type))
1261 index++;
1268 break;
1269 case OPCODE_jsr_w:
1270 case OPCODE_ret_w:
1271 default:
1272 error ("unknown opcode %d@pc=%d during verification", op_code, PC-1);
1273 return 0;
1276 prevpc = oldpc;
1278 /* The following test is true if we have entered or exited an exception
1279 handler range *or* we have done a store to a local variable.
1280 In either case we need to consider any exception handlers that
1281 might "follow" this instruction. */
1283 if (eh_ranges != prev_eh_ranges)
1285 int save_stack_pointer = stack_pointer;
1286 int index = DECL_MAX_LOCALS (current_function_decl);
1287 tree save_type = type_map[index];
1288 tree save_current_subr = current_subr;
1289 struct eh_range *ranges = find_handler (oldpc);
1290 stack_pointer = 1;
1291 for (; ranges != NULL_EH_RANGE; ranges = ranges->outer)
1293 tree chain = ranges->handlers;
1295 /* We need to determine if the handler is part of current_subr.
1296 The are two cases: (1) The exception catch range
1297 is entirely within current_subr. In that case the handler
1298 is also part of current_subr.
1299 (2) Some of the catch range is not in current_subr.
1300 In that case, the handler is *not* part of current_subr.
1302 Figuring out which is the case is not necessarily obvious,
1303 in the presence of clever code generators (and obfuscators).
1304 We make a simplifying assumption that in case (2) we
1305 have that the current_subr is entirely within the catch range.
1306 In that case we can assume if that if a caller (the jsr) of
1307 a subroutine is within the catch range, then the handler is
1308 *not* part of the subroutine, and vice versa. */
1310 current_subr = save_current_subr;
1311 for ( ; current_subr != NULL_TREE;
1312 current_subr = LABEL_SUBR_CONTEXT (current_subr))
1314 tree return_labels = LABEL_RETURN_LABELS (current_subr);
1315 /* There could be multiple return_labels, but
1316 we only need to check one. */
1317 int return_pc = LABEL_PC (TREE_VALUE (return_labels));
1318 if (return_pc <= ranges->start_pc
1319 || return_pc > ranges->end_pc)
1320 break;
1323 for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain))
1325 tree handler = TREE_VALUE (chain);
1326 tree type = TREE_PURPOSE (chain);
1327 if (type == NULL_TREE) /* a finally handler */
1328 type = throwable_type_node;
1329 type_map[index] = promote_type (type);
1331 PUSH_PENDING (handler);
1334 stack_pointer = save_stack_pointer;
1335 current_subr = save_current_subr;
1336 type_map[index] = save_type;
1337 prev_eh_ranges = eh_ranges;
1340 return 1;
1341 bad_pc:
1342 message = "program counter out of range";
1343 goto verify_error;
1344 verify_error:
1345 error ("verification error at PC=%d", oldpc);
1346 error (message);
1347 return 0;