* config/sparc/constraints.md: New file.
[official-gcc.git] / gcc / java / expr.c
blobe1c008242482e81ff3ac0c8de8c5cbf610ef3c77
1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
25 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "real.h"
33 #include "rtl.h"
34 #include "flags.h"
35 #include "expr.h"
36 #include "java-tree.h"
37 #include "javaop.h"
38 #include "java-opcodes.h"
39 #include "jcf.h"
40 #include "java-except.h"
41 #include "parse.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "ggc.h"
45 #include "tree-iterator.h"
46 #include "gimple.h"
47 #include "target.h"
49 static void flush_quick_stack (void);
50 static void push_value (tree);
51 static tree pop_value (tree);
52 static void java_stack_swap (void);
53 static void java_stack_dup (int, int);
54 static void build_java_athrow (tree);
55 static void build_java_jsr (int, int);
56 static void build_java_ret (tree);
57 static void expand_java_multianewarray (tree, int);
58 static void expand_java_arraystore (tree);
59 static void expand_java_arrayload (tree);
60 static void expand_java_array_length (void);
61 static tree build_java_monitor (tree, tree);
62 static void expand_java_pushc (int, tree);
63 static void expand_java_return (tree);
64 static void expand_load_internal (int, tree, int);
65 static void expand_java_NEW (tree);
66 static void expand_java_INSTANCEOF (tree);
67 static void expand_java_CHECKCAST (tree);
68 static void expand_iinc (unsigned int, int, int);
69 static void expand_java_binop (tree, enum tree_code);
70 static void note_label (int, int);
71 static void expand_compare (enum tree_code, tree, tree, int);
72 static void expand_test (enum tree_code, tree, int);
73 static void expand_cond (enum tree_code, tree, int);
74 static void expand_java_goto (int);
75 static tree expand_java_switch (tree, int);
76 static void expand_java_add_case (tree, int, int);
77 static tree pop_arguments (tree);
78 static void expand_invoke (int, int, int);
79 static void expand_java_field_op (int, int, int);
80 static void java_push_constant_from_pool (struct JCF *, int);
81 static void java_stack_pop (int);
82 static tree build_java_throw_out_of_bounds_exception (tree);
83 static tree build_java_check_indexed_type (tree, tree);
84 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
85 static void promote_arguments (void);
86 static void cache_cpool_data_ref (void);
88 static GTY(()) tree operand_type[59];
90 static GTY(()) tree methods_ident;
91 static GTY(()) tree ncode_ident;
92 tree dtable_ident = NULL_TREE;
94 /* Set to nonzero value in order to emit class initialization code
95 before static field references. */
96 int always_initialize_class_p = 0;
98 /* We store the stack state in two places:
99 Within a basic block, we use the quick_stack, which is a
100 pushdown list (TREE_LISTs) of expression nodes.
101 This is the top part of the stack; below that we use find_stack_slot.
102 At the end of a basic block, the quick_stack must be flushed
103 to the stack slot array (as handled by find_stack_slot).
104 Using quick_stack generates better code (especially when
105 compiled without optimization), because we do not have to
106 explicitly store and load trees to temporary variables.
108 If a variable is on the quick stack, it means the value of variable
109 when the quick stack was last flushed. Conceptually, flush_quick_stack
110 saves all the quick_stack elements in parallel. However, that is
111 complicated, so it actually saves them (i.e. copies each stack value
112 to is home virtual register) from low indexes. This allows a quick_stack
113 element at index i (counting from the bottom of stack the) to references
114 slot virtuals for register that are >= i, but not those that are deeper.
115 This convention makes most operations easier. For example iadd works
116 even when the stack contains (reg[0], reg[1]): It results in the
117 stack containing (reg[0]+reg[1]), which is OK. However, some stack
118 operations are more complicated. For example dup given a stack
119 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
120 the convention, since stack value 1 would refer to a register with
121 lower index (reg[0]), which flush_quick_stack does not safely handle.
122 So dup cannot just add an extra element to the quick_stack, but iadd can.
125 static GTY(()) tree quick_stack;
127 /* A free-list of unused permanent TREE_LIST nodes. */
128 static GTY((deletable)) tree tree_list_free_list;
130 /* The physical memory page size used in this computer. See
131 build_field_ref(). */
132 static GTY(()) tree page_size;
134 /* The stack pointer of the Java virtual machine.
135 This does include the size of the quick_stack. */
137 int stack_pointer;
139 const unsigned char *linenumber_table;
140 int linenumber_count;
142 /* Largest pc so far in this method that has been passed to lookup_label. */
143 int highest_label_pc_this_method = -1;
145 /* Base value for this method to add to pc to get generated label. */
146 int start_label_pc_this_method = 0;
148 void
149 init_expr_processing (void)
151 operand_type[21] = operand_type[54] = int_type_node;
152 operand_type[22] = operand_type[55] = long_type_node;
153 operand_type[23] = operand_type[56] = float_type_node;
154 operand_type[24] = operand_type[57] = double_type_node;
155 operand_type[25] = operand_type[58] = ptr_type_node;
158 tree
159 java_truthvalue_conversion (tree expr)
161 /* It is simpler and generates better code to have only TRUTH_*_EXPR
162 or comparison expressions as truth values at this level.
164 This function should normally be identity for Java. */
166 switch (TREE_CODE (expr))
168 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
169 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
170 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
171 case ORDERED_EXPR: case UNORDERED_EXPR:
172 case TRUTH_ANDIF_EXPR:
173 case TRUTH_ORIF_EXPR:
174 case TRUTH_AND_EXPR:
175 case TRUTH_OR_EXPR:
176 case TRUTH_XOR_EXPR:
177 case TRUTH_NOT_EXPR:
178 case ERROR_MARK:
179 return expr;
181 case INTEGER_CST:
182 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
184 case REAL_CST:
185 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
187 /* are these legal? XXX JH */
188 case NEGATE_EXPR:
189 case ABS_EXPR:
190 case FLOAT_EXPR:
191 /* These don't change whether an object is nonzero or zero. */
192 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
194 case COND_EXPR:
195 /* Distribute the conversion into the arms of a COND_EXPR. */
196 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
197 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
198 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
200 case NOP_EXPR:
201 /* If this is widening the argument, we can ignore it. */
202 if (TYPE_PRECISION (TREE_TYPE (expr))
203 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
204 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
205 /* fall through to default */
207 default:
208 return fold_build2 (NE_EXPR, boolean_type_node,
209 expr, boolean_false_node);
213 /* Save any stack slots that happen to be in the quick_stack into their
214 home virtual register slots.
216 The copy order is from low stack index to high, to support the invariant
217 that the expression for a slot may contain decls for stack slots with
218 higher (or the same) index, but not lower. */
220 static void
221 flush_quick_stack (void)
223 int stack_index = stack_pointer;
224 tree prev, cur, next;
226 /* First reverse the quick_stack, and count the number of slots it has. */
227 for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
229 next = TREE_CHAIN (cur);
230 TREE_CHAIN (cur) = prev;
231 prev = cur;
232 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
234 quick_stack = prev;
236 while (quick_stack != NULL_TREE)
238 tree decl;
239 tree node = quick_stack, type;
240 quick_stack = TREE_CHAIN (node);
241 TREE_CHAIN (node) = tree_list_free_list;
242 tree_list_free_list = node;
243 node = TREE_VALUE (node);
244 type = TREE_TYPE (node);
246 decl = find_stack_slot (stack_index, type);
247 if (decl != node)
248 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node));
249 stack_index += 1 + TYPE_IS_WIDE (type);
253 /* Push TYPE on the type stack.
254 Return true on success, 0 on overflow. */
257 push_type_0 (tree type)
259 int n_words;
260 type = promote_type (type);
261 n_words = 1 + TYPE_IS_WIDE (type);
262 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
263 return 0;
264 /* Allocate decl for this variable now, so we get a temporary that
265 survives the whole method. */
266 find_stack_slot (stack_pointer, type);
267 stack_type_map[stack_pointer++] = type;
268 n_words--;
269 while (--n_words >= 0)
270 stack_type_map[stack_pointer++] = TYPE_SECOND;
271 return 1;
274 void
275 push_type (tree type)
277 int r = push_type_0 (type);
278 gcc_assert (r);
281 static void
282 push_value (tree value)
284 tree type = TREE_TYPE (value);
285 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
287 type = promote_type (type);
288 value = convert (type, value);
290 push_type (type);
291 if (tree_list_free_list == NULL_TREE)
292 quick_stack = tree_cons (NULL_TREE, value, quick_stack);
293 else
295 tree node = tree_list_free_list;
296 tree_list_free_list = TREE_CHAIN (tree_list_free_list);
297 TREE_VALUE (node) = value;
298 TREE_CHAIN (node) = quick_stack;
299 quick_stack = node;
301 /* If the value has a side effect, then we need to evaluate it
302 whether or not the result is used. If the value ends up on the
303 quick stack and is then popped, this won't happen -- so we flush
304 the quick stack. It is safest to simply always flush, though,
305 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
306 the latter we may need to strip conversions. */
307 flush_quick_stack ();
310 /* Pop a type from the type stack.
311 TYPE is the expected type. Return the actual type, which must be
312 convertible to TYPE.
313 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
315 tree
316 pop_type_0 (tree type, char **messagep)
318 int n_words;
319 tree t;
320 *messagep = NULL;
321 if (TREE_CODE (type) == RECORD_TYPE)
322 type = promote_type (type);
323 n_words = 1 + TYPE_IS_WIDE (type);
324 if (stack_pointer < n_words)
326 *messagep = xstrdup ("stack underflow");
327 return type;
329 while (--n_words > 0)
331 if (stack_type_map[--stack_pointer] != void_type_node)
333 *messagep = xstrdup ("Invalid multi-word value on type stack");
334 return type;
337 t = stack_type_map[--stack_pointer];
338 if (type == NULL_TREE || t == type)
339 return t;
340 if (TREE_CODE (t) == TREE_LIST)
344 tree tt = TREE_PURPOSE (t);
345 if (! can_widen_reference_to (tt, type))
347 t = tt;
348 goto fail;
350 t = TREE_CHAIN (t);
352 while (t);
353 return t;
355 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
356 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
357 return t;
358 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
360 /* If the expected type we've been passed is object or ptr
361 (i.e. void*), the caller needs to know the real type. */
362 if (type == ptr_type_node || type == object_ptr_type_node)
363 return t;
365 /* Since the verifier has already run, we know that any
366 types we see will be compatible. In BC mode, this fact
367 may be checked at runtime, but if that is so then we can
368 assume its truth here as well. So, we always succeed
369 here, with the expected type. */
370 return type;
373 if (! flag_verify_invocations && flag_indirect_dispatch
374 && t == object_ptr_type_node)
376 if (type != ptr_type_node)
377 warning (0, "need to insert runtime check for %s",
378 xstrdup (lang_printable_name (type, 0)));
379 return type;
382 /* lang_printable_name uses a static buffer, so we must save the result
383 from calling it the first time. */
384 fail:
386 char *temp = xstrdup (lang_printable_name (type, 0));
387 /* If the stack contains a multi-word type, keep popping the stack until
388 the real type is found. */
389 while (t == void_type_node)
390 t = stack_type_map[--stack_pointer];
391 *messagep = concat ("expected type '", temp,
392 "' but stack contains '", lang_printable_name (t, 0),
393 "'", NULL);
394 free (temp);
396 return type;
399 /* Pop a type from the type stack.
400 TYPE is the expected type. Return the actual type, which must be
401 convertible to TYPE, otherwise call error. */
403 tree
404 pop_type (tree type)
406 char *message = NULL;
407 type = pop_type_0 (type, &message);
408 if (message != NULL)
410 error ("%s", message);
411 free (message);
413 return type;
417 /* Return true if two type assertions are equal. */
419 static int
420 type_assertion_eq (const void * k1_p, const void * k2_p)
422 const type_assertion k1 = *(const type_assertion *)k1_p;
423 const type_assertion k2 = *(const type_assertion *)k2_p;
424 return (k1.assertion_code == k2.assertion_code
425 && k1.op1 == k2.op1
426 && k1.op2 == k2.op2);
429 /* Hash a type assertion. */
431 static hashval_t
432 type_assertion_hash (const void *p)
434 const type_assertion *k_p = (const type_assertion *) p;
435 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
436 k_p->assertion_code, 0);
438 switch (k_p->assertion_code)
440 case JV_ASSERT_TYPES_COMPATIBLE:
441 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
442 hash);
443 /* Fall through. */
445 case JV_ASSERT_IS_INSTANTIABLE:
446 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
447 hash);
448 /* Fall through. */
450 case JV_ASSERT_END_OF_TABLE:
451 break;
453 default:
454 gcc_unreachable ();
457 return hash;
460 /* Add an entry to the type assertion table for the given class.
461 KLASS is the class for which this assertion will be evaluated by the
462 runtime during loading/initialization.
463 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
464 OP1 and OP2 are the operands. The tree type of these arguments may be
465 specific to each assertion_code. */
467 void
468 add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
470 htab_t assertions_htab;
471 type_assertion as;
472 void **as_pp;
474 assertions_htab = TYPE_ASSERTIONS (klass);
475 if (assertions_htab == NULL)
477 assertions_htab = htab_create_ggc (7, type_assertion_hash,
478 type_assertion_eq, NULL);
479 TYPE_ASSERTIONS (current_class) = assertions_htab;
482 as.assertion_code = assertion_code;
483 as.op1 = op1;
484 as.op2 = op2;
486 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
488 /* Don't add the same assertion twice. */
489 if (*as_pp)
490 return;
492 *as_pp = ggc_alloc (sizeof (type_assertion));
493 **(type_assertion **)as_pp = as;
497 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
498 Handles array types and interfaces. */
501 can_widen_reference_to (tree source_type, tree target_type)
503 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
504 return 1;
506 /* Get rid of pointers */
507 if (TREE_CODE (source_type) == POINTER_TYPE)
508 source_type = TREE_TYPE (source_type);
509 if (TREE_CODE (target_type) == POINTER_TYPE)
510 target_type = TREE_TYPE (target_type);
512 if (source_type == target_type)
513 return 1;
515 /* FIXME: This is very pessimistic, in that it checks everything,
516 even if we already know that the types are compatible. If we're
517 to support full Java class loader semantics, we need this.
518 However, we could do something more optimal. */
519 if (! flag_verify_invocations)
521 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
522 source_type, target_type);
524 if (!quiet_flag)
525 warning (0, "assert: %s is assign compatible with %s",
526 xstrdup (lang_printable_name (target_type, 0)),
527 xstrdup (lang_printable_name (source_type, 0)));
528 /* Punt everything to runtime. */
529 return 1;
532 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
534 return 1;
536 else
538 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
540 HOST_WIDE_INT source_length, target_length;
541 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
543 /* An array implements Cloneable and Serializable. */
544 tree name = DECL_NAME (TYPE_NAME (target_type));
545 return (name == java_lang_cloneable_identifier_node
546 || name == java_io_serializable_identifier_node);
548 target_length = java_array_type_length (target_type);
549 if (target_length >= 0)
551 source_length = java_array_type_length (source_type);
552 if (source_length != target_length)
553 return 0;
555 source_type = TYPE_ARRAY_ELEMENT (source_type);
556 target_type = TYPE_ARRAY_ELEMENT (target_type);
557 if (source_type == target_type)
558 return 1;
559 if (TREE_CODE (source_type) != POINTER_TYPE
560 || TREE_CODE (target_type) != POINTER_TYPE)
561 return 0;
562 return can_widen_reference_to (source_type, target_type);
564 else
566 int source_depth = class_depth (source_type);
567 int target_depth = class_depth (target_type);
569 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
571 if (! quiet_flag)
572 warning (0, "assert: %s is assign compatible with %s",
573 xstrdup (lang_printable_name (target_type, 0)),
574 xstrdup (lang_printable_name (source_type, 0)));
575 return 1;
578 /* class_depth can return a negative depth if an error occurred */
579 if (source_depth < 0 || target_depth < 0)
580 return 0;
582 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
584 /* target_type is OK if source_type or source_type ancestors
585 implement target_type. We handle multiple sub-interfaces */
586 tree binfo, base_binfo;
587 int i;
589 for (binfo = TYPE_BINFO (source_type), i = 0;
590 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
591 if (can_widen_reference_to
592 (BINFO_TYPE (base_binfo), target_type))
593 return 1;
595 if (!i)
596 return 0;
599 for ( ; source_depth > target_depth; source_depth--)
601 source_type
602 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
604 return source_type == target_type;
609 static tree
610 pop_value (tree type)
612 type = pop_type (type);
613 if (quick_stack)
615 tree node = quick_stack;
616 quick_stack = TREE_CHAIN (quick_stack);
617 TREE_CHAIN (node) = tree_list_free_list;
618 tree_list_free_list = node;
619 node = TREE_VALUE (node);
620 return node;
622 else
623 return find_stack_slot (stack_pointer, promote_type (type));
627 /* Pop and discard the top COUNT stack slots. */
629 static void
630 java_stack_pop (int count)
632 while (count > 0)
634 tree type, val;
636 gcc_assert (stack_pointer != 0);
638 type = stack_type_map[stack_pointer - 1];
639 if (type == TYPE_SECOND)
641 count--;
642 gcc_assert (stack_pointer != 1 && count > 0);
644 type = stack_type_map[stack_pointer - 2];
646 val = pop_value (type);
647 count--;
651 /* Implement the 'swap' operator (to swap two top stack slots). */
653 static void
654 java_stack_swap (void)
656 tree type1, type2;
657 tree temp;
658 tree decl1, decl2;
660 if (stack_pointer < 2
661 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
662 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
663 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
664 /* Bad stack swap. */
665 abort ();
666 /* Bad stack swap. */
668 flush_quick_stack ();
669 decl1 = find_stack_slot (stack_pointer - 1, type1);
670 decl2 = find_stack_slot (stack_pointer - 2, type2);
671 temp = build_decl (VAR_DECL, NULL_TREE, type1);
672 java_add_local_var (temp);
673 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
674 java_add_stmt (build2 (MODIFY_EXPR, type2,
675 find_stack_slot (stack_pointer - 1, type2),
676 decl2));
677 java_add_stmt (build2 (MODIFY_EXPR, type1,
678 find_stack_slot (stack_pointer - 2, type1),
679 temp));
680 stack_type_map[stack_pointer - 1] = type2;
681 stack_type_map[stack_pointer - 2] = type1;
684 static void
685 java_stack_dup (int size, int offset)
687 int low_index = stack_pointer - size - offset;
688 int dst_index;
689 if (low_index < 0)
690 error ("stack underflow - dup* operation");
692 flush_quick_stack ();
694 stack_pointer += size;
695 dst_index = stack_pointer;
697 for (dst_index = stack_pointer; --dst_index >= low_index; )
699 tree type;
700 int src_index = dst_index - size;
701 if (src_index < low_index)
702 src_index = dst_index + size + offset;
703 type = stack_type_map [src_index];
704 if (type == TYPE_SECOND)
706 /* Dup operation splits 64-bit number. */
707 gcc_assert (src_index > low_index);
709 stack_type_map[dst_index] = type;
710 src_index--; dst_index--;
711 type = stack_type_map[src_index];
712 gcc_assert (TYPE_IS_WIDE (type));
714 else
715 gcc_assert (! TYPE_IS_WIDE (type));
717 if (src_index != dst_index)
719 tree src_decl = find_stack_slot (src_index, type);
720 tree dst_decl = find_stack_slot (dst_index, type);
722 java_add_stmt
723 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
724 stack_type_map[dst_index] = type;
729 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
730 value stack. */
732 static void
733 build_java_athrow (tree node)
735 tree call;
737 call = build_call_nary (void_type_node,
738 build_address_of (throw_node),
739 1, node);
740 TREE_SIDE_EFFECTS (call) = 1;
741 java_add_stmt (call);
742 java_stack_pop (stack_pointer);
745 /* Implementation for jsr/ret */
747 static void
748 build_java_jsr (int target_pc, int return_pc)
750 tree where = lookup_label (target_pc);
751 tree ret = lookup_label (return_pc);
752 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
753 push_value (ret_label);
754 flush_quick_stack ();
755 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
757 /* Do not need to emit the label here. We noted the existence of the
758 label as a jump target in note_instructions; we'll emit the label
759 for real at the beginning of the expand_byte_code loop. */
762 static void
763 build_java_ret (tree location)
765 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
768 /* Implementation of operations on array: new, load, store, length */
770 tree
771 decode_newarray_type (int atype)
773 switch (atype)
775 case 4: return boolean_type_node;
776 case 5: return char_type_node;
777 case 6: return float_type_node;
778 case 7: return double_type_node;
779 case 8: return byte_type_node;
780 case 9: return short_type_node;
781 case 10: return int_type_node;
782 case 11: return long_type_node;
783 default: return NULL_TREE;
787 /* Map primitive type to the code used by OPCODE_newarray. */
790 encode_newarray_type (tree type)
792 if (type == boolean_type_node)
793 return 4;
794 else if (type == char_type_node)
795 return 5;
796 else if (type == float_type_node)
797 return 6;
798 else if (type == double_type_node)
799 return 7;
800 else if (type == byte_type_node)
801 return 8;
802 else if (type == short_type_node)
803 return 9;
804 else if (type == int_type_node)
805 return 10;
806 else if (type == long_type_node)
807 return 11;
808 else
809 gcc_unreachable ();
812 /* Build a call to _Jv_ThrowBadArrayIndex(), the
813 ArrayIndexOfBoundsException exception handler. */
815 static tree
816 build_java_throw_out_of_bounds_exception (tree index)
818 tree node;
820 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
821 has void return type. We cannot just set the type of the CALL_EXPR below
822 to int_type_node because we would lose it during gimplification. */
823 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
824 node = build_call_nary (void_type_node,
825 build_address_of (soft_badarrayindex_node),
826 1, index);
827 TREE_SIDE_EFFECTS (node) = 1;
829 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
830 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
832 return (node);
835 /* Return the length of an array. Doesn't perform any checking on the nature
836 or value of the array NODE. May be used to implement some bytecodes. */
838 tree
839 build_java_array_length_access (tree node)
841 tree type = TREE_TYPE (node);
842 tree array_type = TREE_TYPE (type);
843 HOST_WIDE_INT length;
845 if (!is_array_type_p (type))
847 /* With the new verifier, we will see an ordinary pointer type
848 here. In this case, we just use an arbitrary array type. */
849 array_type = build_java_array_type (object_ptr_type_node, -1);
850 type = promote_type (array_type);
853 length = java_array_type_length (type);
854 if (length >= 0)
855 return build_int_cst (NULL_TREE, length);
857 node = build3 (COMPONENT_REF, int_type_node,
858 build_java_indirect_ref (array_type, node,
859 flag_check_references),
860 lookup_field (&array_type, get_identifier ("length")),
861 NULL_TREE);
862 IS_ARRAY_LENGTH_ACCESS (node) = 1;
863 return node;
866 /* Optionally checks a reference against the NULL pointer. ARG1: the
867 expr, ARG2: we should check the reference. Don't generate extra
868 checks if we're not generating code. */
870 tree
871 java_check_reference (tree expr, int check)
873 if (!flag_syntax_only && check)
875 expr = save_expr (expr);
876 expr = build3 (COND_EXPR, TREE_TYPE (expr),
877 build2 (EQ_EXPR, boolean_type_node,
878 expr, null_pointer_node),
879 build_call_nary (void_type_node,
880 build_address_of (soft_nullpointer_node),
882 expr);
885 return expr;
888 /* Reference an object: just like an INDIRECT_REF, but with checking. */
890 tree
891 build_java_indirect_ref (tree type, tree expr, int check)
893 tree t;
894 t = java_check_reference (expr, check);
895 t = convert (build_pointer_type (type), t);
896 return build1 (INDIRECT_REF, type, t);
899 /* Implement array indexing (either as l-value or r-value).
900 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
901 Optionally performs bounds checking and/or test to NULL.
902 At this point, ARRAY should have been verified as an array. */
904 tree
905 build_java_arrayaccess (tree array, tree type, tree index)
907 tree node, throw_expr = NULL_TREE;
908 tree data_field;
909 tree ref;
910 tree array_type = TREE_TYPE (TREE_TYPE (array));
911 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
913 if (!is_array_type_p (TREE_TYPE (array)))
915 /* With the new verifier, we will see an ordinary pointer type
916 here. In this case, we just use the correct array type. */
917 array_type = build_java_array_type (type, -1);
920 if (flag_bounds_check)
922 /* Generate:
923 * (unsigned jint) INDEX >= (unsigned jint) LEN
924 * && throw ArrayIndexOutOfBoundsException.
925 * Note this is equivalent to and more efficient than:
926 * INDEX < 0 || INDEX >= LEN && throw ... */
927 tree test;
928 tree len = convert (unsigned_int_type_node,
929 build_java_array_length_access (array));
930 test = fold_build2 (GE_EXPR, boolean_type_node,
931 convert (unsigned_int_type_node, index),
932 len);
933 if (! integer_zerop (test))
935 throw_expr
936 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
937 build_java_throw_out_of_bounds_exception (index));
938 /* allows expansion within COMPOUND */
939 TREE_SIDE_EFFECTS( throw_expr ) = 1;
943 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
944 to have the bounds check evaluated first. */
945 if (throw_expr != NULL_TREE)
946 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
948 data_field = lookup_field (&array_type, get_identifier ("data"));
950 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
951 build_java_indirect_ref (array_type, array,
952 flag_check_references),
953 data_field, NULL_TREE);
955 /* Take the address of the data field and convert it to a pointer to
956 the element type. */
957 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
959 /* Multiply the index by the size of an element to obtain a byte
960 offset. Convert the result to a pointer to the element type. */
961 index = build2 (MULT_EXPR, sizetype,
962 fold_convert (sizetype, index),
963 size_exp);
965 /* Sum the byte offset and the address of the data field. */
966 node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
968 /* Finally, return
970 *((&array->data) + index*size_exp)
973 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
976 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
977 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
978 determine that no check is required. */
980 tree
981 build_java_arraystore_check (tree array, tree object)
983 tree check, element_type, source;
984 tree array_type_p = TREE_TYPE (array);
985 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
987 if (! flag_verify_invocations)
989 /* With the new verifier, we don't track precise types. FIXME:
990 performance regression here. */
991 element_type = TYPE_NAME (object_type_node);
993 else
995 gcc_assert (is_array_type_p (array_type_p));
997 /* Get the TYPE_DECL for ARRAY's element type. */
998 element_type
999 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
1002 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
1003 && TREE_CODE (object_type) == TYPE_DECL);
1005 if (!flag_store_check)
1006 return build1 (NOP_EXPR, array_type_p, array);
1008 /* No check is needed if the element type is final. Also check that
1009 element_type matches object_type, since in the bytecode
1010 compilation case element_type may be the actual element type of
1011 the array rather than its declared type. However, if we're doing
1012 indirect dispatch, we can't do the `final' optimization. */
1013 if (element_type == object_type
1014 && ! flag_indirect_dispatch
1015 && CLASS_FINAL (element_type))
1016 return build1 (NOP_EXPR, array_type_p, array);
1018 /* OBJECT might be wrapped by a SAVE_EXPR. */
1019 if (TREE_CODE (object) == SAVE_EXPR)
1020 source = TREE_OPERAND (object, 0);
1021 else
1022 source = object;
1024 /* Avoid the check if OBJECT was just loaded from the same array. */
1025 if (TREE_CODE (source) == ARRAY_REF)
1027 tree target;
1028 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1029 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1030 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1031 if (TREE_CODE (source) == SAVE_EXPR)
1032 source = TREE_OPERAND (source, 0);
1034 target = array;
1035 if (TREE_CODE (target) == SAVE_EXPR)
1036 target = TREE_OPERAND (target, 0);
1038 if (source == target)
1039 return build1 (NOP_EXPR, array_type_p, array);
1042 /* Build an invocation of _Jv_CheckArrayStore */
1043 check = build_call_nary (void_type_node,
1044 build_address_of (soft_checkarraystore_node),
1045 2, array, object);
1046 TREE_SIDE_EFFECTS (check) = 1;
1048 return check;
1051 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1052 ARRAY_NODE. This function is used to retrieve something less vague than
1053 a pointer type when indexing the first dimension of something like [[<t>.
1054 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1055 return unchanged. */
1057 static tree
1058 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1059 tree indexed_type)
1061 /* We used to check to see if ARRAY_NODE really had array type.
1062 However, with the new verifier, this is not necessary, as we know
1063 that the object will be an array of the appropriate type. */
1065 return indexed_type;
1068 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1069 called with an integer code (the type of array to create), and the length
1070 of the array to create. */
1072 tree
1073 build_newarray (int atype_value, tree length)
1075 tree type_arg;
1077 tree prim_type = decode_newarray_type (atype_value);
1078 tree type
1079 = build_java_array_type (prim_type,
1080 host_integerp (length, 0) == INTEGER_CST
1081 ? tree_low_cst (length, 0) : -1);
1083 /* Pass a reference to the primitive type class and save the runtime
1084 some work. */
1085 type_arg = build_class_ref (prim_type);
1087 return build_call_nary (promote_type (type),
1088 build_address_of (soft_newarray_node),
1089 2, type_arg, length);
1092 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1093 of the dimension. */
1095 tree
1096 build_anewarray (tree class_type, tree length)
1098 tree type
1099 = build_java_array_type (class_type,
1100 host_integerp (length, 0)
1101 ? tree_low_cst (length, 0) : -1);
1103 return build_call_nary (promote_type (type),
1104 build_address_of (soft_anewarray_node),
1106 length,
1107 build_class_ref (class_type),
1108 null_pointer_node);
1111 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1113 tree
1114 build_new_array (tree type, tree length)
1116 if (JPRIMITIVE_TYPE_P (type))
1117 return build_newarray (encode_newarray_type (type), length);
1118 else
1119 return build_anewarray (TREE_TYPE (type), length);
1122 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1123 class pointer, a number of dimensions and the matching number of
1124 dimensions. The argument list is NULL terminated. */
1126 static void
1127 expand_java_multianewarray (tree class_type, int ndim)
1129 int i;
1130 tree args = build_tree_list( NULL_TREE, null_pointer_node );
1132 for( i = 0; i < ndim; i++ )
1133 args = tree_cons (NULL_TREE, pop_value (int_type_node), args);
1135 args = tree_cons (NULL_TREE,
1136 build_class_ref (class_type),
1137 tree_cons (NULL_TREE,
1138 build_int_cst (NULL_TREE, ndim),
1139 args));
1141 push_value (build_call_list (promote_type (class_type),
1142 build_address_of (soft_multianewarray_node),
1143 args));
1146 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1147 ARRAY is an array type. May expand some bound checking and NULL
1148 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1149 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1150 INT. In those cases, we make the conversion.
1152 if ARRAy is a reference type, the assignment is checked at run-time
1153 to make sure that the RHS can be assigned to the array element
1154 type. It is not necessary to generate this code if ARRAY is final. */
1156 static void
1157 expand_java_arraystore (tree rhs_type_node)
1159 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1160 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1161 int_type_node : rhs_type_node);
1162 tree index = pop_value (int_type_node);
1163 tree array_type, array, temp, access;
1165 /* If we're processing an `aaload' we might as well just pick
1166 `Object'. */
1167 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1169 array_type = build_java_array_type (object_ptr_type_node, -1);
1170 rhs_type_node = object_ptr_type_node;
1172 else
1173 array_type = build_java_array_type (rhs_type_node, -1);
1175 array = pop_value (array_type);
1176 array = build1 (NOP_EXPR, promote_type (array_type), array);
1178 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1180 flush_quick_stack ();
1182 index = save_expr (index);
1183 array = save_expr (array);
1185 /* We want to perform the bounds check (done by
1186 build_java_arrayaccess) before the type check (done by
1187 build_java_arraystore_check). So, we call build_java_arrayaccess
1188 -- which returns an ARRAY_REF lvalue -- and we then generate code
1189 to stash the address of that lvalue in a temp. Then we call
1190 build_java_arraystore_check, and finally we generate a
1191 MODIFY_EXPR to set the array element. */
1193 access = build_java_arrayaccess (array, rhs_type_node, index);
1194 temp = build_decl (VAR_DECL, NULL_TREE,
1195 build_pointer_type (TREE_TYPE (access)));
1196 java_add_local_var (temp);
1197 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1198 temp,
1199 build_fold_addr_expr (access)));
1201 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1203 tree check = build_java_arraystore_check (array, rhs_node);
1204 java_add_stmt (check);
1207 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1208 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1209 rhs_node));
1212 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1213 sure that LHS is an array type. May expand some bound checking and NULL
1214 pointer checking.
1215 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1216 BOOLEAN/SHORT, we push a promoted type back to the stack.
1219 static void
1220 expand_java_arrayload (tree lhs_type_node)
1222 tree load_node;
1223 tree index_node = pop_value (int_type_node);
1224 tree array_type;
1225 tree array_node;
1227 /* If we're processing an `aaload' we might as well just pick
1228 `Object'. */
1229 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1231 array_type = build_java_array_type (object_ptr_type_node, -1);
1232 lhs_type_node = object_ptr_type_node;
1234 else
1235 array_type = build_java_array_type (lhs_type_node, -1);
1236 array_node = pop_value (array_type);
1237 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1239 index_node = save_expr (index_node);
1240 array_node = save_expr (array_node);
1242 lhs_type_node = build_java_check_indexed_type (array_node,
1243 lhs_type_node);
1244 load_node = build_java_arrayaccess (array_node,
1245 lhs_type_node,
1246 index_node);
1247 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1248 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1249 push_value (load_node);
1252 /* Expands .length. Makes sure that we deal with and array and may expand
1253 a NULL check on the array object. */
1255 static void
1256 expand_java_array_length (void)
1258 tree array = pop_value (ptr_type_node);
1259 tree length = build_java_array_length_access (array);
1261 push_value (length);
1264 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1265 either soft_monitorenter_node or soft_monitorexit_node. */
1267 static tree
1268 build_java_monitor (tree call, tree object)
1270 return build_call_nary (void_type_node,
1271 build_address_of (call),
1272 1, object);
1275 /* Emit code for one of the PUSHC instructions. */
1277 static void
1278 expand_java_pushc (int ival, tree type)
1280 tree value;
1281 if (type == ptr_type_node && ival == 0)
1282 value = null_pointer_node;
1283 else if (type == int_type_node || type == long_type_node)
1284 value = build_int_cst (type, ival);
1285 else if (type == float_type_node || type == double_type_node)
1287 REAL_VALUE_TYPE x;
1288 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1289 value = build_real (type, x);
1291 else
1292 gcc_unreachable ();
1294 push_value (value);
1297 static void
1298 expand_java_return (tree type)
1300 if (type == void_type_node)
1301 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1302 else
1304 tree retval = pop_value (type);
1305 tree res = DECL_RESULT (current_function_decl);
1306 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1308 /* Handle the situation where the native integer type is smaller
1309 than the JVM integer. It can happen for many cross compilers.
1310 The whole if expression just goes away if INT_TYPE_SIZE < 32
1311 is false. */
1312 if (INT_TYPE_SIZE < 32
1313 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1314 < GET_MODE_SIZE (TYPE_MODE (type))))
1315 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1317 TREE_SIDE_EFFECTS (retval) = 1;
1318 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1322 static void
1323 expand_load_internal (int index, tree type, int pc)
1325 tree copy;
1326 tree var = find_local_variable (index, type, pc);
1328 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1329 on the stack. If there is an assignment to this VAR_DECL between
1330 the stack push and the use, then the wrong code could be
1331 generated. To avoid this we create a new local and copy our
1332 value into it. Then we push this new local on the stack.
1333 Hopefully this all gets optimized out. */
1334 copy = build_decl (VAR_DECL, NULL_TREE, type);
1335 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1336 && TREE_TYPE (copy) != TREE_TYPE (var))
1337 var = convert (type, var);
1338 java_add_local_var (copy);
1339 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1341 push_value (copy);
1344 tree
1345 build_address_of (tree value)
1347 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1350 bool
1351 class_has_finalize_method (tree type)
1353 tree super = CLASSTYPE_SUPER (type);
1355 if (super == NULL_TREE)
1356 return false; /* Every class with a real finalizer inherits */
1357 /* from java.lang.Object. */
1358 else
1359 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1362 tree
1363 java_create_object (tree type)
1365 tree alloc_node = (class_has_finalize_method (type)
1366 ? alloc_object_node
1367 : alloc_no_finalizer_node);
1369 return build_call_nary (promote_type (type),
1370 build_address_of (alloc_node),
1371 1, build_class_ref (type));
1374 static void
1375 expand_java_NEW (tree type)
1377 tree alloc_node;
1379 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1380 : alloc_no_finalizer_node);
1381 if (! CLASS_LOADED_P (type))
1382 load_class (type, 1);
1383 safe_layout_class (type);
1384 push_value (build_call_nary (promote_type (type),
1385 build_address_of (alloc_node),
1386 1, build_class_ref (type)));
1389 /* This returns an expression which will extract the class of an
1390 object. */
1392 tree
1393 build_get_class (tree value)
1395 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1396 tree vtable_field = lookup_field (&object_type_node,
1397 get_identifier ("vtable"));
1398 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1399 build_java_indirect_ref (object_type_node, value,
1400 flag_check_references),
1401 vtable_field, NULL_TREE);
1402 return build3 (COMPONENT_REF, class_ptr_type,
1403 build1 (INDIRECT_REF, dtable_type, tmp),
1404 class_field, NULL_TREE);
1407 /* This builds the tree representation of the `instanceof' operator.
1408 It tries various tricks to optimize this in cases where types are
1409 known. */
1411 tree
1412 build_instanceof (tree value, tree type)
1414 tree expr;
1415 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1416 tree valtype = TREE_TYPE (TREE_TYPE (value));
1417 tree valclass = TYPE_NAME (valtype);
1418 tree klass;
1420 /* When compiling from bytecode, we need to ensure that TYPE has
1421 been loaded. */
1422 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1424 load_class (type, 1);
1425 safe_layout_class (type);
1426 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1427 return error_mark_node;
1429 klass = TYPE_NAME (type);
1431 if (type == object_type_node || inherits_from_p (valtype, type))
1433 /* Anything except `null' is an instance of Object. Likewise,
1434 if the object is known to be an instance of the class, then
1435 we only need to check for `null'. */
1436 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1438 else if (flag_verify_invocations
1439 && ! TYPE_ARRAY_P (type)
1440 && ! TYPE_ARRAY_P (valtype)
1441 && DECL_P (klass) && DECL_P (valclass)
1442 && ! CLASS_INTERFACE (valclass)
1443 && ! CLASS_INTERFACE (klass)
1444 && ! inherits_from_p (type, valtype)
1445 && (CLASS_FINAL (klass)
1446 || ! inherits_from_p (valtype, type)))
1448 /* The classes are from different branches of the derivation
1449 tree, so we immediately know the answer. */
1450 expr = boolean_false_node;
1452 else if (DECL_P (klass) && CLASS_FINAL (klass))
1454 tree save = save_expr (value);
1455 expr = build3 (COND_EXPR, itype,
1456 build2 (NE_EXPR, boolean_type_node,
1457 save, null_pointer_node),
1458 build2 (EQ_EXPR, itype,
1459 build_get_class (save),
1460 build_class_ref (type)),
1461 boolean_false_node);
1463 else
1465 expr = build_call_nary (itype,
1466 build_address_of (soft_instanceof_node),
1467 2, value, build_class_ref (type));
1469 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1470 return expr;
1473 static void
1474 expand_java_INSTANCEOF (tree type)
1476 tree value = pop_value (object_ptr_type_node);
1477 value = build_instanceof (value, type);
1478 push_value (value);
1481 static void
1482 expand_java_CHECKCAST (tree type)
1484 tree value = pop_value (ptr_type_node);
1485 value = build_call_nary (promote_type (type),
1486 build_address_of (soft_checkcast_node),
1487 2, build_class_ref (type), value);
1488 push_value (value);
1491 static void
1492 expand_iinc (unsigned int local_var_index, int ival, int pc)
1494 tree local_var, res;
1495 tree constant_value;
1497 flush_quick_stack ();
1498 local_var = find_local_variable (local_var_index, int_type_node, pc);
1499 constant_value = build_int_cst (NULL_TREE, ival);
1500 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1501 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1505 tree
1506 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1508 tree call = NULL;
1509 tree arg1 = convert (type, op1);
1510 tree arg2 = convert (type, op2);
1512 if (type == int_type_node)
1514 switch (op)
1516 case TRUNC_DIV_EXPR:
1517 call = soft_idiv_node;
1518 break;
1519 case TRUNC_MOD_EXPR:
1520 call = soft_irem_node;
1521 break;
1522 default:
1523 break;
1526 else if (type == long_type_node)
1528 switch (op)
1530 case TRUNC_DIV_EXPR:
1531 call = soft_ldiv_node;
1532 break;
1533 case TRUNC_MOD_EXPR:
1534 call = soft_lrem_node;
1535 break;
1536 default:
1537 break;
1541 gcc_assert (call);
1542 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1543 return call;
1546 tree
1547 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1549 tree mask;
1550 switch (op)
1552 case URSHIFT_EXPR:
1554 tree u_type = unsigned_type_for (type);
1555 arg1 = convert (u_type, arg1);
1556 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1557 return convert (type, arg1);
1559 case LSHIFT_EXPR:
1560 case RSHIFT_EXPR:
1561 mask = build_int_cst (NULL_TREE,
1562 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1563 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1564 break;
1566 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1567 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1568 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1570 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1571 boolean_type_node, arg1, arg2);
1572 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1573 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1574 ifexp2, integer_zero_node,
1575 op == COMPARE_L_EXPR
1576 ? integer_minus_one_node
1577 : integer_one_node);
1578 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1579 op == COMPARE_L_EXPR ? integer_one_node
1580 : integer_minus_one_node,
1581 second_compare);
1583 case COMPARE_EXPR:
1584 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1586 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1587 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1588 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1589 ifexp2, integer_one_node,
1590 integer_zero_node);
1591 return fold_build3 (COND_EXPR, int_type_node,
1592 ifexp1, integer_minus_one_node, second_compare);
1594 case TRUNC_DIV_EXPR:
1595 case TRUNC_MOD_EXPR:
1596 if (TREE_CODE (type) == REAL_TYPE
1597 && op == TRUNC_MOD_EXPR)
1599 tree call;
1600 if (type != double_type_node)
1602 arg1 = convert (double_type_node, arg1);
1603 arg2 = convert (double_type_node, arg2);
1605 call = build_call_nary (double_type_node,
1606 build_address_of (soft_fmod_node),
1607 2, arg1, arg2);
1608 if (type != double_type_node)
1609 call = convert (type, call);
1610 return call;
1613 if (TREE_CODE (type) == INTEGER_TYPE
1614 && flag_use_divide_subroutine
1615 && ! flag_syntax_only)
1616 return build_java_soft_divmod (op, type, arg1, arg2);
1618 break;
1619 default: ;
1621 return fold_build2 (op, type, arg1, arg2);
1624 static void
1625 expand_java_binop (tree type, enum tree_code op)
1627 tree larg, rarg;
1628 tree ltype = type;
1629 tree rtype = type;
1630 switch (op)
1632 case LSHIFT_EXPR:
1633 case RSHIFT_EXPR:
1634 case URSHIFT_EXPR:
1635 rtype = int_type_node;
1636 rarg = pop_value (rtype);
1637 break;
1638 default:
1639 rarg = pop_value (rtype);
1641 larg = pop_value (ltype);
1642 push_value (build_java_binop (op, type, larg, rarg));
1645 /* Lookup the field named NAME in *TYPEP or its super classes.
1646 If not found, return NULL_TREE.
1647 (If the *TYPEP is not found, or if the field reference is
1648 ambiguous, return error_mark_node.)
1649 If found, return the FIELD_DECL, and set *TYPEP to the
1650 class containing the field. */
1652 tree
1653 lookup_field (tree *typep, tree name)
1655 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1657 load_class (*typep, 1);
1658 safe_layout_class (*typep);
1659 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1660 return error_mark_node;
1664 tree field, binfo, base_binfo;
1665 tree save_field;
1666 int i;
1668 for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1669 if (DECL_NAME (field) == name)
1670 return field;
1672 /* Process implemented interfaces. */
1673 save_field = NULL_TREE;
1674 for (binfo = TYPE_BINFO (*typep), i = 0;
1675 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1677 tree t = BINFO_TYPE (base_binfo);
1678 if ((field = lookup_field (&t, name)))
1680 if (save_field == field)
1681 continue;
1682 if (save_field == NULL_TREE)
1683 save_field = field;
1684 else
1686 tree i1 = DECL_CONTEXT (save_field);
1687 tree i2 = DECL_CONTEXT (field);
1688 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1689 IDENTIFIER_POINTER (name),
1690 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1691 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1692 return error_mark_node;
1697 if (save_field != NULL_TREE)
1698 return save_field;
1700 *typep = CLASSTYPE_SUPER (*typep);
1701 } while (*typep);
1702 return NULL_TREE;
1705 /* Look up the field named NAME in object SELF_VALUE,
1706 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1707 SELF_VALUE is NULL_TREE if looking for a static field. */
1709 tree
1710 build_field_ref (tree self_value, tree self_class, tree name)
1712 tree base_class = self_class;
1713 tree field_decl = lookup_field (&base_class, name);
1714 if (field_decl == NULL_TREE)
1716 error ("field %qs not found", IDENTIFIER_POINTER (name));
1717 return error_mark_node;
1719 if (self_value == NULL_TREE)
1721 return build_static_field_ref (field_decl);
1723 else
1725 tree base_type = promote_type (base_class);
1727 /* CHECK is true if self_value is not the this pointer. */
1728 int check = (! (DECL_P (self_value)
1729 && DECL_NAME (self_value) == this_identifier_node));
1731 /* Determine whether a field offset from NULL will lie within
1732 Page 0: this is necessary on those GNU/Linux/BSD systems that
1733 trap SEGV to generate NullPointerExceptions.
1735 We assume that Page 0 will be mapped with NOPERM, and that
1736 memory may be allocated from any other page, so only field
1737 offsets < pagesize are guaranteed to trap. We also assume
1738 the smallest page size we'll encounter is 4k bytes. */
1739 if (! flag_syntax_only && check && ! flag_check_references
1740 && ! flag_indirect_dispatch)
1742 tree field_offset = byte_position (field_decl);
1743 if (! page_size)
1744 page_size = size_int (4096);
1745 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1748 if (base_type != TREE_TYPE (self_value))
1749 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1750 if (! flag_syntax_only && flag_indirect_dispatch)
1752 tree otable_index
1753 = build_int_cst (NULL_TREE, get_symbol_table_index
1754 (field_decl, NULL_TREE,
1755 &TYPE_OTABLE_METHODS (output_class)));
1756 tree field_offset
1757 = build4 (ARRAY_REF, integer_type_node,
1758 TYPE_OTABLE_DECL (output_class), otable_index,
1759 NULL_TREE, NULL_TREE);
1760 tree address;
1762 if (DECL_CONTEXT (field_decl) != output_class)
1763 field_offset
1764 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1765 build2 (EQ_EXPR, boolean_type_node,
1766 field_offset, integer_zero_node),
1767 build_call_nary (void_type_node,
1768 build_address_of (soft_nosuchfield_node),
1769 1, otable_index),
1770 field_offset);
1772 field_offset = fold (convert (sizetype, field_offset));
1773 self_value = java_check_reference (self_value, check);
1774 address
1775 = fold_build2 (POINTER_PLUS_EXPR,
1776 TREE_TYPE (self_value),
1777 self_value, field_offset);
1778 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1779 address);
1780 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1783 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1784 self_value, check);
1785 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1786 self_value, field_decl, NULL_TREE);
1790 tree
1791 lookup_label (int pc)
1793 tree name;
1794 char buf[32];
1795 if (pc > highest_label_pc_this_method)
1796 highest_label_pc_this_method = pc;
1797 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1798 name = get_identifier (buf);
1799 if (IDENTIFIER_LOCAL_VALUE (name))
1800 return IDENTIFIER_LOCAL_VALUE (name);
1801 else
1803 /* The type of the address of a label is return_address_type_node. */
1804 tree decl = create_label_decl (name);
1805 return pushdecl (decl);
1809 /* Generate a unique name for the purpose of loops and switches
1810 labels, and try-catch-finally blocks label or temporary variables. */
1812 tree
1813 generate_name (void)
1815 static int l_number = 0;
1816 char buff [32];
1817 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1818 l_number++;
1819 return get_identifier (buff);
1822 tree
1823 create_label_decl (tree name)
1825 tree decl;
1826 decl = build_decl (LABEL_DECL, name,
1827 TREE_TYPE (return_address_type_node));
1828 DECL_CONTEXT (decl) = current_function_decl;
1829 DECL_IGNORED_P (decl) = 1;
1830 return decl;
1833 /* This maps a bytecode offset (PC) to various flags. */
1834 char *instruction_bits;
1836 /* This is a vector of type states for the current method. It is
1837 indexed by PC. Each element is a tree vector holding the type
1838 state at that PC. We only note type states at basic block
1839 boundaries. */
1840 VEC(tree, gc) *type_states;
1842 static void
1843 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1845 lookup_label (target_pc);
1846 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1849 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1850 where CONDITION is one of one the compare operators. */
1852 static void
1853 expand_compare (enum tree_code condition, tree value1, tree value2,
1854 int target_pc)
1856 tree target = lookup_label (target_pc);
1857 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1858 java_add_stmt
1859 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1860 build1 (GOTO_EXPR, void_type_node, target),
1861 build_java_empty_stmt ()));
1864 /* Emit code for a TEST-type opcode. */
1866 static void
1867 expand_test (enum tree_code condition, tree type, int target_pc)
1869 tree value1, value2;
1870 flush_quick_stack ();
1871 value1 = pop_value (type);
1872 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1873 expand_compare (condition, value1, value2, target_pc);
1876 /* Emit code for a COND-type opcode. */
1878 static void
1879 expand_cond (enum tree_code condition, tree type, int target_pc)
1881 tree value1, value2;
1882 flush_quick_stack ();
1883 /* note: pop values in opposite order */
1884 value2 = pop_value (type);
1885 value1 = pop_value (type);
1886 /* Maybe should check value1 and value2 for type compatibility ??? */
1887 expand_compare (condition, value1, value2, target_pc);
1890 static void
1891 expand_java_goto (int target_pc)
1893 tree target_label = lookup_label (target_pc);
1894 flush_quick_stack ();
1895 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1898 static tree
1899 expand_java_switch (tree selector, int default_pc)
1901 tree switch_expr, x;
1903 flush_quick_stack ();
1904 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1905 NULL_TREE, NULL_TREE);
1906 java_add_stmt (switch_expr);
1908 x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1909 create_artificial_label ());
1910 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1912 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1913 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1915 return switch_expr;
1918 static void
1919 expand_java_add_case (tree switch_expr, int match, int target_pc)
1921 tree value, x;
1923 value = build_int_cst (TREE_TYPE (switch_expr), match);
1925 x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1926 create_artificial_label ());
1927 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1929 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1930 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1933 static tree
1934 pop_arguments (tree arg_types)
1936 if (arg_types == end_params_node)
1937 return NULL_TREE;
1938 if (TREE_CODE (arg_types) == TREE_LIST)
1940 tree tail = pop_arguments (TREE_CHAIN (arg_types));
1941 tree type = TREE_VALUE (arg_types);
1942 tree arg = pop_value (type);
1944 /* We simply cast each argument to its proper type. This is
1945 needed since we lose type information coming out of the
1946 verifier. We also have to do this when we pop an integer
1947 type that must be promoted for the function call. */
1948 if (TREE_CODE (type) == POINTER_TYPE)
1949 arg = build1 (NOP_EXPR, type, arg);
1950 else if (targetm.calls.promote_prototypes (type)
1951 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1952 && INTEGRAL_TYPE_P (type))
1953 arg = convert (integer_type_node, arg);
1954 return tree_cons (NULL_TREE, arg, tail);
1956 gcc_unreachable ();
1959 /* Attach to PTR (a block) the declaration found in ENTRY. */
1962 attach_init_test_initialization_flags (void **entry, void *ptr)
1964 tree block = (tree)ptr;
1965 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1967 if (block != error_mark_node)
1969 if (TREE_CODE (block) == BIND_EXPR)
1971 tree body = BIND_EXPR_BODY (block);
1972 TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1973 BIND_EXPR_VARS (block) = ite->value;
1974 body = build2 (COMPOUND_EXPR, void_type_node,
1975 build1 (DECL_EXPR, void_type_node, ite->value), body);
1976 BIND_EXPR_BODY (block) = body;
1978 else
1980 tree body = BLOCK_SUBBLOCKS (block);
1981 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1982 BLOCK_EXPR_DECLS (block) = ite->value;
1983 body = build2 (COMPOUND_EXPR, void_type_node,
1984 build1 (DECL_EXPR, void_type_node, ite->value), body);
1985 BLOCK_SUBBLOCKS (block) = body;
1989 return true;
1992 /* Build an expression to initialize the class CLAS.
1993 if EXPR is non-NULL, returns an expression to first call the initializer
1994 (if it is needed) and then calls EXPR. */
1996 tree
1997 build_class_init (tree clas, tree expr)
1999 tree init;
2001 /* An optimization: if CLAS is a superclass of the class we're
2002 compiling, we don't need to initialize it. However, if CLAS is
2003 an interface, it won't necessarily be initialized, even if we
2004 implement it. */
2005 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
2006 && inherits_from_p (current_class, clas))
2007 || current_class == clas)
2008 return expr;
2010 if (always_initialize_class_p)
2012 init = build_call_nary (void_type_node,
2013 build_address_of (soft_initclass_node),
2014 1, build_class_ref (clas));
2015 TREE_SIDE_EFFECTS (init) = 1;
2017 else
2019 tree *init_test_decl;
2020 tree decl;
2021 init_test_decl = java_treetreehash_new
2022 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2024 if (*init_test_decl == NULL)
2026 /* Build a declaration and mark it as a flag used to track
2027 static class initializations. */
2028 decl = build_decl (VAR_DECL, NULL_TREE,
2029 boolean_type_node);
2030 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2031 DECL_CONTEXT (decl) = current_function_decl;
2032 DECL_INITIAL (decl) = boolean_false_node;
2033 /* Don't emit any symbolic debugging info for this decl. */
2034 DECL_IGNORED_P (decl) = 1;
2035 *init_test_decl = decl;
2038 init = build_call_nary (void_type_node,
2039 build_address_of (soft_initclass_node),
2040 1, build_class_ref (clas));
2041 TREE_SIDE_EFFECTS (init) = 1;
2042 init = build3 (COND_EXPR, void_type_node,
2043 build2 (EQ_EXPR, boolean_type_node,
2044 *init_test_decl, boolean_false_node),
2045 init, integer_zero_node);
2046 TREE_SIDE_EFFECTS (init) = 1;
2047 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2048 build2 (MODIFY_EXPR, boolean_type_node,
2049 *init_test_decl, boolean_true_node));
2050 TREE_SIDE_EFFECTS (init) = 1;
2053 if (expr != NULL_TREE)
2055 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2056 TREE_SIDE_EFFECTS (expr) = 1;
2057 return expr;
2059 return init;
2064 /* Rewrite expensive calls that require stack unwinding at runtime to
2065 cheaper alternatives. The logic here performs these
2066 transformations:
2068 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2069 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2073 typedef struct
2075 const char *classname;
2076 const char *method;
2077 const char *signature;
2078 const char *new_classname;
2079 const char *new_signature;
2080 int flags;
2081 tree (*rewrite_arglist) (tree arglist);
2082 } rewrite_rule;
2084 /* Add __builtin_return_address(0) to the end of an arglist. */
2087 static tree
2088 rewrite_arglist_getcaller (tree arglist)
2090 tree retaddr
2091 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2092 1, integer_zero_node);
2094 DECL_UNINLINABLE (current_function_decl) = 1;
2096 return chainon (arglist,
2097 tree_cons (NULL_TREE, retaddr,
2098 NULL_TREE));
2101 /* Add this.class to the end of an arglist. */
2103 static tree
2104 rewrite_arglist_getclass (tree arglist)
2106 return chainon (arglist,
2107 tree_cons (NULL_TREE, build_class_ref (output_class),
2108 NULL_TREE));
2111 static rewrite_rule rules[] =
2112 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2113 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2114 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2116 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2117 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2118 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2120 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2121 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2122 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2124 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2125 "()Ljava/lang/ClassLoader;",
2126 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2127 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2129 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2130 "java.lang.String", "([CII)Ljava/lang/String;",
2131 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2133 {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2135 /* True if this method is special, i.e. it's a private method that
2136 should be exported from a DSO. */
2138 bool
2139 special_method_p (tree candidate_method)
2141 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2142 tree method = DECL_NAME (candidate_method);
2143 rewrite_rule *p;
2145 for (p = rules; p->classname; p++)
2147 if (get_identifier (p->classname) == context
2148 && get_identifier (p->method) == method)
2149 return true;
2151 return false;
2154 /* Scan the rules list for replacements for *METHOD_P and replace the
2155 args accordingly. If the rewrite results in an access to a private
2156 method, update SPECIAL.*/
2158 void
2159 maybe_rewrite_invocation (tree *method_p, tree *arg_list_p,
2160 tree *method_signature_p, tree *special)
2162 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2163 rewrite_rule *p;
2164 *special = NULL_TREE;
2166 for (p = rules; p->classname; p++)
2168 if (get_identifier (p->classname) == context)
2170 tree method = DECL_NAME (*method_p);
2171 if (get_identifier (p->method) == method
2172 && get_identifier (p->signature) == *method_signature_p)
2174 tree maybe_method;
2175 tree destination_class
2176 = lookup_class (get_identifier (p->new_classname));
2177 gcc_assert (destination_class);
2178 maybe_method
2179 = lookup_java_method (destination_class,
2180 method,
2181 get_identifier (p->new_signature));
2182 if (! maybe_method && ! flag_verify_invocations)
2184 maybe_method
2185 = add_method (destination_class, p->flags,
2186 method, get_identifier (p->new_signature));
2187 DECL_EXTERNAL (maybe_method) = 1;
2189 *method_p = maybe_method;
2190 gcc_assert (*method_p);
2191 if (p->rewrite_arglist)
2192 *arg_list_p = p->rewrite_arglist (*arg_list_p);
2193 *method_signature_p = get_identifier (p->new_signature);
2194 *special = integer_one_node;
2196 break;
2204 tree
2205 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2206 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2207 tree arg_list ATTRIBUTE_UNUSED, tree special)
2209 tree func;
2210 if (is_compiled_class (self_type))
2212 /* With indirect dispatch we have to use indirect calls for all
2213 publicly visible methods or gcc will use PLT indirections
2214 to reach them. We also have to use indirect dispatch for all
2215 external methods. */
2216 if (! flag_indirect_dispatch
2217 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2219 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2220 method);
2222 else
2224 tree table_index
2225 = build_int_cst (NULL_TREE,
2226 (get_symbol_table_index
2227 (method, special,
2228 &TYPE_ATABLE_METHODS (output_class))));
2229 func
2230 = build4 (ARRAY_REF,
2231 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2232 TYPE_ATABLE_DECL (output_class), table_index,
2233 NULL_TREE, NULL_TREE);
2235 func = convert (method_ptr_type_node, func);
2237 else
2239 /* We don't know whether the method has been (statically) compiled.
2240 Compile this code to get a reference to the method's code:
2242 SELF_TYPE->methods[METHOD_INDEX].ncode
2246 int method_index = 0;
2247 tree meth, ref;
2249 /* The method might actually be declared in some superclass, so
2250 we have to use its class context, not the caller's notion of
2251 where the method is. */
2252 self_type = DECL_CONTEXT (method);
2253 ref = build_class_ref (self_type);
2254 ref = build1 (INDIRECT_REF, class_type_node, ref);
2255 if (ncode_ident == NULL_TREE)
2256 ncode_ident = get_identifier ("ncode");
2257 if (methods_ident == NULL_TREE)
2258 methods_ident = get_identifier ("methods");
2259 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2260 lookup_field (&class_type_node, methods_ident),
2261 NULL_TREE);
2262 for (meth = TYPE_METHODS (self_type);
2263 ; meth = TREE_CHAIN (meth))
2265 if (method == meth)
2266 break;
2267 if (meth == NULL_TREE)
2268 fatal_error ("method '%s' not found in class",
2269 IDENTIFIER_POINTER (DECL_NAME (method)));
2270 method_index++;
2272 method_index *= int_size_in_bytes (method_type_node);
2273 ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
2274 ref, size_int (method_index));
2275 ref = build1 (INDIRECT_REF, method_type_node, ref);
2276 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2277 ref, lookup_field (&method_type_node, ncode_ident),
2278 NULL_TREE);
2280 return func;
2283 tree
2284 invoke_build_dtable (int is_invoke_interface, tree arg_list)
2286 tree dtable, objectref;
2288 TREE_VALUE (arg_list) = save_expr (TREE_VALUE (arg_list));
2290 /* If we're dealing with interfaces and if the objectref
2291 argument is an array then get the dispatch table of the class
2292 Object rather than the one from the objectref. */
2293 objectref = (is_invoke_interface
2294 && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list)))
2295 ? build_class_ref (object_type_node) : TREE_VALUE (arg_list));
2297 if (dtable_ident == NULL_TREE)
2298 dtable_ident = get_identifier ("vtable");
2299 dtable = build_java_indirect_ref (object_type_node, objectref,
2300 flag_check_references);
2301 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2302 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2304 return dtable;
2307 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2308 T. If this decl has not been seen before, it will be added to the
2309 [oa]table_methods. If it has, the existing table slot will be
2310 reused. */
2313 get_symbol_table_index (tree t, tree special, tree *symbol_table)
2315 int i = 1;
2316 tree method_list;
2318 if (*symbol_table == NULL_TREE)
2320 *symbol_table = build_tree_list (special, t);
2321 return 1;
2324 method_list = *symbol_table;
2326 while (1)
2328 tree value = TREE_VALUE (method_list);
2329 tree purpose = TREE_PURPOSE (method_list);
2330 if (value == t && purpose == special)
2331 return i;
2332 i++;
2333 if (TREE_CHAIN (method_list) == NULL_TREE)
2334 break;
2335 else
2336 method_list = TREE_CHAIN (method_list);
2339 TREE_CHAIN (method_list) = build_tree_list (special, t);
2340 return i;
2343 tree
2344 build_invokevirtual (tree dtable, tree method, tree special)
2346 tree func;
2347 tree nativecode_ptr_ptr_type_node
2348 = build_pointer_type (nativecode_ptr_type_node);
2349 tree method_index;
2350 tree otable_index;
2352 if (flag_indirect_dispatch)
2354 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2356 otable_index
2357 = build_int_cst (NULL_TREE, get_symbol_table_index
2358 (method, special,
2359 &TYPE_OTABLE_METHODS (output_class)));
2360 method_index = build4 (ARRAY_REF, integer_type_node,
2361 TYPE_OTABLE_DECL (output_class),
2362 otable_index, NULL_TREE, NULL_TREE);
2364 else
2366 /* We fetch the DECL_VINDEX field directly here, rather than
2367 using get_method_index(). DECL_VINDEX is the true offset
2368 from the vtable base to a method, regrdless of any extra
2369 words inserted at the start of the vtable. */
2370 method_index = DECL_VINDEX (method);
2371 method_index = size_binop (MULT_EXPR, method_index,
2372 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2373 if (TARGET_VTABLE_USES_DESCRIPTORS)
2374 method_index = size_binop (MULT_EXPR, method_index,
2375 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2378 func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable,
2379 convert (sizetype, method_index));
2381 if (TARGET_VTABLE_USES_DESCRIPTORS)
2382 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2383 else
2385 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2386 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2389 return func;
2392 static GTY(()) tree class_ident;
2393 tree
2394 build_invokeinterface (tree dtable, tree method)
2396 tree interface;
2397 tree idx;
2399 /* We expand invokeinterface here. */
2401 if (class_ident == NULL_TREE)
2402 class_ident = get_identifier ("class");
2404 dtable = build_java_indirect_ref (dtable_type, dtable,
2405 flag_check_references);
2406 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2407 lookup_field (&dtable_type, class_ident), NULL_TREE);
2409 interface = DECL_CONTEXT (method);
2410 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2411 layout_class_methods (interface);
2413 if (flag_indirect_dispatch)
2415 int itable_index
2416 = 2 * (get_symbol_table_index
2417 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2418 interface
2419 = build4 (ARRAY_REF,
2420 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2421 TYPE_ITABLE_DECL (output_class),
2422 build_int_cst (NULL_TREE, itable_index-1),
2423 NULL_TREE, NULL_TREE);
2424 idx
2425 = build4 (ARRAY_REF,
2426 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2427 TYPE_ITABLE_DECL (output_class),
2428 build_int_cst (NULL_TREE, itable_index),
2429 NULL_TREE, NULL_TREE);
2430 interface = convert (class_ptr_type, interface);
2431 idx = convert (integer_type_node, idx);
2433 else
2435 idx = build_int_cst (NULL_TREE,
2436 get_interface_method_index (method, interface));
2437 interface = build_class_ref (interface);
2440 return build_call_nary (ptr_type_node,
2441 build_address_of (soft_lookupinterfacemethod_node),
2442 3, dtable, interface, idx);
2445 /* Expand one of the invoke_* opcodes.
2446 OPCODE is the specific opcode.
2447 METHOD_REF_INDEX is an index into the constant pool.
2448 NARGS is the number of arguments, or -1 if not specified. */
2450 static void
2451 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2453 tree method_signature
2454 = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
2455 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool,
2456 method_ref_index);
2457 tree self_type
2458 = get_class_constant (current_jcf,
2459 COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool,
2460 method_ref_index));
2461 const char *const self_name
2462 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2463 tree call, func, method, arg_list, method_type;
2464 tree check = NULL_TREE;
2466 tree special = NULL_TREE;
2468 if (! CLASS_LOADED_P (self_type))
2470 load_class (self_type, 1);
2471 safe_layout_class (self_type);
2472 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2473 fatal_error ("failed to find class '%s'", self_name);
2475 layout_class_methods (self_type);
2477 if (ID_INIT_P (method_name))
2478 method = lookup_java_constructor (self_type, method_signature);
2479 else
2480 method = lookup_java_method (self_type, method_name, method_signature);
2482 /* We've found a method in a class other than the one in which it
2483 was wanted. This can happen if, for instance, we're trying to
2484 compile invokespecial super.equals().
2485 FIXME: This is a kludge. Rather than nullifying the result, we
2486 should change lookup_java_method() so that it doesn't search the
2487 superclass chain when we're BC-compiling. */
2488 if (! flag_verify_invocations
2489 && method
2490 && ! TYPE_ARRAY_P (self_type)
2491 && self_type != DECL_CONTEXT (method))
2492 method = NULL_TREE;
2494 /* We've found a method in an interface, but this isn't an interface
2495 call. */
2496 if (opcode != OPCODE_invokeinterface
2497 && method
2498 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2499 method = NULL_TREE;
2501 /* We've found a non-interface method but we are making an
2502 interface call. This can happen if the interface overrides a
2503 method in Object. */
2504 if (! flag_verify_invocations
2505 && opcode == OPCODE_invokeinterface
2506 && method
2507 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2508 method = NULL_TREE;
2510 if (method == NULL_TREE)
2512 if (flag_verify_invocations || ! flag_indirect_dispatch)
2514 error ("class '%s' has no method named '%s' matching signature '%s'",
2515 self_name,
2516 IDENTIFIER_POINTER (method_name),
2517 IDENTIFIER_POINTER (method_signature));
2519 else
2521 int flags = ACC_PUBLIC;
2522 if (opcode == OPCODE_invokestatic)
2523 flags |= ACC_STATIC;
2524 if (opcode == OPCODE_invokeinterface)
2526 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2527 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2529 method = add_method (self_type, flags, method_name,
2530 method_signature);
2531 DECL_ARTIFICIAL (method) = 1;
2532 METHOD_DUMMY (method) = 1;
2533 layout_class_method (self_type, NULL,
2534 method, NULL);
2538 /* Invoke static can't invoke static/abstract method */
2539 if (method != NULL_TREE)
2541 if (opcode == OPCODE_invokestatic)
2543 if (!METHOD_STATIC (method))
2545 error ("invokestatic on non static method");
2546 method = NULL_TREE;
2548 else if (METHOD_ABSTRACT (method))
2550 error ("invokestatic on abstract method");
2551 method = NULL_TREE;
2554 else
2556 if (METHOD_STATIC (method))
2558 error ("invoke[non-static] on static method");
2559 method = NULL_TREE;
2564 if (method == NULL_TREE)
2566 /* If we got here, we emitted an error message above. So we
2567 just pop the arguments, push a properly-typed zero, and
2568 continue. */
2569 method_type = get_type_from_signature (method_signature);
2570 pop_arguments (TYPE_ARG_TYPES (method_type));
2571 if (opcode != OPCODE_invokestatic)
2572 pop_type (self_type);
2573 method_type = promote_type (TREE_TYPE (method_type));
2574 push_value (convert (method_type, integer_zero_node));
2575 return;
2578 method_type = TREE_TYPE (method);
2579 arg_list = pop_arguments (TYPE_ARG_TYPES (method_type));
2580 flush_quick_stack ();
2582 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2583 &special);
2585 func = NULL_TREE;
2586 if (opcode == OPCODE_invokestatic)
2587 func = build_known_method_ref (method, method_type, self_type,
2588 method_signature, arg_list, special);
2589 else if (opcode == OPCODE_invokespecial
2590 || (opcode == OPCODE_invokevirtual
2591 && (METHOD_PRIVATE (method)
2592 || METHOD_FINAL (method)
2593 || CLASS_FINAL (TYPE_NAME (self_type)))))
2595 /* If the object for the method call is null, we throw an
2596 exception. We don't do this if the object is the current
2597 method's `this'. In other cases we just rely on an
2598 optimization pass to eliminate redundant checks. FIXME:
2599 Unfortunately there doesn't seem to be a way to determine
2600 what the current method is right now.
2601 We do omit the check if we're calling <init>. */
2602 /* We use a SAVE_EXPR here to make sure we only evaluate
2603 the new `self' expression once. */
2604 tree save_arg = save_expr (TREE_VALUE (arg_list));
2605 TREE_VALUE (arg_list) = save_arg;
2606 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2607 func = build_known_method_ref (method, method_type, self_type,
2608 method_signature, arg_list, special);
2610 else
2612 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2613 arg_list);
2614 if (opcode == OPCODE_invokevirtual)
2615 func = build_invokevirtual (dtable, method, special);
2616 else
2617 func = build_invokeinterface (dtable, method);
2620 if (TREE_CODE (func) == ADDR_EXPR)
2621 TREE_TYPE (func) = build_pointer_type (method_type);
2622 else
2623 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2625 call = build_call_list (TREE_TYPE (method_type), func, arg_list);
2626 TREE_SIDE_EFFECTS (call) = 1;
2627 call = check_for_builtin (method, call);
2629 if (check != NULL_TREE)
2631 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2632 TREE_SIDE_EFFECTS (call) = 1;
2635 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2636 java_add_stmt (call);
2637 else
2639 push_value (call);
2640 flush_quick_stack ();
2644 /* Create a stub which will be put into the vtable but which will call
2645 a JNI function. */
2647 tree
2648 build_jni_stub (tree method)
2650 tree jnifunc, call, args, body, method_sig, arg_types;
2651 tree jniarg0, jniarg1, jniarg2, jniarg3;
2652 tree jni_func_type, tem;
2653 tree env_var, res_var = NULL_TREE, block;
2654 tree method_args, res_type;
2655 tree meth_var;
2656 tree bind;
2658 int args_size = 0;
2660 tree klass = DECL_CONTEXT (method);
2661 klass = build_class_ref (klass);
2663 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2665 DECL_ARTIFICIAL (method) = 1;
2666 DECL_EXTERNAL (method) = 0;
2668 env_var = build_decl (VAR_DECL, get_identifier ("env"), ptr_type_node);
2669 DECL_CONTEXT (env_var) = method;
2671 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2673 res_var = build_decl (VAR_DECL, get_identifier ("res"),
2674 TREE_TYPE (TREE_TYPE (method)));
2675 DECL_CONTEXT (res_var) = method;
2676 TREE_CHAIN (env_var) = res_var;
2679 method_args = DECL_ARGUMENTS (method);
2680 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2681 TREE_SIDE_EFFECTS (block) = 1;
2682 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2684 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2685 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2686 build_call_nary (ptr_type_node,
2687 build_address_of (soft_getjnienvnewframe_node),
2688 1, klass));
2690 /* All the arguments to this method become arguments to the
2691 underlying JNI function. If we had to wrap object arguments in a
2692 special way, we would do that here. */
2693 args = NULL_TREE;
2694 for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2696 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2697 #ifdef PARM_BOUNDARY
2698 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2699 * PARM_BOUNDARY);
2700 #endif
2701 args_size += (arg_bits / BITS_PER_UNIT);
2703 args = tree_cons (NULL_TREE, tem, args);
2705 args = nreverse (args);
2706 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2708 /* For a static method the second argument is the class. For a
2709 non-static method the second argument is `this'; that is already
2710 available in the argument list. */
2711 if (METHOD_STATIC (method))
2713 args_size += int_size_in_bytes (TREE_TYPE (klass));
2714 args = tree_cons (NULL_TREE, klass, args);
2715 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2718 /* The JNIEnv structure is the first argument to the JNI function. */
2719 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2720 args = tree_cons (NULL_TREE, env_var, args);
2721 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2723 /* We call _Jv_LookupJNIMethod to find the actual underlying
2724 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2725 exception if this function is not found at runtime. */
2726 method_sig = build_java_signature (TREE_TYPE (method));
2727 jniarg0 = klass;
2728 jniarg1 = build_utf8_ref (DECL_NAME (method));
2729 jniarg2 = build_utf8_ref (unmangle_classname
2730 (IDENTIFIER_POINTER (method_sig),
2731 IDENTIFIER_LENGTH (method_sig)));
2732 jniarg3 = build_int_cst (NULL_TREE, args_size);
2734 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2736 #ifdef MODIFY_JNI_METHOD_CALL
2737 tem = MODIFY_JNI_METHOD_CALL (tem);
2738 #endif
2740 jni_func_type = build_pointer_type (tem);
2742 /* Use the actual function type, rather than a generic pointer type,
2743 such that this decl keeps the actual pointer type from being
2744 garbage-collected. If it is, we end up using canonical types
2745 with different uids for equivalent function types, and this in
2746 turn causes utf8 identifiers and output order to vary. */
2747 meth_var = build_decl (VAR_DECL, get_identifier ("meth"), jni_func_type);
2748 TREE_STATIC (meth_var) = 1;
2749 TREE_PUBLIC (meth_var) = 0;
2750 DECL_EXTERNAL (meth_var) = 0;
2751 DECL_CONTEXT (meth_var) = method;
2752 DECL_ARTIFICIAL (meth_var) = 1;
2753 DECL_INITIAL (meth_var) = null_pointer_node;
2754 TREE_USED (meth_var) = 1;
2755 chainon (env_var, meth_var);
2756 build_result_decl (method);
2758 jnifunc = build3 (COND_EXPR, jni_func_type,
2759 build2 (NE_EXPR, boolean_type_node,
2760 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2761 meth_var,
2762 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2763 build1
2764 (NOP_EXPR, jni_func_type,
2765 build_call_nary (ptr_type_node,
2766 build_address_of
2767 (soft_lookupjnimethod_node),
2769 jniarg0, jniarg1,
2770 jniarg2, jniarg3))));
2772 /* Now we make the actual JNI call via the resulting function
2773 pointer. */
2774 call = build_call_list (TREE_TYPE (TREE_TYPE (method)),
2775 jnifunc, args);
2777 /* If the JNI call returned a result, capture it here. If we had to
2778 unwrap JNI object results, we would do that here. */
2779 if (res_var != NULL_TREE)
2781 /* If the call returns an object, it may return a JNI weak
2782 reference, in which case we must unwrap it. */
2783 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2784 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2785 build_address_of (soft_unwrapjni_node),
2786 1, call);
2787 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2788 res_var, call);
2791 TREE_SIDE_EFFECTS (call) = 1;
2793 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2794 TREE_SIDE_EFFECTS (body) = 1;
2796 /* Now free the environment we allocated. */
2797 call = build_call_nary (ptr_type_node,
2798 build_address_of (soft_jnipopsystemframe_node),
2799 1, env_var);
2800 TREE_SIDE_EFFECTS (call) = 1;
2801 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2802 TREE_SIDE_EFFECTS (body) = 1;
2804 /* Finally, do the return. */
2805 res_type = void_type_node;
2806 if (res_var != NULL_TREE)
2808 tree drt;
2809 gcc_assert (DECL_RESULT (method));
2810 /* Make sure we copy the result variable to the actual
2811 result. We use the type of the DECL_RESULT because it
2812 might be different from the return type of the function:
2813 it might be promoted. */
2814 drt = TREE_TYPE (DECL_RESULT (method));
2815 if (drt != TREE_TYPE (res_var))
2816 res_var = build1 (CONVERT_EXPR, drt, res_var);
2817 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2818 TREE_SIDE_EFFECTS (res_var) = 1;
2821 body = build2 (COMPOUND_EXPR, void_type_node, body,
2822 build1 (RETURN_EXPR, void_type_node, res_var));
2823 TREE_SIDE_EFFECTS (body) = 1;
2825 /* Prepend class initialization for static methods reachable from
2826 other classes. */
2827 if (METHOD_STATIC (method)
2828 && (! METHOD_PRIVATE (method)
2829 || INNER_CLASS_P (DECL_CONTEXT (method))))
2831 tree init = build_call_expr (soft_initclass_node, 1,
2832 klass);
2833 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2834 TREE_SIDE_EFFECTS (body) = 1;
2837 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2838 body, block);
2839 return bind;
2843 /* Given lvalue EXP, return a volatile expression that references the
2844 same object. */
2846 tree
2847 java_modify_addr_for_volatile (tree exp)
2849 tree exp_type = TREE_TYPE (exp);
2850 tree v_type
2851 = build_qualified_type (exp_type,
2852 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2853 tree addr = build_fold_addr_expr (exp);
2854 v_type = build_pointer_type (v_type);
2855 addr = fold_convert (v_type, addr);
2856 exp = build_fold_indirect_ref (addr);
2857 return exp;
2861 /* Expand an operation to extract from or store into a field.
2862 IS_STATIC is 1 iff the field is static.
2863 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2864 FIELD_REF_INDEX is an index into the constant pool. */
2866 static void
2867 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2869 tree self_type
2870 = get_class_constant (current_jcf,
2871 COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
2872 field_ref_index));
2873 const char *self_name
2874 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2875 tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2876 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool,
2877 field_ref_index);
2878 tree field_type = get_type_from_signature (field_signature);
2879 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2880 tree field_ref;
2881 int is_error = 0;
2882 tree original_self_type = self_type;
2883 tree field_decl;
2884 tree modify_expr;
2886 if (! CLASS_LOADED_P (self_type))
2887 load_class (self_type, 1);
2888 field_decl = lookup_field (&self_type, field_name);
2889 if (field_decl == error_mark_node)
2891 is_error = 1;
2893 else if (field_decl == NULL_TREE)
2895 if (! flag_verify_invocations)
2897 int flags = ACC_PUBLIC;
2898 if (is_static)
2899 flags |= ACC_STATIC;
2900 self_type = original_self_type;
2901 field_decl = add_field (original_self_type, field_name,
2902 field_type, flags);
2903 DECL_ARTIFICIAL (field_decl) = 1;
2904 DECL_IGNORED_P (field_decl) = 1;
2905 #if 0
2906 /* FIXME: We should be pessimistic about volatility. We
2907 don't know one way or another, but this is safe.
2908 However, doing this has bad effects on code quality. We
2909 need to look at better ways to do this. */
2910 TREE_THIS_VOLATILE (field_decl) = 1;
2911 #endif
2913 else
2915 error ("missing field '%s' in '%s'",
2916 IDENTIFIER_POINTER (field_name), self_name);
2917 is_error = 1;
2920 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2922 error ("mismatching signature for field '%s' in '%s'",
2923 IDENTIFIER_POINTER (field_name), self_name);
2924 is_error = 1;
2926 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2927 if (is_error)
2929 if (! is_putting)
2930 push_value (convert (field_type, integer_zero_node));
2931 flush_quick_stack ();
2932 return;
2935 field_ref = build_field_ref (field_ref, self_type, field_name);
2936 if (is_static
2937 && ! flag_indirect_dispatch)
2939 tree context = DECL_CONTEXT (field_ref);
2940 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2941 field_ref = build_class_init (context, field_ref);
2942 else
2943 field_ref = build_class_init (self_type, field_ref);
2945 if (is_putting)
2947 flush_quick_stack ();
2948 if (FIELD_FINAL (field_decl))
2950 if (DECL_CONTEXT (field_decl) != current_class)
2951 error ("assignment to final field %q+D not in field's class",
2952 field_decl);
2953 /* We used to check for assignments to final fields not
2954 occurring in the class initializer or in a constructor
2955 here. However, this constraint doesn't seem to be
2956 enforced by the JVM. */
2959 if (TREE_THIS_VOLATILE (field_decl))
2960 field_ref = java_modify_addr_for_volatile (field_ref);
2962 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2963 field_ref, new_value);
2965 if (TREE_THIS_VOLATILE (field_decl))
2966 java_add_stmt
2967 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2969 java_add_stmt (modify_expr);
2971 else
2973 tree temp = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2974 java_add_local_var (temp);
2976 if (TREE_THIS_VOLATILE (field_decl))
2977 field_ref = java_modify_addr_for_volatile (field_ref);
2979 modify_expr
2980 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2981 java_add_stmt (modify_expr);
2983 if (TREE_THIS_VOLATILE (field_decl))
2984 java_add_stmt
2985 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2987 push_value (temp);
2989 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2992 static void
2993 load_type_state (int pc)
2995 int i;
2996 tree vec = VEC_index (tree, type_states, pc);
2997 int cur_length = TREE_VEC_LENGTH (vec);
2998 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2999 for (i = 0; i < cur_length; i++)
3000 type_map [i] = TREE_VEC_ELT (vec, i);
3003 /* Go over METHOD's bytecode and note instruction starts in
3004 instruction_bits[]. */
3006 void
3007 note_instructions (JCF *jcf, tree method)
3009 int PC;
3010 unsigned char* byte_ops;
3011 long length = DECL_CODE_LENGTH (method);
3013 int saw_index;
3014 jint INT_temp;
3016 #undef RET /* Defined by config/i386/i386.h */
3017 #undef PTR
3018 #define BCODE byte_ops
3019 #define BYTE_type_node byte_type_node
3020 #define SHORT_type_node short_type_node
3021 #define INT_type_node int_type_node
3022 #define LONG_type_node long_type_node
3023 #define CHAR_type_node char_type_node
3024 #define PTR_type_node ptr_type_node
3025 #define FLOAT_type_node float_type_node
3026 #define DOUBLE_type_node double_type_node
3027 #define VOID_type_node void_type_node
3028 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3029 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3030 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3031 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3033 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3035 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3036 byte_ops = jcf->read_ptr;
3037 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3038 memset (instruction_bits, 0, length + 1);
3039 type_states = VEC_alloc (tree, gc, length + 1);
3040 VEC_safe_grow_cleared (tree, gc, type_states, length + 1);
3042 /* This pass figures out which PC can be the targets of jumps. */
3043 for (PC = 0; PC < length;)
3045 int oldpc = PC; /* PC at instruction start. */
3046 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3047 switch (byte_ops[PC++])
3049 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3050 case OPCODE: \
3051 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3052 break;
3054 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3056 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3057 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3058 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3059 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3060 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3061 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3062 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3063 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3065 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3066 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3067 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3068 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3069 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3070 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3071 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3072 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3074 /* two forms of wide instructions */
3075 #define PRE_SPECIAL_WIDE(IGNORE) \
3077 int modified_opcode = IMMEDIATE_u1; \
3078 if (modified_opcode == OPCODE_iinc) \
3080 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3081 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3083 else \
3085 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3089 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3091 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3093 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3094 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3095 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3096 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3097 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3098 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3099 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3100 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3101 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3102 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3104 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3105 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3106 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3107 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3108 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3109 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3110 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3111 NOTE_LABEL (PC); \
3112 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3114 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3116 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3117 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3119 #define PRE_LOOKUP_SWITCH \
3120 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3121 NOTE_LABEL (default_offset+oldpc); \
3122 if (npairs >= 0) \
3123 while (--npairs >= 0) { \
3124 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3125 jint offset = IMMEDIATE_s4; \
3126 NOTE_LABEL (offset+oldpc); } \
3129 #define PRE_TABLE_SWITCH \
3130 { jint default_offset = IMMEDIATE_s4; \
3131 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3132 NOTE_LABEL (default_offset+oldpc); \
3133 if (low <= high) \
3134 while (low++ <= high) { \
3135 jint offset = IMMEDIATE_s4; \
3136 NOTE_LABEL (offset+oldpc); } \
3139 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3140 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3141 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3142 (void)(IMMEDIATE_u2); \
3143 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3145 #include "javaop.def"
3146 #undef JAVAOP
3148 } /* for */
3151 void
3152 expand_byte_code (JCF *jcf, tree method)
3154 int PC;
3155 int i;
3156 const unsigned char *linenumber_pointer;
3157 int dead_code_index = -1;
3158 unsigned char* byte_ops;
3159 long length = DECL_CODE_LENGTH (method);
3160 location_t max_location = input_location;
3162 stack_pointer = 0;
3163 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3164 byte_ops = jcf->read_ptr;
3166 /* We make an initial pass of the line number table, to note
3167 which instructions have associated line number entries. */
3168 linenumber_pointer = linenumber_table;
3169 for (i = 0; i < linenumber_count; i++)
3171 int pc = GET_u2 (linenumber_pointer);
3172 linenumber_pointer += 4;
3173 if (pc >= length)
3174 warning (0, "invalid PC in line number table");
3175 else
3177 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3178 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3179 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3183 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3184 return;
3186 promote_arguments ();
3187 cache_this_class_ref (method);
3188 cache_cpool_data_ref ();
3190 /* Translate bytecodes. */
3191 linenumber_pointer = linenumber_table;
3192 for (PC = 0; PC < length;)
3194 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3196 tree label = lookup_label (PC);
3197 flush_quick_stack ();
3198 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3199 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3200 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3201 load_type_state (PC);
3204 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3206 if (dead_code_index == -1)
3208 /* This is the start of a region of unreachable bytecodes.
3209 They still need to be processed in order for EH ranges
3210 to get handled correctly. However, we can simply
3211 replace these bytecodes with nops. */
3212 dead_code_index = PC;
3215 /* Turn this bytecode into a nop. */
3216 byte_ops[PC] = 0x0;
3218 else
3220 if (dead_code_index != -1)
3222 /* We've just reached the end of a region of dead code. */
3223 if (extra_warnings)
3224 warning (0, "unreachable bytecode from %d to before %d",
3225 dead_code_index, PC);
3226 dead_code_index = -1;
3230 /* Handle possible line number entry for this PC.
3232 This code handles out-of-order and multiple linenumbers per PC,
3233 but is optimized for the case of line numbers increasing
3234 monotonically with PC. */
3235 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3237 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3238 || GET_u2 (linenumber_pointer) != PC)
3239 linenumber_pointer = linenumber_table;
3240 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3242 int pc = GET_u2 (linenumber_pointer);
3243 linenumber_pointer += 4;
3244 if (pc == PC)
3246 int line = GET_u2 (linenumber_pointer - 2);
3247 input_location = linemap_line_start (line_table, line, 1);
3248 if (input_location > max_location)
3249 max_location = input_location;
3250 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3251 break;
3255 maybe_pushlevels (PC);
3256 PC = process_jvm_instruction (PC, byte_ops, length);
3257 maybe_poplevels (PC);
3258 } /* for */
3260 uncache_this_class_ref (method);
3262 if (dead_code_index != -1)
3264 /* We've just reached the end of a region of dead code. */
3265 if (extra_warnings)
3266 warning (0, "unreachable bytecode from %d to the end of the method",
3267 dead_code_index);
3270 DECL_FUNCTION_LAST_LINE (method) = max_location;
3273 static void
3274 java_push_constant_from_pool (JCF *jcf, int index)
3276 tree c;
3277 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3279 tree name;
3280 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3281 index = alloc_name_constant (CONSTANT_String, name);
3282 c = build_ref_from_constant_pool (index);
3283 c = convert (promote_type (string_type_node), c);
3285 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3286 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3288 tree record = get_class_constant (jcf, index);
3289 c = build_class_ref (record);
3291 else
3292 c = get_constant (jcf, index);
3293 push_value (c);
3297 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3298 long length ATTRIBUTE_UNUSED)
3300 const char *opname; /* Temporary ??? */
3301 int oldpc = PC; /* PC at instruction start. */
3303 /* If the instruction is at the beginning of an exception handler,
3304 replace the top of the stack with the thrown object reference. */
3305 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3307 /* Note that the verifier will not emit a type map at all for
3308 dead exception handlers. In this case we just ignore the
3309 situation. */
3310 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3312 tree type = pop_type (promote_type (throwable_type_node));
3313 push_value (build_exception_object_ref (type));
3317 switch (byte_ops[PC++])
3319 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3320 case OPCODE: \
3321 opname = #OPNAME; \
3322 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3323 break;
3325 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3327 int saw_index = 0; \
3328 int index = OPERAND_VALUE; \
3329 build_java_ret \
3330 (find_local_variable (index, return_address_type_node, oldpc)); \
3333 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3335 /* OPERAND_VALUE may have side-effects on PC */ \
3336 int opvalue = OPERAND_VALUE; \
3337 build_java_jsr (oldpc + opvalue, PC); \
3340 /* Push a constant onto the stack. */
3341 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3342 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3343 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3344 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3346 /* internal macro added for use by the WIDE case */
3347 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3348 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3350 /* Push local variable onto the opcode stack. */
3351 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3353 /* have to do this since OPERAND_VALUE may have side-effects */ \
3354 int opvalue = OPERAND_VALUE; \
3355 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3358 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3359 expand_java_return (OPERAND_TYPE##_type_node)
3361 #define REM_EXPR TRUNC_MOD_EXPR
3362 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3363 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3365 #define FIELD(IS_STATIC, IS_PUT) \
3366 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3368 #define TEST(OPERAND_TYPE, CONDITION) \
3369 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3371 #define COND(OPERAND_TYPE, CONDITION) \
3372 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3374 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3375 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3377 #define BRANCH_GOTO(OPERAND_VALUE) \
3378 expand_java_goto (oldpc + OPERAND_VALUE)
3380 #define BRANCH_CALL(OPERAND_VALUE) \
3381 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3383 #if 0
3384 #define BRANCH_RETURN(OPERAND_VALUE) \
3386 tree type = OPERAND_TYPE##_type_node; \
3387 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3388 expand_java_ret (value); \
3390 #endif
3392 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3393 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3394 fprintf (stderr, "(not implemented)\n")
3395 #define NOT_IMPL1(OPERAND_VALUE) \
3396 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3397 fprintf (stderr, "(not implemented)\n")
3399 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3401 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3403 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3405 #define STACK_SWAP(COUNT) java_stack_swap()
3407 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3408 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3409 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3411 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3412 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3414 #define LOOKUP_SWITCH \
3415 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3416 tree selector = pop_value (INT_type_node); \
3417 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3418 while (--npairs >= 0) \
3420 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3421 expand_java_add_case (switch_expr, match, oldpc + offset); \
3425 #define TABLE_SWITCH \
3426 { jint default_offset = IMMEDIATE_s4; \
3427 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3428 tree selector = pop_value (INT_type_node); \
3429 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3430 for (; low <= high; low++) \
3432 jint offset = IMMEDIATE_s4; \
3433 expand_java_add_case (switch_expr, low, oldpc + offset); \
3437 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3438 { int opcode = byte_ops[PC-1]; \
3439 int method_ref_index = IMMEDIATE_u2; \
3440 int nargs; \
3441 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3442 else nargs = -1; \
3443 expand_invoke (opcode, method_ref_index, nargs); \
3446 /* Handle new, checkcast, instanceof */
3447 #define OBJECT(TYPE, OP) \
3448 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3450 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3452 #define ARRAY_LOAD(OPERAND_TYPE) \
3454 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3457 #define ARRAY_STORE(OPERAND_TYPE) \
3459 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3462 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3463 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3464 #define ARRAY_NEW_PTR() \
3465 push_value (build_anewarray (get_class_constant (current_jcf, \
3466 IMMEDIATE_u2), \
3467 pop_value (int_type_node)));
3468 #define ARRAY_NEW_NUM() \
3470 int atype = IMMEDIATE_u1; \
3471 push_value (build_newarray (atype, pop_value (int_type_node)));\
3473 #define ARRAY_NEW_MULTI() \
3475 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3476 int ndims = IMMEDIATE_u1; \
3477 expand_java_multianewarray( klass, ndims ); \
3480 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3481 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3482 pop_value (OPERAND_TYPE##_type_node)));
3484 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3486 push_value (build1 (NOP_EXPR, int_type_node, \
3487 (convert (TO_TYPE##_type_node, \
3488 pop_value (FROM_TYPE##_type_node))))); \
3491 #define CONVERT(FROM_TYPE, TO_TYPE) \
3493 push_value (convert (TO_TYPE##_type_node, \
3494 pop_value (FROM_TYPE##_type_node))); \
3497 /* internal macro added for use by the WIDE case
3498 Added TREE_TYPE (decl) assignment, apbianco */
3499 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3501 tree decl, value; \
3502 int index = OPVALUE; \
3503 tree type = OPTYPE; \
3504 value = pop_value (type); \
3505 type = TREE_TYPE (value); \
3506 decl = find_local_variable (index, type, oldpc); \
3507 set_local_type (index, type); \
3508 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3511 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3513 /* have to do this since OPERAND_VALUE may have side-effects */ \
3514 int opvalue = OPERAND_VALUE; \
3515 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3518 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3519 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3521 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3522 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3524 #define MONITOR_OPERATION(call) \
3526 tree o = pop_value (ptr_type_node); \
3527 tree c; \
3528 flush_quick_stack (); \
3529 c = build_java_monitor (call, o); \
3530 TREE_SIDE_EFFECTS (c) = 1; \
3531 java_add_stmt (c); \
3534 #define SPECIAL_IINC(IGNORED) \
3536 unsigned int local_var_index = IMMEDIATE_u1; \
3537 int ival = IMMEDIATE_s1; \
3538 expand_iinc(local_var_index, ival, oldpc); \
3541 #define SPECIAL_WIDE(IGNORED) \
3543 int modified_opcode = IMMEDIATE_u1; \
3544 unsigned int local_var_index = IMMEDIATE_u2; \
3545 switch (modified_opcode) \
3547 case OPCODE_iinc: \
3549 int ival = IMMEDIATE_s2; \
3550 expand_iinc (local_var_index, ival, oldpc); \
3551 break; \
3553 case OPCODE_iload: \
3554 case OPCODE_lload: \
3555 case OPCODE_fload: \
3556 case OPCODE_dload: \
3557 case OPCODE_aload: \
3559 /* duplicate code from LOAD macro */ \
3560 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3561 break; \
3563 case OPCODE_istore: \
3564 case OPCODE_lstore: \
3565 case OPCODE_fstore: \
3566 case OPCODE_dstore: \
3567 case OPCODE_astore: \
3569 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3570 break; \
3572 default: \
3573 error ("unrecogized wide sub-instruction"); \
3577 #define SPECIAL_THROW(IGNORED) \
3578 build_java_athrow (pop_value (throwable_type_node))
3580 #define SPECIAL_BREAK NOT_IMPL1
3581 #define IMPL NOT_IMPL
3583 #include "javaop.def"
3584 #undef JAVAOP
3585 default:
3586 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3588 return PC;
3591 /* Return the opcode at PC in the code section pointed to by
3592 CODE_OFFSET. */
3594 static unsigned char
3595 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3597 unsigned char opcode;
3598 long absolute_offset = (long)JCF_TELL (jcf);
3600 JCF_SEEK (jcf, code_offset);
3601 opcode = jcf->read_ptr [pc];
3602 JCF_SEEK (jcf, absolute_offset);
3603 return opcode;
3606 /* Some bytecode compilers are emitting accurate LocalVariableTable
3607 attributes. Here's an example:
3609 PC <t>store_<n>
3610 PC+1 ...
3612 Attribute "LocalVariableTable"
3613 slot #<n>: ... (PC: PC+1 length: L)
3615 This is accurate because the local in slot <n> really exists after
3616 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3618 This procedure recognizes this situation and extends the live range
3619 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3620 length of the store instruction.)
3622 This function is used by `give_name_to_locals' so that a local's
3623 DECL features a DECL_LOCAL_START_PC such that the first related
3624 store operation will use DECL as a destination, not an unrelated
3625 temporary created for the occasion.
3627 This function uses a global (instruction_bits) `note_instructions' should
3628 have allocated and filled properly. */
3631 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3632 int start_pc, int slot)
3634 int first, index, opcode;
3635 int pc, insn_pc;
3636 int wide_found = 0;
3638 if (!start_pc)
3639 return start_pc;
3641 first = index = -1;
3643 /* Find last previous instruction and remember it */
3644 for (pc = start_pc-1; pc; pc--)
3645 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3646 break;
3647 insn_pc = pc;
3649 /* Retrieve the instruction, handle `wide'. */
3650 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3651 if (opcode == OPCODE_wide)
3653 wide_found = 1;
3654 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3657 switch (opcode)
3659 case OPCODE_astore_0:
3660 case OPCODE_astore_1:
3661 case OPCODE_astore_2:
3662 case OPCODE_astore_3:
3663 first = OPCODE_astore_0;
3664 break;
3666 case OPCODE_istore_0:
3667 case OPCODE_istore_1:
3668 case OPCODE_istore_2:
3669 case OPCODE_istore_3:
3670 first = OPCODE_istore_0;
3671 break;
3673 case OPCODE_lstore_0:
3674 case OPCODE_lstore_1:
3675 case OPCODE_lstore_2:
3676 case OPCODE_lstore_3:
3677 first = OPCODE_lstore_0;
3678 break;
3680 case OPCODE_fstore_0:
3681 case OPCODE_fstore_1:
3682 case OPCODE_fstore_2:
3683 case OPCODE_fstore_3:
3684 first = OPCODE_fstore_0;
3685 break;
3687 case OPCODE_dstore_0:
3688 case OPCODE_dstore_1:
3689 case OPCODE_dstore_2:
3690 case OPCODE_dstore_3:
3691 first = OPCODE_dstore_0;
3692 break;
3694 case OPCODE_astore:
3695 case OPCODE_istore:
3696 case OPCODE_lstore:
3697 case OPCODE_fstore:
3698 case OPCODE_dstore:
3699 index = peek_opcode_at_pc (jcf, code_offset, pc);
3700 if (wide_found)
3702 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3703 index = (other << 8) + index;
3705 break;
3708 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3709 means we have a <t>store. */
3710 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3711 start_pc = insn_pc;
3713 return start_pc;
3716 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3717 order, as specified by Java Language Specification.
3719 The problem is that while expand_expr will evaluate its sub-operands in
3720 left-to-right order, for variables it will just return an rtx (i.e.
3721 an lvalue) for the variable (rather than an rvalue). So it is possible
3722 that a later sub-operand will change the register, and when the
3723 actual operation is done, it will use the new value, when it should
3724 have used the original value.
3726 We fix this by using save_expr. This forces the sub-operand to be
3727 copied into a fresh virtual register,
3729 For method invocation, we modify the arguments so that a
3730 left-to-right order evaluation is performed. Saved expressions
3731 will, in CALL_EXPR order, be reused when the call will be expanded.
3733 We also promote outgoing args if needed. */
3735 tree
3736 force_evaluation_order (tree node)
3738 if (flag_syntax_only)
3739 return node;
3740 if (TREE_CODE (node) == CALL_EXPR
3741 || (TREE_CODE (node) == COMPOUND_EXPR
3742 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3743 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3745 tree call, cmp;
3746 int i, nargs;
3748 /* Account for wrapped around ctors. */
3749 if (TREE_CODE (node) == COMPOUND_EXPR)
3750 call = TREE_OPERAND (node, 0);
3751 else
3752 call = node;
3754 nargs = call_expr_nargs (call);
3756 /* This reverses the evaluation order. This is a desired effect. */
3757 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3759 tree arg = CALL_EXPR_ARG (call, i);
3760 /* Promote types smaller than integer. This is required by
3761 some ABIs. */
3762 tree type = TREE_TYPE (arg);
3763 tree saved;
3764 if (targetm.calls.promote_prototypes (type)
3765 && INTEGRAL_TYPE_P (type)
3766 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3767 TYPE_SIZE (integer_type_node)))
3768 arg = fold_convert (integer_type_node, arg);
3770 saved = save_expr (force_evaluation_order (arg));
3771 cmp = (cmp == NULL_TREE ? saved :
3772 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3774 CALL_EXPR_ARG (call, i) = saved;
3777 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3778 TREE_SIDE_EFFECTS (cmp) = 1;
3780 if (cmp)
3782 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3783 if (TREE_TYPE (cmp) != void_type_node)
3784 cmp = save_expr (cmp);
3785 TREE_SIDE_EFFECTS (cmp) = 1;
3786 node = cmp;
3789 return node;
3792 /* Build a node to represent empty statements and blocks. */
3794 tree
3795 build_java_empty_stmt (void)
3797 tree t = build_empty_stmt ();
3798 return t;
3801 /* Promote all args of integral type before generating any code. */
3803 static void
3804 promote_arguments (void)
3806 int i;
3807 tree arg;
3808 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3809 arg != NULL_TREE; arg = TREE_CHAIN (arg), i++)
3811 tree arg_type = TREE_TYPE (arg);
3812 if (INTEGRAL_TYPE_P (arg_type)
3813 && TYPE_PRECISION (arg_type) < 32)
3815 tree copy = find_local_variable (i, integer_type_node, -1);
3816 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3817 copy,
3818 fold_convert (integer_type_node, arg)));
3820 if (TYPE_IS_WIDE (arg_type))
3821 i++;
3825 /* Create a local variable that points to the constant pool. */
3827 static void
3828 cache_cpool_data_ref (void)
3830 if (optimize)
3832 tree cpool;
3833 tree d = build_constant_data_ref (flag_indirect_classes);
3834 tree cpool_ptr = build_decl (VAR_DECL, NULL_TREE,
3835 build_pointer_type (TREE_TYPE (d)));
3836 java_add_local_var (cpool_ptr);
3837 TREE_CONSTANT (cpool_ptr) = 1;
3839 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3840 cpool_ptr, build_address_of (d)));
3841 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3842 TREE_THIS_NOTRAP (cpool) = 1;
3843 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3847 #include "gt-java-expr.h"