2013-11-21 Edward Smith-Rowland <3dw4rd@verizon.net>
[official-gcc.git] / gcc / java / expr.c
blobc62d66dba0210c202124f1c7491817e1e2aa79ef
1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>.
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
24 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h" /* For INT_TYPE_SIZE,
30 TARGET_VTABLE_USES_DESCRIPTORS,
31 BITS_PER_UNIT,
32 MODIFY_JNI_METHOD_CALL and
33 PARM_BOUNDARY. */
35 #include "tree.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "flags.h"
39 #include "java-tree.h"
40 #include "javaop.h"
41 #include "java-opcodes.h"
42 #include "jcf.h"
43 #include "java-except.h"
44 #include "parse.h"
45 #include "diagnostic-core.h"
46 #include "ggc.h"
47 #include "tree-iterator.h"
48 #include "target.h"
50 static void flush_quick_stack (void);
51 static void push_value (tree);
52 static tree pop_value (tree);
53 static void java_stack_swap (void);
54 static void java_stack_dup (int, int);
55 static void build_java_athrow (tree);
56 static void build_java_jsr (int, int);
57 static void build_java_ret (tree);
58 static void expand_java_multianewarray (tree, int);
59 static void expand_java_arraystore (tree);
60 static void expand_java_arrayload (tree);
61 static void expand_java_array_length (void);
62 static tree build_java_monitor (tree, tree);
63 static void expand_java_pushc (int, tree);
64 static void expand_java_return (tree);
65 static void expand_load_internal (int, tree, int);
66 static void expand_java_NEW (tree);
67 static void expand_java_INSTANCEOF (tree);
68 static void expand_java_CHECKCAST (tree);
69 static void expand_iinc (unsigned int, int, int);
70 static void expand_java_binop (tree, enum tree_code);
71 static void note_label (int, int);
72 static void expand_compare (enum tree_code, tree, tree, int);
73 static void expand_test (enum tree_code, tree, int);
74 static void expand_cond (enum tree_code, tree, int);
75 static void expand_java_goto (int);
76 static tree expand_java_switch (tree, int);
77 static void expand_java_add_case (tree, int, int);
78 static vec<tree, va_gc> *pop_arguments (tree);
79 static void expand_invoke (int, int, int);
80 static void expand_java_field_op (int, int, int);
81 static void java_push_constant_from_pool (struct JCF *, int);
82 static void java_stack_pop (int);
83 static tree build_java_throw_out_of_bounds_exception (tree);
84 static tree build_java_check_indexed_type (tree, tree);
85 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
86 static void promote_arguments (void);
87 static void cache_cpool_data_ref (void);
89 static GTY(()) tree operand_type[59];
91 static GTY(()) tree methods_ident;
92 static GTY(()) tree ncode_ident;
93 tree dtable_ident = NULL_TREE;
95 /* Set to nonzero value in order to emit class initialization code
96 before static field references. */
97 int always_initialize_class_p = 0;
99 /* We store the stack state in two places:
100 Within a basic block, we use the quick_stack, which is a vec of expression
101 nodes.
102 This is the top part of the stack; below that we use find_stack_slot.
103 At the end of a basic block, the quick_stack must be flushed
104 to the stack slot array (as handled by find_stack_slot).
105 Using quick_stack generates better code (especially when
106 compiled without optimization), because we do not have to
107 explicitly store and load trees to temporary variables.
109 If a variable is on the quick stack, it means the value of variable
110 when the quick stack was last flushed. Conceptually, flush_quick_stack
111 saves all the quick_stack elements in parallel. However, that is
112 complicated, so it actually saves them (i.e. copies each stack value
113 to is home virtual register) from low indexes. This allows a quick_stack
114 element at index i (counting from the bottom of stack the) to references
115 slot virtuals for register that are >= i, but not those that are deeper.
116 This convention makes most operations easier. For example iadd works
117 even when the stack contains (reg[0], reg[1]): It results in the
118 stack containing (reg[0]+reg[1]), which is OK. However, some stack
119 operations are more complicated. For example dup given a stack
120 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
121 the convention, since stack value 1 would refer to a register with
122 lower index (reg[0]), which flush_quick_stack does not safely handle.
123 So dup cannot just add an extra element to the quick_stack, but iadd can.
126 static GTY(()) vec<tree, va_gc> *quick_stack;
128 /* The physical memory page size used in this computer. See
129 build_field_ref(). */
130 static GTY(()) tree page_size;
132 /* The stack pointer of the Java virtual machine.
133 This does include the size of the quick_stack. */
135 int stack_pointer;
137 const unsigned char *linenumber_table;
138 int linenumber_count;
140 /* Largest pc so far in this method that has been passed to lookup_label. */
141 int highest_label_pc_this_method = -1;
143 /* Base value for this method to add to pc to get generated label. */
144 int start_label_pc_this_method = 0;
146 void
147 init_expr_processing (void)
149 operand_type[21] = operand_type[54] = int_type_node;
150 operand_type[22] = operand_type[55] = long_type_node;
151 operand_type[23] = operand_type[56] = float_type_node;
152 operand_type[24] = operand_type[57] = double_type_node;
153 operand_type[25] = operand_type[58] = ptr_type_node;
156 tree
157 java_truthvalue_conversion (tree expr)
159 /* It is simpler and generates better code to have only TRUTH_*_EXPR
160 or comparison expressions as truth values at this level.
162 This function should normally be identity for Java. */
164 switch (TREE_CODE (expr))
166 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
167 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
168 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
169 case ORDERED_EXPR: case UNORDERED_EXPR:
170 case TRUTH_ANDIF_EXPR:
171 case TRUTH_ORIF_EXPR:
172 case TRUTH_AND_EXPR:
173 case TRUTH_OR_EXPR:
174 case TRUTH_XOR_EXPR:
175 case TRUTH_NOT_EXPR:
176 case ERROR_MARK:
177 return expr;
179 case INTEGER_CST:
180 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
182 case REAL_CST:
183 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
185 /* are these legal? XXX JH */
186 case NEGATE_EXPR:
187 case ABS_EXPR:
188 case FLOAT_EXPR:
189 /* These don't change whether an object is nonzero or zero. */
190 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
192 case COND_EXPR:
193 /* Distribute the conversion into the arms of a COND_EXPR. */
194 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
195 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
196 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
198 case NOP_EXPR:
199 /* If this is widening the argument, we can ignore it. */
200 if (TYPE_PRECISION (TREE_TYPE (expr))
201 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
202 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
203 /* fall through to default */
205 default:
206 return fold_build2 (NE_EXPR, boolean_type_node,
207 expr, boolean_false_node);
211 /* Save any stack slots that happen to be in the quick_stack into their
212 home virtual register slots.
214 The copy order is from low stack index to high, to support the invariant
215 that the expression for a slot may contain decls for stack slots with
216 higher (or the same) index, but not lower. */
218 static void
219 flush_quick_stack (void)
221 int stack_index = stack_pointer;
222 unsigned ix;
223 tree t;
225 /* Count the number of slots the quick stack is holding. */
226 for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++)
227 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (t));
229 for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++)
231 tree decl, type = TREE_TYPE (t);
233 decl = find_stack_slot (stack_index, type);
234 if (decl != t)
235 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (t), decl, t));
236 stack_index += 1 + TYPE_IS_WIDE (type);
239 vec_safe_truncate (quick_stack, 0);
242 /* Push TYPE on the type stack.
243 Return true on success, 0 on overflow. */
246 push_type_0 (tree type)
248 int n_words;
249 type = promote_type (type);
250 n_words = 1 + TYPE_IS_WIDE (type);
251 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
252 return 0;
253 /* Allocate decl for this variable now, so we get a temporary that
254 survives the whole method. */
255 find_stack_slot (stack_pointer, type);
256 stack_type_map[stack_pointer++] = type;
257 n_words--;
258 while (--n_words >= 0)
259 stack_type_map[stack_pointer++] = TYPE_SECOND;
260 return 1;
263 void
264 push_type (tree type)
266 int r = push_type_0 (type);
267 gcc_assert (r);
270 static void
271 push_value (tree value)
273 tree type = TREE_TYPE (value);
274 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
276 type = promote_type (type);
277 value = convert (type, value);
279 push_type (type);
280 vec_safe_push (quick_stack, value);
282 /* If the value has a side effect, then we need to evaluate it
283 whether or not the result is used. If the value ends up on the
284 quick stack and is then popped, this won't happen -- so we flush
285 the quick stack. It is safest to simply always flush, though,
286 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
287 the latter we may need to strip conversions. */
288 flush_quick_stack ();
291 /* Pop a type from the type stack.
292 TYPE is the expected type. Return the actual type, which must be
293 convertible to TYPE.
294 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
296 tree
297 pop_type_0 (tree type, char **messagep)
299 int n_words;
300 tree t;
301 *messagep = NULL;
302 if (TREE_CODE (type) == RECORD_TYPE)
303 type = promote_type (type);
304 n_words = 1 + TYPE_IS_WIDE (type);
305 if (stack_pointer < n_words)
307 *messagep = xstrdup ("stack underflow");
308 return type;
310 while (--n_words > 0)
312 if (stack_type_map[--stack_pointer] != void_type_node)
314 *messagep = xstrdup ("Invalid multi-word value on type stack");
315 return type;
318 t = stack_type_map[--stack_pointer];
319 if (type == NULL_TREE || t == type)
320 return t;
321 if (TREE_CODE (t) == TREE_LIST)
325 tree tt = TREE_PURPOSE (t);
326 if (! can_widen_reference_to (tt, type))
328 t = tt;
329 goto fail;
331 t = TREE_CHAIN (t);
333 while (t);
334 return t;
336 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
337 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
338 return t;
339 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
341 /* If the expected type we've been passed is object or ptr
342 (i.e. void*), the caller needs to know the real type. */
343 if (type == ptr_type_node || type == object_ptr_type_node)
344 return t;
346 /* Since the verifier has already run, we know that any
347 types we see will be compatible. In BC mode, this fact
348 may be checked at runtime, but if that is so then we can
349 assume its truth here as well. So, we always succeed
350 here, with the expected type. */
351 return type;
354 if (! flag_verify_invocations && flag_indirect_dispatch
355 && t == object_ptr_type_node)
357 if (type != ptr_type_node)
358 warning (0, "need to insert runtime check for %s",
359 xstrdup (lang_printable_name (type, 0)));
360 return type;
363 /* lang_printable_name uses a static buffer, so we must save the result
364 from calling it the first time. */
365 fail:
367 char *temp = xstrdup (lang_printable_name (type, 0));
368 /* If the stack contains a multi-word type, keep popping the stack until
369 the real type is found. */
370 while (t == void_type_node)
371 t = stack_type_map[--stack_pointer];
372 *messagep = concat ("expected type '", temp,
373 "' but stack contains '", lang_printable_name (t, 0),
374 "'", NULL);
375 free (temp);
377 return type;
380 /* Pop a type from the type stack.
381 TYPE is the expected type. Return the actual type, which must be
382 convertible to TYPE, otherwise call error. */
384 tree
385 pop_type (tree type)
387 char *message = NULL;
388 type = pop_type_0 (type, &message);
389 if (message != NULL)
391 error ("%s", message);
392 free (message);
394 return type;
398 /* Return true if two type assertions are equal. */
400 static int
401 type_assertion_eq (const void * k1_p, const void * k2_p)
403 const type_assertion k1 = *(const type_assertion *)k1_p;
404 const type_assertion k2 = *(const type_assertion *)k2_p;
405 return (k1.assertion_code == k2.assertion_code
406 && k1.op1 == k2.op1
407 && k1.op2 == k2.op2);
410 /* Hash a type assertion. */
412 static hashval_t
413 type_assertion_hash (const void *p)
415 const type_assertion *k_p = (const type_assertion *) p;
416 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
417 k_p->assertion_code, 0);
419 switch (k_p->assertion_code)
421 case JV_ASSERT_TYPES_COMPATIBLE:
422 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
423 hash);
424 /* Fall through. */
426 case JV_ASSERT_IS_INSTANTIABLE:
427 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
428 hash);
429 /* Fall through. */
431 case JV_ASSERT_END_OF_TABLE:
432 break;
434 default:
435 gcc_unreachable ();
438 return hash;
441 /* Add an entry to the type assertion table for the given class.
442 KLASS is the class for which this assertion will be evaluated by the
443 runtime during loading/initialization.
444 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
445 OP1 and OP2 are the operands. The tree type of these arguments may be
446 specific to each assertion_code. */
448 void
449 add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
451 htab_t assertions_htab;
452 type_assertion as;
453 void **as_pp;
455 assertions_htab = TYPE_ASSERTIONS (klass);
456 if (assertions_htab == NULL)
458 assertions_htab = htab_create_ggc (7, type_assertion_hash,
459 type_assertion_eq, NULL);
460 TYPE_ASSERTIONS (current_class) = assertions_htab;
463 as.assertion_code = assertion_code;
464 as.op1 = op1;
465 as.op2 = op2;
467 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
469 /* Don't add the same assertion twice. */
470 if (*as_pp)
471 return;
473 *as_pp = ggc_alloc_type_assertion ();
474 **(type_assertion **)as_pp = as;
478 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
479 Handles array types and interfaces. */
482 can_widen_reference_to (tree source_type, tree target_type)
484 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
485 return 1;
487 /* Get rid of pointers */
488 if (TREE_CODE (source_type) == POINTER_TYPE)
489 source_type = TREE_TYPE (source_type);
490 if (TREE_CODE (target_type) == POINTER_TYPE)
491 target_type = TREE_TYPE (target_type);
493 if (source_type == target_type)
494 return 1;
496 /* FIXME: This is very pessimistic, in that it checks everything,
497 even if we already know that the types are compatible. If we're
498 to support full Java class loader semantics, we need this.
499 However, we could do something more optimal. */
500 if (! flag_verify_invocations)
502 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
503 source_type, target_type);
505 if (!quiet_flag)
506 warning (0, "assert: %s is assign compatible with %s",
507 xstrdup (lang_printable_name (target_type, 0)),
508 xstrdup (lang_printable_name (source_type, 0)));
509 /* Punt everything to runtime. */
510 return 1;
513 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
515 return 1;
517 else
519 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
521 HOST_WIDE_INT source_length, target_length;
522 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
524 /* An array implements Cloneable and Serializable. */
525 tree name = DECL_NAME (TYPE_NAME (target_type));
526 return (name == java_lang_cloneable_identifier_node
527 || name == java_io_serializable_identifier_node);
529 target_length = java_array_type_length (target_type);
530 if (target_length >= 0)
532 source_length = java_array_type_length (source_type);
533 if (source_length != target_length)
534 return 0;
536 source_type = TYPE_ARRAY_ELEMENT (source_type);
537 target_type = TYPE_ARRAY_ELEMENT (target_type);
538 if (source_type == target_type)
539 return 1;
540 if (TREE_CODE (source_type) != POINTER_TYPE
541 || TREE_CODE (target_type) != POINTER_TYPE)
542 return 0;
543 return can_widen_reference_to (source_type, target_type);
545 else
547 int source_depth = class_depth (source_type);
548 int target_depth = class_depth (target_type);
550 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
552 if (! quiet_flag)
553 warning (0, "assert: %s is assign compatible with %s",
554 xstrdup (lang_printable_name (target_type, 0)),
555 xstrdup (lang_printable_name (source_type, 0)));
556 return 1;
559 /* class_depth can return a negative depth if an error occurred */
560 if (source_depth < 0 || target_depth < 0)
561 return 0;
563 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
565 /* target_type is OK if source_type or source_type ancestors
566 implement target_type. We handle multiple sub-interfaces */
567 tree binfo, base_binfo;
568 int i;
570 for (binfo = TYPE_BINFO (source_type), i = 0;
571 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
572 if (can_widen_reference_to
573 (BINFO_TYPE (base_binfo), target_type))
574 return 1;
576 if (!i)
577 return 0;
580 for ( ; source_depth > target_depth; source_depth--)
582 source_type
583 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
585 return source_type == target_type;
590 static tree
591 pop_value (tree type)
593 type = pop_type (type);
594 if (vec_safe_length (quick_stack) != 0)
595 return quick_stack->pop ();
596 else
597 return find_stack_slot (stack_pointer, promote_type (type));
601 /* Pop and discard the top COUNT stack slots. */
603 static void
604 java_stack_pop (int count)
606 while (count > 0)
608 tree type;
610 gcc_assert (stack_pointer != 0);
612 type = stack_type_map[stack_pointer - 1];
613 if (type == TYPE_SECOND)
615 count--;
616 gcc_assert (stack_pointer != 1 && count > 0);
618 type = stack_type_map[stack_pointer - 2];
620 pop_value (type);
621 count--;
625 /* Implement the 'swap' operator (to swap two top stack slots). */
627 static void
628 java_stack_swap (void)
630 tree type1, type2;
631 tree temp;
632 tree decl1, decl2;
634 if (stack_pointer < 2
635 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
636 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
637 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
638 /* Bad stack swap. */
639 abort ();
640 /* Bad stack swap. */
642 flush_quick_stack ();
643 decl1 = find_stack_slot (stack_pointer - 1, type1);
644 decl2 = find_stack_slot (stack_pointer - 2, type2);
645 temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1);
646 java_add_local_var (temp);
647 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
648 java_add_stmt (build2 (MODIFY_EXPR, type2,
649 find_stack_slot (stack_pointer - 1, type2),
650 decl2));
651 java_add_stmt (build2 (MODIFY_EXPR, type1,
652 find_stack_slot (stack_pointer - 2, type1),
653 temp));
654 stack_type_map[stack_pointer - 1] = type2;
655 stack_type_map[stack_pointer - 2] = type1;
658 static void
659 java_stack_dup (int size, int offset)
661 int low_index = stack_pointer - size - offset;
662 int dst_index;
663 if (low_index < 0)
664 error ("stack underflow - dup* operation");
666 flush_quick_stack ();
668 stack_pointer += size;
669 dst_index = stack_pointer;
671 for (dst_index = stack_pointer; --dst_index >= low_index; )
673 tree type;
674 int src_index = dst_index - size;
675 if (src_index < low_index)
676 src_index = dst_index + size + offset;
677 type = stack_type_map [src_index];
678 if (type == TYPE_SECOND)
680 /* Dup operation splits 64-bit number. */
681 gcc_assert (src_index > low_index);
683 stack_type_map[dst_index] = type;
684 src_index--; dst_index--;
685 type = stack_type_map[src_index];
686 gcc_assert (TYPE_IS_WIDE (type));
688 else
689 gcc_assert (! TYPE_IS_WIDE (type));
691 if (src_index != dst_index)
693 tree src_decl = find_stack_slot (src_index, type);
694 tree dst_decl = find_stack_slot (dst_index, type);
696 java_add_stmt
697 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
698 stack_type_map[dst_index] = type;
703 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
704 value stack. */
706 static void
707 build_java_athrow (tree node)
709 tree call;
711 call = build_call_nary (void_type_node,
712 build_address_of (throw_node),
713 1, node);
714 TREE_SIDE_EFFECTS (call) = 1;
715 java_add_stmt (call);
716 java_stack_pop (stack_pointer);
719 /* Implementation for jsr/ret */
721 static void
722 build_java_jsr (int target_pc, int return_pc)
724 tree where = lookup_label (target_pc);
725 tree ret = lookup_label (return_pc);
726 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
727 push_value (ret_label);
728 flush_quick_stack ();
729 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
731 /* Do not need to emit the label here. We noted the existence of the
732 label as a jump target in note_instructions; we'll emit the label
733 for real at the beginning of the expand_byte_code loop. */
736 static void
737 build_java_ret (tree location)
739 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
742 /* Implementation of operations on array: new, load, store, length */
744 tree
745 decode_newarray_type (int atype)
747 switch (atype)
749 case 4: return boolean_type_node;
750 case 5: return char_type_node;
751 case 6: return float_type_node;
752 case 7: return double_type_node;
753 case 8: return byte_type_node;
754 case 9: return short_type_node;
755 case 10: return int_type_node;
756 case 11: return long_type_node;
757 default: return NULL_TREE;
761 /* Map primitive type to the code used by OPCODE_newarray. */
764 encode_newarray_type (tree type)
766 if (type == boolean_type_node)
767 return 4;
768 else if (type == char_type_node)
769 return 5;
770 else if (type == float_type_node)
771 return 6;
772 else if (type == double_type_node)
773 return 7;
774 else if (type == byte_type_node)
775 return 8;
776 else if (type == short_type_node)
777 return 9;
778 else if (type == int_type_node)
779 return 10;
780 else if (type == long_type_node)
781 return 11;
782 else
783 gcc_unreachable ();
786 /* Build a call to _Jv_ThrowBadArrayIndex(), the
787 ArrayIndexOfBoundsException exception handler. */
789 static tree
790 build_java_throw_out_of_bounds_exception (tree index)
792 tree node;
794 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
795 has void return type. We cannot just set the type of the CALL_EXPR below
796 to int_type_node because we would lose it during gimplification. */
797 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
798 node = build_call_nary (void_type_node,
799 build_address_of (soft_badarrayindex_node),
800 1, index);
801 TREE_SIDE_EFFECTS (node) = 1;
803 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
804 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
806 return (node);
809 /* Return the length of an array. Doesn't perform any checking on the nature
810 or value of the array NODE. May be used to implement some bytecodes. */
812 tree
813 build_java_array_length_access (tree node)
815 tree type = TREE_TYPE (node);
816 tree array_type = TREE_TYPE (type);
817 HOST_WIDE_INT length;
819 if (!is_array_type_p (type))
821 /* With the new verifier, we will see an ordinary pointer type
822 here. In this case, we just use an arbitrary array type. */
823 array_type = build_java_array_type (object_ptr_type_node, -1);
824 type = promote_type (array_type);
827 length = java_array_type_length (type);
828 if (length >= 0)
829 return build_int_cst (NULL_TREE, length);
831 node = build3 (COMPONENT_REF, int_type_node,
832 build_java_indirect_ref (array_type, node,
833 flag_check_references),
834 lookup_field (&array_type, get_identifier ("length")),
835 NULL_TREE);
836 IS_ARRAY_LENGTH_ACCESS (node) = 1;
837 return node;
840 /* Optionally checks a reference against the NULL pointer. ARG1: the
841 expr, ARG2: we should check the reference. Don't generate extra
842 checks if we're not generating code. */
844 tree
845 java_check_reference (tree expr, int check)
847 if (!flag_syntax_only && check)
849 expr = save_expr (expr);
850 expr = build3 (COND_EXPR, TREE_TYPE (expr),
851 build2 (EQ_EXPR, boolean_type_node,
852 expr, null_pointer_node),
853 build_call_nary (void_type_node,
854 build_address_of (soft_nullpointer_node),
856 expr);
859 return expr;
862 /* Reference an object: just like an INDIRECT_REF, but with checking. */
864 tree
865 build_java_indirect_ref (tree type, tree expr, int check)
867 tree t;
868 t = java_check_reference (expr, check);
869 t = convert (build_pointer_type (type), t);
870 return build1 (INDIRECT_REF, type, t);
873 /* Implement array indexing (either as l-value or r-value).
874 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
875 Optionally performs bounds checking and/or test to NULL.
876 At this point, ARRAY should have been verified as an array. */
878 tree
879 build_java_arrayaccess (tree array, tree type, tree index)
881 tree node, throw_expr = NULL_TREE;
882 tree data_field;
883 tree ref;
884 tree array_type = TREE_TYPE (TREE_TYPE (array));
885 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
887 if (!is_array_type_p (TREE_TYPE (array)))
889 /* With the new verifier, we will see an ordinary pointer type
890 here. In this case, we just use the correct array type. */
891 array_type = build_java_array_type (type, -1);
894 if (flag_bounds_check)
896 /* Generate:
897 * (unsigned jint) INDEX >= (unsigned jint) LEN
898 * && throw ArrayIndexOutOfBoundsException.
899 * Note this is equivalent to and more efficient than:
900 * INDEX < 0 || INDEX >= LEN && throw ... */
901 tree test;
902 tree len = convert (unsigned_int_type_node,
903 build_java_array_length_access (array));
904 test = fold_build2 (GE_EXPR, boolean_type_node,
905 convert (unsigned_int_type_node, index),
906 len);
907 if (! integer_zerop (test))
909 throw_expr
910 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
911 build_java_throw_out_of_bounds_exception (index));
912 /* allows expansion within COMPOUND */
913 TREE_SIDE_EFFECTS( throw_expr ) = 1;
917 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
918 to have the bounds check evaluated first. */
919 if (throw_expr != NULL_TREE)
920 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
922 data_field = lookup_field (&array_type, get_identifier ("data"));
924 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
925 build_java_indirect_ref (array_type, array,
926 flag_check_references),
927 data_field, NULL_TREE);
929 /* Take the address of the data field and convert it to a pointer to
930 the element type. */
931 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
933 /* Multiply the index by the size of an element to obtain a byte
934 offset. Convert the result to a pointer to the element type. */
935 index = build2 (MULT_EXPR, sizetype,
936 fold_convert (sizetype, index),
937 size_exp);
939 /* Sum the byte offset and the address of the data field. */
940 node = fold_build_pointer_plus (node, index);
942 /* Finally, return
944 *((&array->data) + index*size_exp)
947 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
950 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
951 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
952 determine that no check is required. */
954 tree
955 build_java_arraystore_check (tree array, tree object)
957 tree check, element_type, source;
958 tree array_type_p = TREE_TYPE (array);
959 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
961 if (! flag_verify_invocations)
963 /* With the new verifier, we don't track precise types. FIXME:
964 performance regression here. */
965 element_type = TYPE_NAME (object_type_node);
967 else
969 gcc_assert (is_array_type_p (array_type_p));
971 /* Get the TYPE_DECL for ARRAY's element type. */
972 element_type
973 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
976 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
977 && TREE_CODE (object_type) == TYPE_DECL);
979 if (!flag_store_check)
980 return build1 (NOP_EXPR, array_type_p, array);
982 /* No check is needed if the element type is final. Also check that
983 element_type matches object_type, since in the bytecode
984 compilation case element_type may be the actual element type of
985 the array rather than its declared type. However, if we're doing
986 indirect dispatch, we can't do the `final' optimization. */
987 if (element_type == object_type
988 && ! flag_indirect_dispatch
989 && CLASS_FINAL (element_type))
990 return build1 (NOP_EXPR, array_type_p, array);
992 /* OBJECT might be wrapped by a SAVE_EXPR. */
993 if (TREE_CODE (object) == SAVE_EXPR)
994 source = TREE_OPERAND (object, 0);
995 else
996 source = object;
998 /* Avoid the check if OBJECT was just loaded from the same array. */
999 if (TREE_CODE (source) == ARRAY_REF)
1001 tree target;
1002 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1003 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1004 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1005 if (TREE_CODE (source) == SAVE_EXPR)
1006 source = TREE_OPERAND (source, 0);
1008 target = array;
1009 if (TREE_CODE (target) == SAVE_EXPR)
1010 target = TREE_OPERAND (target, 0);
1012 if (source == target)
1013 return build1 (NOP_EXPR, array_type_p, array);
1016 /* Build an invocation of _Jv_CheckArrayStore */
1017 check = build_call_nary (void_type_node,
1018 build_address_of (soft_checkarraystore_node),
1019 2, array, object);
1020 TREE_SIDE_EFFECTS (check) = 1;
1022 return check;
1025 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1026 ARRAY_NODE. This function is used to retrieve something less vague than
1027 a pointer type when indexing the first dimension of something like [[<t>.
1028 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1029 return unchanged. */
1031 static tree
1032 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1033 tree indexed_type)
1035 /* We used to check to see if ARRAY_NODE really had array type.
1036 However, with the new verifier, this is not necessary, as we know
1037 that the object will be an array of the appropriate type. */
1039 return indexed_type;
1042 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1043 called with an integer code (the type of array to create), and the length
1044 of the array to create. */
1046 tree
1047 build_newarray (int atype_value, tree length)
1049 tree type_arg;
1051 tree prim_type = decode_newarray_type (atype_value);
1052 tree type
1053 = build_java_array_type (prim_type,
1054 tree_fits_shwi_p (length) == INTEGER_CST
1055 ? tree_to_shwi (length) : -1);
1057 /* Pass a reference to the primitive type class and save the runtime
1058 some work. */
1059 type_arg = build_class_ref (prim_type);
1061 return build_call_nary (promote_type (type),
1062 build_address_of (soft_newarray_node),
1063 2, type_arg, length);
1066 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1067 of the dimension. */
1069 tree
1070 build_anewarray (tree class_type, tree length)
1072 tree type
1073 = build_java_array_type (class_type,
1074 tree_fits_shwi_p (length)
1075 ? tree_to_shwi (length) : -1);
1077 return build_call_nary (promote_type (type),
1078 build_address_of (soft_anewarray_node),
1080 length,
1081 build_class_ref (class_type),
1082 null_pointer_node);
1085 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1087 tree
1088 build_new_array (tree type, tree length)
1090 if (JPRIMITIVE_TYPE_P (type))
1091 return build_newarray (encode_newarray_type (type), length);
1092 else
1093 return build_anewarray (TREE_TYPE (type), length);
1096 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1097 class pointer, a number of dimensions and the matching number of
1098 dimensions. The argument list is NULL terminated. */
1100 static void
1101 expand_java_multianewarray (tree class_type, int ndim)
1103 int i;
1104 vec<tree, va_gc> *args = NULL;
1106 vec_safe_grow (args, 3 + ndim);
1108 (*args)[0] = build_class_ref (class_type);
1109 (*args)[1] = build_int_cst (NULL_TREE, ndim);
1111 for(i = ndim - 1; i >= 0; i-- )
1112 (*args)[(unsigned)(2 + i)] = pop_value (int_type_node);
1114 (*args)[2 + ndim] = null_pointer_node;
1116 push_value (build_call_vec (promote_type (class_type),
1117 build_address_of (soft_multianewarray_node),
1118 args));
1121 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1122 ARRAY is an array type. May expand some bound checking and NULL
1123 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1124 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1125 INT. In those cases, we make the conversion.
1127 if ARRAy is a reference type, the assignment is checked at run-time
1128 to make sure that the RHS can be assigned to the array element
1129 type. It is not necessary to generate this code if ARRAY is final. */
1131 static void
1132 expand_java_arraystore (tree rhs_type_node)
1134 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1135 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1136 int_type_node : rhs_type_node);
1137 tree index = pop_value (int_type_node);
1138 tree array_type, array, temp, access;
1140 /* If we're processing an `aaload' we might as well just pick
1141 `Object'. */
1142 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1144 array_type = build_java_array_type (object_ptr_type_node, -1);
1145 rhs_type_node = object_ptr_type_node;
1147 else
1148 array_type = build_java_array_type (rhs_type_node, -1);
1150 array = pop_value (array_type);
1151 array = build1 (NOP_EXPR, promote_type (array_type), array);
1153 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1155 flush_quick_stack ();
1157 index = save_expr (index);
1158 array = save_expr (array);
1160 /* We want to perform the bounds check (done by
1161 build_java_arrayaccess) before the type check (done by
1162 build_java_arraystore_check). So, we call build_java_arrayaccess
1163 -- which returns an ARRAY_REF lvalue -- and we then generate code
1164 to stash the address of that lvalue in a temp. Then we call
1165 build_java_arraystore_check, and finally we generate a
1166 MODIFY_EXPR to set the array element. */
1168 access = build_java_arrayaccess (array, rhs_type_node, index);
1169 temp = build_decl (input_location, VAR_DECL, NULL_TREE,
1170 build_pointer_type (TREE_TYPE (access)));
1171 java_add_local_var (temp);
1172 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1173 temp,
1174 build_fold_addr_expr (access)));
1176 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1178 tree check = build_java_arraystore_check (array, rhs_node);
1179 java_add_stmt (check);
1182 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1183 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1184 rhs_node));
1187 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1188 sure that LHS is an array type. May expand some bound checking and NULL
1189 pointer checking.
1190 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1191 BOOLEAN/SHORT, we push a promoted type back to the stack.
1194 static void
1195 expand_java_arrayload (tree lhs_type_node)
1197 tree load_node;
1198 tree index_node = pop_value (int_type_node);
1199 tree array_type;
1200 tree array_node;
1202 /* If we're processing an `aaload' we might as well just pick
1203 `Object'. */
1204 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1206 array_type = build_java_array_type (object_ptr_type_node, -1);
1207 lhs_type_node = object_ptr_type_node;
1209 else
1210 array_type = build_java_array_type (lhs_type_node, -1);
1211 array_node = pop_value (array_type);
1212 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1214 index_node = save_expr (index_node);
1215 array_node = save_expr (array_node);
1217 lhs_type_node = build_java_check_indexed_type (array_node,
1218 lhs_type_node);
1219 load_node = build_java_arrayaccess (array_node,
1220 lhs_type_node,
1221 index_node);
1222 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1223 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1224 push_value (load_node);
1227 /* Expands .length. Makes sure that we deal with and array and may expand
1228 a NULL check on the array object. */
1230 static void
1231 expand_java_array_length (void)
1233 tree array = pop_value (ptr_type_node);
1234 tree length = build_java_array_length_access (array);
1236 push_value (length);
1239 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1240 either soft_monitorenter_node or soft_monitorexit_node. */
1242 static tree
1243 build_java_monitor (tree call, tree object)
1245 return build_call_nary (void_type_node,
1246 build_address_of (call),
1247 1, object);
1250 /* Emit code for one of the PUSHC instructions. */
1252 static void
1253 expand_java_pushc (int ival, tree type)
1255 tree value;
1256 if (type == ptr_type_node && ival == 0)
1257 value = null_pointer_node;
1258 else if (type == int_type_node || type == long_type_node)
1259 value = build_int_cst (type, ival);
1260 else if (type == float_type_node || type == double_type_node)
1262 REAL_VALUE_TYPE x;
1263 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1264 value = build_real (type, x);
1266 else
1267 gcc_unreachable ();
1269 push_value (value);
1272 static void
1273 expand_java_return (tree type)
1275 if (type == void_type_node)
1276 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1277 else
1279 tree retval = pop_value (type);
1280 tree res = DECL_RESULT (current_function_decl);
1281 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1283 /* Handle the situation where the native integer type is smaller
1284 than the JVM integer. It can happen for many cross compilers.
1285 The whole if expression just goes away if INT_TYPE_SIZE < 32
1286 is false. */
1287 if (INT_TYPE_SIZE < 32
1288 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1289 < GET_MODE_SIZE (TYPE_MODE (type))))
1290 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1292 TREE_SIDE_EFFECTS (retval) = 1;
1293 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1297 static void
1298 expand_load_internal (int index, tree type, int pc)
1300 tree copy;
1301 tree var = find_local_variable (index, type, pc);
1303 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1304 on the stack. If there is an assignment to this VAR_DECL between
1305 the stack push and the use, then the wrong code could be
1306 generated. To avoid this we create a new local and copy our
1307 value into it. Then we push this new local on the stack.
1308 Hopefully this all gets optimized out. */
1309 copy = build_decl (input_location, VAR_DECL, NULL_TREE, type);
1310 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1311 && TREE_TYPE (copy) != TREE_TYPE (var))
1312 var = convert (type, var);
1313 java_add_local_var (copy);
1314 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1316 push_value (copy);
1319 tree
1320 build_address_of (tree value)
1322 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1325 bool
1326 class_has_finalize_method (tree type)
1328 tree super = CLASSTYPE_SUPER (type);
1330 if (super == NULL_TREE)
1331 return false; /* Every class with a real finalizer inherits */
1332 /* from java.lang.Object. */
1333 else
1334 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1337 tree
1338 java_create_object (tree type)
1340 tree alloc_node = (class_has_finalize_method (type)
1341 ? alloc_object_node
1342 : alloc_no_finalizer_node);
1344 return build_call_nary (promote_type (type),
1345 build_address_of (alloc_node),
1346 1, build_class_ref (type));
1349 static void
1350 expand_java_NEW (tree type)
1352 tree alloc_node;
1354 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1355 : alloc_no_finalizer_node);
1356 if (! CLASS_LOADED_P (type))
1357 load_class (type, 1);
1358 safe_layout_class (type);
1359 push_value (build_call_nary (promote_type (type),
1360 build_address_of (alloc_node),
1361 1, build_class_ref (type)));
1364 /* This returns an expression which will extract the class of an
1365 object. */
1367 tree
1368 build_get_class (tree value)
1370 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1371 tree vtable_field = lookup_field (&object_type_node,
1372 get_identifier ("vtable"));
1373 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1374 build_java_indirect_ref (object_type_node, value,
1375 flag_check_references),
1376 vtable_field, NULL_TREE);
1377 return build3 (COMPONENT_REF, class_ptr_type,
1378 build1 (INDIRECT_REF, dtable_type, tmp),
1379 class_field, NULL_TREE);
1382 /* This builds the tree representation of the `instanceof' operator.
1383 It tries various tricks to optimize this in cases where types are
1384 known. */
1386 tree
1387 build_instanceof (tree value, tree type)
1389 tree expr;
1390 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1391 tree valtype = TREE_TYPE (TREE_TYPE (value));
1392 tree valclass = TYPE_NAME (valtype);
1393 tree klass;
1395 /* When compiling from bytecode, we need to ensure that TYPE has
1396 been loaded. */
1397 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1399 load_class (type, 1);
1400 safe_layout_class (type);
1401 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1402 return error_mark_node;
1404 klass = TYPE_NAME (type);
1406 if (type == object_type_node || inherits_from_p (valtype, type))
1408 /* Anything except `null' is an instance of Object. Likewise,
1409 if the object is known to be an instance of the class, then
1410 we only need to check for `null'. */
1411 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1413 else if (flag_verify_invocations
1414 && ! TYPE_ARRAY_P (type)
1415 && ! TYPE_ARRAY_P (valtype)
1416 && DECL_P (klass) && DECL_P (valclass)
1417 && ! CLASS_INTERFACE (valclass)
1418 && ! CLASS_INTERFACE (klass)
1419 && ! inherits_from_p (type, valtype)
1420 && (CLASS_FINAL (klass)
1421 || ! inherits_from_p (valtype, type)))
1423 /* The classes are from different branches of the derivation
1424 tree, so we immediately know the answer. */
1425 expr = boolean_false_node;
1427 else if (DECL_P (klass) && CLASS_FINAL (klass))
1429 tree save = save_expr (value);
1430 expr = build3 (COND_EXPR, itype,
1431 build2 (NE_EXPR, boolean_type_node,
1432 save, null_pointer_node),
1433 build2 (EQ_EXPR, itype,
1434 build_get_class (save),
1435 build_class_ref (type)),
1436 boolean_false_node);
1438 else
1440 expr = build_call_nary (itype,
1441 build_address_of (soft_instanceof_node),
1442 2, value, build_class_ref (type));
1444 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1445 return expr;
1448 static void
1449 expand_java_INSTANCEOF (tree type)
1451 tree value = pop_value (object_ptr_type_node);
1452 value = build_instanceof (value, type);
1453 push_value (value);
1456 static void
1457 expand_java_CHECKCAST (tree type)
1459 tree value = pop_value (ptr_type_node);
1460 value = build_call_nary (promote_type (type),
1461 build_address_of (soft_checkcast_node),
1462 2, build_class_ref (type), value);
1463 push_value (value);
1466 static void
1467 expand_iinc (unsigned int local_var_index, int ival, int pc)
1469 tree local_var, res;
1470 tree constant_value;
1472 flush_quick_stack ();
1473 local_var = find_local_variable (local_var_index, int_type_node, pc);
1474 constant_value = build_int_cst (NULL_TREE, ival);
1475 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1476 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1480 tree
1481 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1483 tree call = NULL;
1484 tree arg1 = convert (type, op1);
1485 tree arg2 = convert (type, op2);
1487 if (type == int_type_node)
1489 switch (op)
1491 case TRUNC_DIV_EXPR:
1492 call = soft_idiv_node;
1493 break;
1494 case TRUNC_MOD_EXPR:
1495 call = soft_irem_node;
1496 break;
1497 default:
1498 break;
1501 else if (type == long_type_node)
1503 switch (op)
1505 case TRUNC_DIV_EXPR:
1506 call = soft_ldiv_node;
1507 break;
1508 case TRUNC_MOD_EXPR:
1509 call = soft_lrem_node;
1510 break;
1511 default:
1512 break;
1516 gcc_assert (call);
1517 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1518 return call;
1521 tree
1522 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1524 tree mask;
1525 switch (op)
1527 case URSHIFT_EXPR:
1529 tree u_type = unsigned_type_for (type);
1530 arg1 = convert (u_type, arg1);
1531 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1532 return convert (type, arg1);
1534 case LSHIFT_EXPR:
1535 case RSHIFT_EXPR:
1536 mask = build_int_cst (int_type_node,
1537 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1538 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1539 break;
1541 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1542 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1543 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1545 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1546 boolean_type_node, arg1, arg2);
1547 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1548 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1549 ifexp2, integer_zero_node,
1550 op == COMPARE_L_EXPR
1551 ? integer_minus_one_node
1552 : integer_one_node);
1553 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1554 op == COMPARE_L_EXPR ? integer_one_node
1555 : integer_minus_one_node,
1556 second_compare);
1558 case COMPARE_EXPR:
1559 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1561 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1562 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1563 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1564 ifexp2, integer_one_node,
1565 integer_zero_node);
1566 return fold_build3 (COND_EXPR, int_type_node,
1567 ifexp1, integer_minus_one_node, second_compare);
1569 case TRUNC_DIV_EXPR:
1570 case TRUNC_MOD_EXPR:
1571 if (TREE_CODE (type) == REAL_TYPE
1572 && op == TRUNC_MOD_EXPR)
1574 tree call;
1575 if (type != double_type_node)
1577 arg1 = convert (double_type_node, arg1);
1578 arg2 = convert (double_type_node, arg2);
1580 call = build_call_nary (double_type_node,
1581 build_address_of (soft_fmod_node),
1582 2, arg1, arg2);
1583 if (type != double_type_node)
1584 call = convert (type, call);
1585 return call;
1588 if (TREE_CODE (type) == INTEGER_TYPE
1589 && flag_use_divide_subroutine
1590 && ! flag_syntax_only)
1591 return build_java_soft_divmod (op, type, arg1, arg2);
1593 break;
1594 default: ;
1596 return fold_build2 (op, type, arg1, arg2);
1599 static void
1600 expand_java_binop (tree type, enum tree_code op)
1602 tree larg, rarg;
1603 tree ltype = type;
1604 tree rtype = type;
1605 switch (op)
1607 case LSHIFT_EXPR:
1608 case RSHIFT_EXPR:
1609 case URSHIFT_EXPR:
1610 rtype = int_type_node;
1611 rarg = pop_value (rtype);
1612 break;
1613 default:
1614 rarg = pop_value (rtype);
1616 larg = pop_value (ltype);
1617 push_value (build_java_binop (op, type, larg, rarg));
1620 /* Lookup the field named NAME in *TYPEP or its super classes.
1621 If not found, return NULL_TREE.
1622 (If the *TYPEP is not found, or if the field reference is
1623 ambiguous, return error_mark_node.)
1624 If found, return the FIELD_DECL, and set *TYPEP to the
1625 class containing the field. */
1627 tree
1628 lookup_field (tree *typep, tree name)
1630 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1632 load_class (*typep, 1);
1633 safe_layout_class (*typep);
1634 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1635 return error_mark_node;
1639 tree field, binfo, base_binfo;
1640 tree save_field;
1641 int i;
1643 for (field = TYPE_FIELDS (*typep); field; field = DECL_CHAIN (field))
1644 if (DECL_NAME (field) == name)
1645 return field;
1647 /* Process implemented interfaces. */
1648 save_field = NULL_TREE;
1649 for (binfo = TYPE_BINFO (*typep), i = 0;
1650 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1652 tree t = BINFO_TYPE (base_binfo);
1653 if ((field = lookup_field (&t, name)))
1655 if (save_field == field)
1656 continue;
1657 if (save_field == NULL_TREE)
1658 save_field = field;
1659 else
1661 tree i1 = DECL_CONTEXT (save_field);
1662 tree i2 = DECL_CONTEXT (field);
1663 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1664 IDENTIFIER_POINTER (name),
1665 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1666 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1667 return error_mark_node;
1672 if (save_field != NULL_TREE)
1673 return save_field;
1675 *typep = CLASSTYPE_SUPER (*typep);
1676 } while (*typep);
1677 return NULL_TREE;
1680 /* Look up the field named NAME in object SELF_VALUE,
1681 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1682 SELF_VALUE is NULL_TREE if looking for a static field. */
1684 tree
1685 build_field_ref (tree self_value, tree self_class, tree name)
1687 tree base_class = self_class;
1688 tree field_decl = lookup_field (&base_class, name);
1689 if (field_decl == NULL_TREE)
1691 error ("field %qs not found", IDENTIFIER_POINTER (name));
1692 return error_mark_node;
1694 if (self_value == NULL_TREE)
1696 return build_static_field_ref (field_decl);
1698 else
1700 tree base_type = promote_type (base_class);
1702 /* CHECK is true if self_value is not the this pointer. */
1703 int check = (! (DECL_P (self_value)
1704 && DECL_NAME (self_value) == this_identifier_node));
1706 /* Determine whether a field offset from NULL will lie within
1707 Page 0: this is necessary on those GNU/Linux/BSD systems that
1708 trap SEGV to generate NullPointerExceptions.
1710 We assume that Page 0 will be mapped with NOPERM, and that
1711 memory may be allocated from any other page, so only field
1712 offsets < pagesize are guaranteed to trap. We also assume
1713 the smallest page size we'll encounter is 4k bytes. */
1714 if (! flag_syntax_only && check && ! flag_check_references
1715 && ! flag_indirect_dispatch)
1717 tree field_offset = byte_position (field_decl);
1718 if (! page_size)
1719 page_size = size_int (4096);
1720 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1723 if (base_type != TREE_TYPE (self_value))
1724 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1725 if (! flag_syntax_only && flag_indirect_dispatch)
1727 tree otable_index
1728 = build_int_cst (NULL_TREE, get_symbol_table_index
1729 (field_decl, NULL_TREE,
1730 &TYPE_OTABLE_METHODS (output_class)));
1731 tree field_offset
1732 = build4 (ARRAY_REF, integer_type_node,
1733 TYPE_OTABLE_DECL (output_class), otable_index,
1734 NULL_TREE, NULL_TREE);
1735 tree address;
1737 if (DECL_CONTEXT (field_decl) != output_class)
1738 field_offset
1739 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1740 build2 (EQ_EXPR, boolean_type_node,
1741 field_offset, integer_zero_node),
1742 build_call_nary (void_type_node,
1743 build_address_of (soft_nosuchfield_node),
1744 1, otable_index),
1745 field_offset);
1747 self_value = java_check_reference (self_value, check);
1748 address = fold_build_pointer_plus (self_value, field_offset);
1749 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1750 address);
1751 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1754 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1755 self_value, check);
1756 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1757 self_value, field_decl, NULL_TREE);
1761 tree
1762 lookup_label (int pc)
1764 tree name;
1765 char buf[32];
1766 if (pc > highest_label_pc_this_method)
1767 highest_label_pc_this_method = pc;
1768 targetm.asm_out.generate_internal_label (buf, "LJpc=",
1769 start_label_pc_this_method + pc);
1770 name = get_identifier (buf);
1771 if (IDENTIFIER_LOCAL_VALUE (name))
1772 return IDENTIFIER_LOCAL_VALUE (name);
1773 else
1775 /* The type of the address of a label is return_address_type_node. */
1776 tree decl = create_label_decl (name);
1777 return pushdecl (decl);
1781 /* Generate a unique name for the purpose of loops and switches
1782 labels, and try-catch-finally blocks label or temporary variables. */
1784 tree
1785 generate_name (void)
1787 static int l_number = 0;
1788 char buff [32];
1789 targetm.asm_out.generate_internal_label (buff, "LJv", l_number);
1790 l_number++;
1791 return get_identifier (buff);
1794 tree
1795 create_label_decl (tree name)
1797 tree decl;
1798 decl = build_decl (input_location, LABEL_DECL, name,
1799 TREE_TYPE (return_address_type_node));
1800 DECL_CONTEXT (decl) = current_function_decl;
1801 DECL_IGNORED_P (decl) = 1;
1802 return decl;
1805 /* This maps a bytecode offset (PC) to various flags. */
1806 char *instruction_bits;
1808 /* This is a vector of type states for the current method. It is
1809 indexed by PC. Each element is a tree vector holding the type
1810 state at that PC. We only note type states at basic block
1811 boundaries. */
1812 vec<tree, va_gc> *type_states;
1814 static void
1815 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1817 lookup_label (target_pc);
1818 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1821 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1822 where CONDITION is one of one the compare operators. */
1824 static void
1825 expand_compare (enum tree_code condition, tree value1, tree value2,
1826 int target_pc)
1828 tree target = lookup_label (target_pc);
1829 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1830 java_add_stmt
1831 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1832 build1 (GOTO_EXPR, void_type_node, target),
1833 build_java_empty_stmt ()));
1836 /* Emit code for a TEST-type opcode. */
1838 static void
1839 expand_test (enum tree_code condition, tree type, int target_pc)
1841 tree value1, value2;
1842 flush_quick_stack ();
1843 value1 = pop_value (type);
1844 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1845 expand_compare (condition, value1, value2, target_pc);
1848 /* Emit code for a COND-type opcode. */
1850 static void
1851 expand_cond (enum tree_code condition, tree type, int target_pc)
1853 tree value1, value2;
1854 flush_quick_stack ();
1855 /* note: pop values in opposite order */
1856 value2 = pop_value (type);
1857 value1 = pop_value (type);
1858 /* Maybe should check value1 and value2 for type compatibility ??? */
1859 expand_compare (condition, value1, value2, target_pc);
1862 static void
1863 expand_java_goto (int target_pc)
1865 tree target_label = lookup_label (target_pc);
1866 flush_quick_stack ();
1867 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1870 static tree
1871 expand_java_switch (tree selector, int default_pc)
1873 tree switch_expr, x;
1875 flush_quick_stack ();
1876 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1877 NULL_TREE, NULL_TREE);
1878 java_add_stmt (switch_expr);
1880 x = build_case_label (NULL_TREE, NULL_TREE,
1881 create_artificial_label (input_location));
1882 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1884 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1885 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1887 return switch_expr;
1890 static void
1891 expand_java_add_case (tree switch_expr, int match, int target_pc)
1893 tree value, x;
1895 value = build_int_cst (TREE_TYPE (switch_expr), match);
1897 x = build_case_label (value, NULL_TREE,
1898 create_artificial_label (input_location));
1899 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1901 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1902 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1905 static vec<tree, va_gc> *
1906 pop_arguments (tree method_type)
1908 function_args_iterator fnai;
1909 tree type;
1910 vec<tree, va_gc> *args = NULL;
1911 int arity;
1913 FOREACH_FUNCTION_ARGS (method_type, type, fnai)
1915 /* XXX: leaky abstraction. */
1916 if (type == void_type_node)
1917 break;
1919 vec_safe_push (args, type);
1922 arity = vec_safe_length (args);
1924 while (arity--)
1926 tree arg = pop_value ((*args)[arity]);
1928 /* We simply cast each argument to its proper type. This is
1929 needed since we lose type information coming out of the
1930 verifier. We also have to do this when we pop an integer
1931 type that must be promoted for the function call. */
1932 if (TREE_CODE (type) == POINTER_TYPE)
1933 arg = build1 (NOP_EXPR, type, arg);
1934 else if (targetm.calls.promote_prototypes (type)
1935 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1936 && INTEGRAL_TYPE_P (type))
1937 arg = convert (integer_type_node, arg);
1939 (*args)[arity] = arg;
1942 return args;
1945 /* Attach to PTR (a block) the declaration found in ENTRY. */
1948 attach_init_test_initialization_flags (void **entry, void *ptr)
1950 tree block = (tree)ptr;
1951 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1953 if (block != error_mark_node)
1955 if (TREE_CODE (block) == BIND_EXPR)
1957 tree body = BIND_EXPR_BODY (block);
1958 DECL_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1959 BIND_EXPR_VARS (block) = ite->value;
1960 body = build2 (COMPOUND_EXPR, void_type_node,
1961 build1 (DECL_EXPR, void_type_node, ite->value), body);
1962 BIND_EXPR_BODY (block) = body;
1964 else
1966 tree body = BLOCK_SUBBLOCKS (block);
1967 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1968 BLOCK_EXPR_DECLS (block) = ite->value;
1969 body = build2 (COMPOUND_EXPR, void_type_node,
1970 build1 (DECL_EXPR, void_type_node, ite->value), body);
1971 BLOCK_SUBBLOCKS (block) = body;
1975 return true;
1978 /* Build an expression to initialize the class CLAS.
1979 if EXPR is non-NULL, returns an expression to first call the initializer
1980 (if it is needed) and then calls EXPR. */
1982 tree
1983 build_class_init (tree clas, tree expr)
1985 tree init;
1987 /* An optimization: if CLAS is a superclass of the class we're
1988 compiling, we don't need to initialize it. However, if CLAS is
1989 an interface, it won't necessarily be initialized, even if we
1990 implement it. */
1991 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1992 && inherits_from_p (current_class, clas))
1993 || current_class == clas)
1994 return expr;
1996 if (always_initialize_class_p)
1998 init = build_call_nary (void_type_node,
1999 build_address_of (soft_initclass_node),
2000 1, build_class_ref (clas));
2001 TREE_SIDE_EFFECTS (init) = 1;
2003 else
2005 tree *init_test_decl;
2006 tree decl;
2007 init_test_decl = java_treetreehash_new
2008 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2010 if (*init_test_decl == NULL)
2012 /* Build a declaration and mark it as a flag used to track
2013 static class initializations. */
2014 decl = build_decl (input_location, VAR_DECL, NULL_TREE,
2015 boolean_type_node);
2016 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2017 DECL_CONTEXT (decl) = current_function_decl;
2018 DECL_INITIAL (decl) = boolean_false_node;
2019 /* Don't emit any symbolic debugging info for this decl. */
2020 DECL_IGNORED_P (decl) = 1;
2021 *init_test_decl = decl;
2024 init = build_call_nary (void_type_node,
2025 build_address_of (soft_initclass_node),
2026 1, build_class_ref (clas));
2027 TREE_SIDE_EFFECTS (init) = 1;
2028 init = build3 (COND_EXPR, void_type_node,
2029 build2 (EQ_EXPR, boolean_type_node,
2030 *init_test_decl, boolean_false_node),
2031 init, integer_zero_node);
2032 TREE_SIDE_EFFECTS (init) = 1;
2033 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2034 build2 (MODIFY_EXPR, boolean_type_node,
2035 *init_test_decl, boolean_true_node));
2036 TREE_SIDE_EFFECTS (init) = 1;
2039 if (expr != NULL_TREE)
2041 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2042 TREE_SIDE_EFFECTS (expr) = 1;
2043 return expr;
2045 return init;
2050 /* Rewrite expensive calls that require stack unwinding at runtime to
2051 cheaper alternatives. The logic here performs these
2052 transformations:
2054 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2055 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2059 typedef struct
2061 const char *classname;
2062 const char *method;
2063 const char *signature;
2064 const char *new_classname;
2065 const char *new_signature;
2066 int flags;
2067 void (*rewrite_arglist) (vec<tree, va_gc> **);
2068 } rewrite_rule;
2070 /* Add __builtin_return_address(0) to the end of an arglist. */
2073 static void
2074 rewrite_arglist_getcaller (vec<tree, va_gc> **arglist)
2076 tree retaddr
2077 = build_call_expr (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS),
2078 1, integer_zero_node);
2080 DECL_UNINLINABLE (current_function_decl) = 1;
2082 vec_safe_push (*arglist, retaddr);
2085 /* Add this.class to the end of an arglist. */
2087 static void
2088 rewrite_arglist_getclass (vec<tree, va_gc> **arglist)
2090 vec_safe_push (*arglist, build_class_ref (output_class));
2093 static rewrite_rule rules[] =
2094 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2095 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2096 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2098 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2099 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2100 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2102 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2103 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2104 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2106 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2107 "()Ljava/lang/ClassLoader;",
2108 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2109 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2111 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2112 "java.lang.String", "([CII)Ljava/lang/String;",
2113 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2115 {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2117 /* True if this method is special, i.e. it's a private method that
2118 should be exported from a DSO. */
2120 bool
2121 special_method_p (tree candidate_method)
2123 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2124 tree method = DECL_NAME (candidate_method);
2125 rewrite_rule *p;
2127 for (p = rules; p->classname; p++)
2129 if (get_identifier (p->classname) == context
2130 && get_identifier (p->method) == method)
2131 return true;
2133 return false;
2136 /* Scan the rules list for replacements for *METHOD_P and replace the
2137 args accordingly. If the rewrite results in an access to a private
2138 method, update SPECIAL.*/
2140 void
2141 maybe_rewrite_invocation (tree *method_p, vec<tree, va_gc> **arg_list_p,
2142 tree *method_signature_p, tree *special)
2144 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2145 rewrite_rule *p;
2146 *special = NULL_TREE;
2148 for (p = rules; p->classname; p++)
2150 if (get_identifier (p->classname) == context)
2152 tree method = DECL_NAME (*method_p);
2153 if (get_identifier (p->method) == method
2154 && get_identifier (p->signature) == *method_signature_p)
2156 tree maybe_method;
2157 tree destination_class
2158 = lookup_class (get_identifier (p->new_classname));
2159 gcc_assert (destination_class);
2160 maybe_method
2161 = lookup_java_method (destination_class,
2162 method,
2163 get_identifier (p->new_signature));
2164 if (! maybe_method && ! flag_verify_invocations)
2166 maybe_method
2167 = add_method (destination_class, p->flags,
2168 method, get_identifier (p->new_signature));
2169 DECL_EXTERNAL (maybe_method) = 1;
2171 *method_p = maybe_method;
2172 gcc_assert (*method_p);
2173 if (p->rewrite_arglist)
2174 p->rewrite_arglist (arg_list_p);
2175 *method_signature_p = get_identifier (p->new_signature);
2176 *special = integer_one_node;
2178 break;
2186 tree
2187 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2188 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2189 vec<tree, va_gc> *arg_list ATTRIBUTE_UNUSED, tree special)
2191 tree func;
2192 if (is_compiled_class (self_type))
2194 /* With indirect dispatch we have to use indirect calls for all
2195 publicly visible methods or gcc will use PLT indirections
2196 to reach them. We also have to use indirect dispatch for all
2197 external methods. */
2198 if (! flag_indirect_dispatch
2199 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2201 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2202 method);
2204 else
2206 tree table_index
2207 = build_int_cst (NULL_TREE,
2208 (get_symbol_table_index
2209 (method, special,
2210 &TYPE_ATABLE_METHODS (output_class))));
2211 func
2212 = build4 (ARRAY_REF,
2213 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2214 TYPE_ATABLE_DECL (output_class), table_index,
2215 NULL_TREE, NULL_TREE);
2217 func = convert (method_ptr_type_node, func);
2219 else
2221 /* We don't know whether the method has been (statically) compiled.
2222 Compile this code to get a reference to the method's code:
2224 SELF_TYPE->methods[METHOD_INDEX].ncode
2228 int method_index = 0;
2229 tree meth, ref;
2231 /* The method might actually be declared in some superclass, so
2232 we have to use its class context, not the caller's notion of
2233 where the method is. */
2234 self_type = DECL_CONTEXT (method);
2235 ref = build_class_ref (self_type);
2236 ref = build1 (INDIRECT_REF, class_type_node, ref);
2237 if (ncode_ident == NULL_TREE)
2238 ncode_ident = get_identifier ("ncode");
2239 if (methods_ident == NULL_TREE)
2240 methods_ident = get_identifier ("methods");
2241 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2242 lookup_field (&class_type_node, methods_ident),
2243 NULL_TREE);
2244 for (meth = TYPE_METHODS (self_type);
2245 ; meth = DECL_CHAIN (meth))
2247 if (method == meth)
2248 break;
2249 if (meth == NULL_TREE)
2250 fatal_error ("method '%s' not found in class",
2251 IDENTIFIER_POINTER (DECL_NAME (method)));
2252 method_index++;
2254 method_index *= int_size_in_bytes (method_type_node);
2255 ref = fold_build_pointer_plus_hwi (ref, method_index);
2256 ref = build1 (INDIRECT_REF, method_type_node, ref);
2257 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2258 ref, lookup_field (&method_type_node, ncode_ident),
2259 NULL_TREE);
2261 return func;
2264 tree
2265 invoke_build_dtable (int is_invoke_interface, vec<tree, va_gc> *arg_list)
2267 tree dtable, objectref;
2268 tree saved = save_expr ((*arg_list)[0]);
2270 (*arg_list)[0] = saved;
2272 /* If we're dealing with interfaces and if the objectref
2273 argument is an array then get the dispatch table of the class
2274 Object rather than the one from the objectref. */
2275 objectref = (is_invoke_interface
2276 && is_array_type_p (TREE_TYPE (saved))
2277 ? build_class_ref (object_type_node) : saved);
2279 if (dtable_ident == NULL_TREE)
2280 dtable_ident = get_identifier ("vtable");
2281 dtable = build_java_indirect_ref (object_type_node, objectref,
2282 flag_check_references);
2283 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2284 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2286 return dtable;
2289 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2290 T. If this decl has not been seen before, it will be added to the
2291 [oa]table_methods. If it has, the existing table slot will be
2292 reused. */
2295 get_symbol_table_index (tree t, tree special,
2296 vec<method_entry, va_gc> **symbol_table)
2298 method_entry *e;
2299 unsigned i;
2300 method_entry elem = {t, special};
2302 FOR_EACH_VEC_SAFE_ELT (*symbol_table, i, e)
2303 if (t == e->method && special == e->special)
2304 goto done;
2306 vec_safe_push (*symbol_table, elem);
2308 done:
2309 return i + 1;
2312 tree
2313 build_invokevirtual (tree dtable, tree method, tree special)
2315 tree func;
2316 tree nativecode_ptr_ptr_type_node
2317 = build_pointer_type (nativecode_ptr_type_node);
2318 tree method_index;
2319 tree otable_index;
2321 if (flag_indirect_dispatch)
2323 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2325 otable_index
2326 = build_int_cst (NULL_TREE, get_symbol_table_index
2327 (method, special,
2328 &TYPE_OTABLE_METHODS (output_class)));
2329 method_index = build4 (ARRAY_REF, integer_type_node,
2330 TYPE_OTABLE_DECL (output_class),
2331 otable_index, NULL_TREE, NULL_TREE);
2333 else
2335 /* We fetch the DECL_VINDEX field directly here, rather than
2336 using get_method_index(). DECL_VINDEX is the true offset
2337 from the vtable base to a method, regrdless of any extra
2338 words inserted at the start of the vtable. */
2339 method_index = DECL_VINDEX (method);
2340 method_index = size_binop (MULT_EXPR, method_index,
2341 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2342 if (TARGET_VTABLE_USES_DESCRIPTORS)
2343 method_index = size_binop (MULT_EXPR, method_index,
2344 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2347 func = fold_build_pointer_plus (dtable, method_index);
2349 if (TARGET_VTABLE_USES_DESCRIPTORS)
2350 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2351 else
2353 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2354 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2357 return func;
2360 static GTY(()) tree class_ident;
2361 tree
2362 build_invokeinterface (tree dtable, tree method)
2364 tree interface;
2365 tree idx;
2367 /* We expand invokeinterface here. */
2369 if (class_ident == NULL_TREE)
2370 class_ident = get_identifier ("class");
2372 dtable = build_java_indirect_ref (dtable_type, dtable,
2373 flag_check_references);
2374 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2375 lookup_field (&dtable_type, class_ident), NULL_TREE);
2377 interface = DECL_CONTEXT (method);
2378 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2379 layout_class_methods (interface);
2381 if (flag_indirect_dispatch)
2383 int itable_index
2384 = 2 * (get_symbol_table_index
2385 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2386 interface
2387 = build4 (ARRAY_REF,
2388 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2389 TYPE_ITABLE_DECL (output_class),
2390 build_int_cst (NULL_TREE, itable_index-1),
2391 NULL_TREE, NULL_TREE);
2392 idx
2393 = build4 (ARRAY_REF,
2394 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2395 TYPE_ITABLE_DECL (output_class),
2396 build_int_cst (NULL_TREE, itable_index),
2397 NULL_TREE, NULL_TREE);
2398 interface = convert (class_ptr_type, interface);
2399 idx = convert (integer_type_node, idx);
2401 else
2403 idx = build_int_cst (NULL_TREE,
2404 get_interface_method_index (method, interface));
2405 interface = build_class_ref (interface);
2408 return build_call_nary (ptr_type_node,
2409 build_address_of (soft_lookupinterfacemethod_node),
2410 3, dtable, interface, idx);
2413 /* Expand one of the invoke_* opcodes.
2414 OPCODE is the specific opcode.
2415 METHOD_REF_INDEX is an index into the constant pool.
2416 NARGS is the number of arguments, or -1 if not specified. */
2418 static void
2419 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2421 tree method_signature
2422 = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
2423 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool,
2424 method_ref_index);
2425 tree self_type
2426 = get_class_constant (current_jcf,
2427 COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool,
2428 method_ref_index));
2429 const char *const self_name
2430 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2431 tree call, func, method, method_type;
2432 vec<tree, va_gc> *arg_list;
2433 tree check = NULL_TREE;
2435 tree special = NULL_TREE;
2437 if (! CLASS_LOADED_P (self_type))
2439 load_class (self_type, 1);
2440 safe_layout_class (self_type);
2441 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2442 fatal_error ("failed to find class '%s'", self_name);
2444 layout_class_methods (self_type);
2446 if (ID_INIT_P (method_name))
2447 method = lookup_java_constructor (self_type, method_signature);
2448 else
2449 method = lookup_java_method (self_type, method_name, method_signature);
2451 /* We've found a method in a class other than the one in which it
2452 was wanted. This can happen if, for instance, we're trying to
2453 compile invokespecial super.equals().
2454 FIXME: This is a kludge. Rather than nullifying the result, we
2455 should change lookup_java_method() so that it doesn't search the
2456 superclass chain when we're BC-compiling. */
2457 if (! flag_verify_invocations
2458 && method
2459 && ! TYPE_ARRAY_P (self_type)
2460 && self_type != DECL_CONTEXT (method))
2461 method = NULL_TREE;
2463 /* We've found a method in an interface, but this isn't an interface
2464 call. */
2465 if (opcode != OPCODE_invokeinterface
2466 && method
2467 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2468 method = NULL_TREE;
2470 /* We've found a non-interface method but we are making an
2471 interface call. This can happen if the interface overrides a
2472 method in Object. */
2473 if (! flag_verify_invocations
2474 && opcode == OPCODE_invokeinterface
2475 && method
2476 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2477 method = NULL_TREE;
2479 if (method == NULL_TREE)
2481 if (flag_verify_invocations || ! flag_indirect_dispatch)
2483 error ("class '%s' has no method named '%s' matching signature '%s'",
2484 self_name,
2485 IDENTIFIER_POINTER (method_name),
2486 IDENTIFIER_POINTER (method_signature));
2488 else
2490 int flags = ACC_PUBLIC;
2491 if (opcode == OPCODE_invokestatic)
2492 flags |= ACC_STATIC;
2493 if (opcode == OPCODE_invokeinterface)
2495 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2496 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2498 method = add_method (self_type, flags, method_name,
2499 method_signature);
2500 DECL_ARTIFICIAL (method) = 1;
2501 METHOD_DUMMY (method) = 1;
2502 layout_class_method (self_type, NULL,
2503 method, NULL);
2507 /* Invoke static can't invoke static/abstract method */
2508 if (method != NULL_TREE)
2510 if (opcode == OPCODE_invokestatic)
2512 if (!METHOD_STATIC (method))
2514 error ("invokestatic on non static method");
2515 method = NULL_TREE;
2517 else if (METHOD_ABSTRACT (method))
2519 error ("invokestatic on abstract method");
2520 method = NULL_TREE;
2523 else
2525 if (METHOD_STATIC (method))
2527 error ("invoke[non-static] on static method");
2528 method = NULL_TREE;
2533 if (method == NULL_TREE)
2535 /* If we got here, we emitted an error message above. So we
2536 just pop the arguments, push a properly-typed zero, and
2537 continue. */
2538 method_type = get_type_from_signature (method_signature);
2539 pop_arguments (method_type);
2540 if (opcode != OPCODE_invokestatic)
2541 pop_type (self_type);
2542 method_type = promote_type (TREE_TYPE (method_type));
2543 push_value (convert (method_type, integer_zero_node));
2544 return;
2547 arg_list = pop_arguments (TREE_TYPE (method));
2548 flush_quick_stack ();
2550 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2551 &special);
2552 method_type = TREE_TYPE (method);
2554 func = NULL_TREE;
2555 if (opcode == OPCODE_invokestatic)
2556 func = build_known_method_ref (method, method_type, self_type,
2557 method_signature, arg_list, special);
2558 else if (opcode == OPCODE_invokespecial
2559 || (opcode == OPCODE_invokevirtual
2560 && (METHOD_PRIVATE (method)
2561 || METHOD_FINAL (method)
2562 || CLASS_FINAL (TYPE_NAME (self_type)))))
2564 /* If the object for the method call is null, we throw an
2565 exception. We don't do this if the object is the current
2566 method's `this'. In other cases we just rely on an
2567 optimization pass to eliminate redundant checks. FIXME:
2568 Unfortunately there doesn't seem to be a way to determine
2569 what the current method is right now.
2570 We do omit the check if we're calling <init>. */
2571 /* We use a SAVE_EXPR here to make sure we only evaluate
2572 the new `self' expression once. */
2573 tree save_arg = save_expr ((*arg_list)[0]);
2574 (*arg_list)[0] = save_arg;
2575 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2576 func = build_known_method_ref (method, method_type, self_type,
2577 method_signature, arg_list, special);
2579 else
2581 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2582 arg_list);
2583 if (opcode == OPCODE_invokevirtual)
2584 func = build_invokevirtual (dtable, method, special);
2585 else
2586 func = build_invokeinterface (dtable, method);
2589 if (TREE_CODE (func) == ADDR_EXPR)
2590 TREE_TYPE (func) = build_pointer_type (method_type);
2591 else
2592 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2594 call = build_call_vec (TREE_TYPE (method_type), func, arg_list);
2595 TREE_SIDE_EFFECTS (call) = 1;
2596 call = check_for_builtin (method, call);
2598 if (check != NULL_TREE)
2600 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2601 TREE_SIDE_EFFECTS (call) = 1;
2604 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2605 java_add_stmt (call);
2606 else
2608 push_value (call);
2609 flush_quick_stack ();
2613 /* Create a stub which will be put into the vtable but which will call
2614 a JNI function. */
2616 tree
2617 build_jni_stub (tree method)
2619 tree jnifunc, call, body, method_sig, arg_types;
2620 tree jniarg0, jniarg1, jniarg2, jniarg3;
2621 tree jni_func_type, tem;
2622 tree env_var, res_var = NULL_TREE, block;
2623 tree method_args;
2624 tree meth_var;
2625 tree bind;
2626 vec<tree, va_gc> *args = NULL;
2627 int args_size = 0;
2629 tree klass = DECL_CONTEXT (method);
2630 klass = build_class_ref (klass);
2632 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2634 DECL_ARTIFICIAL (method) = 1;
2635 DECL_EXTERNAL (method) = 0;
2637 env_var = build_decl (input_location,
2638 VAR_DECL, get_identifier ("env"), ptr_type_node);
2639 DECL_CONTEXT (env_var) = method;
2641 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2643 res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"),
2644 TREE_TYPE (TREE_TYPE (method)));
2645 DECL_CONTEXT (res_var) = method;
2646 DECL_CHAIN (env_var) = res_var;
2649 method_args = DECL_ARGUMENTS (method);
2650 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2651 TREE_SIDE_EFFECTS (block) = 1;
2653 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2654 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2655 build_call_nary (ptr_type_node,
2656 build_address_of (soft_getjnienvnewframe_node),
2657 1, klass));
2659 /* The JNIEnv structure is the first argument to the JNI function. */
2660 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2661 vec_safe_push (args, env_var);
2663 /* For a static method the second argument is the class. For a
2664 non-static method the second argument is `this'; that is already
2665 available in the argument list. */
2666 if (METHOD_STATIC (method))
2668 args_size += int_size_in_bytes (TREE_TYPE (klass));
2669 vec_safe_push (args, klass);
2672 /* All the arguments to this method become arguments to the
2673 underlying JNI function. If we had to wrap object arguments in a
2674 special way, we would do that here. */
2675 for (tem = method_args; tem != NULL_TREE; tem = DECL_CHAIN (tem))
2677 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2678 #ifdef PARM_BOUNDARY
2679 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2680 * PARM_BOUNDARY);
2681 #endif
2682 args_size += (arg_bits / BITS_PER_UNIT);
2684 vec_safe_push (args, tem);
2686 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2688 /* Argument types for static methods and the JNIEnv structure.
2689 FIXME: Write and use build_function_type_vec to avoid this. */
2690 if (METHOD_STATIC (method))
2691 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2692 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2694 /* We call _Jv_LookupJNIMethod to find the actual underlying
2695 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2696 exception if this function is not found at runtime. */
2697 method_sig = build_java_signature (TREE_TYPE (method));
2698 jniarg0 = klass;
2699 jniarg1 = build_utf8_ref (DECL_NAME (method));
2700 jniarg2 = build_utf8_ref (unmangle_classname
2701 (IDENTIFIER_POINTER (method_sig),
2702 IDENTIFIER_LENGTH (method_sig)));
2703 jniarg3 = build_int_cst (NULL_TREE, args_size);
2705 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2707 #ifdef MODIFY_JNI_METHOD_CALL
2708 tem = MODIFY_JNI_METHOD_CALL (tem);
2709 #endif
2711 jni_func_type = build_pointer_type (tem);
2713 /* Use the actual function type, rather than a generic pointer type,
2714 such that this decl keeps the actual pointer type from being
2715 garbage-collected. If it is, we end up using canonical types
2716 with different uids for equivalent function types, and this in
2717 turn causes utf8 identifiers and output order to vary. */
2718 meth_var = build_decl (input_location,
2719 VAR_DECL, get_identifier ("meth"), jni_func_type);
2720 TREE_STATIC (meth_var) = 1;
2721 TREE_PUBLIC (meth_var) = 0;
2722 DECL_EXTERNAL (meth_var) = 0;
2723 DECL_CONTEXT (meth_var) = method;
2724 DECL_ARTIFICIAL (meth_var) = 1;
2725 DECL_INITIAL (meth_var) = null_pointer_node;
2726 TREE_USED (meth_var) = 1;
2727 chainon (env_var, meth_var);
2728 build_result_decl (method);
2730 jnifunc = build3 (COND_EXPR, jni_func_type,
2731 build2 (NE_EXPR, boolean_type_node,
2732 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2733 meth_var,
2734 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2735 build1
2736 (NOP_EXPR, jni_func_type,
2737 build_call_nary (ptr_type_node,
2738 build_address_of
2739 (soft_lookupjnimethod_node),
2741 jniarg0, jniarg1,
2742 jniarg2, jniarg3))));
2744 /* Now we make the actual JNI call via the resulting function
2745 pointer. */
2746 call = build_call_vec (TREE_TYPE (TREE_TYPE (method)), jnifunc, args);
2748 /* If the JNI call returned a result, capture it here. If we had to
2749 unwrap JNI object results, we would do that here. */
2750 if (res_var != NULL_TREE)
2752 /* If the call returns an object, it may return a JNI weak
2753 reference, in which case we must unwrap it. */
2754 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2755 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2756 build_address_of (soft_unwrapjni_node),
2757 1, call);
2758 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2759 res_var, call);
2762 TREE_SIDE_EFFECTS (call) = 1;
2764 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2765 TREE_SIDE_EFFECTS (body) = 1;
2767 /* Now free the environment we allocated. */
2768 call = build_call_nary (ptr_type_node,
2769 build_address_of (soft_jnipopsystemframe_node),
2770 1, env_var);
2771 TREE_SIDE_EFFECTS (call) = 1;
2772 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2773 TREE_SIDE_EFFECTS (body) = 1;
2775 /* Finally, do the return. */
2776 if (res_var != NULL_TREE)
2778 tree drt;
2779 gcc_assert (DECL_RESULT (method));
2780 /* Make sure we copy the result variable to the actual
2781 result. We use the type of the DECL_RESULT because it
2782 might be different from the return type of the function:
2783 it might be promoted. */
2784 drt = TREE_TYPE (DECL_RESULT (method));
2785 if (drt != TREE_TYPE (res_var))
2786 res_var = build1 (CONVERT_EXPR, drt, res_var);
2787 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2788 TREE_SIDE_EFFECTS (res_var) = 1;
2791 body = build2 (COMPOUND_EXPR, void_type_node, body,
2792 build1 (RETURN_EXPR, void_type_node, res_var));
2793 TREE_SIDE_EFFECTS (body) = 1;
2795 /* Prepend class initialization for static methods reachable from
2796 other classes. */
2797 if (METHOD_STATIC (method)
2798 && (! METHOD_PRIVATE (method)
2799 || INNER_CLASS_P (DECL_CONTEXT (method))))
2801 tree init = build_call_expr (soft_initclass_node, 1,
2802 klass);
2803 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2804 TREE_SIDE_EFFECTS (body) = 1;
2807 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2808 body, block);
2809 return bind;
2813 /* Given lvalue EXP, return a volatile expression that references the
2814 same object. */
2816 tree
2817 java_modify_addr_for_volatile (tree exp)
2819 tree exp_type = TREE_TYPE (exp);
2820 tree v_type
2821 = build_qualified_type (exp_type,
2822 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2823 tree addr = build_fold_addr_expr (exp);
2824 v_type = build_pointer_type (v_type);
2825 addr = fold_convert (v_type, addr);
2826 exp = build_fold_indirect_ref (addr);
2827 return exp;
2831 /* Expand an operation to extract from or store into a field.
2832 IS_STATIC is 1 iff the field is static.
2833 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2834 FIELD_REF_INDEX is an index into the constant pool. */
2836 static void
2837 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2839 tree self_type
2840 = get_class_constant (current_jcf,
2841 COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
2842 field_ref_index));
2843 const char *self_name
2844 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2845 tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2846 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool,
2847 field_ref_index);
2848 tree field_type = get_type_from_signature (field_signature);
2849 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2850 tree field_ref;
2851 int is_error = 0;
2852 tree original_self_type = self_type;
2853 tree field_decl;
2854 tree modify_expr;
2856 if (! CLASS_LOADED_P (self_type))
2857 load_class (self_type, 1);
2858 field_decl = lookup_field (&self_type, field_name);
2859 if (field_decl == error_mark_node)
2861 is_error = 1;
2863 else if (field_decl == NULL_TREE)
2865 if (! flag_verify_invocations)
2867 int flags = ACC_PUBLIC;
2868 if (is_static)
2869 flags |= ACC_STATIC;
2870 self_type = original_self_type;
2871 field_decl = add_field (original_self_type, field_name,
2872 field_type, flags);
2873 DECL_ARTIFICIAL (field_decl) = 1;
2874 DECL_IGNORED_P (field_decl) = 1;
2875 #if 0
2876 /* FIXME: We should be pessimistic about volatility. We
2877 don't know one way or another, but this is safe.
2878 However, doing this has bad effects on code quality. We
2879 need to look at better ways to do this. */
2880 TREE_THIS_VOLATILE (field_decl) = 1;
2881 #endif
2883 else
2885 error ("missing field '%s' in '%s'",
2886 IDENTIFIER_POINTER (field_name), self_name);
2887 is_error = 1;
2890 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2892 error ("mismatching signature for field '%s' in '%s'",
2893 IDENTIFIER_POINTER (field_name), self_name);
2894 is_error = 1;
2896 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2897 if (is_error)
2899 if (! is_putting)
2900 push_value (convert (field_type, integer_zero_node));
2901 flush_quick_stack ();
2902 return;
2905 field_ref = build_field_ref (field_ref, self_type, field_name);
2906 if (is_static
2907 && ! flag_indirect_dispatch)
2909 tree context = DECL_CONTEXT (field_ref);
2910 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2911 field_ref = build_class_init (context, field_ref);
2912 else
2913 field_ref = build_class_init (self_type, field_ref);
2915 if (is_putting)
2917 flush_quick_stack ();
2918 if (FIELD_FINAL (field_decl))
2920 if (DECL_CONTEXT (field_decl) != current_class)
2921 error ("assignment to final field %q+D not in field%'s class",
2922 field_decl);
2923 /* We used to check for assignments to final fields not
2924 occurring in the class initializer or in a constructor
2925 here. However, this constraint doesn't seem to be
2926 enforced by the JVM. */
2929 if (TREE_THIS_VOLATILE (field_decl))
2930 field_ref = java_modify_addr_for_volatile (field_ref);
2932 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2933 field_ref, new_value);
2935 if (TREE_THIS_VOLATILE (field_decl))
2937 tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2938 java_add_stmt (build_call_expr (sync, 0));
2941 java_add_stmt (modify_expr);
2943 else
2945 tree temp = build_decl (input_location,
2946 VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2947 java_add_local_var (temp);
2949 if (TREE_THIS_VOLATILE (field_decl))
2950 field_ref = java_modify_addr_for_volatile (field_ref);
2952 modify_expr
2953 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2954 java_add_stmt (modify_expr);
2956 if (TREE_THIS_VOLATILE (field_decl))
2958 tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2959 java_add_stmt (build_call_expr (sync, 0));
2962 push_value (temp);
2964 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2967 static void
2968 load_type_state (int pc)
2970 int i;
2971 tree vec = (*type_states)[pc];
2972 int cur_length = TREE_VEC_LENGTH (vec);
2973 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2974 for (i = 0; i < cur_length; i++)
2975 type_map [i] = TREE_VEC_ELT (vec, i);
2978 /* Go over METHOD's bytecode and note instruction starts in
2979 instruction_bits[]. */
2981 void
2982 note_instructions (JCF *jcf, tree method)
2984 int PC;
2985 unsigned char* byte_ops;
2986 long length = DECL_CODE_LENGTH (method);
2988 int saw_index;
2989 jint INT_temp;
2991 #undef RET /* Defined by config/i386/i386.h */
2992 #undef PTR
2993 #define BCODE byte_ops
2994 #define BYTE_type_node byte_type_node
2995 #define SHORT_type_node short_type_node
2996 #define INT_type_node int_type_node
2997 #define LONG_type_node long_type_node
2998 #define CHAR_type_node char_type_node
2999 #define PTR_type_node ptr_type_node
3000 #define FLOAT_type_node float_type_node
3001 #define DOUBLE_type_node double_type_node
3002 #define VOID_type_node void_type_node
3003 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3004 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3005 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3006 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3008 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3010 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3011 byte_ops = jcf->read_ptr;
3012 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3013 memset (instruction_bits, 0, length + 1);
3014 vec_alloc (type_states, length + 1);
3015 type_states->quick_grow_cleared (length + 1);
3017 /* This pass figures out which PC can be the targets of jumps. */
3018 for (PC = 0; PC < length;)
3020 int oldpc = PC; /* PC at instruction start. */
3021 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3022 switch (byte_ops[PC++])
3024 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3025 case OPCODE: \
3026 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3027 break;
3029 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3031 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3032 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3033 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3034 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3035 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3036 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3037 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3038 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3040 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3041 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3042 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3043 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3044 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3045 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3046 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3047 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3049 /* two forms of wide instructions */
3050 #define PRE_SPECIAL_WIDE(IGNORE) \
3052 int modified_opcode = IMMEDIATE_u1; \
3053 if (modified_opcode == OPCODE_iinc) \
3055 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3056 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3058 else \
3060 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3064 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3066 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3068 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3069 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3070 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3071 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3072 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3073 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3074 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3075 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3076 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3077 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3079 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3080 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3081 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3082 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3083 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3084 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3085 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3086 NOTE_LABEL (PC); \
3087 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3089 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3091 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3092 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3094 #define PRE_LOOKUP_SWITCH \
3095 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3096 NOTE_LABEL (default_offset+oldpc); \
3097 if (npairs >= 0) \
3098 while (--npairs >= 0) { \
3099 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3100 jint offset = IMMEDIATE_s4; \
3101 NOTE_LABEL (offset+oldpc); } \
3104 #define PRE_TABLE_SWITCH \
3105 { jint default_offset = IMMEDIATE_s4; \
3106 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3107 NOTE_LABEL (default_offset+oldpc); \
3108 if (low <= high) \
3109 while (low++ <= high) { \
3110 jint offset = IMMEDIATE_s4; \
3111 NOTE_LABEL (offset+oldpc); } \
3114 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3115 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3116 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3117 (void)(IMMEDIATE_u2); \
3118 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3120 #include "javaop.def"
3121 #undef JAVAOP
3123 } /* for */
3126 void
3127 expand_byte_code (JCF *jcf, tree method)
3129 int PC;
3130 int i;
3131 const unsigned char *linenumber_pointer;
3132 int dead_code_index = -1;
3133 unsigned char* byte_ops;
3134 long length = DECL_CODE_LENGTH (method);
3135 location_t max_location = input_location;
3137 stack_pointer = 0;
3138 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3139 byte_ops = jcf->read_ptr;
3141 /* We make an initial pass of the line number table, to note
3142 which instructions have associated line number entries. */
3143 linenumber_pointer = linenumber_table;
3144 for (i = 0; i < linenumber_count; i++)
3146 int pc = GET_u2 (linenumber_pointer);
3147 linenumber_pointer += 4;
3148 if (pc >= length)
3149 warning (0, "invalid PC in line number table");
3150 else
3152 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3153 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3154 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3158 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3159 return;
3161 promote_arguments ();
3162 cache_this_class_ref (method);
3163 cache_cpool_data_ref ();
3165 /* Translate bytecodes. */
3166 linenumber_pointer = linenumber_table;
3167 for (PC = 0; PC < length;)
3169 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3171 tree label = lookup_label (PC);
3172 flush_quick_stack ();
3173 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3174 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3175 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3176 load_type_state (PC);
3179 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3181 if (dead_code_index == -1)
3183 /* This is the start of a region of unreachable bytecodes.
3184 They still need to be processed in order for EH ranges
3185 to get handled correctly. However, we can simply
3186 replace these bytecodes with nops. */
3187 dead_code_index = PC;
3190 /* Turn this bytecode into a nop. */
3191 byte_ops[PC] = 0x0;
3193 else
3195 if (dead_code_index != -1)
3197 /* We've just reached the end of a region of dead code. */
3198 if (extra_warnings)
3199 warning (0, "unreachable bytecode from %d to before %d",
3200 dead_code_index, PC);
3201 dead_code_index = -1;
3205 /* Handle possible line number entry for this PC.
3207 This code handles out-of-order and multiple linenumbers per PC,
3208 but is optimized for the case of line numbers increasing
3209 monotonically with PC. */
3210 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3212 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3213 || GET_u2 (linenumber_pointer) != PC)
3214 linenumber_pointer = linenumber_table;
3215 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3217 int pc = GET_u2 (linenumber_pointer);
3218 linenumber_pointer += 4;
3219 if (pc == PC)
3221 int line = GET_u2 (linenumber_pointer - 2);
3222 input_location = linemap_line_start (line_table, line, 1);
3223 if (input_location > max_location)
3224 max_location = input_location;
3225 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3226 break;
3230 maybe_pushlevels (PC);
3231 PC = process_jvm_instruction (PC, byte_ops, length);
3232 maybe_poplevels (PC);
3233 } /* for */
3235 uncache_this_class_ref (method);
3237 if (dead_code_index != -1)
3239 /* We've just reached the end of a region of dead code. */
3240 if (extra_warnings)
3241 warning (0, "unreachable bytecode from %d to the end of the method",
3242 dead_code_index);
3245 DECL_FUNCTION_LAST_LINE (method) = max_location;
3248 static void
3249 java_push_constant_from_pool (JCF *jcf, int index)
3251 tree c;
3252 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3254 tree name;
3255 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3256 index = alloc_name_constant (CONSTANT_String, name);
3257 c = build_ref_from_constant_pool (index);
3258 c = convert (promote_type (string_type_node), c);
3260 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3261 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3263 tree record = get_class_constant (jcf, index);
3264 c = build_class_ref (record);
3266 else
3267 c = get_constant (jcf, index);
3268 push_value (c);
3272 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3273 long length ATTRIBUTE_UNUSED)
3275 const char *opname; /* Temporary ??? */
3276 int oldpc = PC; /* PC at instruction start. */
3278 /* If the instruction is at the beginning of an exception handler,
3279 replace the top of the stack with the thrown object reference. */
3280 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3282 /* Note that the verifier will not emit a type map at all for
3283 dead exception handlers. In this case we just ignore the
3284 situation. */
3285 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3287 tree type = pop_type (promote_type (throwable_type_node));
3288 push_value (build_exception_object_ref (type));
3292 switch (byte_ops[PC++])
3294 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3295 case OPCODE: \
3296 opname = #OPNAME; \
3297 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3298 break;
3300 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3302 int saw_index = 0; \
3303 int index = OPERAND_VALUE; \
3304 (void) saw_index; /* Avoid set but not used warning. */ \
3305 build_java_ret \
3306 (find_local_variable (index, return_address_type_node, oldpc)); \
3309 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3311 /* OPERAND_VALUE may have side-effects on PC */ \
3312 int opvalue = OPERAND_VALUE; \
3313 build_java_jsr (oldpc + opvalue, PC); \
3316 /* Push a constant onto the stack. */
3317 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3318 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3319 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3320 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3322 /* internal macro added for use by the WIDE case */
3323 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3324 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3326 /* Push local variable onto the opcode stack. */
3327 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3329 /* have to do this since OPERAND_VALUE may have side-effects */ \
3330 int opvalue = OPERAND_VALUE; \
3331 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3334 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3335 expand_java_return (OPERAND_TYPE##_type_node)
3337 #define REM_EXPR TRUNC_MOD_EXPR
3338 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3339 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3341 #define FIELD(IS_STATIC, IS_PUT) \
3342 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3344 #define TEST(OPERAND_TYPE, CONDITION) \
3345 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3347 #define COND(OPERAND_TYPE, CONDITION) \
3348 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3350 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3351 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3353 #define BRANCH_GOTO(OPERAND_VALUE) \
3354 expand_java_goto (oldpc + OPERAND_VALUE)
3356 #define BRANCH_CALL(OPERAND_VALUE) \
3357 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3359 #if 0
3360 #define BRANCH_RETURN(OPERAND_VALUE) \
3362 tree type = OPERAND_TYPE##_type_node; \
3363 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3364 expand_java_ret (value); \
3366 #endif
3368 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3369 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3370 fprintf (stderr, "(not implemented)\n")
3371 #define NOT_IMPL1(OPERAND_VALUE) \
3372 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3373 fprintf (stderr, "(not implemented)\n")
3375 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3377 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3379 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3381 #define STACK_SWAP(COUNT) java_stack_swap()
3383 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3384 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3385 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3387 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3388 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3390 #define LOOKUP_SWITCH \
3391 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3392 tree selector = pop_value (INT_type_node); \
3393 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3394 while (--npairs >= 0) \
3396 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3397 expand_java_add_case (switch_expr, match, oldpc + offset); \
3401 #define TABLE_SWITCH \
3402 { jint default_offset = IMMEDIATE_s4; \
3403 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3404 tree selector = pop_value (INT_type_node); \
3405 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3406 for (; low <= high; low++) \
3408 jint offset = IMMEDIATE_s4; \
3409 expand_java_add_case (switch_expr, low, oldpc + offset); \
3413 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3414 { int opcode = byte_ops[PC-1]; \
3415 int method_ref_index = IMMEDIATE_u2; \
3416 int nargs; \
3417 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3418 else nargs = -1; \
3419 expand_invoke (opcode, method_ref_index, nargs); \
3422 /* Handle new, checkcast, instanceof */
3423 #define OBJECT(TYPE, OP) \
3424 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3426 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3428 #define ARRAY_LOAD(OPERAND_TYPE) \
3430 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3433 #define ARRAY_STORE(OPERAND_TYPE) \
3435 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3438 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3439 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3440 #define ARRAY_NEW_PTR() \
3441 push_value (build_anewarray (get_class_constant (current_jcf, \
3442 IMMEDIATE_u2), \
3443 pop_value (int_type_node)));
3444 #define ARRAY_NEW_NUM() \
3446 int atype = IMMEDIATE_u1; \
3447 push_value (build_newarray (atype, pop_value (int_type_node)));\
3449 #define ARRAY_NEW_MULTI() \
3451 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3452 int ndims = IMMEDIATE_u1; \
3453 expand_java_multianewarray( klass, ndims ); \
3456 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3457 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3458 pop_value (OPERAND_TYPE##_type_node)));
3460 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3462 push_value (build1 (NOP_EXPR, int_type_node, \
3463 (convert (TO_TYPE##_type_node, \
3464 pop_value (FROM_TYPE##_type_node))))); \
3467 #define CONVERT(FROM_TYPE, TO_TYPE) \
3469 push_value (convert (TO_TYPE##_type_node, \
3470 pop_value (FROM_TYPE##_type_node))); \
3473 /* internal macro added for use by the WIDE case
3474 Added TREE_TYPE (decl) assignment, apbianco */
3475 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3477 tree decl, value; \
3478 int index = OPVALUE; \
3479 tree type = OPTYPE; \
3480 value = pop_value (type); \
3481 type = TREE_TYPE (value); \
3482 decl = find_local_variable (index, type, oldpc); \
3483 set_local_type (index, type); \
3484 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3487 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3489 /* have to do this since OPERAND_VALUE may have side-effects */ \
3490 int opvalue = OPERAND_VALUE; \
3491 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3494 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3495 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3497 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3498 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3500 #define MONITOR_OPERATION(call) \
3502 tree o = pop_value (ptr_type_node); \
3503 tree c; \
3504 flush_quick_stack (); \
3505 c = build_java_monitor (call, o); \
3506 TREE_SIDE_EFFECTS (c) = 1; \
3507 java_add_stmt (c); \
3510 #define SPECIAL_IINC(IGNORED) \
3512 unsigned int local_var_index = IMMEDIATE_u1; \
3513 int ival = IMMEDIATE_s1; \
3514 expand_iinc(local_var_index, ival, oldpc); \
3517 #define SPECIAL_WIDE(IGNORED) \
3519 int modified_opcode = IMMEDIATE_u1; \
3520 unsigned int local_var_index = IMMEDIATE_u2; \
3521 switch (modified_opcode) \
3523 case OPCODE_iinc: \
3525 int ival = IMMEDIATE_s2; \
3526 expand_iinc (local_var_index, ival, oldpc); \
3527 break; \
3529 case OPCODE_iload: \
3530 case OPCODE_lload: \
3531 case OPCODE_fload: \
3532 case OPCODE_dload: \
3533 case OPCODE_aload: \
3535 /* duplicate code from LOAD macro */ \
3536 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3537 break; \
3539 case OPCODE_istore: \
3540 case OPCODE_lstore: \
3541 case OPCODE_fstore: \
3542 case OPCODE_dstore: \
3543 case OPCODE_astore: \
3545 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3546 break; \
3548 default: \
3549 error ("unrecognized wide sub-instruction"); \
3553 #define SPECIAL_THROW(IGNORED) \
3554 build_java_athrow (pop_value (throwable_type_node))
3556 #define SPECIAL_BREAK NOT_IMPL1
3557 #define IMPL NOT_IMPL
3559 #include "javaop.def"
3560 #undef JAVAOP
3561 default:
3562 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3564 return PC;
3567 /* Return the opcode at PC in the code section pointed to by
3568 CODE_OFFSET. */
3570 static unsigned char
3571 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3573 unsigned char opcode;
3574 long absolute_offset = (long)JCF_TELL (jcf);
3576 JCF_SEEK (jcf, code_offset);
3577 opcode = jcf->read_ptr [pc];
3578 JCF_SEEK (jcf, absolute_offset);
3579 return opcode;
3582 /* Some bytecode compilers are emitting accurate LocalVariableTable
3583 attributes. Here's an example:
3585 PC <t>store_<n>
3586 PC+1 ...
3588 Attribute "LocalVariableTable"
3589 slot #<n>: ... (PC: PC+1 length: L)
3591 This is accurate because the local in slot <n> really exists after
3592 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3594 This procedure recognizes this situation and extends the live range
3595 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3596 length of the store instruction.)
3598 This function is used by `give_name_to_locals' so that a local's
3599 DECL features a DECL_LOCAL_START_PC such that the first related
3600 store operation will use DECL as a destination, not an unrelated
3601 temporary created for the occasion.
3603 This function uses a global (instruction_bits) `note_instructions' should
3604 have allocated and filled properly. */
3607 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3608 int start_pc, int slot)
3610 int first, index, opcode;
3611 int pc, insn_pc;
3612 int wide_found = 0;
3614 if (!start_pc)
3615 return start_pc;
3617 first = index = -1;
3619 /* Find last previous instruction and remember it */
3620 for (pc = start_pc-1; pc; pc--)
3621 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3622 break;
3623 insn_pc = pc;
3625 /* Retrieve the instruction, handle `wide'. */
3626 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3627 if (opcode == OPCODE_wide)
3629 wide_found = 1;
3630 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3633 switch (opcode)
3635 case OPCODE_astore_0:
3636 case OPCODE_astore_1:
3637 case OPCODE_astore_2:
3638 case OPCODE_astore_3:
3639 first = OPCODE_astore_0;
3640 break;
3642 case OPCODE_istore_0:
3643 case OPCODE_istore_1:
3644 case OPCODE_istore_2:
3645 case OPCODE_istore_3:
3646 first = OPCODE_istore_0;
3647 break;
3649 case OPCODE_lstore_0:
3650 case OPCODE_lstore_1:
3651 case OPCODE_lstore_2:
3652 case OPCODE_lstore_3:
3653 first = OPCODE_lstore_0;
3654 break;
3656 case OPCODE_fstore_0:
3657 case OPCODE_fstore_1:
3658 case OPCODE_fstore_2:
3659 case OPCODE_fstore_3:
3660 first = OPCODE_fstore_0;
3661 break;
3663 case OPCODE_dstore_0:
3664 case OPCODE_dstore_1:
3665 case OPCODE_dstore_2:
3666 case OPCODE_dstore_3:
3667 first = OPCODE_dstore_0;
3668 break;
3670 case OPCODE_astore:
3671 case OPCODE_istore:
3672 case OPCODE_lstore:
3673 case OPCODE_fstore:
3674 case OPCODE_dstore:
3675 index = peek_opcode_at_pc (jcf, code_offset, pc);
3676 if (wide_found)
3678 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3679 index = (other << 8) + index;
3681 break;
3684 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3685 means we have a <t>store. */
3686 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3687 start_pc = insn_pc;
3689 return start_pc;
3692 /* Build a node to represent empty statements and blocks. */
3694 tree
3695 build_java_empty_stmt (void)
3697 tree t = build_empty_stmt (input_location);
3698 return t;
3701 /* Promote all args of integral type before generating any code. */
3703 static void
3704 promote_arguments (void)
3706 int i;
3707 tree arg;
3708 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3709 arg != NULL_TREE; arg = DECL_CHAIN (arg), i++)
3711 tree arg_type = TREE_TYPE (arg);
3712 if (INTEGRAL_TYPE_P (arg_type)
3713 && TYPE_PRECISION (arg_type) < 32)
3715 tree copy = find_local_variable (i, integer_type_node, -1);
3716 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3717 copy,
3718 fold_convert (integer_type_node, arg)));
3720 if (TYPE_IS_WIDE (arg_type))
3721 i++;
3725 /* Create a local variable that points to the constant pool. */
3727 static void
3728 cache_cpool_data_ref (void)
3730 if (optimize)
3732 tree cpool;
3733 tree d = build_constant_data_ref (flag_indirect_classes);
3734 tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE,
3735 build_pointer_type (TREE_TYPE (d)));
3736 java_add_local_var (cpool_ptr);
3737 TREE_CONSTANT (cpool_ptr) = 1;
3739 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3740 cpool_ptr, build_address_of (d)));
3741 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3742 TREE_THIS_NOTRAP (cpool) = 1;
3743 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3747 #include "gt-java-expr.h"