Reverting merge from trunk
[official-gcc.git] / gcc / java / expr.c
bloba434913d47514af006d318eb539f2e6717861396
1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>.
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
24 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h" /* For INT_TYPE_SIZE,
30 TARGET_VTABLE_USES_DESCRIPTORS,
31 BITS_PER_UNIT,
32 MODIFY_JNI_METHOD_CALL and
33 PARM_BOUNDARY. */
35 #include "tree.h"
36 #include "flags.h"
37 #include "java-tree.h"
38 #include "javaop.h"
39 #include "java-opcodes.h"
40 #include "jcf.h"
41 #include "java-except.h"
42 #include "parse.h"
43 #include "diagnostic-core.h"
44 #include "ggc.h"
45 #include "tree-iterator.h"
46 #include "target.h"
48 static void flush_quick_stack (void);
49 static void push_value (tree);
50 static tree pop_value (tree);
51 static void java_stack_swap (void);
52 static void java_stack_dup (int, int);
53 static void build_java_athrow (tree);
54 static void build_java_jsr (int, int);
55 static void build_java_ret (tree);
56 static void expand_java_multianewarray (tree, int);
57 static void expand_java_arraystore (tree);
58 static void expand_java_arrayload (tree);
59 static void expand_java_array_length (void);
60 static tree build_java_monitor (tree, tree);
61 static void expand_java_pushc (int, tree);
62 static void expand_java_return (tree);
63 static void expand_load_internal (int, tree, int);
64 static void expand_java_NEW (tree);
65 static void expand_java_INSTANCEOF (tree);
66 static void expand_java_CHECKCAST (tree);
67 static void expand_iinc (unsigned int, int, int);
68 static void expand_java_binop (tree, enum tree_code);
69 static void note_label (int, int);
70 static void expand_compare (enum tree_code, tree, tree, int);
71 static void expand_test (enum tree_code, tree, int);
72 static void expand_cond (enum tree_code, tree, int);
73 static void expand_java_goto (int);
74 static tree expand_java_switch (tree, int);
75 static void expand_java_add_case (tree, int, int);
76 static vec<tree, va_gc> *pop_arguments (tree);
77 static void expand_invoke (int, int, int);
78 static void expand_java_field_op (int, int, int);
79 static void java_push_constant_from_pool (struct JCF *, int);
80 static void java_stack_pop (int);
81 static tree build_java_throw_out_of_bounds_exception (tree);
82 static tree build_java_check_indexed_type (tree, tree);
83 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
84 static void promote_arguments (void);
85 static void cache_cpool_data_ref (void);
87 static GTY(()) tree operand_type[59];
89 static GTY(()) tree methods_ident;
90 static GTY(()) tree ncode_ident;
91 tree dtable_ident = NULL_TREE;
93 /* Set to nonzero value in order to emit class initialization code
94 before static field references. */
95 int always_initialize_class_p = 0;
97 /* We store the stack state in two places:
98 Within a basic block, we use the quick_stack, which is a vec of expression
99 nodes.
100 This is the top part of the stack; below that we use find_stack_slot.
101 At the end of a basic block, the quick_stack must be flushed
102 to the stack slot array (as handled by find_stack_slot).
103 Using quick_stack generates better code (especially when
104 compiled without optimization), because we do not have to
105 explicitly store and load trees to temporary variables.
107 If a variable is on the quick stack, it means the value of variable
108 when the quick stack was last flushed. Conceptually, flush_quick_stack
109 saves all the quick_stack elements in parallel. However, that is
110 complicated, so it actually saves them (i.e. copies each stack value
111 to is home virtual register) from low indexes. This allows a quick_stack
112 element at index i (counting from the bottom of stack the) to references
113 slot virtuals for register that are >= i, but not those that are deeper.
114 This convention makes most operations easier. For example iadd works
115 even when the stack contains (reg[0], reg[1]): It results in the
116 stack containing (reg[0]+reg[1]), which is OK. However, some stack
117 operations are more complicated. For example dup given a stack
118 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
119 the convention, since stack value 1 would refer to a register with
120 lower index (reg[0]), which flush_quick_stack does not safely handle.
121 So dup cannot just add an extra element to the quick_stack, but iadd can.
124 static GTY(()) vec<tree, va_gc> *quick_stack;
126 /* The physical memory page size used in this computer. See
127 build_field_ref(). */
128 static GTY(()) tree page_size;
130 /* The stack pointer of the Java virtual machine.
131 This does include the size of the quick_stack. */
133 int stack_pointer;
135 const unsigned char *linenumber_table;
136 int linenumber_count;
138 /* Largest pc so far in this method that has been passed to lookup_label. */
139 int highest_label_pc_this_method = -1;
141 /* Base value for this method to add to pc to get generated label. */
142 int start_label_pc_this_method = 0;
144 void
145 init_expr_processing (void)
147 operand_type[21] = operand_type[54] = int_type_node;
148 operand_type[22] = operand_type[55] = long_type_node;
149 operand_type[23] = operand_type[56] = float_type_node;
150 operand_type[24] = operand_type[57] = double_type_node;
151 operand_type[25] = operand_type[58] = ptr_type_node;
154 tree
155 java_truthvalue_conversion (tree expr)
157 /* It is simpler and generates better code to have only TRUTH_*_EXPR
158 or comparison expressions as truth values at this level.
160 This function should normally be identity for Java. */
162 switch (TREE_CODE (expr))
164 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
165 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
166 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
167 case ORDERED_EXPR: case UNORDERED_EXPR:
168 case TRUTH_ANDIF_EXPR:
169 case TRUTH_ORIF_EXPR:
170 case TRUTH_AND_EXPR:
171 case TRUTH_OR_EXPR:
172 case TRUTH_XOR_EXPR:
173 case TRUTH_NOT_EXPR:
174 case ERROR_MARK:
175 return expr;
177 case INTEGER_CST:
178 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
180 case REAL_CST:
181 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
183 /* are these legal? XXX JH */
184 case NEGATE_EXPR:
185 case ABS_EXPR:
186 case FLOAT_EXPR:
187 /* These don't change whether an object is nonzero or zero. */
188 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
190 case COND_EXPR:
191 /* Distribute the conversion into the arms of a COND_EXPR. */
192 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
193 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
194 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
196 case NOP_EXPR:
197 /* If this is widening the argument, we can ignore it. */
198 if (TYPE_PRECISION (TREE_TYPE (expr))
199 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
200 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
201 /* fall through to default */
203 default:
204 return fold_build2 (NE_EXPR, boolean_type_node,
205 expr, boolean_false_node);
209 /* Save any stack slots that happen to be in the quick_stack into their
210 home virtual register slots.
212 The copy order is from low stack index to high, to support the invariant
213 that the expression for a slot may contain decls for stack slots with
214 higher (or the same) index, but not lower. */
216 static void
217 flush_quick_stack (void)
219 int stack_index = stack_pointer;
220 unsigned ix;
221 tree t;
223 /* Count the number of slots the quick stack is holding. */
224 for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++)
225 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (t));
227 for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++)
229 tree decl, type = TREE_TYPE (t);
231 decl = find_stack_slot (stack_index, type);
232 if (decl != t)
233 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (t), decl, t));
234 stack_index += 1 + TYPE_IS_WIDE (type);
237 vec_safe_truncate (quick_stack, 0);
240 /* Push TYPE on the type stack.
241 Return true on success, 0 on overflow. */
244 push_type_0 (tree type)
246 int n_words;
247 type = promote_type (type);
248 n_words = 1 + TYPE_IS_WIDE (type);
249 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
250 return 0;
251 /* Allocate decl for this variable now, so we get a temporary that
252 survives the whole method. */
253 find_stack_slot (stack_pointer, type);
254 stack_type_map[stack_pointer++] = type;
255 n_words--;
256 while (--n_words >= 0)
257 stack_type_map[stack_pointer++] = TYPE_SECOND;
258 return 1;
261 void
262 push_type (tree type)
264 int r = push_type_0 (type);
265 gcc_assert (r);
268 static void
269 push_value (tree value)
271 tree type = TREE_TYPE (value);
272 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
274 type = promote_type (type);
275 value = convert (type, value);
277 push_type (type);
278 vec_safe_push (quick_stack, value);
280 /* If the value has a side effect, then we need to evaluate it
281 whether or not the result is used. If the value ends up on the
282 quick stack and is then popped, this won't happen -- so we flush
283 the quick stack. It is safest to simply always flush, though,
284 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
285 the latter we may need to strip conversions. */
286 flush_quick_stack ();
289 /* Pop a type from the type stack.
290 TYPE is the expected type. Return the actual type, which must be
291 convertible to TYPE.
292 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
294 tree
295 pop_type_0 (tree type, char **messagep)
297 int n_words;
298 tree t;
299 *messagep = NULL;
300 if (TREE_CODE (type) == RECORD_TYPE)
301 type = promote_type (type);
302 n_words = 1 + TYPE_IS_WIDE (type);
303 if (stack_pointer < n_words)
305 *messagep = xstrdup ("stack underflow");
306 return type;
308 while (--n_words > 0)
310 if (stack_type_map[--stack_pointer] != void_type_node)
312 *messagep = xstrdup ("Invalid multi-word value on type stack");
313 return type;
316 t = stack_type_map[--stack_pointer];
317 if (type == NULL_TREE || t == type)
318 return t;
319 if (TREE_CODE (t) == TREE_LIST)
323 tree tt = TREE_PURPOSE (t);
324 if (! can_widen_reference_to (tt, type))
326 t = tt;
327 goto fail;
329 t = TREE_CHAIN (t);
331 while (t);
332 return t;
334 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
335 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
336 return t;
337 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
339 /* If the expected type we've been passed is object or ptr
340 (i.e. void*), the caller needs to know the real type. */
341 if (type == ptr_type_node || type == object_ptr_type_node)
342 return t;
344 /* Since the verifier has already run, we know that any
345 types we see will be compatible. In BC mode, this fact
346 may be checked at runtime, but if that is so then we can
347 assume its truth here as well. So, we always succeed
348 here, with the expected type. */
349 return type;
352 if (! flag_verify_invocations && flag_indirect_dispatch
353 && t == object_ptr_type_node)
355 if (type != ptr_type_node)
356 warning (0, "need to insert runtime check for %s",
357 xstrdup (lang_printable_name (type, 0)));
358 return type;
361 /* lang_printable_name uses a static buffer, so we must save the result
362 from calling it the first time. */
363 fail:
365 char *temp = xstrdup (lang_printable_name (type, 0));
366 /* If the stack contains a multi-word type, keep popping the stack until
367 the real type is found. */
368 while (t == void_type_node)
369 t = stack_type_map[--stack_pointer];
370 *messagep = concat ("expected type '", temp,
371 "' but stack contains '", lang_printable_name (t, 0),
372 "'", NULL);
373 free (temp);
375 return type;
378 /* Pop a type from the type stack.
379 TYPE is the expected type. Return the actual type, which must be
380 convertible to TYPE, otherwise call error. */
382 tree
383 pop_type (tree type)
385 char *message = NULL;
386 type = pop_type_0 (type, &message);
387 if (message != NULL)
389 error ("%s", message);
390 free (message);
392 return type;
396 /* Return true if two type assertions are equal. */
398 static int
399 type_assertion_eq (const void * k1_p, const void * k2_p)
401 const type_assertion k1 = *(const type_assertion *)k1_p;
402 const type_assertion k2 = *(const type_assertion *)k2_p;
403 return (k1.assertion_code == k2.assertion_code
404 && k1.op1 == k2.op1
405 && k1.op2 == k2.op2);
408 /* Hash a type assertion. */
410 static hashval_t
411 type_assertion_hash (const void *p)
413 const type_assertion *k_p = (const type_assertion *) p;
414 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
415 k_p->assertion_code, 0);
417 switch (k_p->assertion_code)
419 case JV_ASSERT_TYPES_COMPATIBLE:
420 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
421 hash);
422 /* Fall through. */
424 case JV_ASSERT_IS_INSTANTIABLE:
425 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
426 hash);
427 /* Fall through. */
429 case JV_ASSERT_END_OF_TABLE:
430 break;
432 default:
433 gcc_unreachable ();
436 return hash;
439 /* Add an entry to the type assertion table for the given class.
440 KLASS is the class for which this assertion will be evaluated by the
441 runtime during loading/initialization.
442 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
443 OP1 and OP2 are the operands. The tree type of these arguments may be
444 specific to each assertion_code. */
446 void
447 add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
449 htab_t assertions_htab;
450 type_assertion as;
451 void **as_pp;
453 assertions_htab = TYPE_ASSERTIONS (klass);
454 if (assertions_htab == NULL)
456 assertions_htab = htab_create_ggc (7, type_assertion_hash,
457 type_assertion_eq, NULL);
458 TYPE_ASSERTIONS (current_class) = assertions_htab;
461 as.assertion_code = assertion_code;
462 as.op1 = op1;
463 as.op2 = op2;
465 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
467 /* Don't add the same assertion twice. */
468 if (*as_pp)
469 return;
471 *as_pp = ggc_alloc_type_assertion ();
472 **(type_assertion **)as_pp = as;
476 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
477 Handles array types and interfaces. */
480 can_widen_reference_to (tree source_type, tree target_type)
482 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
483 return 1;
485 /* Get rid of pointers */
486 if (TREE_CODE (source_type) == POINTER_TYPE)
487 source_type = TREE_TYPE (source_type);
488 if (TREE_CODE (target_type) == POINTER_TYPE)
489 target_type = TREE_TYPE (target_type);
491 if (source_type == target_type)
492 return 1;
494 /* FIXME: This is very pessimistic, in that it checks everything,
495 even if we already know that the types are compatible. If we're
496 to support full Java class loader semantics, we need this.
497 However, we could do something more optimal. */
498 if (! flag_verify_invocations)
500 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
501 source_type, target_type);
503 if (!quiet_flag)
504 warning (0, "assert: %s is assign compatible with %s",
505 xstrdup (lang_printable_name (target_type, 0)),
506 xstrdup (lang_printable_name (source_type, 0)));
507 /* Punt everything to runtime. */
508 return 1;
511 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
513 return 1;
515 else
517 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
519 HOST_WIDE_INT source_length, target_length;
520 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
522 /* An array implements Cloneable and Serializable. */
523 tree name = DECL_NAME (TYPE_NAME (target_type));
524 return (name == java_lang_cloneable_identifier_node
525 || name == java_io_serializable_identifier_node);
527 target_length = java_array_type_length (target_type);
528 if (target_length >= 0)
530 source_length = java_array_type_length (source_type);
531 if (source_length != target_length)
532 return 0;
534 source_type = TYPE_ARRAY_ELEMENT (source_type);
535 target_type = TYPE_ARRAY_ELEMENT (target_type);
536 if (source_type == target_type)
537 return 1;
538 if (TREE_CODE (source_type) != POINTER_TYPE
539 || TREE_CODE (target_type) != POINTER_TYPE)
540 return 0;
541 return can_widen_reference_to (source_type, target_type);
543 else
545 int source_depth = class_depth (source_type);
546 int target_depth = class_depth (target_type);
548 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
550 if (! quiet_flag)
551 warning (0, "assert: %s is assign compatible with %s",
552 xstrdup (lang_printable_name (target_type, 0)),
553 xstrdup (lang_printable_name (source_type, 0)));
554 return 1;
557 /* class_depth can return a negative depth if an error occurred */
558 if (source_depth < 0 || target_depth < 0)
559 return 0;
561 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
563 /* target_type is OK if source_type or source_type ancestors
564 implement target_type. We handle multiple sub-interfaces */
565 tree binfo, base_binfo;
566 int i;
568 for (binfo = TYPE_BINFO (source_type), i = 0;
569 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
570 if (can_widen_reference_to
571 (BINFO_TYPE (base_binfo), target_type))
572 return 1;
574 if (!i)
575 return 0;
578 for ( ; source_depth > target_depth; source_depth--)
580 source_type
581 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
583 return source_type == target_type;
588 static tree
589 pop_value (tree type)
591 type = pop_type (type);
592 if (vec_safe_length (quick_stack) != 0)
593 return quick_stack->pop ();
594 else
595 return find_stack_slot (stack_pointer, promote_type (type));
599 /* Pop and discard the top COUNT stack slots. */
601 static void
602 java_stack_pop (int count)
604 while (count > 0)
606 tree type;
608 gcc_assert (stack_pointer != 0);
610 type = stack_type_map[stack_pointer - 1];
611 if (type == TYPE_SECOND)
613 count--;
614 gcc_assert (stack_pointer != 1 && count > 0);
616 type = stack_type_map[stack_pointer - 2];
618 pop_value (type);
619 count--;
623 /* Implement the 'swap' operator (to swap two top stack slots). */
625 static void
626 java_stack_swap (void)
628 tree type1, type2;
629 tree temp;
630 tree decl1, decl2;
632 if (stack_pointer < 2
633 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
634 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
635 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
636 /* Bad stack swap. */
637 abort ();
638 /* Bad stack swap. */
640 flush_quick_stack ();
641 decl1 = find_stack_slot (stack_pointer - 1, type1);
642 decl2 = find_stack_slot (stack_pointer - 2, type2);
643 temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1);
644 java_add_local_var (temp);
645 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
646 java_add_stmt (build2 (MODIFY_EXPR, type2,
647 find_stack_slot (stack_pointer - 1, type2),
648 decl2));
649 java_add_stmt (build2 (MODIFY_EXPR, type1,
650 find_stack_slot (stack_pointer - 2, type1),
651 temp));
652 stack_type_map[stack_pointer - 1] = type2;
653 stack_type_map[stack_pointer - 2] = type1;
656 static void
657 java_stack_dup (int size, int offset)
659 int low_index = stack_pointer - size - offset;
660 int dst_index;
661 if (low_index < 0)
662 error ("stack underflow - dup* operation");
664 flush_quick_stack ();
666 stack_pointer += size;
667 dst_index = stack_pointer;
669 for (dst_index = stack_pointer; --dst_index >= low_index; )
671 tree type;
672 int src_index = dst_index - size;
673 if (src_index < low_index)
674 src_index = dst_index + size + offset;
675 type = stack_type_map [src_index];
676 if (type == TYPE_SECOND)
678 /* Dup operation splits 64-bit number. */
679 gcc_assert (src_index > low_index);
681 stack_type_map[dst_index] = type;
682 src_index--; dst_index--;
683 type = stack_type_map[src_index];
684 gcc_assert (TYPE_IS_WIDE (type));
686 else
687 gcc_assert (! TYPE_IS_WIDE (type));
689 if (src_index != dst_index)
691 tree src_decl = find_stack_slot (src_index, type);
692 tree dst_decl = find_stack_slot (dst_index, type);
694 java_add_stmt
695 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
696 stack_type_map[dst_index] = type;
701 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
702 value stack. */
704 static void
705 build_java_athrow (tree node)
707 tree call;
709 call = build_call_nary (void_type_node,
710 build_address_of (throw_node),
711 1, node);
712 TREE_SIDE_EFFECTS (call) = 1;
713 java_add_stmt (call);
714 java_stack_pop (stack_pointer);
717 /* Implementation for jsr/ret */
719 static void
720 build_java_jsr (int target_pc, int return_pc)
722 tree where = lookup_label (target_pc);
723 tree ret = lookup_label (return_pc);
724 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
725 push_value (ret_label);
726 flush_quick_stack ();
727 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
729 /* Do not need to emit the label here. We noted the existence of the
730 label as a jump target in note_instructions; we'll emit the label
731 for real at the beginning of the expand_byte_code loop. */
734 static void
735 build_java_ret (tree location)
737 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
740 /* Implementation of operations on array: new, load, store, length */
742 tree
743 decode_newarray_type (int atype)
745 switch (atype)
747 case 4: return boolean_type_node;
748 case 5: return char_type_node;
749 case 6: return float_type_node;
750 case 7: return double_type_node;
751 case 8: return byte_type_node;
752 case 9: return short_type_node;
753 case 10: return int_type_node;
754 case 11: return long_type_node;
755 default: return NULL_TREE;
759 /* Map primitive type to the code used by OPCODE_newarray. */
762 encode_newarray_type (tree type)
764 if (type == boolean_type_node)
765 return 4;
766 else if (type == char_type_node)
767 return 5;
768 else if (type == float_type_node)
769 return 6;
770 else if (type == double_type_node)
771 return 7;
772 else if (type == byte_type_node)
773 return 8;
774 else if (type == short_type_node)
775 return 9;
776 else if (type == int_type_node)
777 return 10;
778 else if (type == long_type_node)
779 return 11;
780 else
781 gcc_unreachable ();
784 /* Build a call to _Jv_ThrowBadArrayIndex(), the
785 ArrayIndexOfBoundsException exception handler. */
787 static tree
788 build_java_throw_out_of_bounds_exception (tree index)
790 tree node;
792 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
793 has void return type. We cannot just set the type of the CALL_EXPR below
794 to int_type_node because we would lose it during gimplification. */
795 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
796 node = build_call_nary (void_type_node,
797 build_address_of (soft_badarrayindex_node),
798 1, index);
799 TREE_SIDE_EFFECTS (node) = 1;
801 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
802 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
804 return (node);
807 /* Return the length of an array. Doesn't perform any checking on the nature
808 or value of the array NODE. May be used to implement some bytecodes. */
810 tree
811 build_java_array_length_access (tree node)
813 tree type = TREE_TYPE (node);
814 tree array_type = TREE_TYPE (type);
815 HOST_WIDE_INT length;
817 if (!is_array_type_p (type))
819 /* With the new verifier, we will see an ordinary pointer type
820 here. In this case, we just use an arbitrary array type. */
821 array_type = build_java_array_type (object_ptr_type_node, -1);
822 type = promote_type (array_type);
825 length = java_array_type_length (type);
826 if (length >= 0)
827 return build_int_cst (NULL_TREE, length);
829 node = build3 (COMPONENT_REF, int_type_node,
830 build_java_indirect_ref (array_type, node,
831 flag_check_references),
832 lookup_field (&array_type, get_identifier ("length")),
833 NULL_TREE);
834 IS_ARRAY_LENGTH_ACCESS (node) = 1;
835 return node;
838 /* Optionally checks a reference against the NULL pointer. ARG1: the
839 expr, ARG2: we should check the reference. Don't generate extra
840 checks if we're not generating code. */
842 tree
843 java_check_reference (tree expr, int check)
845 if (!flag_syntax_only && check)
847 expr = save_expr (expr);
848 expr = build3 (COND_EXPR, TREE_TYPE (expr),
849 build2 (EQ_EXPR, boolean_type_node,
850 expr, null_pointer_node),
851 build_call_nary (void_type_node,
852 build_address_of (soft_nullpointer_node),
854 expr);
857 return expr;
860 /* Reference an object: just like an INDIRECT_REF, but with checking. */
862 tree
863 build_java_indirect_ref (tree type, tree expr, int check)
865 tree t;
866 t = java_check_reference (expr, check);
867 t = convert (build_pointer_type (type), t);
868 return build1 (INDIRECT_REF, type, t);
871 /* Implement array indexing (either as l-value or r-value).
872 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
873 Optionally performs bounds checking and/or test to NULL.
874 At this point, ARRAY should have been verified as an array. */
876 tree
877 build_java_arrayaccess (tree array, tree type, tree index)
879 tree node, throw_expr = NULL_TREE;
880 tree data_field;
881 tree ref;
882 tree array_type = TREE_TYPE (TREE_TYPE (array));
883 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
885 if (!is_array_type_p (TREE_TYPE (array)))
887 /* With the new verifier, we will see an ordinary pointer type
888 here. In this case, we just use the correct array type. */
889 array_type = build_java_array_type (type, -1);
892 if (flag_bounds_check)
894 /* Generate:
895 * (unsigned jint) INDEX >= (unsigned jint) LEN
896 * && throw ArrayIndexOutOfBoundsException.
897 * Note this is equivalent to and more efficient than:
898 * INDEX < 0 || INDEX >= LEN && throw ... */
899 tree test;
900 tree len = convert (unsigned_int_type_node,
901 build_java_array_length_access (array));
902 test = fold_build2 (GE_EXPR, boolean_type_node,
903 convert (unsigned_int_type_node, index),
904 len);
905 if (! integer_zerop (test))
907 throw_expr
908 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
909 build_java_throw_out_of_bounds_exception (index));
910 /* allows expansion within COMPOUND */
911 TREE_SIDE_EFFECTS( throw_expr ) = 1;
915 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
916 to have the bounds check evaluated first. */
917 if (throw_expr != NULL_TREE)
918 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
920 data_field = lookup_field (&array_type, get_identifier ("data"));
922 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
923 build_java_indirect_ref (array_type, array,
924 flag_check_references),
925 data_field, NULL_TREE);
927 /* Take the address of the data field and convert it to a pointer to
928 the element type. */
929 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
931 /* Multiply the index by the size of an element to obtain a byte
932 offset. Convert the result to a pointer to the element type. */
933 index = build2 (MULT_EXPR, sizetype,
934 fold_convert (sizetype, index),
935 size_exp);
937 /* Sum the byte offset and the address of the data field. */
938 node = fold_build_pointer_plus (node, index);
940 /* Finally, return
942 *((&array->data) + index*size_exp)
945 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
948 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
949 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
950 determine that no check is required. */
952 tree
953 build_java_arraystore_check (tree array, tree object)
955 tree check, element_type, source;
956 tree array_type_p = TREE_TYPE (array);
957 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
959 if (! flag_verify_invocations)
961 /* With the new verifier, we don't track precise types. FIXME:
962 performance regression here. */
963 element_type = TYPE_NAME (object_type_node);
965 else
967 gcc_assert (is_array_type_p (array_type_p));
969 /* Get the TYPE_DECL for ARRAY's element type. */
970 element_type
971 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
974 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
975 && TREE_CODE (object_type) == TYPE_DECL);
977 if (!flag_store_check)
978 return build1 (NOP_EXPR, array_type_p, array);
980 /* No check is needed if the element type is final. Also check that
981 element_type matches object_type, since in the bytecode
982 compilation case element_type may be the actual element type of
983 the array rather than its declared type. However, if we're doing
984 indirect dispatch, we can't do the `final' optimization. */
985 if (element_type == object_type
986 && ! flag_indirect_dispatch
987 && CLASS_FINAL (element_type))
988 return build1 (NOP_EXPR, array_type_p, array);
990 /* OBJECT might be wrapped by a SAVE_EXPR. */
991 if (TREE_CODE (object) == SAVE_EXPR)
992 source = TREE_OPERAND (object, 0);
993 else
994 source = object;
996 /* Avoid the check if OBJECT was just loaded from the same array. */
997 if (TREE_CODE (source) == ARRAY_REF)
999 tree target;
1000 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1001 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1002 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1003 if (TREE_CODE (source) == SAVE_EXPR)
1004 source = TREE_OPERAND (source, 0);
1006 target = array;
1007 if (TREE_CODE (target) == SAVE_EXPR)
1008 target = TREE_OPERAND (target, 0);
1010 if (source == target)
1011 return build1 (NOP_EXPR, array_type_p, array);
1014 /* Build an invocation of _Jv_CheckArrayStore */
1015 check = build_call_nary (void_type_node,
1016 build_address_of (soft_checkarraystore_node),
1017 2, array, object);
1018 TREE_SIDE_EFFECTS (check) = 1;
1020 return check;
1023 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1024 ARRAY_NODE. This function is used to retrieve something less vague than
1025 a pointer type when indexing the first dimension of something like [[<t>.
1026 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1027 return unchanged. */
1029 static tree
1030 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1031 tree indexed_type)
1033 /* We used to check to see if ARRAY_NODE really had array type.
1034 However, with the new verifier, this is not necessary, as we know
1035 that the object will be an array of the appropriate type. */
1037 return indexed_type;
1040 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1041 called with an integer code (the type of array to create), and the length
1042 of the array to create. */
1044 tree
1045 build_newarray (int atype_value, tree length)
1047 tree type_arg;
1049 tree prim_type = decode_newarray_type (atype_value);
1050 tree type
1051 = build_java_array_type (prim_type,
1052 host_integerp (length, 0) == INTEGER_CST
1053 ? tree_low_cst (length, 0) : -1);
1055 /* Pass a reference to the primitive type class and save the runtime
1056 some work. */
1057 type_arg = build_class_ref (prim_type);
1059 return build_call_nary (promote_type (type),
1060 build_address_of (soft_newarray_node),
1061 2, type_arg, length);
1064 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1065 of the dimension. */
1067 tree
1068 build_anewarray (tree class_type, tree length)
1070 tree type
1071 = build_java_array_type (class_type,
1072 host_integerp (length, 0)
1073 ? tree_low_cst (length, 0) : -1);
1075 return build_call_nary (promote_type (type),
1076 build_address_of (soft_anewarray_node),
1078 length,
1079 build_class_ref (class_type),
1080 null_pointer_node);
1083 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1085 tree
1086 build_new_array (tree type, tree length)
1088 if (JPRIMITIVE_TYPE_P (type))
1089 return build_newarray (encode_newarray_type (type), length);
1090 else
1091 return build_anewarray (TREE_TYPE (type), length);
1094 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1095 class pointer, a number of dimensions and the matching number of
1096 dimensions. The argument list is NULL terminated. */
1098 static void
1099 expand_java_multianewarray (tree class_type, int ndim)
1101 int i;
1102 vec<tree, va_gc> *args = NULL;
1104 vec_safe_grow (args, 3 + ndim);
1106 (*args)[0] = build_class_ref (class_type);
1107 (*args)[1] = build_int_cst (NULL_TREE, ndim);
1109 for(i = ndim - 1; i >= 0; i-- )
1110 (*args)[(unsigned)(2 + i)] = pop_value (int_type_node);
1112 (*args)[2 + ndim] = null_pointer_node;
1114 push_value (build_call_vec (promote_type (class_type),
1115 build_address_of (soft_multianewarray_node),
1116 args));
1119 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1120 ARRAY is an array type. May expand some bound checking and NULL
1121 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1122 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1123 INT. In those cases, we make the conversion.
1125 if ARRAy is a reference type, the assignment is checked at run-time
1126 to make sure that the RHS can be assigned to the array element
1127 type. It is not necessary to generate this code if ARRAY is final. */
1129 static void
1130 expand_java_arraystore (tree rhs_type_node)
1132 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1133 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1134 int_type_node : rhs_type_node);
1135 tree index = pop_value (int_type_node);
1136 tree array_type, array, temp, access;
1138 /* If we're processing an `aaload' we might as well just pick
1139 `Object'. */
1140 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1142 array_type = build_java_array_type (object_ptr_type_node, -1);
1143 rhs_type_node = object_ptr_type_node;
1145 else
1146 array_type = build_java_array_type (rhs_type_node, -1);
1148 array = pop_value (array_type);
1149 array = build1 (NOP_EXPR, promote_type (array_type), array);
1151 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1153 flush_quick_stack ();
1155 index = save_expr (index);
1156 array = save_expr (array);
1158 /* We want to perform the bounds check (done by
1159 build_java_arrayaccess) before the type check (done by
1160 build_java_arraystore_check). So, we call build_java_arrayaccess
1161 -- which returns an ARRAY_REF lvalue -- and we then generate code
1162 to stash the address of that lvalue in a temp. Then we call
1163 build_java_arraystore_check, and finally we generate a
1164 MODIFY_EXPR to set the array element. */
1166 access = build_java_arrayaccess (array, rhs_type_node, index);
1167 temp = build_decl (input_location, VAR_DECL, NULL_TREE,
1168 build_pointer_type (TREE_TYPE (access)));
1169 java_add_local_var (temp);
1170 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1171 temp,
1172 build_fold_addr_expr (access)));
1174 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1176 tree check = build_java_arraystore_check (array, rhs_node);
1177 java_add_stmt (check);
1180 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1181 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1182 rhs_node));
1185 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1186 sure that LHS is an array type. May expand some bound checking and NULL
1187 pointer checking.
1188 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1189 BOOLEAN/SHORT, we push a promoted type back to the stack.
1192 static void
1193 expand_java_arrayload (tree lhs_type_node)
1195 tree load_node;
1196 tree index_node = pop_value (int_type_node);
1197 tree array_type;
1198 tree array_node;
1200 /* If we're processing an `aaload' we might as well just pick
1201 `Object'. */
1202 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1204 array_type = build_java_array_type (object_ptr_type_node, -1);
1205 lhs_type_node = object_ptr_type_node;
1207 else
1208 array_type = build_java_array_type (lhs_type_node, -1);
1209 array_node = pop_value (array_type);
1210 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1212 index_node = save_expr (index_node);
1213 array_node = save_expr (array_node);
1215 lhs_type_node = build_java_check_indexed_type (array_node,
1216 lhs_type_node);
1217 load_node = build_java_arrayaccess (array_node,
1218 lhs_type_node,
1219 index_node);
1220 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1221 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1222 push_value (load_node);
1225 /* Expands .length. Makes sure that we deal with and array and may expand
1226 a NULL check on the array object. */
1228 static void
1229 expand_java_array_length (void)
1231 tree array = pop_value (ptr_type_node);
1232 tree length = build_java_array_length_access (array);
1234 push_value (length);
1237 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1238 either soft_monitorenter_node or soft_monitorexit_node. */
1240 static tree
1241 build_java_monitor (tree call, tree object)
1243 return build_call_nary (void_type_node,
1244 build_address_of (call),
1245 1, object);
1248 /* Emit code for one of the PUSHC instructions. */
1250 static void
1251 expand_java_pushc (int ival, tree type)
1253 tree value;
1254 if (type == ptr_type_node && ival == 0)
1255 value = null_pointer_node;
1256 else if (type == int_type_node || type == long_type_node)
1257 value = build_int_cst (type, ival);
1258 else if (type == float_type_node || type == double_type_node)
1260 REAL_VALUE_TYPE x;
1261 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1262 value = build_real (type, x);
1264 else
1265 gcc_unreachable ();
1267 push_value (value);
1270 static void
1271 expand_java_return (tree type)
1273 if (type == void_type_node)
1274 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1275 else
1277 tree retval = pop_value (type);
1278 tree res = DECL_RESULT (current_function_decl);
1279 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1281 /* Handle the situation where the native integer type is smaller
1282 than the JVM integer. It can happen for many cross compilers.
1283 The whole if expression just goes away if INT_TYPE_SIZE < 32
1284 is false. */
1285 if (INT_TYPE_SIZE < 32
1286 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1287 < GET_MODE_SIZE (TYPE_MODE (type))))
1288 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1290 TREE_SIDE_EFFECTS (retval) = 1;
1291 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1295 static void
1296 expand_load_internal (int index, tree type, int pc)
1298 tree copy;
1299 tree var = find_local_variable (index, type, pc);
1301 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1302 on the stack. If there is an assignment to this VAR_DECL between
1303 the stack push and the use, then the wrong code could be
1304 generated. To avoid this we create a new local and copy our
1305 value into it. Then we push this new local on the stack.
1306 Hopefully this all gets optimized out. */
1307 copy = build_decl (input_location, VAR_DECL, NULL_TREE, type);
1308 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1309 && TREE_TYPE (copy) != TREE_TYPE (var))
1310 var = convert (type, var);
1311 java_add_local_var (copy);
1312 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1314 push_value (copy);
1317 tree
1318 build_address_of (tree value)
1320 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1323 bool
1324 class_has_finalize_method (tree type)
1326 tree super = CLASSTYPE_SUPER (type);
1328 if (super == NULL_TREE)
1329 return false; /* Every class with a real finalizer inherits */
1330 /* from java.lang.Object. */
1331 else
1332 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1335 tree
1336 java_create_object (tree type)
1338 tree alloc_node = (class_has_finalize_method (type)
1339 ? alloc_object_node
1340 : alloc_no_finalizer_node);
1342 return build_call_nary (promote_type (type),
1343 build_address_of (alloc_node),
1344 1, build_class_ref (type));
1347 static void
1348 expand_java_NEW (tree type)
1350 tree alloc_node;
1352 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1353 : alloc_no_finalizer_node);
1354 if (! CLASS_LOADED_P (type))
1355 load_class (type, 1);
1356 safe_layout_class (type);
1357 push_value (build_call_nary (promote_type (type),
1358 build_address_of (alloc_node),
1359 1, build_class_ref (type)));
1362 /* This returns an expression which will extract the class of an
1363 object. */
1365 tree
1366 build_get_class (tree value)
1368 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1369 tree vtable_field = lookup_field (&object_type_node,
1370 get_identifier ("vtable"));
1371 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1372 build_java_indirect_ref (object_type_node, value,
1373 flag_check_references),
1374 vtable_field, NULL_TREE);
1375 return build3 (COMPONENT_REF, class_ptr_type,
1376 build1 (INDIRECT_REF, dtable_type, tmp),
1377 class_field, NULL_TREE);
1380 /* This builds the tree representation of the `instanceof' operator.
1381 It tries various tricks to optimize this in cases where types are
1382 known. */
1384 tree
1385 build_instanceof (tree value, tree type)
1387 tree expr;
1388 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1389 tree valtype = TREE_TYPE (TREE_TYPE (value));
1390 tree valclass = TYPE_NAME (valtype);
1391 tree klass;
1393 /* When compiling from bytecode, we need to ensure that TYPE has
1394 been loaded. */
1395 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1397 load_class (type, 1);
1398 safe_layout_class (type);
1399 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1400 return error_mark_node;
1402 klass = TYPE_NAME (type);
1404 if (type == object_type_node || inherits_from_p (valtype, type))
1406 /* Anything except `null' is an instance of Object. Likewise,
1407 if the object is known to be an instance of the class, then
1408 we only need to check for `null'. */
1409 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1411 else if (flag_verify_invocations
1412 && ! TYPE_ARRAY_P (type)
1413 && ! TYPE_ARRAY_P (valtype)
1414 && DECL_P (klass) && DECL_P (valclass)
1415 && ! CLASS_INTERFACE (valclass)
1416 && ! CLASS_INTERFACE (klass)
1417 && ! inherits_from_p (type, valtype)
1418 && (CLASS_FINAL (klass)
1419 || ! inherits_from_p (valtype, type)))
1421 /* The classes are from different branches of the derivation
1422 tree, so we immediately know the answer. */
1423 expr = boolean_false_node;
1425 else if (DECL_P (klass) && CLASS_FINAL (klass))
1427 tree save = save_expr (value);
1428 expr = build3 (COND_EXPR, itype,
1429 build2 (NE_EXPR, boolean_type_node,
1430 save, null_pointer_node),
1431 build2 (EQ_EXPR, itype,
1432 build_get_class (save),
1433 build_class_ref (type)),
1434 boolean_false_node);
1436 else
1438 expr = build_call_nary (itype,
1439 build_address_of (soft_instanceof_node),
1440 2, value, build_class_ref (type));
1442 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1443 return expr;
1446 static void
1447 expand_java_INSTANCEOF (tree type)
1449 tree value = pop_value (object_ptr_type_node);
1450 value = build_instanceof (value, type);
1451 push_value (value);
1454 static void
1455 expand_java_CHECKCAST (tree type)
1457 tree value = pop_value (ptr_type_node);
1458 value = build_call_nary (promote_type (type),
1459 build_address_of (soft_checkcast_node),
1460 2, build_class_ref (type), value);
1461 push_value (value);
1464 static void
1465 expand_iinc (unsigned int local_var_index, int ival, int pc)
1467 tree local_var, res;
1468 tree constant_value;
1470 flush_quick_stack ();
1471 local_var = find_local_variable (local_var_index, int_type_node, pc);
1472 constant_value = build_int_cst (NULL_TREE, ival);
1473 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1474 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1478 tree
1479 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1481 tree call = NULL;
1482 tree arg1 = convert (type, op1);
1483 tree arg2 = convert (type, op2);
1485 if (type == int_type_node)
1487 switch (op)
1489 case TRUNC_DIV_EXPR:
1490 call = soft_idiv_node;
1491 break;
1492 case TRUNC_MOD_EXPR:
1493 call = soft_irem_node;
1494 break;
1495 default:
1496 break;
1499 else if (type == long_type_node)
1501 switch (op)
1503 case TRUNC_DIV_EXPR:
1504 call = soft_ldiv_node;
1505 break;
1506 case TRUNC_MOD_EXPR:
1507 call = soft_lrem_node;
1508 break;
1509 default:
1510 break;
1514 gcc_assert (call);
1515 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1516 return call;
1519 tree
1520 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1522 tree mask;
1523 switch (op)
1525 case URSHIFT_EXPR:
1527 tree u_type = unsigned_type_for (type);
1528 arg1 = convert (u_type, arg1);
1529 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1530 return convert (type, arg1);
1532 case LSHIFT_EXPR:
1533 case RSHIFT_EXPR:
1534 mask = build_int_cst (int_type_node,
1535 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1536 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1537 break;
1539 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1540 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1541 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1543 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1544 boolean_type_node, arg1, arg2);
1545 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1546 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1547 ifexp2, integer_zero_node,
1548 op == COMPARE_L_EXPR
1549 ? integer_minus_one_node
1550 : integer_one_node);
1551 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1552 op == COMPARE_L_EXPR ? integer_one_node
1553 : integer_minus_one_node,
1554 second_compare);
1556 case COMPARE_EXPR:
1557 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1559 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1560 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1561 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1562 ifexp2, integer_one_node,
1563 integer_zero_node);
1564 return fold_build3 (COND_EXPR, int_type_node,
1565 ifexp1, integer_minus_one_node, second_compare);
1567 case TRUNC_DIV_EXPR:
1568 case TRUNC_MOD_EXPR:
1569 if (TREE_CODE (type) == REAL_TYPE
1570 && op == TRUNC_MOD_EXPR)
1572 tree call;
1573 if (type != double_type_node)
1575 arg1 = convert (double_type_node, arg1);
1576 arg2 = convert (double_type_node, arg2);
1578 call = build_call_nary (double_type_node,
1579 build_address_of (soft_fmod_node),
1580 2, arg1, arg2);
1581 if (type != double_type_node)
1582 call = convert (type, call);
1583 return call;
1586 if (TREE_CODE (type) == INTEGER_TYPE
1587 && flag_use_divide_subroutine
1588 && ! flag_syntax_only)
1589 return build_java_soft_divmod (op, type, arg1, arg2);
1591 break;
1592 default: ;
1594 return fold_build2 (op, type, arg1, arg2);
1597 static void
1598 expand_java_binop (tree type, enum tree_code op)
1600 tree larg, rarg;
1601 tree ltype = type;
1602 tree rtype = type;
1603 switch (op)
1605 case LSHIFT_EXPR:
1606 case RSHIFT_EXPR:
1607 case URSHIFT_EXPR:
1608 rtype = int_type_node;
1609 rarg = pop_value (rtype);
1610 break;
1611 default:
1612 rarg = pop_value (rtype);
1614 larg = pop_value (ltype);
1615 push_value (build_java_binop (op, type, larg, rarg));
1618 /* Lookup the field named NAME in *TYPEP or its super classes.
1619 If not found, return NULL_TREE.
1620 (If the *TYPEP is not found, or if the field reference is
1621 ambiguous, return error_mark_node.)
1622 If found, return the FIELD_DECL, and set *TYPEP to the
1623 class containing the field. */
1625 tree
1626 lookup_field (tree *typep, tree name)
1628 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1630 load_class (*typep, 1);
1631 safe_layout_class (*typep);
1632 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1633 return error_mark_node;
1637 tree field, binfo, base_binfo;
1638 tree save_field;
1639 int i;
1641 for (field = TYPE_FIELDS (*typep); field; field = DECL_CHAIN (field))
1642 if (DECL_NAME (field) == name)
1643 return field;
1645 /* Process implemented interfaces. */
1646 save_field = NULL_TREE;
1647 for (binfo = TYPE_BINFO (*typep), i = 0;
1648 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1650 tree t = BINFO_TYPE (base_binfo);
1651 if ((field = lookup_field (&t, name)))
1653 if (save_field == field)
1654 continue;
1655 if (save_field == NULL_TREE)
1656 save_field = field;
1657 else
1659 tree i1 = DECL_CONTEXT (save_field);
1660 tree i2 = DECL_CONTEXT (field);
1661 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1662 IDENTIFIER_POINTER (name),
1663 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1664 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1665 return error_mark_node;
1670 if (save_field != NULL_TREE)
1671 return save_field;
1673 *typep = CLASSTYPE_SUPER (*typep);
1674 } while (*typep);
1675 return NULL_TREE;
1678 /* Look up the field named NAME in object SELF_VALUE,
1679 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1680 SELF_VALUE is NULL_TREE if looking for a static field. */
1682 tree
1683 build_field_ref (tree self_value, tree self_class, tree name)
1685 tree base_class = self_class;
1686 tree field_decl = lookup_field (&base_class, name);
1687 if (field_decl == NULL_TREE)
1689 error ("field %qs not found", IDENTIFIER_POINTER (name));
1690 return error_mark_node;
1692 if (self_value == NULL_TREE)
1694 return build_static_field_ref (field_decl);
1696 else
1698 tree base_type = promote_type (base_class);
1700 /* CHECK is true if self_value is not the this pointer. */
1701 int check = (! (DECL_P (self_value)
1702 && DECL_NAME (self_value) == this_identifier_node));
1704 /* Determine whether a field offset from NULL will lie within
1705 Page 0: this is necessary on those GNU/Linux/BSD systems that
1706 trap SEGV to generate NullPointerExceptions.
1708 We assume that Page 0 will be mapped with NOPERM, and that
1709 memory may be allocated from any other page, so only field
1710 offsets < pagesize are guaranteed to trap. We also assume
1711 the smallest page size we'll encounter is 4k bytes. */
1712 if (! flag_syntax_only && check && ! flag_check_references
1713 && ! flag_indirect_dispatch)
1715 tree field_offset = byte_position (field_decl);
1716 if (! page_size)
1717 page_size = size_int (4096);
1718 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1721 if (base_type != TREE_TYPE (self_value))
1722 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1723 if (! flag_syntax_only && flag_indirect_dispatch)
1725 tree otable_index
1726 = build_int_cst (NULL_TREE, get_symbol_table_index
1727 (field_decl, NULL_TREE,
1728 &TYPE_OTABLE_METHODS (output_class)));
1729 tree field_offset
1730 = build4 (ARRAY_REF, integer_type_node,
1731 TYPE_OTABLE_DECL (output_class), otable_index,
1732 NULL_TREE, NULL_TREE);
1733 tree address;
1735 if (DECL_CONTEXT (field_decl) != output_class)
1736 field_offset
1737 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1738 build2 (EQ_EXPR, boolean_type_node,
1739 field_offset, integer_zero_node),
1740 build_call_nary (void_type_node,
1741 build_address_of (soft_nosuchfield_node),
1742 1, otable_index),
1743 field_offset);
1745 self_value = java_check_reference (self_value, check);
1746 address = fold_build_pointer_plus (self_value, field_offset);
1747 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1748 address);
1749 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1752 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1753 self_value, check);
1754 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1755 self_value, field_decl, NULL_TREE);
1759 tree
1760 lookup_label (int pc)
1762 tree name;
1763 char buf[32];
1764 if (pc > highest_label_pc_this_method)
1765 highest_label_pc_this_method = pc;
1766 targetm.asm_out.generate_internal_label (buf, "LJpc=",
1767 start_label_pc_this_method + pc);
1768 name = get_identifier (buf);
1769 if (IDENTIFIER_LOCAL_VALUE (name))
1770 return IDENTIFIER_LOCAL_VALUE (name);
1771 else
1773 /* The type of the address of a label is return_address_type_node. */
1774 tree decl = create_label_decl (name);
1775 return pushdecl (decl);
1779 /* Generate a unique name for the purpose of loops and switches
1780 labels, and try-catch-finally blocks label or temporary variables. */
1782 tree
1783 generate_name (void)
1785 static int l_number = 0;
1786 char buff [32];
1787 targetm.asm_out.generate_internal_label (buff, "LJv", l_number);
1788 l_number++;
1789 return get_identifier (buff);
1792 tree
1793 create_label_decl (tree name)
1795 tree decl;
1796 decl = build_decl (input_location, LABEL_DECL, name,
1797 TREE_TYPE (return_address_type_node));
1798 DECL_CONTEXT (decl) = current_function_decl;
1799 DECL_IGNORED_P (decl) = 1;
1800 return decl;
1803 /* This maps a bytecode offset (PC) to various flags. */
1804 char *instruction_bits;
1806 /* This is a vector of type states for the current method. It is
1807 indexed by PC. Each element is a tree vector holding the type
1808 state at that PC. We only note type states at basic block
1809 boundaries. */
1810 vec<tree, va_gc> *type_states;
1812 static void
1813 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1815 lookup_label (target_pc);
1816 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1819 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1820 where CONDITION is one of one the compare operators. */
1822 static void
1823 expand_compare (enum tree_code condition, tree value1, tree value2,
1824 int target_pc)
1826 tree target = lookup_label (target_pc);
1827 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1828 java_add_stmt
1829 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1830 build1 (GOTO_EXPR, void_type_node, target),
1831 build_java_empty_stmt ()));
1834 /* Emit code for a TEST-type opcode. */
1836 static void
1837 expand_test (enum tree_code condition, tree type, int target_pc)
1839 tree value1, value2;
1840 flush_quick_stack ();
1841 value1 = pop_value (type);
1842 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1843 expand_compare (condition, value1, value2, target_pc);
1846 /* Emit code for a COND-type opcode. */
1848 static void
1849 expand_cond (enum tree_code condition, tree type, int target_pc)
1851 tree value1, value2;
1852 flush_quick_stack ();
1853 /* note: pop values in opposite order */
1854 value2 = pop_value (type);
1855 value1 = pop_value (type);
1856 /* Maybe should check value1 and value2 for type compatibility ??? */
1857 expand_compare (condition, value1, value2, target_pc);
1860 static void
1861 expand_java_goto (int target_pc)
1863 tree target_label = lookup_label (target_pc);
1864 flush_quick_stack ();
1865 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1868 static tree
1869 expand_java_switch (tree selector, int default_pc)
1871 tree switch_expr, x;
1873 flush_quick_stack ();
1874 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1875 NULL_TREE, NULL_TREE);
1876 java_add_stmt (switch_expr);
1878 x = build_case_label (NULL_TREE, NULL_TREE,
1879 create_artificial_label (input_location));
1880 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1882 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1883 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1885 return switch_expr;
1888 static void
1889 expand_java_add_case (tree switch_expr, int match, int target_pc)
1891 tree value, x;
1893 value = build_int_cst (TREE_TYPE (switch_expr), match);
1895 x = build_case_label (value, NULL_TREE,
1896 create_artificial_label (input_location));
1897 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1899 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1900 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1903 static vec<tree, va_gc> *
1904 pop_arguments (tree method_type)
1906 function_args_iterator fnai;
1907 tree type;
1908 vec<tree, va_gc> *args = NULL;
1909 int arity;
1911 FOREACH_FUNCTION_ARGS (method_type, type, fnai)
1913 /* XXX: leaky abstraction. */
1914 if (type == void_type_node)
1915 break;
1917 vec_safe_push (args, type);
1920 arity = vec_safe_length (args);
1922 while (arity--)
1924 tree arg = pop_value ((*args)[arity]);
1926 /* We simply cast each argument to its proper type. This is
1927 needed since we lose type information coming out of the
1928 verifier. We also have to do this when we pop an integer
1929 type that must be promoted for the function call. */
1930 if (TREE_CODE (type) == POINTER_TYPE)
1931 arg = build1 (NOP_EXPR, type, arg);
1932 else if (targetm.calls.promote_prototypes (type)
1933 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1934 && INTEGRAL_TYPE_P (type))
1935 arg = convert (integer_type_node, arg);
1937 (*args)[arity] = arg;
1940 return args;
1943 /* Attach to PTR (a block) the declaration found in ENTRY. */
1946 attach_init_test_initialization_flags (void **entry, void *ptr)
1948 tree block = (tree)ptr;
1949 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1951 if (block != error_mark_node)
1953 if (TREE_CODE (block) == BIND_EXPR)
1955 tree body = BIND_EXPR_BODY (block);
1956 DECL_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1957 BIND_EXPR_VARS (block) = ite->value;
1958 body = build2 (COMPOUND_EXPR, void_type_node,
1959 build1 (DECL_EXPR, void_type_node, ite->value), body);
1960 BIND_EXPR_BODY (block) = body;
1962 else
1964 tree body = BLOCK_SUBBLOCKS (block);
1965 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1966 BLOCK_EXPR_DECLS (block) = ite->value;
1967 body = build2 (COMPOUND_EXPR, void_type_node,
1968 build1 (DECL_EXPR, void_type_node, ite->value), body);
1969 BLOCK_SUBBLOCKS (block) = body;
1973 return true;
1976 /* Build an expression to initialize the class CLAS.
1977 if EXPR is non-NULL, returns an expression to first call the initializer
1978 (if it is needed) and then calls EXPR. */
1980 tree
1981 build_class_init (tree clas, tree expr)
1983 tree init;
1985 /* An optimization: if CLAS is a superclass of the class we're
1986 compiling, we don't need to initialize it. However, if CLAS is
1987 an interface, it won't necessarily be initialized, even if we
1988 implement it. */
1989 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1990 && inherits_from_p (current_class, clas))
1991 || current_class == clas)
1992 return expr;
1994 if (always_initialize_class_p)
1996 init = build_call_nary (void_type_node,
1997 build_address_of (soft_initclass_node),
1998 1, build_class_ref (clas));
1999 TREE_SIDE_EFFECTS (init) = 1;
2001 else
2003 tree *init_test_decl;
2004 tree decl;
2005 init_test_decl = java_treetreehash_new
2006 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2008 if (*init_test_decl == NULL)
2010 /* Build a declaration and mark it as a flag used to track
2011 static class initializations. */
2012 decl = build_decl (input_location, VAR_DECL, NULL_TREE,
2013 boolean_type_node);
2014 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2015 DECL_CONTEXT (decl) = current_function_decl;
2016 DECL_INITIAL (decl) = boolean_false_node;
2017 /* Don't emit any symbolic debugging info for this decl. */
2018 DECL_IGNORED_P (decl) = 1;
2019 *init_test_decl = decl;
2022 init = build_call_nary (void_type_node,
2023 build_address_of (soft_initclass_node),
2024 1, build_class_ref (clas));
2025 TREE_SIDE_EFFECTS (init) = 1;
2026 init = build3 (COND_EXPR, void_type_node,
2027 build2 (EQ_EXPR, boolean_type_node,
2028 *init_test_decl, boolean_false_node),
2029 init, integer_zero_node);
2030 TREE_SIDE_EFFECTS (init) = 1;
2031 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2032 build2 (MODIFY_EXPR, boolean_type_node,
2033 *init_test_decl, boolean_true_node));
2034 TREE_SIDE_EFFECTS (init) = 1;
2037 if (expr != NULL_TREE)
2039 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2040 TREE_SIDE_EFFECTS (expr) = 1;
2041 return expr;
2043 return init;
2048 /* Rewrite expensive calls that require stack unwinding at runtime to
2049 cheaper alternatives. The logic here performs these
2050 transformations:
2052 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2053 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2057 typedef struct
2059 const char *classname;
2060 const char *method;
2061 const char *signature;
2062 const char *new_classname;
2063 const char *new_signature;
2064 int flags;
2065 void (*rewrite_arglist) (vec<tree, va_gc> **);
2066 } rewrite_rule;
2068 /* Add __builtin_return_address(0) to the end of an arglist. */
2071 static void
2072 rewrite_arglist_getcaller (vec<tree, va_gc> **arglist)
2074 tree retaddr
2075 = build_call_expr (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS),
2076 1, integer_zero_node);
2078 DECL_UNINLINABLE (current_function_decl) = 1;
2080 vec_safe_push (*arglist, retaddr);
2083 /* Add this.class to the end of an arglist. */
2085 static void
2086 rewrite_arglist_getclass (vec<tree, va_gc> **arglist)
2088 vec_safe_push (*arglist, build_class_ref (output_class));
2091 static rewrite_rule rules[] =
2092 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2093 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2094 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2096 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2097 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2098 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2100 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2101 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2102 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2104 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2105 "()Ljava/lang/ClassLoader;",
2106 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2107 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2109 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2110 "java.lang.String", "([CII)Ljava/lang/String;",
2111 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2113 {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2115 /* True if this method is special, i.e. it's a private method that
2116 should be exported from a DSO. */
2118 bool
2119 special_method_p (tree candidate_method)
2121 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2122 tree method = DECL_NAME (candidate_method);
2123 rewrite_rule *p;
2125 for (p = rules; p->classname; p++)
2127 if (get_identifier (p->classname) == context
2128 && get_identifier (p->method) == method)
2129 return true;
2131 return false;
2134 /* Scan the rules list for replacements for *METHOD_P and replace the
2135 args accordingly. If the rewrite results in an access to a private
2136 method, update SPECIAL.*/
2138 void
2139 maybe_rewrite_invocation (tree *method_p, vec<tree, va_gc> **arg_list_p,
2140 tree *method_signature_p, tree *special)
2142 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2143 rewrite_rule *p;
2144 *special = NULL_TREE;
2146 for (p = rules; p->classname; p++)
2148 if (get_identifier (p->classname) == context)
2150 tree method = DECL_NAME (*method_p);
2151 if (get_identifier (p->method) == method
2152 && get_identifier (p->signature) == *method_signature_p)
2154 tree maybe_method;
2155 tree destination_class
2156 = lookup_class (get_identifier (p->new_classname));
2157 gcc_assert (destination_class);
2158 maybe_method
2159 = lookup_java_method (destination_class,
2160 method,
2161 get_identifier (p->new_signature));
2162 if (! maybe_method && ! flag_verify_invocations)
2164 maybe_method
2165 = add_method (destination_class, p->flags,
2166 method, get_identifier (p->new_signature));
2167 DECL_EXTERNAL (maybe_method) = 1;
2169 *method_p = maybe_method;
2170 gcc_assert (*method_p);
2171 if (p->rewrite_arglist)
2172 p->rewrite_arglist (arg_list_p);
2173 *method_signature_p = get_identifier (p->new_signature);
2174 *special = integer_one_node;
2176 break;
2184 tree
2185 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2186 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2187 vec<tree, va_gc> *arg_list ATTRIBUTE_UNUSED, tree special)
2189 tree func;
2190 if (is_compiled_class (self_type))
2192 /* With indirect dispatch we have to use indirect calls for all
2193 publicly visible methods or gcc will use PLT indirections
2194 to reach them. We also have to use indirect dispatch for all
2195 external methods. */
2196 if (! flag_indirect_dispatch
2197 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2199 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2200 method);
2202 else
2204 tree table_index
2205 = build_int_cst (NULL_TREE,
2206 (get_symbol_table_index
2207 (method, special,
2208 &TYPE_ATABLE_METHODS (output_class))));
2209 func
2210 = build4 (ARRAY_REF,
2211 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2212 TYPE_ATABLE_DECL (output_class), table_index,
2213 NULL_TREE, NULL_TREE);
2215 func = convert (method_ptr_type_node, func);
2217 else
2219 /* We don't know whether the method has been (statically) compiled.
2220 Compile this code to get a reference to the method's code:
2222 SELF_TYPE->methods[METHOD_INDEX].ncode
2226 int method_index = 0;
2227 tree meth, ref;
2229 /* The method might actually be declared in some superclass, so
2230 we have to use its class context, not the caller's notion of
2231 where the method is. */
2232 self_type = DECL_CONTEXT (method);
2233 ref = build_class_ref (self_type);
2234 ref = build1 (INDIRECT_REF, class_type_node, ref);
2235 if (ncode_ident == NULL_TREE)
2236 ncode_ident = get_identifier ("ncode");
2237 if (methods_ident == NULL_TREE)
2238 methods_ident = get_identifier ("methods");
2239 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2240 lookup_field (&class_type_node, methods_ident),
2241 NULL_TREE);
2242 for (meth = TYPE_METHODS (self_type);
2243 ; meth = DECL_CHAIN (meth))
2245 if (method == meth)
2246 break;
2247 if (meth == NULL_TREE)
2248 fatal_error ("method '%s' not found in class",
2249 IDENTIFIER_POINTER (DECL_NAME (method)));
2250 method_index++;
2252 method_index *= int_size_in_bytes (method_type_node);
2253 ref = fold_build_pointer_plus_hwi (ref, method_index);
2254 ref = build1 (INDIRECT_REF, method_type_node, ref);
2255 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2256 ref, lookup_field (&method_type_node, ncode_ident),
2257 NULL_TREE);
2259 return func;
2262 tree
2263 invoke_build_dtable (int is_invoke_interface, vec<tree, va_gc> *arg_list)
2265 tree dtable, objectref;
2266 tree saved = save_expr ((*arg_list)[0]);
2268 (*arg_list)[0] = saved;
2270 /* If we're dealing with interfaces and if the objectref
2271 argument is an array then get the dispatch table of the class
2272 Object rather than the one from the objectref. */
2273 objectref = (is_invoke_interface
2274 && is_array_type_p (TREE_TYPE (saved))
2275 ? build_class_ref (object_type_node) : saved);
2277 if (dtable_ident == NULL_TREE)
2278 dtable_ident = get_identifier ("vtable");
2279 dtable = build_java_indirect_ref (object_type_node, objectref,
2280 flag_check_references);
2281 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2282 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2284 return dtable;
2287 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2288 T. If this decl has not been seen before, it will be added to the
2289 [oa]table_methods. If it has, the existing table slot will be
2290 reused. */
2293 get_symbol_table_index (tree t, tree special,
2294 vec<method_entry, va_gc> **symbol_table)
2296 method_entry *e;
2297 unsigned i;
2298 method_entry elem = {t, special};
2300 FOR_EACH_VEC_SAFE_ELT (*symbol_table, i, e)
2301 if (t == e->method && special == e->special)
2302 goto done;
2304 vec_safe_push (*symbol_table, elem);
2306 done:
2307 return i + 1;
2310 tree
2311 build_invokevirtual (tree dtable, tree method, tree special)
2313 tree func;
2314 tree nativecode_ptr_ptr_type_node
2315 = build_pointer_type (nativecode_ptr_type_node);
2316 tree method_index;
2317 tree otable_index;
2319 if (flag_indirect_dispatch)
2321 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2323 otable_index
2324 = build_int_cst (NULL_TREE, get_symbol_table_index
2325 (method, special,
2326 &TYPE_OTABLE_METHODS (output_class)));
2327 method_index = build4 (ARRAY_REF, integer_type_node,
2328 TYPE_OTABLE_DECL (output_class),
2329 otable_index, NULL_TREE, NULL_TREE);
2331 else
2333 /* We fetch the DECL_VINDEX field directly here, rather than
2334 using get_method_index(). DECL_VINDEX is the true offset
2335 from the vtable base to a method, regrdless of any extra
2336 words inserted at the start of the vtable. */
2337 method_index = DECL_VINDEX (method);
2338 method_index = size_binop (MULT_EXPR, method_index,
2339 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2340 if (TARGET_VTABLE_USES_DESCRIPTORS)
2341 method_index = size_binop (MULT_EXPR, method_index,
2342 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2345 func = fold_build_pointer_plus (dtable, method_index);
2347 if (TARGET_VTABLE_USES_DESCRIPTORS)
2348 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2349 else
2351 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2352 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2355 return func;
2358 static GTY(()) tree class_ident;
2359 tree
2360 build_invokeinterface (tree dtable, tree method)
2362 tree interface;
2363 tree idx;
2365 /* We expand invokeinterface here. */
2367 if (class_ident == NULL_TREE)
2368 class_ident = get_identifier ("class");
2370 dtable = build_java_indirect_ref (dtable_type, dtable,
2371 flag_check_references);
2372 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2373 lookup_field (&dtable_type, class_ident), NULL_TREE);
2375 interface = DECL_CONTEXT (method);
2376 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2377 layout_class_methods (interface);
2379 if (flag_indirect_dispatch)
2381 int itable_index
2382 = 2 * (get_symbol_table_index
2383 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2384 interface
2385 = build4 (ARRAY_REF,
2386 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2387 TYPE_ITABLE_DECL (output_class),
2388 build_int_cst (NULL_TREE, itable_index-1),
2389 NULL_TREE, NULL_TREE);
2390 idx
2391 = build4 (ARRAY_REF,
2392 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2393 TYPE_ITABLE_DECL (output_class),
2394 build_int_cst (NULL_TREE, itable_index),
2395 NULL_TREE, NULL_TREE);
2396 interface = convert (class_ptr_type, interface);
2397 idx = convert (integer_type_node, idx);
2399 else
2401 idx = build_int_cst (NULL_TREE,
2402 get_interface_method_index (method, interface));
2403 interface = build_class_ref (interface);
2406 return build_call_nary (ptr_type_node,
2407 build_address_of (soft_lookupinterfacemethod_node),
2408 3, dtable, interface, idx);
2411 /* Expand one of the invoke_* opcodes.
2412 OPCODE is the specific opcode.
2413 METHOD_REF_INDEX is an index into the constant pool.
2414 NARGS is the number of arguments, or -1 if not specified. */
2416 static void
2417 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2419 tree method_signature
2420 = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
2421 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool,
2422 method_ref_index);
2423 tree self_type
2424 = get_class_constant (current_jcf,
2425 COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool,
2426 method_ref_index));
2427 const char *const self_name
2428 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2429 tree call, func, method, method_type;
2430 vec<tree, va_gc> *arg_list;
2431 tree check = NULL_TREE;
2433 tree special = NULL_TREE;
2435 if (! CLASS_LOADED_P (self_type))
2437 load_class (self_type, 1);
2438 safe_layout_class (self_type);
2439 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2440 fatal_error ("failed to find class '%s'", self_name);
2442 layout_class_methods (self_type);
2444 if (ID_INIT_P (method_name))
2445 method = lookup_java_constructor (self_type, method_signature);
2446 else
2447 method = lookup_java_method (self_type, method_name, method_signature);
2449 /* We've found a method in a class other than the one in which it
2450 was wanted. This can happen if, for instance, we're trying to
2451 compile invokespecial super.equals().
2452 FIXME: This is a kludge. Rather than nullifying the result, we
2453 should change lookup_java_method() so that it doesn't search the
2454 superclass chain when we're BC-compiling. */
2455 if (! flag_verify_invocations
2456 && method
2457 && ! TYPE_ARRAY_P (self_type)
2458 && self_type != DECL_CONTEXT (method))
2459 method = NULL_TREE;
2461 /* We've found a method in an interface, but this isn't an interface
2462 call. */
2463 if (opcode != OPCODE_invokeinterface
2464 && method
2465 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2466 method = NULL_TREE;
2468 /* We've found a non-interface method but we are making an
2469 interface call. This can happen if the interface overrides a
2470 method in Object. */
2471 if (! flag_verify_invocations
2472 && opcode == OPCODE_invokeinterface
2473 && method
2474 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2475 method = NULL_TREE;
2477 if (method == NULL_TREE)
2479 if (flag_verify_invocations || ! flag_indirect_dispatch)
2481 error ("class '%s' has no method named '%s' matching signature '%s'",
2482 self_name,
2483 IDENTIFIER_POINTER (method_name),
2484 IDENTIFIER_POINTER (method_signature));
2486 else
2488 int flags = ACC_PUBLIC;
2489 if (opcode == OPCODE_invokestatic)
2490 flags |= ACC_STATIC;
2491 if (opcode == OPCODE_invokeinterface)
2493 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2494 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2496 method = add_method (self_type, flags, method_name,
2497 method_signature);
2498 DECL_ARTIFICIAL (method) = 1;
2499 METHOD_DUMMY (method) = 1;
2500 layout_class_method (self_type, NULL,
2501 method, NULL);
2505 /* Invoke static can't invoke static/abstract method */
2506 if (method != NULL_TREE)
2508 if (opcode == OPCODE_invokestatic)
2510 if (!METHOD_STATIC (method))
2512 error ("invokestatic on non static method");
2513 method = NULL_TREE;
2515 else if (METHOD_ABSTRACT (method))
2517 error ("invokestatic on abstract method");
2518 method = NULL_TREE;
2521 else
2523 if (METHOD_STATIC (method))
2525 error ("invoke[non-static] on static method");
2526 method = NULL_TREE;
2531 if (method == NULL_TREE)
2533 /* If we got here, we emitted an error message above. So we
2534 just pop the arguments, push a properly-typed zero, and
2535 continue. */
2536 method_type = get_type_from_signature (method_signature);
2537 pop_arguments (method_type);
2538 if (opcode != OPCODE_invokestatic)
2539 pop_type (self_type);
2540 method_type = promote_type (TREE_TYPE (method_type));
2541 push_value (convert (method_type, integer_zero_node));
2542 return;
2545 arg_list = pop_arguments (TREE_TYPE (method));
2546 flush_quick_stack ();
2548 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2549 &special);
2550 method_type = TREE_TYPE (method);
2552 func = NULL_TREE;
2553 if (opcode == OPCODE_invokestatic)
2554 func = build_known_method_ref (method, method_type, self_type,
2555 method_signature, arg_list, special);
2556 else if (opcode == OPCODE_invokespecial
2557 || (opcode == OPCODE_invokevirtual
2558 && (METHOD_PRIVATE (method)
2559 || METHOD_FINAL (method)
2560 || CLASS_FINAL (TYPE_NAME (self_type)))))
2562 /* If the object for the method call is null, we throw an
2563 exception. We don't do this if the object is the current
2564 method's `this'. In other cases we just rely on an
2565 optimization pass to eliminate redundant checks. FIXME:
2566 Unfortunately there doesn't seem to be a way to determine
2567 what the current method is right now.
2568 We do omit the check if we're calling <init>. */
2569 /* We use a SAVE_EXPR here to make sure we only evaluate
2570 the new `self' expression once. */
2571 tree save_arg = save_expr ((*arg_list)[0]);
2572 (*arg_list)[0] = save_arg;
2573 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2574 func = build_known_method_ref (method, method_type, self_type,
2575 method_signature, arg_list, special);
2577 else
2579 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2580 arg_list);
2581 if (opcode == OPCODE_invokevirtual)
2582 func = build_invokevirtual (dtable, method, special);
2583 else
2584 func = build_invokeinterface (dtable, method);
2587 if (TREE_CODE (func) == ADDR_EXPR)
2588 TREE_TYPE (func) = build_pointer_type (method_type);
2589 else
2590 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2592 call = build_call_vec (TREE_TYPE (method_type), func, arg_list);
2593 TREE_SIDE_EFFECTS (call) = 1;
2594 call = check_for_builtin (method, call);
2596 if (check != NULL_TREE)
2598 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2599 TREE_SIDE_EFFECTS (call) = 1;
2602 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2603 java_add_stmt (call);
2604 else
2606 push_value (call);
2607 flush_quick_stack ();
2611 /* Create a stub which will be put into the vtable but which will call
2612 a JNI function. */
2614 tree
2615 build_jni_stub (tree method)
2617 tree jnifunc, call, body, method_sig, arg_types;
2618 tree jniarg0, jniarg1, jniarg2, jniarg3;
2619 tree jni_func_type, tem;
2620 tree env_var, res_var = NULL_TREE, block;
2621 tree method_args;
2622 tree meth_var;
2623 tree bind;
2624 vec<tree, va_gc> *args = NULL;
2625 int args_size = 0;
2627 tree klass = DECL_CONTEXT (method);
2628 klass = build_class_ref (klass);
2630 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2632 DECL_ARTIFICIAL (method) = 1;
2633 DECL_EXTERNAL (method) = 0;
2635 env_var = build_decl (input_location,
2636 VAR_DECL, get_identifier ("env"), ptr_type_node);
2637 DECL_CONTEXT (env_var) = method;
2639 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2641 res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"),
2642 TREE_TYPE (TREE_TYPE (method)));
2643 DECL_CONTEXT (res_var) = method;
2644 DECL_CHAIN (env_var) = res_var;
2647 method_args = DECL_ARGUMENTS (method);
2648 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2649 TREE_SIDE_EFFECTS (block) = 1;
2651 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2652 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2653 build_call_nary (ptr_type_node,
2654 build_address_of (soft_getjnienvnewframe_node),
2655 1, klass));
2657 /* The JNIEnv structure is the first argument to the JNI function. */
2658 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2659 vec_safe_push (args, env_var);
2661 /* For a static method the second argument is the class. For a
2662 non-static method the second argument is `this'; that is already
2663 available in the argument list. */
2664 if (METHOD_STATIC (method))
2666 args_size += int_size_in_bytes (TREE_TYPE (klass));
2667 vec_safe_push (args, klass);
2670 /* All the arguments to this method become arguments to the
2671 underlying JNI function. If we had to wrap object arguments in a
2672 special way, we would do that here. */
2673 for (tem = method_args; tem != NULL_TREE; tem = DECL_CHAIN (tem))
2675 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2676 #ifdef PARM_BOUNDARY
2677 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2678 * PARM_BOUNDARY);
2679 #endif
2680 args_size += (arg_bits / BITS_PER_UNIT);
2682 vec_safe_push (args, tem);
2684 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2686 /* Argument types for static methods and the JNIEnv structure.
2687 FIXME: Write and use build_function_type_vec to avoid this. */
2688 if (METHOD_STATIC (method))
2689 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2690 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2692 /* We call _Jv_LookupJNIMethod to find the actual underlying
2693 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2694 exception if this function is not found at runtime. */
2695 method_sig = build_java_signature (TREE_TYPE (method));
2696 jniarg0 = klass;
2697 jniarg1 = build_utf8_ref (DECL_NAME (method));
2698 jniarg2 = build_utf8_ref (unmangle_classname
2699 (IDENTIFIER_POINTER (method_sig),
2700 IDENTIFIER_LENGTH (method_sig)));
2701 jniarg3 = build_int_cst (NULL_TREE, args_size);
2703 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2705 #ifdef MODIFY_JNI_METHOD_CALL
2706 tem = MODIFY_JNI_METHOD_CALL (tem);
2707 #endif
2709 jni_func_type = build_pointer_type (tem);
2711 /* Use the actual function type, rather than a generic pointer type,
2712 such that this decl keeps the actual pointer type from being
2713 garbage-collected. If it is, we end up using canonical types
2714 with different uids for equivalent function types, and this in
2715 turn causes utf8 identifiers and output order to vary. */
2716 meth_var = build_decl (input_location,
2717 VAR_DECL, get_identifier ("meth"), jni_func_type);
2718 TREE_STATIC (meth_var) = 1;
2719 TREE_PUBLIC (meth_var) = 0;
2720 DECL_EXTERNAL (meth_var) = 0;
2721 DECL_CONTEXT (meth_var) = method;
2722 DECL_ARTIFICIAL (meth_var) = 1;
2723 DECL_INITIAL (meth_var) = null_pointer_node;
2724 TREE_USED (meth_var) = 1;
2725 chainon (env_var, meth_var);
2726 build_result_decl (method);
2728 jnifunc = build3 (COND_EXPR, jni_func_type,
2729 build2 (NE_EXPR, boolean_type_node,
2730 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2731 meth_var,
2732 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2733 build1
2734 (NOP_EXPR, jni_func_type,
2735 build_call_nary (ptr_type_node,
2736 build_address_of
2737 (soft_lookupjnimethod_node),
2739 jniarg0, jniarg1,
2740 jniarg2, jniarg3))));
2742 /* Now we make the actual JNI call via the resulting function
2743 pointer. */
2744 call = build_call_vec (TREE_TYPE (TREE_TYPE (method)), jnifunc, args);
2746 /* If the JNI call returned a result, capture it here. If we had to
2747 unwrap JNI object results, we would do that here. */
2748 if (res_var != NULL_TREE)
2750 /* If the call returns an object, it may return a JNI weak
2751 reference, in which case we must unwrap it. */
2752 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2753 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2754 build_address_of (soft_unwrapjni_node),
2755 1, call);
2756 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2757 res_var, call);
2760 TREE_SIDE_EFFECTS (call) = 1;
2762 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2763 TREE_SIDE_EFFECTS (body) = 1;
2765 /* Now free the environment we allocated. */
2766 call = build_call_nary (ptr_type_node,
2767 build_address_of (soft_jnipopsystemframe_node),
2768 1, env_var);
2769 TREE_SIDE_EFFECTS (call) = 1;
2770 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2771 TREE_SIDE_EFFECTS (body) = 1;
2773 /* Finally, do the return. */
2774 if (res_var != NULL_TREE)
2776 tree drt;
2777 gcc_assert (DECL_RESULT (method));
2778 /* Make sure we copy the result variable to the actual
2779 result. We use the type of the DECL_RESULT because it
2780 might be different from the return type of the function:
2781 it might be promoted. */
2782 drt = TREE_TYPE (DECL_RESULT (method));
2783 if (drt != TREE_TYPE (res_var))
2784 res_var = build1 (CONVERT_EXPR, drt, res_var);
2785 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2786 TREE_SIDE_EFFECTS (res_var) = 1;
2789 body = build2 (COMPOUND_EXPR, void_type_node, body,
2790 build1 (RETURN_EXPR, void_type_node, res_var));
2791 TREE_SIDE_EFFECTS (body) = 1;
2793 /* Prepend class initialization for static methods reachable from
2794 other classes. */
2795 if (METHOD_STATIC (method)
2796 && (! METHOD_PRIVATE (method)
2797 || INNER_CLASS_P (DECL_CONTEXT (method))))
2799 tree init = build_call_expr (soft_initclass_node, 1,
2800 klass);
2801 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2802 TREE_SIDE_EFFECTS (body) = 1;
2805 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2806 body, block);
2807 return bind;
2811 /* Given lvalue EXP, return a volatile expression that references the
2812 same object. */
2814 tree
2815 java_modify_addr_for_volatile (tree exp)
2817 tree exp_type = TREE_TYPE (exp);
2818 tree v_type
2819 = build_qualified_type (exp_type,
2820 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2821 tree addr = build_fold_addr_expr (exp);
2822 v_type = build_pointer_type (v_type);
2823 addr = fold_convert (v_type, addr);
2824 exp = build_fold_indirect_ref (addr);
2825 return exp;
2829 /* Expand an operation to extract from or store into a field.
2830 IS_STATIC is 1 iff the field is static.
2831 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2832 FIELD_REF_INDEX is an index into the constant pool. */
2834 static void
2835 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2837 tree self_type
2838 = get_class_constant (current_jcf,
2839 COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
2840 field_ref_index));
2841 const char *self_name
2842 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2843 tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2844 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool,
2845 field_ref_index);
2846 tree field_type = get_type_from_signature (field_signature);
2847 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2848 tree field_ref;
2849 int is_error = 0;
2850 tree original_self_type = self_type;
2851 tree field_decl;
2852 tree modify_expr;
2854 if (! CLASS_LOADED_P (self_type))
2855 load_class (self_type, 1);
2856 field_decl = lookup_field (&self_type, field_name);
2857 if (field_decl == error_mark_node)
2859 is_error = 1;
2861 else if (field_decl == NULL_TREE)
2863 if (! flag_verify_invocations)
2865 int flags = ACC_PUBLIC;
2866 if (is_static)
2867 flags |= ACC_STATIC;
2868 self_type = original_self_type;
2869 field_decl = add_field (original_self_type, field_name,
2870 field_type, flags);
2871 DECL_ARTIFICIAL (field_decl) = 1;
2872 DECL_IGNORED_P (field_decl) = 1;
2873 #if 0
2874 /* FIXME: We should be pessimistic about volatility. We
2875 don't know one way or another, but this is safe.
2876 However, doing this has bad effects on code quality. We
2877 need to look at better ways to do this. */
2878 TREE_THIS_VOLATILE (field_decl) = 1;
2879 #endif
2881 else
2883 error ("missing field '%s' in '%s'",
2884 IDENTIFIER_POINTER (field_name), self_name);
2885 is_error = 1;
2888 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2890 error ("mismatching signature for field '%s' in '%s'",
2891 IDENTIFIER_POINTER (field_name), self_name);
2892 is_error = 1;
2894 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2895 if (is_error)
2897 if (! is_putting)
2898 push_value (convert (field_type, integer_zero_node));
2899 flush_quick_stack ();
2900 return;
2903 field_ref = build_field_ref (field_ref, self_type, field_name);
2904 if (is_static
2905 && ! flag_indirect_dispatch)
2907 tree context = DECL_CONTEXT (field_ref);
2908 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2909 field_ref = build_class_init (context, field_ref);
2910 else
2911 field_ref = build_class_init (self_type, field_ref);
2913 if (is_putting)
2915 flush_quick_stack ();
2916 if (FIELD_FINAL (field_decl))
2918 if (DECL_CONTEXT (field_decl) != current_class)
2919 error ("assignment to final field %q+D not in field%'s class",
2920 field_decl);
2921 /* We used to check for assignments to final fields not
2922 occurring in the class initializer or in a constructor
2923 here. However, this constraint doesn't seem to be
2924 enforced by the JVM. */
2927 if (TREE_THIS_VOLATILE (field_decl))
2928 field_ref = java_modify_addr_for_volatile (field_ref);
2930 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2931 field_ref, new_value);
2933 if (TREE_THIS_VOLATILE (field_decl))
2935 tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2936 java_add_stmt (build_call_expr (sync, 0));
2939 java_add_stmt (modify_expr);
2941 else
2943 tree temp = build_decl (input_location,
2944 VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2945 java_add_local_var (temp);
2947 if (TREE_THIS_VOLATILE (field_decl))
2948 field_ref = java_modify_addr_for_volatile (field_ref);
2950 modify_expr
2951 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2952 java_add_stmt (modify_expr);
2954 if (TREE_THIS_VOLATILE (field_decl))
2956 tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2957 java_add_stmt (build_call_expr (sync, 0));
2960 push_value (temp);
2962 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2965 static void
2966 load_type_state (int pc)
2968 int i;
2969 tree vec = (*type_states)[pc];
2970 int cur_length = TREE_VEC_LENGTH (vec);
2971 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2972 for (i = 0; i < cur_length; i++)
2973 type_map [i] = TREE_VEC_ELT (vec, i);
2976 /* Go over METHOD's bytecode and note instruction starts in
2977 instruction_bits[]. */
2979 void
2980 note_instructions (JCF *jcf, tree method)
2982 int PC;
2983 unsigned char* byte_ops;
2984 long length = DECL_CODE_LENGTH (method);
2986 int saw_index;
2987 jint INT_temp;
2989 #undef RET /* Defined by config/i386/i386.h */
2990 #undef PTR
2991 #define BCODE byte_ops
2992 #define BYTE_type_node byte_type_node
2993 #define SHORT_type_node short_type_node
2994 #define INT_type_node int_type_node
2995 #define LONG_type_node long_type_node
2996 #define CHAR_type_node char_type_node
2997 #define PTR_type_node ptr_type_node
2998 #define FLOAT_type_node float_type_node
2999 #define DOUBLE_type_node double_type_node
3000 #define VOID_type_node void_type_node
3001 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3002 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3003 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3004 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3006 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3008 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3009 byte_ops = jcf->read_ptr;
3010 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3011 memset (instruction_bits, 0, length + 1);
3012 vec_alloc (type_states, length + 1);
3013 type_states->quick_grow_cleared (length + 1);
3015 /* This pass figures out which PC can be the targets of jumps. */
3016 for (PC = 0; PC < length;)
3018 int oldpc = PC; /* PC at instruction start. */
3019 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3020 switch (byte_ops[PC++])
3022 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3023 case OPCODE: \
3024 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3025 break;
3027 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3029 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3030 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3031 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3032 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3033 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3034 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3035 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3036 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3038 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3039 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3040 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3041 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3042 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3043 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3044 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3045 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3047 /* two forms of wide instructions */
3048 #define PRE_SPECIAL_WIDE(IGNORE) \
3050 int modified_opcode = IMMEDIATE_u1; \
3051 if (modified_opcode == OPCODE_iinc) \
3053 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3054 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3056 else \
3058 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3062 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3064 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3066 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3067 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3068 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3069 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3070 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3071 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3072 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3073 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3074 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3075 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3077 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3078 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3079 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3080 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3081 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3082 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3083 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3084 NOTE_LABEL (PC); \
3085 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3087 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3089 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3090 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3092 #define PRE_LOOKUP_SWITCH \
3093 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3094 NOTE_LABEL (default_offset+oldpc); \
3095 if (npairs >= 0) \
3096 while (--npairs >= 0) { \
3097 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3098 jint offset = IMMEDIATE_s4; \
3099 NOTE_LABEL (offset+oldpc); } \
3102 #define PRE_TABLE_SWITCH \
3103 { jint default_offset = IMMEDIATE_s4; \
3104 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3105 NOTE_LABEL (default_offset+oldpc); \
3106 if (low <= high) \
3107 while (low++ <= high) { \
3108 jint offset = IMMEDIATE_s4; \
3109 NOTE_LABEL (offset+oldpc); } \
3112 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3113 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3114 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3115 (void)(IMMEDIATE_u2); \
3116 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3118 #include "javaop.def"
3119 #undef JAVAOP
3121 } /* for */
3124 void
3125 expand_byte_code (JCF *jcf, tree method)
3127 int PC;
3128 int i;
3129 const unsigned char *linenumber_pointer;
3130 int dead_code_index = -1;
3131 unsigned char* byte_ops;
3132 long length = DECL_CODE_LENGTH (method);
3133 location_t max_location = input_location;
3135 stack_pointer = 0;
3136 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3137 byte_ops = jcf->read_ptr;
3139 /* We make an initial pass of the line number table, to note
3140 which instructions have associated line number entries. */
3141 linenumber_pointer = linenumber_table;
3142 for (i = 0; i < linenumber_count; i++)
3144 int pc = GET_u2 (linenumber_pointer);
3145 linenumber_pointer += 4;
3146 if (pc >= length)
3147 warning (0, "invalid PC in line number table");
3148 else
3150 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3151 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3152 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3156 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3157 return;
3159 promote_arguments ();
3160 cache_this_class_ref (method);
3161 cache_cpool_data_ref ();
3163 /* Translate bytecodes. */
3164 linenumber_pointer = linenumber_table;
3165 for (PC = 0; PC < length;)
3167 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3169 tree label = lookup_label (PC);
3170 flush_quick_stack ();
3171 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3172 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3173 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3174 load_type_state (PC);
3177 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3179 if (dead_code_index == -1)
3181 /* This is the start of a region of unreachable bytecodes.
3182 They still need to be processed in order for EH ranges
3183 to get handled correctly. However, we can simply
3184 replace these bytecodes with nops. */
3185 dead_code_index = PC;
3188 /* Turn this bytecode into a nop. */
3189 byte_ops[PC] = 0x0;
3191 else
3193 if (dead_code_index != -1)
3195 /* We've just reached the end of a region of dead code. */
3196 if (extra_warnings)
3197 warning (0, "unreachable bytecode from %d to before %d",
3198 dead_code_index, PC);
3199 dead_code_index = -1;
3203 /* Handle possible line number entry for this PC.
3205 This code handles out-of-order and multiple linenumbers per PC,
3206 but is optimized for the case of line numbers increasing
3207 monotonically with PC. */
3208 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3210 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3211 || GET_u2 (linenumber_pointer) != PC)
3212 linenumber_pointer = linenumber_table;
3213 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3215 int pc = GET_u2 (linenumber_pointer);
3216 linenumber_pointer += 4;
3217 if (pc == PC)
3219 int line = GET_u2 (linenumber_pointer - 2);
3220 input_location = linemap_line_start (line_table, line, 1);
3221 if (input_location > max_location)
3222 max_location = input_location;
3223 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3224 break;
3228 maybe_pushlevels (PC);
3229 PC = process_jvm_instruction (PC, byte_ops, length);
3230 maybe_poplevels (PC);
3231 } /* for */
3233 uncache_this_class_ref (method);
3235 if (dead_code_index != -1)
3237 /* We've just reached the end of a region of dead code. */
3238 if (extra_warnings)
3239 warning (0, "unreachable bytecode from %d to the end of the method",
3240 dead_code_index);
3243 DECL_FUNCTION_LAST_LINE (method) = max_location;
3246 static void
3247 java_push_constant_from_pool (JCF *jcf, int index)
3249 tree c;
3250 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3252 tree name;
3253 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3254 index = alloc_name_constant (CONSTANT_String, name);
3255 c = build_ref_from_constant_pool (index);
3256 c = convert (promote_type (string_type_node), c);
3258 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3259 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3261 tree record = get_class_constant (jcf, index);
3262 c = build_class_ref (record);
3264 else
3265 c = get_constant (jcf, index);
3266 push_value (c);
3270 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3271 long length ATTRIBUTE_UNUSED)
3273 const char *opname; /* Temporary ??? */
3274 int oldpc = PC; /* PC at instruction start. */
3276 /* If the instruction is at the beginning of an exception handler,
3277 replace the top of the stack with the thrown object reference. */
3278 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3280 /* Note that the verifier will not emit a type map at all for
3281 dead exception handlers. In this case we just ignore the
3282 situation. */
3283 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3285 tree type = pop_type (promote_type (throwable_type_node));
3286 push_value (build_exception_object_ref (type));
3290 switch (byte_ops[PC++])
3292 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3293 case OPCODE: \
3294 opname = #OPNAME; \
3295 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3296 break;
3298 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3300 int saw_index = 0; \
3301 int index = OPERAND_VALUE; \
3302 (void) saw_index; /* Avoid set but not used warning. */ \
3303 build_java_ret \
3304 (find_local_variable (index, return_address_type_node, oldpc)); \
3307 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3309 /* OPERAND_VALUE may have side-effects on PC */ \
3310 int opvalue = OPERAND_VALUE; \
3311 build_java_jsr (oldpc + opvalue, PC); \
3314 /* Push a constant onto the stack. */
3315 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3316 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3317 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3318 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3320 /* internal macro added for use by the WIDE case */
3321 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3322 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3324 /* Push local variable onto the opcode stack. */
3325 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3327 /* have to do this since OPERAND_VALUE may have side-effects */ \
3328 int opvalue = OPERAND_VALUE; \
3329 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3332 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3333 expand_java_return (OPERAND_TYPE##_type_node)
3335 #define REM_EXPR TRUNC_MOD_EXPR
3336 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3337 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3339 #define FIELD(IS_STATIC, IS_PUT) \
3340 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3342 #define TEST(OPERAND_TYPE, CONDITION) \
3343 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3345 #define COND(OPERAND_TYPE, CONDITION) \
3346 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3348 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3349 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3351 #define BRANCH_GOTO(OPERAND_VALUE) \
3352 expand_java_goto (oldpc + OPERAND_VALUE)
3354 #define BRANCH_CALL(OPERAND_VALUE) \
3355 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3357 #if 0
3358 #define BRANCH_RETURN(OPERAND_VALUE) \
3360 tree type = OPERAND_TYPE##_type_node; \
3361 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3362 expand_java_ret (value); \
3364 #endif
3366 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3367 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3368 fprintf (stderr, "(not implemented)\n")
3369 #define NOT_IMPL1(OPERAND_VALUE) \
3370 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3371 fprintf (stderr, "(not implemented)\n")
3373 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3375 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3377 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3379 #define STACK_SWAP(COUNT) java_stack_swap()
3381 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3382 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3383 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3385 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3386 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3388 #define LOOKUP_SWITCH \
3389 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3390 tree selector = pop_value (INT_type_node); \
3391 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3392 while (--npairs >= 0) \
3394 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3395 expand_java_add_case (switch_expr, match, oldpc + offset); \
3399 #define TABLE_SWITCH \
3400 { jint default_offset = IMMEDIATE_s4; \
3401 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3402 tree selector = pop_value (INT_type_node); \
3403 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3404 for (; low <= high; low++) \
3406 jint offset = IMMEDIATE_s4; \
3407 expand_java_add_case (switch_expr, low, oldpc + offset); \
3411 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3412 { int opcode = byte_ops[PC-1]; \
3413 int method_ref_index = IMMEDIATE_u2; \
3414 int nargs; \
3415 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3416 else nargs = -1; \
3417 expand_invoke (opcode, method_ref_index, nargs); \
3420 /* Handle new, checkcast, instanceof */
3421 #define OBJECT(TYPE, OP) \
3422 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3424 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3426 #define ARRAY_LOAD(OPERAND_TYPE) \
3428 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3431 #define ARRAY_STORE(OPERAND_TYPE) \
3433 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3436 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3437 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3438 #define ARRAY_NEW_PTR() \
3439 push_value (build_anewarray (get_class_constant (current_jcf, \
3440 IMMEDIATE_u2), \
3441 pop_value (int_type_node)));
3442 #define ARRAY_NEW_NUM() \
3444 int atype = IMMEDIATE_u1; \
3445 push_value (build_newarray (atype, pop_value (int_type_node)));\
3447 #define ARRAY_NEW_MULTI() \
3449 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3450 int ndims = IMMEDIATE_u1; \
3451 expand_java_multianewarray( klass, ndims ); \
3454 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3455 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3456 pop_value (OPERAND_TYPE##_type_node)));
3458 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3460 push_value (build1 (NOP_EXPR, int_type_node, \
3461 (convert (TO_TYPE##_type_node, \
3462 pop_value (FROM_TYPE##_type_node))))); \
3465 #define CONVERT(FROM_TYPE, TO_TYPE) \
3467 push_value (convert (TO_TYPE##_type_node, \
3468 pop_value (FROM_TYPE##_type_node))); \
3471 /* internal macro added for use by the WIDE case
3472 Added TREE_TYPE (decl) assignment, apbianco */
3473 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3475 tree decl, value; \
3476 int index = OPVALUE; \
3477 tree type = OPTYPE; \
3478 value = pop_value (type); \
3479 type = TREE_TYPE (value); \
3480 decl = find_local_variable (index, type, oldpc); \
3481 set_local_type (index, type); \
3482 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3485 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3487 /* have to do this since OPERAND_VALUE may have side-effects */ \
3488 int opvalue = OPERAND_VALUE; \
3489 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3492 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3493 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3495 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3496 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3498 #define MONITOR_OPERATION(call) \
3500 tree o = pop_value (ptr_type_node); \
3501 tree c; \
3502 flush_quick_stack (); \
3503 c = build_java_monitor (call, o); \
3504 TREE_SIDE_EFFECTS (c) = 1; \
3505 java_add_stmt (c); \
3508 #define SPECIAL_IINC(IGNORED) \
3510 unsigned int local_var_index = IMMEDIATE_u1; \
3511 int ival = IMMEDIATE_s1; \
3512 expand_iinc(local_var_index, ival, oldpc); \
3515 #define SPECIAL_WIDE(IGNORED) \
3517 int modified_opcode = IMMEDIATE_u1; \
3518 unsigned int local_var_index = IMMEDIATE_u2; \
3519 switch (modified_opcode) \
3521 case OPCODE_iinc: \
3523 int ival = IMMEDIATE_s2; \
3524 expand_iinc (local_var_index, ival, oldpc); \
3525 break; \
3527 case OPCODE_iload: \
3528 case OPCODE_lload: \
3529 case OPCODE_fload: \
3530 case OPCODE_dload: \
3531 case OPCODE_aload: \
3533 /* duplicate code from LOAD macro */ \
3534 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3535 break; \
3537 case OPCODE_istore: \
3538 case OPCODE_lstore: \
3539 case OPCODE_fstore: \
3540 case OPCODE_dstore: \
3541 case OPCODE_astore: \
3543 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3544 break; \
3546 default: \
3547 error ("unrecognized wide sub-instruction"); \
3551 #define SPECIAL_THROW(IGNORED) \
3552 build_java_athrow (pop_value (throwable_type_node))
3554 #define SPECIAL_BREAK NOT_IMPL1
3555 #define IMPL NOT_IMPL
3557 #include "javaop.def"
3558 #undef JAVAOP
3559 default:
3560 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3562 return PC;
3565 /* Return the opcode at PC in the code section pointed to by
3566 CODE_OFFSET. */
3568 static unsigned char
3569 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3571 unsigned char opcode;
3572 long absolute_offset = (long)JCF_TELL (jcf);
3574 JCF_SEEK (jcf, code_offset);
3575 opcode = jcf->read_ptr [pc];
3576 JCF_SEEK (jcf, absolute_offset);
3577 return opcode;
3580 /* Some bytecode compilers are emitting accurate LocalVariableTable
3581 attributes. Here's an example:
3583 PC <t>store_<n>
3584 PC+1 ...
3586 Attribute "LocalVariableTable"
3587 slot #<n>: ... (PC: PC+1 length: L)
3589 This is accurate because the local in slot <n> really exists after
3590 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3592 This procedure recognizes this situation and extends the live range
3593 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3594 length of the store instruction.)
3596 This function is used by `give_name_to_locals' so that a local's
3597 DECL features a DECL_LOCAL_START_PC such that the first related
3598 store operation will use DECL as a destination, not an unrelated
3599 temporary created for the occasion.
3601 This function uses a global (instruction_bits) `note_instructions' should
3602 have allocated and filled properly. */
3605 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3606 int start_pc, int slot)
3608 int first, index, opcode;
3609 int pc, insn_pc;
3610 int wide_found = 0;
3612 if (!start_pc)
3613 return start_pc;
3615 first = index = -1;
3617 /* Find last previous instruction and remember it */
3618 for (pc = start_pc-1; pc; pc--)
3619 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3620 break;
3621 insn_pc = pc;
3623 /* Retrieve the instruction, handle `wide'. */
3624 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3625 if (opcode == OPCODE_wide)
3627 wide_found = 1;
3628 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3631 switch (opcode)
3633 case OPCODE_astore_0:
3634 case OPCODE_astore_1:
3635 case OPCODE_astore_2:
3636 case OPCODE_astore_3:
3637 first = OPCODE_astore_0;
3638 break;
3640 case OPCODE_istore_0:
3641 case OPCODE_istore_1:
3642 case OPCODE_istore_2:
3643 case OPCODE_istore_3:
3644 first = OPCODE_istore_0;
3645 break;
3647 case OPCODE_lstore_0:
3648 case OPCODE_lstore_1:
3649 case OPCODE_lstore_2:
3650 case OPCODE_lstore_3:
3651 first = OPCODE_lstore_0;
3652 break;
3654 case OPCODE_fstore_0:
3655 case OPCODE_fstore_1:
3656 case OPCODE_fstore_2:
3657 case OPCODE_fstore_3:
3658 first = OPCODE_fstore_0;
3659 break;
3661 case OPCODE_dstore_0:
3662 case OPCODE_dstore_1:
3663 case OPCODE_dstore_2:
3664 case OPCODE_dstore_3:
3665 first = OPCODE_dstore_0;
3666 break;
3668 case OPCODE_astore:
3669 case OPCODE_istore:
3670 case OPCODE_lstore:
3671 case OPCODE_fstore:
3672 case OPCODE_dstore:
3673 index = peek_opcode_at_pc (jcf, code_offset, pc);
3674 if (wide_found)
3676 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3677 index = (other << 8) + index;
3679 break;
3682 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3683 means we have a <t>store. */
3684 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3685 start_pc = insn_pc;
3687 return start_pc;
3690 /* Build a node to represent empty statements and blocks. */
3692 tree
3693 build_java_empty_stmt (void)
3695 tree t = build_empty_stmt (input_location);
3696 return t;
3699 /* Promote all args of integral type before generating any code. */
3701 static void
3702 promote_arguments (void)
3704 int i;
3705 tree arg;
3706 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3707 arg != NULL_TREE; arg = DECL_CHAIN (arg), i++)
3709 tree arg_type = TREE_TYPE (arg);
3710 if (INTEGRAL_TYPE_P (arg_type)
3711 && TYPE_PRECISION (arg_type) < 32)
3713 tree copy = find_local_variable (i, integer_type_node, -1);
3714 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3715 copy,
3716 fold_convert (integer_type_node, arg)));
3718 if (TYPE_IS_WIDE (arg_type))
3719 i++;
3723 /* Create a local variable that points to the constant pool. */
3725 static void
3726 cache_cpool_data_ref (void)
3728 if (optimize)
3730 tree cpool;
3731 tree d = build_constant_data_ref (flag_indirect_classes);
3732 tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE,
3733 build_pointer_type (TREE_TYPE (d)));
3734 java_add_local_var (cpool_ptr);
3735 TREE_CONSTANT (cpool_ptr) = 1;
3737 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3738 cpool_ptr, build_address_of (d)));
3739 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3740 TREE_THIS_NOTRAP (cpool) = 1;
3741 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3745 #include "gt-java-expr.h"