* PR target/49903
[official-gcc.git] / gcc / java / expr.c
blob4686f30309bb0944da95d466e6a5114c22db3ecf
1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008, 2010, 2011 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
25 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h" /* For INT_TYPE_SIZE,
31 TARGET_VTABLE_USES_DESCRIPTORS,
32 BITS_PER_UNIT,
33 MODIFY_JNI_METHOD_CALL and
34 PARM_BOUNDARY. */
36 #include "tree.h"
37 #include "flags.h"
38 #include "java-tree.h"
39 #include "javaop.h"
40 #include "java-opcodes.h"
41 #include "jcf.h"
42 #include "java-except.h"
43 #include "parse.h"
44 #include "diagnostic-core.h"
45 #include "ggc.h"
46 #include "tree-iterator.h"
47 #include "target.h"
49 static void flush_quick_stack (void);
50 static void push_value (tree);
51 static tree pop_value (tree);
52 static void java_stack_swap (void);
53 static void java_stack_dup (int, int);
54 static void build_java_athrow (tree);
55 static void build_java_jsr (int, int);
56 static void build_java_ret (tree);
57 static void expand_java_multianewarray (tree, int);
58 static void expand_java_arraystore (tree);
59 static void expand_java_arrayload (tree);
60 static void expand_java_array_length (void);
61 static tree build_java_monitor (tree, tree);
62 static void expand_java_pushc (int, tree);
63 static void expand_java_return (tree);
64 static void expand_load_internal (int, tree, int);
65 static void expand_java_NEW (tree);
66 static void expand_java_INSTANCEOF (tree);
67 static void expand_java_CHECKCAST (tree);
68 static void expand_iinc (unsigned int, int, int);
69 static void expand_java_binop (tree, enum tree_code);
70 static void note_label (int, int);
71 static void expand_compare (enum tree_code, tree, tree, int);
72 static void expand_test (enum tree_code, tree, int);
73 static void expand_cond (enum tree_code, tree, int);
74 static void expand_java_goto (int);
75 static tree expand_java_switch (tree, int);
76 static void expand_java_add_case (tree, int, int);
77 static VEC(tree,gc) *pop_arguments (tree);
78 static void expand_invoke (int, int, int);
79 static void expand_java_field_op (int, int, int);
80 static void java_push_constant_from_pool (struct JCF *, int);
81 static void java_stack_pop (int);
82 static tree build_java_throw_out_of_bounds_exception (tree);
83 static tree build_java_check_indexed_type (tree, tree);
84 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
85 static void promote_arguments (void);
86 static void cache_cpool_data_ref (void);
88 static GTY(()) tree operand_type[59];
90 static GTY(()) tree methods_ident;
91 static GTY(()) tree ncode_ident;
92 tree dtable_ident = NULL_TREE;
94 /* Set to nonzero value in order to emit class initialization code
95 before static field references. */
96 int always_initialize_class_p = 0;
98 /* We store the stack state in two places:
99 Within a basic block, we use the quick_stack, which is a VEC of expression
100 nodes.
101 This is the top part of the stack; below that we use find_stack_slot.
102 At the end of a basic block, the quick_stack must be flushed
103 to the stack slot array (as handled by find_stack_slot).
104 Using quick_stack generates better code (especially when
105 compiled without optimization), because we do not have to
106 explicitly store and load trees to temporary variables.
108 If a variable is on the quick stack, it means the value of variable
109 when the quick stack was last flushed. Conceptually, flush_quick_stack
110 saves all the quick_stack elements in parallel. However, that is
111 complicated, so it actually saves them (i.e. copies each stack value
112 to is home virtual register) from low indexes. This allows a quick_stack
113 element at index i (counting from the bottom of stack the) to references
114 slot virtuals for register that are >= i, but not those that are deeper.
115 This convention makes most operations easier. For example iadd works
116 even when the stack contains (reg[0], reg[1]): It results in the
117 stack containing (reg[0]+reg[1]), which is OK. However, some stack
118 operations are more complicated. For example dup given a stack
119 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
120 the convention, since stack value 1 would refer to a register with
121 lower index (reg[0]), which flush_quick_stack does not safely handle.
122 So dup cannot just add an extra element to the quick_stack, but iadd can.
125 static GTY(()) VEC(tree,gc) *quick_stack;
127 /* The physical memory page size used in this computer. See
128 build_field_ref(). */
129 static GTY(()) tree page_size;
131 /* The stack pointer of the Java virtual machine.
132 This does include the size of the quick_stack. */
134 int stack_pointer;
136 const unsigned char *linenumber_table;
137 int linenumber_count;
139 /* Largest pc so far in this method that has been passed to lookup_label. */
140 int highest_label_pc_this_method = -1;
142 /* Base value for this method to add to pc to get generated label. */
143 int start_label_pc_this_method = 0;
145 void
146 init_expr_processing (void)
148 operand_type[21] = operand_type[54] = int_type_node;
149 operand_type[22] = operand_type[55] = long_type_node;
150 operand_type[23] = operand_type[56] = float_type_node;
151 operand_type[24] = operand_type[57] = double_type_node;
152 operand_type[25] = operand_type[58] = ptr_type_node;
155 tree
156 java_truthvalue_conversion (tree expr)
158 /* It is simpler and generates better code to have only TRUTH_*_EXPR
159 or comparison expressions as truth values at this level.
161 This function should normally be identity for Java. */
163 switch (TREE_CODE (expr))
165 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
166 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
167 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
168 case ORDERED_EXPR: case UNORDERED_EXPR:
169 case TRUTH_ANDIF_EXPR:
170 case TRUTH_ORIF_EXPR:
171 case TRUTH_AND_EXPR:
172 case TRUTH_OR_EXPR:
173 case TRUTH_XOR_EXPR:
174 case TRUTH_NOT_EXPR:
175 case ERROR_MARK:
176 return expr;
178 case INTEGER_CST:
179 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
181 case REAL_CST:
182 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
184 /* are these legal? XXX JH */
185 case NEGATE_EXPR:
186 case ABS_EXPR:
187 case FLOAT_EXPR:
188 /* These don't change whether an object is nonzero or zero. */
189 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
191 case COND_EXPR:
192 /* Distribute the conversion into the arms of a COND_EXPR. */
193 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
194 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
195 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
197 case NOP_EXPR:
198 /* If this is widening the argument, we can ignore it. */
199 if (TYPE_PRECISION (TREE_TYPE (expr))
200 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
201 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
202 /* fall through to default */
204 default:
205 return fold_build2 (NE_EXPR, boolean_type_node,
206 expr, boolean_false_node);
210 /* Save any stack slots that happen to be in the quick_stack into their
211 home virtual register slots.
213 The copy order is from low stack index to high, to support the invariant
214 that the expression for a slot may contain decls for stack slots with
215 higher (or the same) index, but not lower. */
217 static void
218 flush_quick_stack (void)
220 int stack_index = stack_pointer;
221 unsigned ix;
222 tree t;
224 /* Count the number of slots the quick stack is holding. */
225 for (ix = 0; VEC_iterate(tree, quick_stack, ix, t); ix++)
226 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (t));
228 for (ix = 0; VEC_iterate(tree, quick_stack, ix, t); ix++)
230 tree decl, type = TREE_TYPE (t);
232 decl = find_stack_slot (stack_index, type);
233 if (decl != t)
234 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (t), decl, t));
235 stack_index += 1 + TYPE_IS_WIDE (type);
238 VEC_truncate (tree, quick_stack, 0);
241 /* Push TYPE on the type stack.
242 Return true on success, 0 on overflow. */
245 push_type_0 (tree type)
247 int n_words;
248 type = promote_type (type);
249 n_words = 1 + TYPE_IS_WIDE (type);
250 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
251 return 0;
252 /* Allocate decl for this variable now, so we get a temporary that
253 survives the whole method. */
254 find_stack_slot (stack_pointer, type);
255 stack_type_map[stack_pointer++] = type;
256 n_words--;
257 while (--n_words >= 0)
258 stack_type_map[stack_pointer++] = TYPE_SECOND;
259 return 1;
262 void
263 push_type (tree type)
265 int r = push_type_0 (type);
266 gcc_assert (r);
269 static void
270 push_value (tree value)
272 tree type = TREE_TYPE (value);
273 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
275 type = promote_type (type);
276 value = convert (type, value);
278 push_type (type);
279 VEC_safe_push (tree, gc, quick_stack, value);
281 /* If the value has a side effect, then we need to evaluate it
282 whether or not the result is used. If the value ends up on the
283 quick stack and is then popped, this won't happen -- so we flush
284 the quick stack. It is safest to simply always flush, though,
285 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
286 the latter we may need to strip conversions. */
287 flush_quick_stack ();
290 /* Pop a type from the type stack.
291 TYPE is the expected type. Return the actual type, which must be
292 convertible to TYPE.
293 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
295 tree
296 pop_type_0 (tree type, char **messagep)
298 int n_words;
299 tree t;
300 *messagep = NULL;
301 if (TREE_CODE (type) == RECORD_TYPE)
302 type = promote_type (type);
303 n_words = 1 + TYPE_IS_WIDE (type);
304 if (stack_pointer < n_words)
306 *messagep = xstrdup ("stack underflow");
307 return type;
309 while (--n_words > 0)
311 if (stack_type_map[--stack_pointer] != void_type_node)
313 *messagep = xstrdup ("Invalid multi-word value on type stack");
314 return type;
317 t = stack_type_map[--stack_pointer];
318 if (type == NULL_TREE || t == type)
319 return t;
320 if (TREE_CODE (t) == TREE_LIST)
324 tree tt = TREE_PURPOSE (t);
325 if (! can_widen_reference_to (tt, type))
327 t = tt;
328 goto fail;
330 t = TREE_CHAIN (t);
332 while (t);
333 return t;
335 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
336 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
337 return t;
338 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
340 /* If the expected type we've been passed is object or ptr
341 (i.e. void*), the caller needs to know the real type. */
342 if (type == ptr_type_node || type == object_ptr_type_node)
343 return t;
345 /* Since the verifier has already run, we know that any
346 types we see will be compatible. In BC mode, this fact
347 may be checked at runtime, but if that is so then we can
348 assume its truth here as well. So, we always succeed
349 here, with the expected type. */
350 return type;
353 if (! flag_verify_invocations && flag_indirect_dispatch
354 && t == object_ptr_type_node)
356 if (type != ptr_type_node)
357 warning (0, "need to insert runtime check for %s",
358 xstrdup (lang_printable_name (type, 0)));
359 return type;
362 /* lang_printable_name uses a static buffer, so we must save the result
363 from calling it the first time. */
364 fail:
366 char *temp = xstrdup (lang_printable_name (type, 0));
367 /* If the stack contains a multi-word type, keep popping the stack until
368 the real type is found. */
369 while (t == void_type_node)
370 t = stack_type_map[--stack_pointer];
371 *messagep = concat ("expected type '", temp,
372 "' but stack contains '", lang_printable_name (t, 0),
373 "'", NULL);
374 free (temp);
376 return type;
379 /* Pop a type from the type stack.
380 TYPE is the expected type. Return the actual type, which must be
381 convertible to TYPE, otherwise call error. */
383 tree
384 pop_type (tree type)
386 char *message = NULL;
387 type = pop_type_0 (type, &message);
388 if (message != NULL)
390 error ("%s", message);
391 free (message);
393 return type;
397 /* Return true if two type assertions are equal. */
399 static int
400 type_assertion_eq (const void * k1_p, const void * k2_p)
402 const type_assertion k1 = *(const type_assertion *)k1_p;
403 const type_assertion k2 = *(const type_assertion *)k2_p;
404 return (k1.assertion_code == k2.assertion_code
405 && k1.op1 == k2.op1
406 && k1.op2 == k2.op2);
409 /* Hash a type assertion. */
411 static hashval_t
412 type_assertion_hash (const void *p)
414 const type_assertion *k_p = (const type_assertion *) p;
415 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
416 k_p->assertion_code, 0);
418 switch (k_p->assertion_code)
420 case JV_ASSERT_TYPES_COMPATIBLE:
421 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
422 hash);
423 /* Fall through. */
425 case JV_ASSERT_IS_INSTANTIABLE:
426 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
427 hash);
428 /* Fall through. */
430 case JV_ASSERT_END_OF_TABLE:
431 break;
433 default:
434 gcc_unreachable ();
437 return hash;
440 /* Add an entry to the type assertion table for the given class.
441 KLASS is the class for which this assertion will be evaluated by the
442 runtime during loading/initialization.
443 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
444 OP1 and OP2 are the operands. The tree type of these arguments may be
445 specific to each assertion_code. */
447 void
448 add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
450 htab_t assertions_htab;
451 type_assertion as;
452 void **as_pp;
454 assertions_htab = TYPE_ASSERTIONS (klass);
455 if (assertions_htab == NULL)
457 assertions_htab = htab_create_ggc (7, type_assertion_hash,
458 type_assertion_eq, NULL);
459 TYPE_ASSERTIONS (current_class) = assertions_htab;
462 as.assertion_code = assertion_code;
463 as.op1 = op1;
464 as.op2 = op2;
466 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
468 /* Don't add the same assertion twice. */
469 if (*as_pp)
470 return;
472 *as_pp = ggc_alloc_type_assertion ();
473 **(type_assertion **)as_pp = as;
477 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
478 Handles array types and interfaces. */
481 can_widen_reference_to (tree source_type, tree target_type)
483 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
484 return 1;
486 /* Get rid of pointers */
487 if (TREE_CODE (source_type) == POINTER_TYPE)
488 source_type = TREE_TYPE (source_type);
489 if (TREE_CODE (target_type) == POINTER_TYPE)
490 target_type = TREE_TYPE (target_type);
492 if (source_type == target_type)
493 return 1;
495 /* FIXME: This is very pessimistic, in that it checks everything,
496 even if we already know that the types are compatible. If we're
497 to support full Java class loader semantics, we need this.
498 However, we could do something more optimal. */
499 if (! flag_verify_invocations)
501 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
502 source_type, target_type);
504 if (!quiet_flag)
505 warning (0, "assert: %s is assign compatible with %s",
506 xstrdup (lang_printable_name (target_type, 0)),
507 xstrdup (lang_printable_name (source_type, 0)));
508 /* Punt everything to runtime. */
509 return 1;
512 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
514 return 1;
516 else
518 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
520 HOST_WIDE_INT source_length, target_length;
521 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
523 /* An array implements Cloneable and Serializable. */
524 tree name = DECL_NAME (TYPE_NAME (target_type));
525 return (name == java_lang_cloneable_identifier_node
526 || name == java_io_serializable_identifier_node);
528 target_length = java_array_type_length (target_type);
529 if (target_length >= 0)
531 source_length = java_array_type_length (source_type);
532 if (source_length != target_length)
533 return 0;
535 source_type = TYPE_ARRAY_ELEMENT (source_type);
536 target_type = TYPE_ARRAY_ELEMENT (target_type);
537 if (source_type == target_type)
538 return 1;
539 if (TREE_CODE (source_type) != POINTER_TYPE
540 || TREE_CODE (target_type) != POINTER_TYPE)
541 return 0;
542 return can_widen_reference_to (source_type, target_type);
544 else
546 int source_depth = class_depth (source_type);
547 int target_depth = class_depth (target_type);
549 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
551 if (! quiet_flag)
552 warning (0, "assert: %s is assign compatible with %s",
553 xstrdup (lang_printable_name (target_type, 0)),
554 xstrdup (lang_printable_name (source_type, 0)));
555 return 1;
558 /* class_depth can return a negative depth if an error occurred */
559 if (source_depth < 0 || target_depth < 0)
560 return 0;
562 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
564 /* target_type is OK if source_type or source_type ancestors
565 implement target_type. We handle multiple sub-interfaces */
566 tree binfo, base_binfo;
567 int i;
569 for (binfo = TYPE_BINFO (source_type), i = 0;
570 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
571 if (can_widen_reference_to
572 (BINFO_TYPE (base_binfo), target_type))
573 return 1;
575 if (!i)
576 return 0;
579 for ( ; source_depth > target_depth; source_depth--)
581 source_type
582 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
584 return source_type == target_type;
589 static tree
590 pop_value (tree type)
592 type = pop_type (type);
593 if (VEC_length (tree, quick_stack) != 0)
594 return VEC_pop (tree, quick_stack);
595 else
596 return find_stack_slot (stack_pointer, promote_type (type));
600 /* Pop and discard the top COUNT stack slots. */
602 static void
603 java_stack_pop (int count)
605 while (count > 0)
607 tree type;
609 gcc_assert (stack_pointer != 0);
611 type = stack_type_map[stack_pointer - 1];
612 if (type == TYPE_SECOND)
614 count--;
615 gcc_assert (stack_pointer != 1 && count > 0);
617 type = stack_type_map[stack_pointer - 2];
619 pop_value (type);
620 count--;
624 /* Implement the 'swap' operator (to swap two top stack slots). */
626 static void
627 java_stack_swap (void)
629 tree type1, type2;
630 tree temp;
631 tree decl1, decl2;
633 if (stack_pointer < 2
634 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
635 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
636 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
637 /* Bad stack swap. */
638 abort ();
639 /* Bad stack swap. */
641 flush_quick_stack ();
642 decl1 = find_stack_slot (stack_pointer - 1, type1);
643 decl2 = find_stack_slot (stack_pointer - 2, type2);
644 temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1);
645 java_add_local_var (temp);
646 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
647 java_add_stmt (build2 (MODIFY_EXPR, type2,
648 find_stack_slot (stack_pointer - 1, type2),
649 decl2));
650 java_add_stmt (build2 (MODIFY_EXPR, type1,
651 find_stack_slot (stack_pointer - 2, type1),
652 temp));
653 stack_type_map[stack_pointer - 1] = type2;
654 stack_type_map[stack_pointer - 2] = type1;
657 static void
658 java_stack_dup (int size, int offset)
660 int low_index = stack_pointer - size - offset;
661 int dst_index;
662 if (low_index < 0)
663 error ("stack underflow - dup* operation");
665 flush_quick_stack ();
667 stack_pointer += size;
668 dst_index = stack_pointer;
670 for (dst_index = stack_pointer; --dst_index >= low_index; )
672 tree type;
673 int src_index = dst_index - size;
674 if (src_index < low_index)
675 src_index = dst_index + size + offset;
676 type = stack_type_map [src_index];
677 if (type == TYPE_SECOND)
679 /* Dup operation splits 64-bit number. */
680 gcc_assert (src_index > low_index);
682 stack_type_map[dst_index] = type;
683 src_index--; dst_index--;
684 type = stack_type_map[src_index];
685 gcc_assert (TYPE_IS_WIDE (type));
687 else
688 gcc_assert (! TYPE_IS_WIDE (type));
690 if (src_index != dst_index)
692 tree src_decl = find_stack_slot (src_index, type);
693 tree dst_decl = find_stack_slot (dst_index, type);
695 java_add_stmt
696 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
697 stack_type_map[dst_index] = type;
702 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
703 value stack. */
705 static void
706 build_java_athrow (tree node)
708 tree call;
710 call = build_call_nary (void_type_node,
711 build_address_of (throw_node),
712 1, node);
713 TREE_SIDE_EFFECTS (call) = 1;
714 java_add_stmt (call);
715 java_stack_pop (stack_pointer);
718 /* Implementation for jsr/ret */
720 static void
721 build_java_jsr (int target_pc, int return_pc)
723 tree where = lookup_label (target_pc);
724 tree ret = lookup_label (return_pc);
725 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
726 push_value (ret_label);
727 flush_quick_stack ();
728 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
730 /* Do not need to emit the label here. We noted the existence of the
731 label as a jump target in note_instructions; we'll emit the label
732 for real at the beginning of the expand_byte_code loop. */
735 static void
736 build_java_ret (tree location)
738 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
741 /* Implementation of operations on array: new, load, store, length */
743 tree
744 decode_newarray_type (int atype)
746 switch (atype)
748 case 4: return boolean_type_node;
749 case 5: return char_type_node;
750 case 6: return float_type_node;
751 case 7: return double_type_node;
752 case 8: return byte_type_node;
753 case 9: return short_type_node;
754 case 10: return int_type_node;
755 case 11: return long_type_node;
756 default: return NULL_TREE;
760 /* Map primitive type to the code used by OPCODE_newarray. */
763 encode_newarray_type (tree type)
765 if (type == boolean_type_node)
766 return 4;
767 else if (type == char_type_node)
768 return 5;
769 else if (type == float_type_node)
770 return 6;
771 else if (type == double_type_node)
772 return 7;
773 else if (type == byte_type_node)
774 return 8;
775 else if (type == short_type_node)
776 return 9;
777 else if (type == int_type_node)
778 return 10;
779 else if (type == long_type_node)
780 return 11;
781 else
782 gcc_unreachable ();
785 /* Build a call to _Jv_ThrowBadArrayIndex(), the
786 ArrayIndexOfBoundsException exception handler. */
788 static tree
789 build_java_throw_out_of_bounds_exception (tree index)
791 tree node;
793 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
794 has void return type. We cannot just set the type of the CALL_EXPR below
795 to int_type_node because we would lose it during gimplification. */
796 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
797 node = build_call_nary (void_type_node,
798 build_address_of (soft_badarrayindex_node),
799 1, index);
800 TREE_SIDE_EFFECTS (node) = 1;
802 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
803 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
805 return (node);
808 /* Return the length of an array. Doesn't perform any checking on the nature
809 or value of the array NODE. May be used to implement some bytecodes. */
811 tree
812 build_java_array_length_access (tree node)
814 tree type = TREE_TYPE (node);
815 tree array_type = TREE_TYPE (type);
816 HOST_WIDE_INT length;
818 if (!is_array_type_p (type))
820 /* With the new verifier, we will see an ordinary pointer type
821 here. In this case, we just use an arbitrary array type. */
822 array_type = build_java_array_type (object_ptr_type_node, -1);
823 type = promote_type (array_type);
826 length = java_array_type_length (type);
827 if (length >= 0)
828 return build_int_cst (NULL_TREE, length);
830 node = build3 (COMPONENT_REF, int_type_node,
831 build_java_indirect_ref (array_type, node,
832 flag_check_references),
833 lookup_field (&array_type, get_identifier ("length")),
834 NULL_TREE);
835 IS_ARRAY_LENGTH_ACCESS (node) = 1;
836 return node;
839 /* Optionally checks a reference against the NULL pointer. ARG1: the
840 expr, ARG2: we should check the reference. Don't generate extra
841 checks if we're not generating code. */
843 tree
844 java_check_reference (tree expr, int check)
846 if (!flag_syntax_only && check)
848 expr = save_expr (expr);
849 expr = build3 (COND_EXPR, TREE_TYPE (expr),
850 build2 (EQ_EXPR, boolean_type_node,
851 expr, null_pointer_node),
852 build_call_nary (void_type_node,
853 build_address_of (soft_nullpointer_node),
855 expr);
858 return expr;
861 /* Reference an object: just like an INDIRECT_REF, but with checking. */
863 tree
864 build_java_indirect_ref (tree type, tree expr, int check)
866 tree t;
867 t = java_check_reference (expr, check);
868 t = convert (build_pointer_type (type), t);
869 return build1 (INDIRECT_REF, type, t);
872 /* Implement array indexing (either as l-value or r-value).
873 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
874 Optionally performs bounds checking and/or test to NULL.
875 At this point, ARRAY should have been verified as an array. */
877 tree
878 build_java_arrayaccess (tree array, tree type, tree index)
880 tree node, throw_expr = NULL_TREE;
881 tree data_field;
882 tree ref;
883 tree array_type = TREE_TYPE (TREE_TYPE (array));
884 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
886 if (!is_array_type_p (TREE_TYPE (array)))
888 /* With the new verifier, we will see an ordinary pointer type
889 here. In this case, we just use the correct array type. */
890 array_type = build_java_array_type (type, -1);
893 if (flag_bounds_check)
895 /* Generate:
896 * (unsigned jint) INDEX >= (unsigned jint) LEN
897 * && throw ArrayIndexOutOfBoundsException.
898 * Note this is equivalent to and more efficient than:
899 * INDEX < 0 || INDEX >= LEN && throw ... */
900 tree test;
901 tree len = convert (unsigned_int_type_node,
902 build_java_array_length_access (array));
903 test = fold_build2 (GE_EXPR, boolean_type_node,
904 convert (unsigned_int_type_node, index),
905 len);
906 if (! integer_zerop (test))
908 throw_expr
909 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
910 build_java_throw_out_of_bounds_exception (index));
911 /* allows expansion within COMPOUND */
912 TREE_SIDE_EFFECTS( throw_expr ) = 1;
916 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
917 to have the bounds check evaluated first. */
918 if (throw_expr != NULL_TREE)
919 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
921 data_field = lookup_field (&array_type, get_identifier ("data"));
923 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
924 build_java_indirect_ref (array_type, array,
925 flag_check_references),
926 data_field, NULL_TREE);
928 /* Take the address of the data field and convert it to a pointer to
929 the element type. */
930 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
932 /* Multiply the index by the size of an element to obtain a byte
933 offset. Convert the result to a pointer to the element type. */
934 index = build2 (MULT_EXPR, sizetype,
935 fold_convert (sizetype, index),
936 size_exp);
938 /* Sum the byte offset and the address of the data field. */
939 node = fold_build_pointer_plus (node, index);
941 /* Finally, return
943 *((&array->data) + index*size_exp)
946 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
949 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
950 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
951 determine that no check is required. */
953 tree
954 build_java_arraystore_check (tree array, tree object)
956 tree check, element_type, source;
957 tree array_type_p = TREE_TYPE (array);
958 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
960 if (! flag_verify_invocations)
962 /* With the new verifier, we don't track precise types. FIXME:
963 performance regression here. */
964 element_type = TYPE_NAME (object_type_node);
966 else
968 gcc_assert (is_array_type_p (array_type_p));
970 /* Get the TYPE_DECL for ARRAY's element type. */
971 element_type
972 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
975 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
976 && TREE_CODE (object_type) == TYPE_DECL);
978 if (!flag_store_check)
979 return build1 (NOP_EXPR, array_type_p, array);
981 /* No check is needed if the element type is final. Also check that
982 element_type matches object_type, since in the bytecode
983 compilation case element_type may be the actual element type of
984 the array rather than its declared type. However, if we're doing
985 indirect dispatch, we can't do the `final' optimization. */
986 if (element_type == object_type
987 && ! flag_indirect_dispatch
988 && CLASS_FINAL (element_type))
989 return build1 (NOP_EXPR, array_type_p, array);
991 /* OBJECT might be wrapped by a SAVE_EXPR. */
992 if (TREE_CODE (object) == SAVE_EXPR)
993 source = TREE_OPERAND (object, 0);
994 else
995 source = object;
997 /* Avoid the check if OBJECT was just loaded from the same array. */
998 if (TREE_CODE (source) == ARRAY_REF)
1000 tree target;
1001 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1002 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1003 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1004 if (TREE_CODE (source) == SAVE_EXPR)
1005 source = TREE_OPERAND (source, 0);
1007 target = array;
1008 if (TREE_CODE (target) == SAVE_EXPR)
1009 target = TREE_OPERAND (target, 0);
1011 if (source == target)
1012 return build1 (NOP_EXPR, array_type_p, array);
1015 /* Build an invocation of _Jv_CheckArrayStore */
1016 check = build_call_nary (void_type_node,
1017 build_address_of (soft_checkarraystore_node),
1018 2, array, object);
1019 TREE_SIDE_EFFECTS (check) = 1;
1021 return check;
1024 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1025 ARRAY_NODE. This function is used to retrieve something less vague than
1026 a pointer type when indexing the first dimension of something like [[<t>.
1027 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1028 return unchanged. */
1030 static tree
1031 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1032 tree indexed_type)
1034 /* We used to check to see if ARRAY_NODE really had array type.
1035 However, with the new verifier, this is not necessary, as we know
1036 that the object will be an array of the appropriate type. */
1038 return indexed_type;
1041 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1042 called with an integer code (the type of array to create), and the length
1043 of the array to create. */
1045 tree
1046 build_newarray (int atype_value, tree length)
1048 tree type_arg;
1050 tree prim_type = decode_newarray_type (atype_value);
1051 tree type
1052 = build_java_array_type (prim_type,
1053 host_integerp (length, 0) == INTEGER_CST
1054 ? tree_low_cst (length, 0) : -1);
1056 /* Pass a reference to the primitive type class and save the runtime
1057 some work. */
1058 type_arg = build_class_ref (prim_type);
1060 return build_call_nary (promote_type (type),
1061 build_address_of (soft_newarray_node),
1062 2, type_arg, length);
1065 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1066 of the dimension. */
1068 tree
1069 build_anewarray (tree class_type, tree length)
1071 tree type
1072 = build_java_array_type (class_type,
1073 host_integerp (length, 0)
1074 ? tree_low_cst (length, 0) : -1);
1076 return build_call_nary (promote_type (type),
1077 build_address_of (soft_anewarray_node),
1079 length,
1080 build_class_ref (class_type),
1081 null_pointer_node);
1084 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1086 tree
1087 build_new_array (tree type, tree length)
1089 if (JPRIMITIVE_TYPE_P (type))
1090 return build_newarray (encode_newarray_type (type), length);
1091 else
1092 return build_anewarray (TREE_TYPE (type), length);
1095 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1096 class pointer, a number of dimensions and the matching number of
1097 dimensions. The argument list is NULL terminated. */
1099 static void
1100 expand_java_multianewarray (tree class_type, int ndim)
1102 int i;
1103 VEC(tree,gc) *args = NULL;
1105 VEC_safe_grow (tree, gc, args, 3 + ndim);
1107 VEC_replace (tree, args, 0, build_class_ref (class_type));
1108 VEC_replace (tree, args, 1, build_int_cst (NULL_TREE, ndim));
1110 for(i = ndim - 1; i >= 0; i-- )
1111 VEC_replace (tree, args, (unsigned)(2 + i), pop_value (int_type_node));
1113 VEC_replace (tree, args, 2 + ndim, null_pointer_node);
1115 push_value (build_call_vec (promote_type (class_type),
1116 build_address_of (soft_multianewarray_node),
1117 args));
1120 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1121 ARRAY is an array type. May expand some bound checking and NULL
1122 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1123 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1124 INT. In those cases, we make the conversion.
1126 if ARRAy is a reference type, the assignment is checked at run-time
1127 to make sure that the RHS can be assigned to the array element
1128 type. It is not necessary to generate this code if ARRAY is final. */
1130 static void
1131 expand_java_arraystore (tree rhs_type_node)
1133 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1134 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1135 int_type_node : rhs_type_node);
1136 tree index = pop_value (int_type_node);
1137 tree array_type, array, temp, access;
1139 /* If we're processing an `aaload' we might as well just pick
1140 `Object'. */
1141 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1143 array_type = build_java_array_type (object_ptr_type_node, -1);
1144 rhs_type_node = object_ptr_type_node;
1146 else
1147 array_type = build_java_array_type (rhs_type_node, -1);
1149 array = pop_value (array_type);
1150 array = build1 (NOP_EXPR, promote_type (array_type), array);
1152 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1154 flush_quick_stack ();
1156 index = save_expr (index);
1157 array = save_expr (array);
1159 /* We want to perform the bounds check (done by
1160 build_java_arrayaccess) before the type check (done by
1161 build_java_arraystore_check). So, we call build_java_arrayaccess
1162 -- which returns an ARRAY_REF lvalue -- and we then generate code
1163 to stash the address of that lvalue in a temp. Then we call
1164 build_java_arraystore_check, and finally we generate a
1165 MODIFY_EXPR to set the array element. */
1167 access = build_java_arrayaccess (array, rhs_type_node, index);
1168 temp = build_decl (input_location, VAR_DECL, NULL_TREE,
1169 build_pointer_type (TREE_TYPE (access)));
1170 java_add_local_var (temp);
1171 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1172 temp,
1173 build_fold_addr_expr (access)));
1175 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1177 tree check = build_java_arraystore_check (array, rhs_node);
1178 java_add_stmt (check);
1181 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1182 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1183 rhs_node));
1186 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1187 sure that LHS is an array type. May expand some bound checking and NULL
1188 pointer checking.
1189 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1190 BOOLEAN/SHORT, we push a promoted type back to the stack.
1193 static void
1194 expand_java_arrayload (tree lhs_type_node)
1196 tree load_node;
1197 tree index_node = pop_value (int_type_node);
1198 tree array_type;
1199 tree array_node;
1201 /* If we're processing an `aaload' we might as well just pick
1202 `Object'. */
1203 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1205 array_type = build_java_array_type (object_ptr_type_node, -1);
1206 lhs_type_node = object_ptr_type_node;
1208 else
1209 array_type = build_java_array_type (lhs_type_node, -1);
1210 array_node = pop_value (array_type);
1211 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1213 index_node = save_expr (index_node);
1214 array_node = save_expr (array_node);
1216 lhs_type_node = build_java_check_indexed_type (array_node,
1217 lhs_type_node);
1218 load_node = build_java_arrayaccess (array_node,
1219 lhs_type_node,
1220 index_node);
1221 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1222 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1223 push_value (load_node);
1226 /* Expands .length. Makes sure that we deal with and array and may expand
1227 a NULL check on the array object. */
1229 static void
1230 expand_java_array_length (void)
1232 tree array = pop_value (ptr_type_node);
1233 tree length = build_java_array_length_access (array);
1235 push_value (length);
1238 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1239 either soft_monitorenter_node or soft_monitorexit_node. */
1241 static tree
1242 build_java_monitor (tree call, tree object)
1244 return build_call_nary (void_type_node,
1245 build_address_of (call),
1246 1, object);
1249 /* Emit code for one of the PUSHC instructions. */
1251 static void
1252 expand_java_pushc (int ival, tree type)
1254 tree value;
1255 if (type == ptr_type_node && ival == 0)
1256 value = null_pointer_node;
1257 else if (type == int_type_node || type == long_type_node)
1258 value = build_int_cst (type, ival);
1259 else if (type == float_type_node || type == double_type_node)
1261 REAL_VALUE_TYPE x;
1262 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1263 value = build_real (type, x);
1265 else
1266 gcc_unreachable ();
1268 push_value (value);
1271 static void
1272 expand_java_return (tree type)
1274 if (type == void_type_node)
1275 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1276 else
1278 tree retval = pop_value (type);
1279 tree res = DECL_RESULT (current_function_decl);
1280 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1282 /* Handle the situation where the native integer type is smaller
1283 than the JVM integer. It can happen for many cross compilers.
1284 The whole if expression just goes away if INT_TYPE_SIZE < 32
1285 is false. */
1286 if (INT_TYPE_SIZE < 32
1287 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1288 < GET_MODE_SIZE (TYPE_MODE (type))))
1289 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1291 TREE_SIDE_EFFECTS (retval) = 1;
1292 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1296 static void
1297 expand_load_internal (int index, tree type, int pc)
1299 tree copy;
1300 tree var = find_local_variable (index, type, pc);
1302 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1303 on the stack. If there is an assignment to this VAR_DECL between
1304 the stack push and the use, then the wrong code could be
1305 generated. To avoid this we create a new local and copy our
1306 value into it. Then we push this new local on the stack.
1307 Hopefully this all gets optimized out. */
1308 copy = build_decl (input_location, VAR_DECL, NULL_TREE, type);
1309 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1310 && TREE_TYPE (copy) != TREE_TYPE (var))
1311 var = convert (type, var);
1312 java_add_local_var (copy);
1313 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1315 push_value (copy);
1318 tree
1319 build_address_of (tree value)
1321 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1324 bool
1325 class_has_finalize_method (tree type)
1327 tree super = CLASSTYPE_SUPER (type);
1329 if (super == NULL_TREE)
1330 return false; /* Every class with a real finalizer inherits */
1331 /* from java.lang.Object. */
1332 else
1333 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1336 tree
1337 java_create_object (tree type)
1339 tree alloc_node = (class_has_finalize_method (type)
1340 ? alloc_object_node
1341 : alloc_no_finalizer_node);
1343 return build_call_nary (promote_type (type),
1344 build_address_of (alloc_node),
1345 1, build_class_ref (type));
1348 static void
1349 expand_java_NEW (tree type)
1351 tree alloc_node;
1353 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1354 : alloc_no_finalizer_node);
1355 if (! CLASS_LOADED_P (type))
1356 load_class (type, 1);
1357 safe_layout_class (type);
1358 push_value (build_call_nary (promote_type (type),
1359 build_address_of (alloc_node),
1360 1, build_class_ref (type)));
1363 /* This returns an expression which will extract the class of an
1364 object. */
1366 tree
1367 build_get_class (tree value)
1369 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1370 tree vtable_field = lookup_field (&object_type_node,
1371 get_identifier ("vtable"));
1372 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1373 build_java_indirect_ref (object_type_node, value,
1374 flag_check_references),
1375 vtable_field, NULL_TREE);
1376 return build3 (COMPONENT_REF, class_ptr_type,
1377 build1 (INDIRECT_REF, dtable_type, tmp),
1378 class_field, NULL_TREE);
1381 /* This builds the tree representation of the `instanceof' operator.
1382 It tries various tricks to optimize this in cases where types are
1383 known. */
1385 tree
1386 build_instanceof (tree value, tree type)
1388 tree expr;
1389 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1390 tree valtype = TREE_TYPE (TREE_TYPE (value));
1391 tree valclass = TYPE_NAME (valtype);
1392 tree klass;
1394 /* When compiling from bytecode, we need to ensure that TYPE has
1395 been loaded. */
1396 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1398 load_class (type, 1);
1399 safe_layout_class (type);
1400 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1401 return error_mark_node;
1403 klass = TYPE_NAME (type);
1405 if (type == object_type_node || inherits_from_p (valtype, type))
1407 /* Anything except `null' is an instance of Object. Likewise,
1408 if the object is known to be an instance of the class, then
1409 we only need to check for `null'. */
1410 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1412 else if (flag_verify_invocations
1413 && ! TYPE_ARRAY_P (type)
1414 && ! TYPE_ARRAY_P (valtype)
1415 && DECL_P (klass) && DECL_P (valclass)
1416 && ! CLASS_INTERFACE (valclass)
1417 && ! CLASS_INTERFACE (klass)
1418 && ! inherits_from_p (type, valtype)
1419 && (CLASS_FINAL (klass)
1420 || ! inherits_from_p (valtype, type)))
1422 /* The classes are from different branches of the derivation
1423 tree, so we immediately know the answer. */
1424 expr = boolean_false_node;
1426 else if (DECL_P (klass) && CLASS_FINAL (klass))
1428 tree save = save_expr (value);
1429 expr = build3 (COND_EXPR, itype,
1430 build2 (NE_EXPR, boolean_type_node,
1431 save, null_pointer_node),
1432 build2 (EQ_EXPR, itype,
1433 build_get_class (save),
1434 build_class_ref (type)),
1435 boolean_false_node);
1437 else
1439 expr = build_call_nary (itype,
1440 build_address_of (soft_instanceof_node),
1441 2, value, build_class_ref (type));
1443 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1444 return expr;
1447 static void
1448 expand_java_INSTANCEOF (tree type)
1450 tree value = pop_value (object_ptr_type_node);
1451 value = build_instanceof (value, type);
1452 push_value (value);
1455 static void
1456 expand_java_CHECKCAST (tree type)
1458 tree value = pop_value (ptr_type_node);
1459 value = build_call_nary (promote_type (type),
1460 build_address_of (soft_checkcast_node),
1461 2, build_class_ref (type), value);
1462 push_value (value);
1465 static void
1466 expand_iinc (unsigned int local_var_index, int ival, int pc)
1468 tree local_var, res;
1469 tree constant_value;
1471 flush_quick_stack ();
1472 local_var = find_local_variable (local_var_index, int_type_node, pc);
1473 constant_value = build_int_cst (NULL_TREE, ival);
1474 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1475 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1479 tree
1480 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1482 tree call = NULL;
1483 tree arg1 = convert (type, op1);
1484 tree arg2 = convert (type, op2);
1486 if (type == int_type_node)
1488 switch (op)
1490 case TRUNC_DIV_EXPR:
1491 call = soft_idiv_node;
1492 break;
1493 case TRUNC_MOD_EXPR:
1494 call = soft_irem_node;
1495 break;
1496 default:
1497 break;
1500 else if (type == long_type_node)
1502 switch (op)
1504 case TRUNC_DIV_EXPR:
1505 call = soft_ldiv_node;
1506 break;
1507 case TRUNC_MOD_EXPR:
1508 call = soft_lrem_node;
1509 break;
1510 default:
1511 break;
1515 gcc_assert (call);
1516 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1517 return call;
1520 tree
1521 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1523 tree mask;
1524 switch (op)
1526 case URSHIFT_EXPR:
1528 tree u_type = unsigned_type_for (type);
1529 arg1 = convert (u_type, arg1);
1530 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1531 return convert (type, arg1);
1533 case LSHIFT_EXPR:
1534 case RSHIFT_EXPR:
1535 mask = build_int_cst (NULL_TREE,
1536 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1537 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1538 break;
1540 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1541 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1542 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1544 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1545 boolean_type_node, arg1, arg2);
1546 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1547 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1548 ifexp2, integer_zero_node,
1549 op == COMPARE_L_EXPR
1550 ? integer_minus_one_node
1551 : integer_one_node);
1552 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1553 op == COMPARE_L_EXPR ? integer_one_node
1554 : integer_minus_one_node,
1555 second_compare);
1557 case COMPARE_EXPR:
1558 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1560 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1561 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1562 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1563 ifexp2, integer_one_node,
1564 integer_zero_node);
1565 return fold_build3 (COND_EXPR, int_type_node,
1566 ifexp1, integer_minus_one_node, second_compare);
1568 case TRUNC_DIV_EXPR:
1569 case TRUNC_MOD_EXPR:
1570 if (TREE_CODE (type) == REAL_TYPE
1571 && op == TRUNC_MOD_EXPR)
1573 tree call;
1574 if (type != double_type_node)
1576 arg1 = convert (double_type_node, arg1);
1577 arg2 = convert (double_type_node, arg2);
1579 call = build_call_nary (double_type_node,
1580 build_address_of (soft_fmod_node),
1581 2, arg1, arg2);
1582 if (type != double_type_node)
1583 call = convert (type, call);
1584 return call;
1587 if (TREE_CODE (type) == INTEGER_TYPE
1588 && flag_use_divide_subroutine
1589 && ! flag_syntax_only)
1590 return build_java_soft_divmod (op, type, arg1, arg2);
1592 break;
1593 default: ;
1595 return fold_build2 (op, type, arg1, arg2);
1598 static void
1599 expand_java_binop (tree type, enum tree_code op)
1601 tree larg, rarg;
1602 tree ltype = type;
1603 tree rtype = type;
1604 switch (op)
1606 case LSHIFT_EXPR:
1607 case RSHIFT_EXPR:
1608 case URSHIFT_EXPR:
1609 rtype = int_type_node;
1610 rarg = pop_value (rtype);
1611 break;
1612 default:
1613 rarg = pop_value (rtype);
1615 larg = pop_value (ltype);
1616 push_value (build_java_binop (op, type, larg, rarg));
1619 /* Lookup the field named NAME in *TYPEP or its super classes.
1620 If not found, return NULL_TREE.
1621 (If the *TYPEP is not found, or if the field reference is
1622 ambiguous, return error_mark_node.)
1623 If found, return the FIELD_DECL, and set *TYPEP to the
1624 class containing the field. */
1626 tree
1627 lookup_field (tree *typep, tree name)
1629 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1631 load_class (*typep, 1);
1632 safe_layout_class (*typep);
1633 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1634 return error_mark_node;
1638 tree field, binfo, base_binfo;
1639 tree save_field;
1640 int i;
1642 for (field = TYPE_FIELDS (*typep); field; field = DECL_CHAIN (field))
1643 if (DECL_NAME (field) == name)
1644 return field;
1646 /* Process implemented interfaces. */
1647 save_field = NULL_TREE;
1648 for (binfo = TYPE_BINFO (*typep), i = 0;
1649 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1651 tree t = BINFO_TYPE (base_binfo);
1652 if ((field = lookup_field (&t, name)))
1654 if (save_field == field)
1655 continue;
1656 if (save_field == NULL_TREE)
1657 save_field = field;
1658 else
1660 tree i1 = DECL_CONTEXT (save_field);
1661 tree i2 = DECL_CONTEXT (field);
1662 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1663 IDENTIFIER_POINTER (name),
1664 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1665 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1666 return error_mark_node;
1671 if (save_field != NULL_TREE)
1672 return save_field;
1674 *typep = CLASSTYPE_SUPER (*typep);
1675 } while (*typep);
1676 return NULL_TREE;
1679 /* Look up the field named NAME in object SELF_VALUE,
1680 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1681 SELF_VALUE is NULL_TREE if looking for a static field. */
1683 tree
1684 build_field_ref (tree self_value, tree self_class, tree name)
1686 tree base_class = self_class;
1687 tree field_decl = lookup_field (&base_class, name);
1688 if (field_decl == NULL_TREE)
1690 error ("field %qs not found", IDENTIFIER_POINTER (name));
1691 return error_mark_node;
1693 if (self_value == NULL_TREE)
1695 return build_static_field_ref (field_decl);
1697 else
1699 tree base_type = promote_type (base_class);
1701 /* CHECK is true if self_value is not the this pointer. */
1702 int check = (! (DECL_P (self_value)
1703 && DECL_NAME (self_value) == this_identifier_node));
1705 /* Determine whether a field offset from NULL will lie within
1706 Page 0: this is necessary on those GNU/Linux/BSD systems that
1707 trap SEGV to generate NullPointerExceptions.
1709 We assume that Page 0 will be mapped with NOPERM, and that
1710 memory may be allocated from any other page, so only field
1711 offsets < pagesize are guaranteed to trap. We also assume
1712 the smallest page size we'll encounter is 4k bytes. */
1713 if (! flag_syntax_only && check && ! flag_check_references
1714 && ! flag_indirect_dispatch)
1716 tree field_offset = byte_position (field_decl);
1717 if (! page_size)
1718 page_size = size_int (4096);
1719 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1722 if (base_type != TREE_TYPE (self_value))
1723 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1724 if (! flag_syntax_only && flag_indirect_dispatch)
1726 tree otable_index
1727 = build_int_cst (NULL_TREE, get_symbol_table_index
1728 (field_decl, NULL_TREE,
1729 &TYPE_OTABLE_METHODS (output_class)));
1730 tree field_offset
1731 = build4 (ARRAY_REF, integer_type_node,
1732 TYPE_OTABLE_DECL (output_class), otable_index,
1733 NULL_TREE, NULL_TREE);
1734 tree address;
1736 if (DECL_CONTEXT (field_decl) != output_class)
1737 field_offset
1738 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1739 build2 (EQ_EXPR, boolean_type_node,
1740 field_offset, integer_zero_node),
1741 build_call_nary (void_type_node,
1742 build_address_of (soft_nosuchfield_node),
1743 1, otable_index),
1744 field_offset);
1746 self_value = java_check_reference (self_value, check);
1747 address = fold_build_pointer_plus (self_value, field_offset);
1748 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1749 address);
1750 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1753 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1754 self_value, check);
1755 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1756 self_value, field_decl, NULL_TREE);
1760 tree
1761 lookup_label (int pc)
1763 tree name;
1764 char buf[32];
1765 if (pc > highest_label_pc_this_method)
1766 highest_label_pc_this_method = pc;
1767 targetm.asm_out.generate_internal_label (buf, "LJpc=",
1768 start_label_pc_this_method + pc);
1769 name = get_identifier (buf);
1770 if (IDENTIFIER_LOCAL_VALUE (name))
1771 return IDENTIFIER_LOCAL_VALUE (name);
1772 else
1774 /* The type of the address of a label is return_address_type_node. */
1775 tree decl = create_label_decl (name);
1776 return pushdecl (decl);
1780 /* Generate a unique name for the purpose of loops and switches
1781 labels, and try-catch-finally blocks label or temporary variables. */
1783 tree
1784 generate_name (void)
1786 static int l_number = 0;
1787 char buff [32];
1788 targetm.asm_out.generate_internal_label (buff, "LJv", l_number);
1789 l_number++;
1790 return get_identifier (buff);
1793 tree
1794 create_label_decl (tree name)
1796 tree decl;
1797 decl = build_decl (input_location, LABEL_DECL, name,
1798 TREE_TYPE (return_address_type_node));
1799 DECL_CONTEXT (decl) = current_function_decl;
1800 DECL_IGNORED_P (decl) = 1;
1801 return decl;
1804 /* This maps a bytecode offset (PC) to various flags. */
1805 char *instruction_bits;
1807 /* This is a vector of type states for the current method. It is
1808 indexed by PC. Each element is a tree vector holding the type
1809 state at that PC. We only note type states at basic block
1810 boundaries. */
1811 VEC(tree, gc) *type_states;
1813 static void
1814 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1816 lookup_label (target_pc);
1817 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1820 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1821 where CONDITION is one of one the compare operators. */
1823 static void
1824 expand_compare (enum tree_code condition, tree value1, tree value2,
1825 int target_pc)
1827 tree target = lookup_label (target_pc);
1828 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1829 java_add_stmt
1830 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1831 build1 (GOTO_EXPR, void_type_node, target),
1832 build_java_empty_stmt ()));
1835 /* Emit code for a TEST-type opcode. */
1837 static void
1838 expand_test (enum tree_code condition, tree type, int target_pc)
1840 tree value1, value2;
1841 flush_quick_stack ();
1842 value1 = pop_value (type);
1843 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1844 expand_compare (condition, value1, value2, target_pc);
1847 /* Emit code for a COND-type opcode. */
1849 static void
1850 expand_cond (enum tree_code condition, tree type, int target_pc)
1852 tree value1, value2;
1853 flush_quick_stack ();
1854 /* note: pop values in opposite order */
1855 value2 = pop_value (type);
1856 value1 = pop_value (type);
1857 /* Maybe should check value1 and value2 for type compatibility ??? */
1858 expand_compare (condition, value1, value2, target_pc);
1861 static void
1862 expand_java_goto (int target_pc)
1864 tree target_label = lookup_label (target_pc);
1865 flush_quick_stack ();
1866 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1869 static tree
1870 expand_java_switch (tree selector, int default_pc)
1872 tree switch_expr, x;
1874 flush_quick_stack ();
1875 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1876 NULL_TREE, NULL_TREE);
1877 java_add_stmt (switch_expr);
1879 x = build_case_label (NULL_TREE, NULL_TREE,
1880 create_artificial_label (input_location));
1881 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1883 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1884 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1886 return switch_expr;
1889 static void
1890 expand_java_add_case (tree switch_expr, int match, int target_pc)
1892 tree value, x;
1894 value = build_int_cst (TREE_TYPE (switch_expr), match);
1896 x = build_case_label (value, NULL_TREE,
1897 create_artificial_label (input_location));
1898 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1900 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1901 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1904 static VEC(tree,gc) *
1905 pop_arguments (tree method_type)
1907 function_args_iterator fnai;
1908 tree type;
1909 VEC(tree,gc) *args = NULL;
1910 int arity;
1912 FOREACH_FUNCTION_ARGS (method_type, type, fnai)
1914 /* XXX: leaky abstraction. */
1915 if (type == void_type_node)
1916 break;
1918 VEC_safe_push (tree, gc, args, type);
1921 arity = VEC_length (tree, args);
1923 while (arity--)
1925 tree arg = pop_value (VEC_index (tree, args, arity));
1927 /* We simply cast each argument to its proper type. This is
1928 needed since we lose type information coming out of the
1929 verifier. We also have to do this when we pop an integer
1930 type that must be promoted for the function call. */
1931 if (TREE_CODE (type) == POINTER_TYPE)
1932 arg = build1 (NOP_EXPR, type, arg);
1933 else if (targetm.calls.promote_prototypes (type)
1934 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1935 && INTEGRAL_TYPE_P (type))
1936 arg = convert (integer_type_node, arg);
1938 VEC_replace (tree, args, arity, arg);
1941 return args;
1944 /* Attach to PTR (a block) the declaration found in ENTRY. */
1947 attach_init_test_initialization_flags (void **entry, void *ptr)
1949 tree block = (tree)ptr;
1950 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1952 if (block != error_mark_node)
1954 if (TREE_CODE (block) == BIND_EXPR)
1956 tree body = BIND_EXPR_BODY (block);
1957 DECL_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1958 BIND_EXPR_VARS (block) = ite->value;
1959 body = build2 (COMPOUND_EXPR, void_type_node,
1960 build1 (DECL_EXPR, void_type_node, ite->value), body);
1961 BIND_EXPR_BODY (block) = body;
1963 else
1965 tree body = BLOCK_SUBBLOCKS (block);
1966 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1967 BLOCK_EXPR_DECLS (block) = ite->value;
1968 body = build2 (COMPOUND_EXPR, void_type_node,
1969 build1 (DECL_EXPR, void_type_node, ite->value), body);
1970 BLOCK_SUBBLOCKS (block) = body;
1974 return true;
1977 /* Build an expression to initialize the class CLAS.
1978 if EXPR is non-NULL, returns an expression to first call the initializer
1979 (if it is needed) and then calls EXPR. */
1981 tree
1982 build_class_init (tree clas, tree expr)
1984 tree init;
1986 /* An optimization: if CLAS is a superclass of the class we're
1987 compiling, we don't need to initialize it. However, if CLAS is
1988 an interface, it won't necessarily be initialized, even if we
1989 implement it. */
1990 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1991 && inherits_from_p (current_class, clas))
1992 || current_class == clas)
1993 return expr;
1995 if (always_initialize_class_p)
1997 init = build_call_nary (void_type_node,
1998 build_address_of (soft_initclass_node),
1999 1, build_class_ref (clas));
2000 TREE_SIDE_EFFECTS (init) = 1;
2002 else
2004 tree *init_test_decl;
2005 tree decl;
2006 init_test_decl = java_treetreehash_new
2007 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2009 if (*init_test_decl == NULL)
2011 /* Build a declaration and mark it as a flag used to track
2012 static class initializations. */
2013 decl = build_decl (input_location, VAR_DECL, NULL_TREE,
2014 boolean_type_node);
2015 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2016 DECL_CONTEXT (decl) = current_function_decl;
2017 DECL_INITIAL (decl) = boolean_false_node;
2018 /* Don't emit any symbolic debugging info for this decl. */
2019 DECL_IGNORED_P (decl) = 1;
2020 *init_test_decl = decl;
2023 init = build_call_nary (void_type_node,
2024 build_address_of (soft_initclass_node),
2025 1, build_class_ref (clas));
2026 TREE_SIDE_EFFECTS (init) = 1;
2027 init = build3 (COND_EXPR, void_type_node,
2028 build2 (EQ_EXPR, boolean_type_node,
2029 *init_test_decl, boolean_false_node),
2030 init, integer_zero_node);
2031 TREE_SIDE_EFFECTS (init) = 1;
2032 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2033 build2 (MODIFY_EXPR, boolean_type_node,
2034 *init_test_decl, boolean_true_node));
2035 TREE_SIDE_EFFECTS (init) = 1;
2038 if (expr != NULL_TREE)
2040 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2041 TREE_SIDE_EFFECTS (expr) = 1;
2042 return expr;
2044 return init;
2049 /* Rewrite expensive calls that require stack unwinding at runtime to
2050 cheaper alternatives. The logic here performs these
2051 transformations:
2053 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2054 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2058 typedef struct
2060 const char *classname;
2061 const char *method;
2062 const char *signature;
2063 const char *new_classname;
2064 const char *new_signature;
2065 int flags;
2066 void (*rewrite_arglist) (VEC(tree,gc) **);
2067 } rewrite_rule;
2069 /* Add __builtin_return_address(0) to the end of an arglist. */
2072 static void
2073 rewrite_arglist_getcaller (VEC(tree,gc) **arglist)
2075 tree retaddr
2076 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2077 1, integer_zero_node);
2079 DECL_UNINLINABLE (current_function_decl) = 1;
2081 VEC_safe_push (tree, gc, *arglist, retaddr);
2084 /* Add this.class to the end of an arglist. */
2086 static void
2087 rewrite_arglist_getclass (VEC(tree,gc) **arglist)
2089 VEC_safe_push (tree, gc, *arglist, build_class_ref (output_class));
2092 static rewrite_rule rules[] =
2093 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2094 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2095 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2097 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2098 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2099 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2101 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2102 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2103 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2105 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2106 "()Ljava/lang/ClassLoader;",
2107 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2108 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2110 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2111 "java.lang.String", "([CII)Ljava/lang/String;",
2112 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2114 {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2116 /* True if this method is special, i.e. it's a private method that
2117 should be exported from a DSO. */
2119 bool
2120 special_method_p (tree candidate_method)
2122 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2123 tree method = DECL_NAME (candidate_method);
2124 rewrite_rule *p;
2126 for (p = rules; p->classname; p++)
2128 if (get_identifier (p->classname) == context
2129 && get_identifier (p->method) == method)
2130 return true;
2132 return false;
2135 /* Scan the rules list for replacements for *METHOD_P and replace the
2136 args accordingly. If the rewrite results in an access to a private
2137 method, update SPECIAL.*/
2139 void
2140 maybe_rewrite_invocation (tree *method_p, VEC(tree,gc) **arg_list_p,
2141 tree *method_signature_p, tree *special)
2143 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2144 rewrite_rule *p;
2145 *special = NULL_TREE;
2147 for (p = rules; p->classname; p++)
2149 if (get_identifier (p->classname) == context)
2151 tree method = DECL_NAME (*method_p);
2152 if (get_identifier (p->method) == method
2153 && get_identifier (p->signature) == *method_signature_p)
2155 tree maybe_method;
2156 tree destination_class
2157 = lookup_class (get_identifier (p->new_classname));
2158 gcc_assert (destination_class);
2159 maybe_method
2160 = lookup_java_method (destination_class,
2161 method,
2162 get_identifier (p->new_signature));
2163 if (! maybe_method && ! flag_verify_invocations)
2165 maybe_method
2166 = add_method (destination_class, p->flags,
2167 method, get_identifier (p->new_signature));
2168 DECL_EXTERNAL (maybe_method) = 1;
2170 *method_p = maybe_method;
2171 gcc_assert (*method_p);
2172 if (p->rewrite_arglist)
2173 p->rewrite_arglist (arg_list_p);
2174 *method_signature_p = get_identifier (p->new_signature);
2175 *special = integer_one_node;
2177 break;
2185 tree
2186 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2187 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2188 VEC(tree,gc) *arg_list ATTRIBUTE_UNUSED, tree special)
2190 tree func;
2191 if (is_compiled_class (self_type))
2193 /* With indirect dispatch we have to use indirect calls for all
2194 publicly visible methods or gcc will use PLT indirections
2195 to reach them. We also have to use indirect dispatch for all
2196 external methods. */
2197 if (! flag_indirect_dispatch
2198 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2200 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2201 method);
2203 else
2205 tree table_index
2206 = build_int_cst (NULL_TREE,
2207 (get_symbol_table_index
2208 (method, special,
2209 &TYPE_ATABLE_METHODS (output_class))));
2210 func
2211 = build4 (ARRAY_REF,
2212 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2213 TYPE_ATABLE_DECL (output_class), table_index,
2214 NULL_TREE, NULL_TREE);
2216 func = convert (method_ptr_type_node, func);
2218 else
2220 /* We don't know whether the method has been (statically) compiled.
2221 Compile this code to get a reference to the method's code:
2223 SELF_TYPE->methods[METHOD_INDEX].ncode
2227 int method_index = 0;
2228 tree meth, ref;
2230 /* The method might actually be declared in some superclass, so
2231 we have to use its class context, not the caller's notion of
2232 where the method is. */
2233 self_type = DECL_CONTEXT (method);
2234 ref = build_class_ref (self_type);
2235 ref = build1 (INDIRECT_REF, class_type_node, ref);
2236 if (ncode_ident == NULL_TREE)
2237 ncode_ident = get_identifier ("ncode");
2238 if (methods_ident == NULL_TREE)
2239 methods_ident = get_identifier ("methods");
2240 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2241 lookup_field (&class_type_node, methods_ident),
2242 NULL_TREE);
2243 for (meth = TYPE_METHODS (self_type);
2244 ; meth = DECL_CHAIN (meth))
2246 if (method == meth)
2247 break;
2248 if (meth == NULL_TREE)
2249 fatal_error ("method '%s' not found in class",
2250 IDENTIFIER_POINTER (DECL_NAME (method)));
2251 method_index++;
2253 method_index *= int_size_in_bytes (method_type_node);
2254 ref = fold_build_pointer_plus_hwi (ref, method_index);
2255 ref = build1 (INDIRECT_REF, method_type_node, ref);
2256 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2257 ref, lookup_field (&method_type_node, ncode_ident),
2258 NULL_TREE);
2260 return func;
2263 tree
2264 invoke_build_dtable (int is_invoke_interface, VEC(tree,gc) *arg_list)
2266 tree dtable, objectref;
2267 tree saved = save_expr (VEC_index (tree, arg_list, 0));
2269 VEC_replace (tree, arg_list, 0, saved);
2271 /* If we're dealing with interfaces and if the objectref
2272 argument is an array then get the dispatch table of the class
2273 Object rather than the one from the objectref. */
2274 objectref = (is_invoke_interface
2275 && is_array_type_p (TREE_TYPE (saved))
2276 ? build_class_ref (object_type_node) : saved);
2278 if (dtable_ident == NULL_TREE)
2279 dtable_ident = get_identifier ("vtable");
2280 dtable = build_java_indirect_ref (object_type_node, objectref,
2281 flag_check_references);
2282 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2283 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2285 return dtable;
2288 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2289 T. If this decl has not been seen before, it will be added to the
2290 [oa]table_methods. If it has, the existing table slot will be
2291 reused. */
2294 get_symbol_table_index (tree t, tree special,
2295 VEC(method_entry,gc) **symbol_table)
2297 method_entry *e;
2298 unsigned i;
2300 FOR_EACH_VEC_ELT (method_entry, *symbol_table, i, e)
2301 if (t == e->method && special == e->special)
2302 goto done;
2304 e = VEC_safe_push (method_entry, gc, *symbol_table, NULL);
2305 e->method = t;
2306 e->special = special;
2308 done:
2309 return i + 1;
2312 tree
2313 build_invokevirtual (tree dtable, tree method, tree special)
2315 tree func;
2316 tree nativecode_ptr_ptr_type_node
2317 = build_pointer_type (nativecode_ptr_type_node);
2318 tree method_index;
2319 tree otable_index;
2321 if (flag_indirect_dispatch)
2323 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2325 otable_index
2326 = build_int_cst (NULL_TREE, get_symbol_table_index
2327 (method, special,
2328 &TYPE_OTABLE_METHODS (output_class)));
2329 method_index = build4 (ARRAY_REF, integer_type_node,
2330 TYPE_OTABLE_DECL (output_class),
2331 otable_index, NULL_TREE, NULL_TREE);
2333 else
2335 /* We fetch the DECL_VINDEX field directly here, rather than
2336 using get_method_index(). DECL_VINDEX is the true offset
2337 from the vtable base to a method, regrdless of any extra
2338 words inserted at the start of the vtable. */
2339 method_index = DECL_VINDEX (method);
2340 method_index = size_binop (MULT_EXPR, method_index,
2341 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2342 if (TARGET_VTABLE_USES_DESCRIPTORS)
2343 method_index = size_binop (MULT_EXPR, method_index,
2344 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2347 func = fold_build_pointer_plus (dtable, method_index);
2349 if (TARGET_VTABLE_USES_DESCRIPTORS)
2350 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2351 else
2353 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2354 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2357 return func;
2360 static GTY(()) tree class_ident;
2361 tree
2362 build_invokeinterface (tree dtable, tree method)
2364 tree interface;
2365 tree idx;
2367 /* We expand invokeinterface here. */
2369 if (class_ident == NULL_TREE)
2370 class_ident = get_identifier ("class");
2372 dtable = build_java_indirect_ref (dtable_type, dtable,
2373 flag_check_references);
2374 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2375 lookup_field (&dtable_type, class_ident), NULL_TREE);
2377 interface = DECL_CONTEXT (method);
2378 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2379 layout_class_methods (interface);
2381 if (flag_indirect_dispatch)
2383 int itable_index
2384 = 2 * (get_symbol_table_index
2385 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2386 interface
2387 = build4 (ARRAY_REF,
2388 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2389 TYPE_ITABLE_DECL (output_class),
2390 build_int_cst (NULL_TREE, itable_index-1),
2391 NULL_TREE, NULL_TREE);
2392 idx
2393 = build4 (ARRAY_REF,
2394 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2395 TYPE_ITABLE_DECL (output_class),
2396 build_int_cst (NULL_TREE, itable_index),
2397 NULL_TREE, NULL_TREE);
2398 interface = convert (class_ptr_type, interface);
2399 idx = convert (integer_type_node, idx);
2401 else
2403 idx = build_int_cst (NULL_TREE,
2404 get_interface_method_index (method, interface));
2405 interface = build_class_ref (interface);
2408 return build_call_nary (ptr_type_node,
2409 build_address_of (soft_lookupinterfacemethod_node),
2410 3, dtable, interface, idx);
2413 /* Expand one of the invoke_* opcodes.
2414 OPCODE is the specific opcode.
2415 METHOD_REF_INDEX is an index into the constant pool.
2416 NARGS is the number of arguments, or -1 if not specified. */
2418 static void
2419 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2421 tree method_signature
2422 = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
2423 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool,
2424 method_ref_index);
2425 tree self_type
2426 = get_class_constant (current_jcf,
2427 COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool,
2428 method_ref_index));
2429 const char *const self_name
2430 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2431 tree call, func, method, method_type;
2432 VEC(tree,gc) *arg_list;
2433 tree check = NULL_TREE;
2435 tree special = NULL_TREE;
2437 if (! CLASS_LOADED_P (self_type))
2439 load_class (self_type, 1);
2440 safe_layout_class (self_type);
2441 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2442 fatal_error ("failed to find class '%s'", self_name);
2444 layout_class_methods (self_type);
2446 if (ID_INIT_P (method_name))
2447 method = lookup_java_constructor (self_type, method_signature);
2448 else
2449 method = lookup_java_method (self_type, method_name, method_signature);
2451 /* We've found a method in a class other than the one in which it
2452 was wanted. This can happen if, for instance, we're trying to
2453 compile invokespecial super.equals().
2454 FIXME: This is a kludge. Rather than nullifying the result, we
2455 should change lookup_java_method() so that it doesn't search the
2456 superclass chain when we're BC-compiling. */
2457 if (! flag_verify_invocations
2458 && method
2459 && ! TYPE_ARRAY_P (self_type)
2460 && self_type != DECL_CONTEXT (method))
2461 method = NULL_TREE;
2463 /* We've found a method in an interface, but this isn't an interface
2464 call. */
2465 if (opcode != OPCODE_invokeinterface
2466 && method
2467 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2468 method = NULL_TREE;
2470 /* We've found a non-interface method but we are making an
2471 interface call. This can happen if the interface overrides a
2472 method in Object. */
2473 if (! flag_verify_invocations
2474 && opcode == OPCODE_invokeinterface
2475 && method
2476 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2477 method = NULL_TREE;
2479 if (method == NULL_TREE)
2481 if (flag_verify_invocations || ! flag_indirect_dispatch)
2483 error ("class '%s' has no method named '%s' matching signature '%s'",
2484 self_name,
2485 IDENTIFIER_POINTER (method_name),
2486 IDENTIFIER_POINTER (method_signature));
2488 else
2490 int flags = ACC_PUBLIC;
2491 if (opcode == OPCODE_invokestatic)
2492 flags |= ACC_STATIC;
2493 if (opcode == OPCODE_invokeinterface)
2495 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2496 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2498 method = add_method (self_type, flags, method_name,
2499 method_signature);
2500 DECL_ARTIFICIAL (method) = 1;
2501 METHOD_DUMMY (method) = 1;
2502 layout_class_method (self_type, NULL,
2503 method, NULL);
2507 /* Invoke static can't invoke static/abstract method */
2508 if (method != NULL_TREE)
2510 if (opcode == OPCODE_invokestatic)
2512 if (!METHOD_STATIC (method))
2514 error ("invokestatic on non static method");
2515 method = NULL_TREE;
2517 else if (METHOD_ABSTRACT (method))
2519 error ("invokestatic on abstract method");
2520 method = NULL_TREE;
2523 else
2525 if (METHOD_STATIC (method))
2527 error ("invoke[non-static] on static method");
2528 method = NULL_TREE;
2533 if (method == NULL_TREE)
2535 /* If we got here, we emitted an error message above. So we
2536 just pop the arguments, push a properly-typed zero, and
2537 continue. */
2538 method_type = get_type_from_signature (method_signature);
2539 pop_arguments (method_type);
2540 if (opcode != OPCODE_invokestatic)
2541 pop_type (self_type);
2542 method_type = promote_type (TREE_TYPE (method_type));
2543 push_value (convert (method_type, integer_zero_node));
2544 return;
2547 method_type = TREE_TYPE (method);
2548 arg_list = pop_arguments (method_type);
2549 flush_quick_stack ();
2551 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2552 &special);
2554 func = NULL_TREE;
2555 if (opcode == OPCODE_invokestatic)
2556 func = build_known_method_ref (method, method_type, self_type,
2557 method_signature, arg_list, special);
2558 else if (opcode == OPCODE_invokespecial
2559 || (opcode == OPCODE_invokevirtual
2560 && (METHOD_PRIVATE (method)
2561 || METHOD_FINAL (method)
2562 || CLASS_FINAL (TYPE_NAME (self_type)))))
2564 /* If the object for the method call is null, we throw an
2565 exception. We don't do this if the object is the current
2566 method's `this'. In other cases we just rely on an
2567 optimization pass to eliminate redundant checks. FIXME:
2568 Unfortunately there doesn't seem to be a way to determine
2569 what the current method is right now.
2570 We do omit the check if we're calling <init>. */
2571 /* We use a SAVE_EXPR here to make sure we only evaluate
2572 the new `self' expression once. */
2573 tree save_arg = save_expr (VEC_index (tree, arg_list, 0));
2574 VEC_replace (tree, arg_list, 0, save_arg);
2575 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2576 func = build_known_method_ref (method, method_type, self_type,
2577 method_signature, arg_list, special);
2579 else
2581 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2582 arg_list);
2583 if (opcode == OPCODE_invokevirtual)
2584 func = build_invokevirtual (dtable, method, special);
2585 else
2586 func = build_invokeinterface (dtable, method);
2589 if (TREE_CODE (func) == ADDR_EXPR)
2590 TREE_TYPE (func) = build_pointer_type (method_type);
2591 else
2592 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2594 call = build_call_vec (TREE_TYPE (method_type), func, arg_list);
2595 TREE_SIDE_EFFECTS (call) = 1;
2596 call = check_for_builtin (method, call);
2598 if (check != NULL_TREE)
2600 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2601 TREE_SIDE_EFFECTS (call) = 1;
2604 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2605 java_add_stmt (call);
2606 else
2608 push_value (call);
2609 flush_quick_stack ();
2613 /* Create a stub which will be put into the vtable but which will call
2614 a JNI function. */
2616 tree
2617 build_jni_stub (tree method)
2619 tree jnifunc, call, body, method_sig, arg_types;
2620 tree jniarg0, jniarg1, jniarg2, jniarg3;
2621 tree jni_func_type, tem;
2622 tree env_var, res_var = NULL_TREE, block;
2623 tree method_args;
2624 tree meth_var;
2625 tree bind;
2626 VEC(tree,gc) *args = NULL;
2627 int args_size = 0;
2629 tree klass = DECL_CONTEXT (method);
2630 klass = build_class_ref (klass);
2632 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2634 DECL_ARTIFICIAL (method) = 1;
2635 DECL_EXTERNAL (method) = 0;
2637 env_var = build_decl (input_location,
2638 VAR_DECL, get_identifier ("env"), ptr_type_node);
2639 DECL_CONTEXT (env_var) = method;
2641 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2643 res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"),
2644 TREE_TYPE (TREE_TYPE (method)));
2645 DECL_CONTEXT (res_var) = method;
2646 DECL_CHAIN (env_var) = res_var;
2649 method_args = DECL_ARGUMENTS (method);
2650 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2651 TREE_SIDE_EFFECTS (block) = 1;
2653 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2654 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2655 build_call_nary (ptr_type_node,
2656 build_address_of (soft_getjnienvnewframe_node),
2657 1, klass));
2659 /* The JNIEnv structure is the first argument to the JNI function. */
2660 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2661 VEC_safe_push (tree, gc, args, env_var);
2663 /* For a static method the second argument is the class. For a
2664 non-static method the second argument is `this'; that is already
2665 available in the argument list. */
2666 if (METHOD_STATIC (method))
2668 args_size += int_size_in_bytes (TREE_TYPE (klass));
2669 VEC_safe_push (tree, gc, args, klass);
2672 /* All the arguments to this method become arguments to the
2673 underlying JNI function. If we had to wrap object arguments in a
2674 special way, we would do that here. */
2675 for (tem = method_args; tem != NULL_TREE; tem = DECL_CHAIN (tem))
2677 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2678 #ifdef PARM_BOUNDARY
2679 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2680 * PARM_BOUNDARY);
2681 #endif
2682 args_size += (arg_bits / BITS_PER_UNIT);
2684 VEC_safe_push (tree, gc, args, tem);
2686 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2688 /* Argument types for static methods and the JNIEnv structure.
2689 FIXME: Write and use build_function_type_vec to avoid this. */
2690 if (METHOD_STATIC (method))
2691 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2692 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2694 /* We call _Jv_LookupJNIMethod to find the actual underlying
2695 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2696 exception if this function is not found at runtime. */
2697 method_sig = build_java_signature (TREE_TYPE (method));
2698 jniarg0 = klass;
2699 jniarg1 = build_utf8_ref (DECL_NAME (method));
2700 jniarg2 = build_utf8_ref (unmangle_classname
2701 (IDENTIFIER_POINTER (method_sig),
2702 IDENTIFIER_LENGTH (method_sig)));
2703 jniarg3 = build_int_cst (NULL_TREE, args_size);
2705 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2707 #ifdef MODIFY_JNI_METHOD_CALL
2708 tem = MODIFY_JNI_METHOD_CALL (tem);
2709 #endif
2711 jni_func_type = build_pointer_type (tem);
2713 /* Use the actual function type, rather than a generic pointer type,
2714 such that this decl keeps the actual pointer type from being
2715 garbage-collected. If it is, we end up using canonical types
2716 with different uids for equivalent function types, and this in
2717 turn causes utf8 identifiers and output order to vary. */
2718 meth_var = build_decl (input_location,
2719 VAR_DECL, get_identifier ("meth"), jni_func_type);
2720 TREE_STATIC (meth_var) = 1;
2721 TREE_PUBLIC (meth_var) = 0;
2722 DECL_EXTERNAL (meth_var) = 0;
2723 DECL_CONTEXT (meth_var) = method;
2724 DECL_ARTIFICIAL (meth_var) = 1;
2725 DECL_INITIAL (meth_var) = null_pointer_node;
2726 TREE_USED (meth_var) = 1;
2727 chainon (env_var, meth_var);
2728 build_result_decl (method);
2730 jnifunc = build3 (COND_EXPR, jni_func_type,
2731 build2 (NE_EXPR, boolean_type_node,
2732 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2733 meth_var,
2734 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2735 build1
2736 (NOP_EXPR, jni_func_type,
2737 build_call_nary (ptr_type_node,
2738 build_address_of
2739 (soft_lookupjnimethod_node),
2741 jniarg0, jniarg1,
2742 jniarg2, jniarg3))));
2744 /* Now we make the actual JNI call via the resulting function
2745 pointer. */
2746 call = build_call_vec (TREE_TYPE (TREE_TYPE (method)), jnifunc, args);
2748 /* If the JNI call returned a result, capture it here. If we had to
2749 unwrap JNI object results, we would do that here. */
2750 if (res_var != NULL_TREE)
2752 /* If the call returns an object, it may return a JNI weak
2753 reference, in which case we must unwrap it. */
2754 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2755 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2756 build_address_of (soft_unwrapjni_node),
2757 1, call);
2758 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2759 res_var, call);
2762 TREE_SIDE_EFFECTS (call) = 1;
2764 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2765 TREE_SIDE_EFFECTS (body) = 1;
2767 /* Now free the environment we allocated. */
2768 call = build_call_nary (ptr_type_node,
2769 build_address_of (soft_jnipopsystemframe_node),
2770 1, env_var);
2771 TREE_SIDE_EFFECTS (call) = 1;
2772 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2773 TREE_SIDE_EFFECTS (body) = 1;
2775 /* Finally, do the return. */
2776 if (res_var != NULL_TREE)
2778 tree drt;
2779 gcc_assert (DECL_RESULT (method));
2780 /* Make sure we copy the result variable to the actual
2781 result. We use the type of the DECL_RESULT because it
2782 might be different from the return type of the function:
2783 it might be promoted. */
2784 drt = TREE_TYPE (DECL_RESULT (method));
2785 if (drt != TREE_TYPE (res_var))
2786 res_var = build1 (CONVERT_EXPR, drt, res_var);
2787 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2788 TREE_SIDE_EFFECTS (res_var) = 1;
2791 body = build2 (COMPOUND_EXPR, void_type_node, body,
2792 build1 (RETURN_EXPR, void_type_node, res_var));
2793 TREE_SIDE_EFFECTS (body) = 1;
2795 /* Prepend class initialization for static methods reachable from
2796 other classes. */
2797 if (METHOD_STATIC (method)
2798 && (! METHOD_PRIVATE (method)
2799 || INNER_CLASS_P (DECL_CONTEXT (method))))
2801 tree init = build_call_expr (soft_initclass_node, 1,
2802 klass);
2803 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2804 TREE_SIDE_EFFECTS (body) = 1;
2807 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2808 body, block);
2809 return bind;
2813 /* Given lvalue EXP, return a volatile expression that references the
2814 same object. */
2816 tree
2817 java_modify_addr_for_volatile (tree exp)
2819 tree exp_type = TREE_TYPE (exp);
2820 tree v_type
2821 = build_qualified_type (exp_type,
2822 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2823 tree addr = build_fold_addr_expr (exp);
2824 v_type = build_pointer_type (v_type);
2825 addr = fold_convert (v_type, addr);
2826 exp = build_fold_indirect_ref (addr);
2827 return exp;
2831 /* Expand an operation to extract from or store into a field.
2832 IS_STATIC is 1 iff the field is static.
2833 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2834 FIELD_REF_INDEX is an index into the constant pool. */
2836 static void
2837 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2839 tree self_type
2840 = get_class_constant (current_jcf,
2841 COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
2842 field_ref_index));
2843 const char *self_name
2844 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2845 tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2846 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool,
2847 field_ref_index);
2848 tree field_type = get_type_from_signature (field_signature);
2849 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2850 tree field_ref;
2851 int is_error = 0;
2852 tree original_self_type = self_type;
2853 tree field_decl;
2854 tree modify_expr;
2856 if (! CLASS_LOADED_P (self_type))
2857 load_class (self_type, 1);
2858 field_decl = lookup_field (&self_type, field_name);
2859 if (field_decl == error_mark_node)
2861 is_error = 1;
2863 else if (field_decl == NULL_TREE)
2865 if (! flag_verify_invocations)
2867 int flags = ACC_PUBLIC;
2868 if (is_static)
2869 flags |= ACC_STATIC;
2870 self_type = original_self_type;
2871 field_decl = add_field (original_self_type, field_name,
2872 field_type, flags);
2873 DECL_ARTIFICIAL (field_decl) = 1;
2874 DECL_IGNORED_P (field_decl) = 1;
2875 #if 0
2876 /* FIXME: We should be pessimistic about volatility. We
2877 don't know one way or another, but this is safe.
2878 However, doing this has bad effects on code quality. We
2879 need to look at better ways to do this. */
2880 TREE_THIS_VOLATILE (field_decl) = 1;
2881 #endif
2883 else
2885 error ("missing field '%s' in '%s'",
2886 IDENTIFIER_POINTER (field_name), self_name);
2887 is_error = 1;
2890 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2892 error ("mismatching signature for field '%s' in '%s'",
2893 IDENTIFIER_POINTER (field_name), self_name);
2894 is_error = 1;
2896 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2897 if (is_error)
2899 if (! is_putting)
2900 push_value (convert (field_type, integer_zero_node));
2901 flush_quick_stack ();
2902 return;
2905 field_ref = build_field_ref (field_ref, self_type, field_name);
2906 if (is_static
2907 && ! flag_indirect_dispatch)
2909 tree context = DECL_CONTEXT (field_ref);
2910 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2911 field_ref = build_class_init (context, field_ref);
2912 else
2913 field_ref = build_class_init (self_type, field_ref);
2915 if (is_putting)
2917 flush_quick_stack ();
2918 if (FIELD_FINAL (field_decl))
2920 if (DECL_CONTEXT (field_decl) != current_class)
2921 error ("assignment to final field %q+D not in field%'s class",
2922 field_decl);
2923 /* We used to check for assignments to final fields not
2924 occurring in the class initializer or in a constructor
2925 here. However, this constraint doesn't seem to be
2926 enforced by the JVM. */
2929 if (TREE_THIS_VOLATILE (field_decl))
2930 field_ref = java_modify_addr_for_volatile (field_ref);
2932 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2933 field_ref, new_value);
2935 if (TREE_THIS_VOLATILE (field_decl))
2936 java_add_stmt
2937 (build_call_expr (built_in_decls[BUILT_IN_SYNC_SYNCHRONIZE], 0));
2939 java_add_stmt (modify_expr);
2941 else
2943 tree temp = build_decl (input_location,
2944 VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2945 java_add_local_var (temp);
2947 if (TREE_THIS_VOLATILE (field_decl))
2948 field_ref = java_modify_addr_for_volatile (field_ref);
2950 modify_expr
2951 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2952 java_add_stmt (modify_expr);
2954 if (TREE_THIS_VOLATILE (field_decl))
2955 java_add_stmt
2956 (build_call_expr (built_in_decls[BUILT_IN_SYNC_SYNCHRONIZE], 0));
2958 push_value (temp);
2960 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2963 static void
2964 load_type_state (int pc)
2966 int i;
2967 tree vec = VEC_index (tree, type_states, pc);
2968 int cur_length = TREE_VEC_LENGTH (vec);
2969 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2970 for (i = 0; i < cur_length; i++)
2971 type_map [i] = TREE_VEC_ELT (vec, i);
2974 /* Go over METHOD's bytecode and note instruction starts in
2975 instruction_bits[]. */
2977 void
2978 note_instructions (JCF *jcf, tree method)
2980 int PC;
2981 unsigned char* byte_ops;
2982 long length = DECL_CODE_LENGTH (method);
2984 int saw_index;
2985 jint INT_temp;
2987 #undef RET /* Defined by config/i386/i386.h */
2988 #undef PTR
2989 #define BCODE byte_ops
2990 #define BYTE_type_node byte_type_node
2991 #define SHORT_type_node short_type_node
2992 #define INT_type_node int_type_node
2993 #define LONG_type_node long_type_node
2994 #define CHAR_type_node char_type_node
2995 #define PTR_type_node ptr_type_node
2996 #define FLOAT_type_node float_type_node
2997 #define DOUBLE_type_node double_type_node
2998 #define VOID_type_node void_type_node
2999 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3000 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3001 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3002 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3004 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3006 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3007 byte_ops = jcf->read_ptr;
3008 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3009 memset (instruction_bits, 0, length + 1);
3010 type_states = VEC_alloc (tree, gc, length + 1);
3011 VEC_safe_grow_cleared (tree, gc, type_states, length + 1);
3013 /* This pass figures out which PC can be the targets of jumps. */
3014 for (PC = 0; PC < length;)
3016 int oldpc = PC; /* PC at instruction start. */
3017 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3018 switch (byte_ops[PC++])
3020 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3021 case OPCODE: \
3022 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3023 break;
3025 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3027 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3028 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3029 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3030 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3031 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3032 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3033 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3034 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3036 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3037 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3038 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3039 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3040 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3041 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3042 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3043 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3045 /* two forms of wide instructions */
3046 #define PRE_SPECIAL_WIDE(IGNORE) \
3048 int modified_opcode = IMMEDIATE_u1; \
3049 if (modified_opcode == OPCODE_iinc) \
3051 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3052 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3054 else \
3056 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3060 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3062 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3064 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3065 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3066 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3067 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3068 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3069 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3070 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3071 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3072 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3073 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3075 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3076 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3077 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3078 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3079 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3080 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3081 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3082 NOTE_LABEL (PC); \
3083 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3085 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3087 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3088 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3090 #define PRE_LOOKUP_SWITCH \
3091 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3092 NOTE_LABEL (default_offset+oldpc); \
3093 if (npairs >= 0) \
3094 while (--npairs >= 0) { \
3095 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3096 jint offset = IMMEDIATE_s4; \
3097 NOTE_LABEL (offset+oldpc); } \
3100 #define PRE_TABLE_SWITCH \
3101 { jint default_offset = IMMEDIATE_s4; \
3102 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3103 NOTE_LABEL (default_offset+oldpc); \
3104 if (low <= high) \
3105 while (low++ <= high) { \
3106 jint offset = IMMEDIATE_s4; \
3107 NOTE_LABEL (offset+oldpc); } \
3110 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3111 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3112 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3113 (void)(IMMEDIATE_u2); \
3114 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3116 #include "javaop.def"
3117 #undef JAVAOP
3119 } /* for */
3122 void
3123 expand_byte_code (JCF *jcf, tree method)
3125 int PC;
3126 int i;
3127 const unsigned char *linenumber_pointer;
3128 int dead_code_index = -1;
3129 unsigned char* byte_ops;
3130 long length = DECL_CODE_LENGTH (method);
3131 location_t max_location = input_location;
3133 stack_pointer = 0;
3134 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3135 byte_ops = jcf->read_ptr;
3137 /* We make an initial pass of the line number table, to note
3138 which instructions have associated line number entries. */
3139 linenumber_pointer = linenumber_table;
3140 for (i = 0; i < linenumber_count; i++)
3142 int pc = GET_u2 (linenumber_pointer);
3143 linenumber_pointer += 4;
3144 if (pc >= length)
3145 warning (0, "invalid PC in line number table");
3146 else
3148 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3149 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3150 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3154 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3155 return;
3157 promote_arguments ();
3158 cache_this_class_ref (method);
3159 cache_cpool_data_ref ();
3161 /* Translate bytecodes. */
3162 linenumber_pointer = linenumber_table;
3163 for (PC = 0; PC < length;)
3165 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3167 tree label = lookup_label (PC);
3168 flush_quick_stack ();
3169 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3170 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3171 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3172 load_type_state (PC);
3175 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3177 if (dead_code_index == -1)
3179 /* This is the start of a region of unreachable bytecodes.
3180 They still need to be processed in order for EH ranges
3181 to get handled correctly. However, we can simply
3182 replace these bytecodes with nops. */
3183 dead_code_index = PC;
3186 /* Turn this bytecode into a nop. */
3187 byte_ops[PC] = 0x0;
3189 else
3191 if (dead_code_index != -1)
3193 /* We've just reached the end of a region of dead code. */
3194 if (extra_warnings)
3195 warning (0, "unreachable bytecode from %d to before %d",
3196 dead_code_index, PC);
3197 dead_code_index = -1;
3201 /* Handle possible line number entry for this PC.
3203 This code handles out-of-order and multiple linenumbers per PC,
3204 but is optimized for the case of line numbers increasing
3205 monotonically with PC. */
3206 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3208 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3209 || GET_u2 (linenumber_pointer) != PC)
3210 linenumber_pointer = linenumber_table;
3211 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3213 int pc = GET_u2 (linenumber_pointer);
3214 linenumber_pointer += 4;
3215 if (pc == PC)
3217 int line = GET_u2 (linenumber_pointer - 2);
3218 input_location = linemap_line_start (line_table, line, 1);
3219 if (input_location > max_location)
3220 max_location = input_location;
3221 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3222 break;
3226 maybe_pushlevels (PC);
3227 PC = process_jvm_instruction (PC, byte_ops, length);
3228 maybe_poplevels (PC);
3229 } /* for */
3231 uncache_this_class_ref (method);
3233 if (dead_code_index != -1)
3235 /* We've just reached the end of a region of dead code. */
3236 if (extra_warnings)
3237 warning (0, "unreachable bytecode from %d to the end of the method",
3238 dead_code_index);
3241 DECL_FUNCTION_LAST_LINE (method) = max_location;
3244 static void
3245 java_push_constant_from_pool (JCF *jcf, int index)
3247 tree c;
3248 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3250 tree name;
3251 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3252 index = alloc_name_constant (CONSTANT_String, name);
3253 c = build_ref_from_constant_pool (index);
3254 c = convert (promote_type (string_type_node), c);
3256 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3257 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3259 tree record = get_class_constant (jcf, index);
3260 c = build_class_ref (record);
3262 else
3263 c = get_constant (jcf, index);
3264 push_value (c);
3268 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3269 long length ATTRIBUTE_UNUSED)
3271 const char *opname; /* Temporary ??? */
3272 int oldpc = PC; /* PC at instruction start. */
3274 /* If the instruction is at the beginning of an exception handler,
3275 replace the top of the stack with the thrown object reference. */
3276 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3278 /* Note that the verifier will not emit a type map at all for
3279 dead exception handlers. In this case we just ignore the
3280 situation. */
3281 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3283 tree type = pop_type (promote_type (throwable_type_node));
3284 push_value (build_exception_object_ref (type));
3288 switch (byte_ops[PC++])
3290 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3291 case OPCODE: \
3292 opname = #OPNAME; \
3293 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3294 break;
3296 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3298 int saw_index = 0; \
3299 int index = OPERAND_VALUE; \
3300 (void) saw_index; /* Avoid set but not used warning. */ \
3301 build_java_ret \
3302 (find_local_variable (index, return_address_type_node, oldpc)); \
3305 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3307 /* OPERAND_VALUE may have side-effects on PC */ \
3308 int opvalue = OPERAND_VALUE; \
3309 build_java_jsr (oldpc + opvalue, PC); \
3312 /* Push a constant onto the stack. */
3313 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3314 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3315 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3316 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3318 /* internal macro added for use by the WIDE case */
3319 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3320 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3322 /* Push local variable onto the opcode stack. */
3323 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3325 /* have to do this since OPERAND_VALUE may have side-effects */ \
3326 int opvalue = OPERAND_VALUE; \
3327 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3330 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3331 expand_java_return (OPERAND_TYPE##_type_node)
3333 #define REM_EXPR TRUNC_MOD_EXPR
3334 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3335 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3337 #define FIELD(IS_STATIC, IS_PUT) \
3338 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3340 #define TEST(OPERAND_TYPE, CONDITION) \
3341 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3343 #define COND(OPERAND_TYPE, CONDITION) \
3344 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3346 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3347 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3349 #define BRANCH_GOTO(OPERAND_VALUE) \
3350 expand_java_goto (oldpc + OPERAND_VALUE)
3352 #define BRANCH_CALL(OPERAND_VALUE) \
3353 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3355 #if 0
3356 #define BRANCH_RETURN(OPERAND_VALUE) \
3358 tree type = OPERAND_TYPE##_type_node; \
3359 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3360 expand_java_ret (value); \
3362 #endif
3364 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3365 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3366 fprintf (stderr, "(not implemented)\n")
3367 #define NOT_IMPL1(OPERAND_VALUE) \
3368 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3369 fprintf (stderr, "(not implemented)\n")
3371 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3373 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3375 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3377 #define STACK_SWAP(COUNT) java_stack_swap()
3379 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3380 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3381 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3383 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3384 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3386 #define LOOKUP_SWITCH \
3387 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3388 tree selector = pop_value (INT_type_node); \
3389 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3390 while (--npairs >= 0) \
3392 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3393 expand_java_add_case (switch_expr, match, oldpc + offset); \
3397 #define TABLE_SWITCH \
3398 { jint default_offset = IMMEDIATE_s4; \
3399 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3400 tree selector = pop_value (INT_type_node); \
3401 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3402 for (; low <= high; low++) \
3404 jint offset = IMMEDIATE_s4; \
3405 expand_java_add_case (switch_expr, low, oldpc + offset); \
3409 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3410 { int opcode = byte_ops[PC-1]; \
3411 int method_ref_index = IMMEDIATE_u2; \
3412 int nargs; \
3413 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3414 else nargs = -1; \
3415 expand_invoke (opcode, method_ref_index, nargs); \
3418 /* Handle new, checkcast, instanceof */
3419 #define OBJECT(TYPE, OP) \
3420 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3422 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3424 #define ARRAY_LOAD(OPERAND_TYPE) \
3426 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3429 #define ARRAY_STORE(OPERAND_TYPE) \
3431 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3434 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3435 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3436 #define ARRAY_NEW_PTR() \
3437 push_value (build_anewarray (get_class_constant (current_jcf, \
3438 IMMEDIATE_u2), \
3439 pop_value (int_type_node)));
3440 #define ARRAY_NEW_NUM() \
3442 int atype = IMMEDIATE_u1; \
3443 push_value (build_newarray (atype, pop_value (int_type_node)));\
3445 #define ARRAY_NEW_MULTI() \
3447 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3448 int ndims = IMMEDIATE_u1; \
3449 expand_java_multianewarray( klass, ndims ); \
3452 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3453 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3454 pop_value (OPERAND_TYPE##_type_node)));
3456 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3458 push_value (build1 (NOP_EXPR, int_type_node, \
3459 (convert (TO_TYPE##_type_node, \
3460 pop_value (FROM_TYPE##_type_node))))); \
3463 #define CONVERT(FROM_TYPE, TO_TYPE) \
3465 push_value (convert (TO_TYPE##_type_node, \
3466 pop_value (FROM_TYPE##_type_node))); \
3469 /* internal macro added for use by the WIDE case
3470 Added TREE_TYPE (decl) assignment, apbianco */
3471 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3473 tree decl, value; \
3474 int index = OPVALUE; \
3475 tree type = OPTYPE; \
3476 value = pop_value (type); \
3477 type = TREE_TYPE (value); \
3478 decl = find_local_variable (index, type, oldpc); \
3479 set_local_type (index, type); \
3480 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3483 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3485 /* have to do this since OPERAND_VALUE may have side-effects */ \
3486 int opvalue = OPERAND_VALUE; \
3487 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3490 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3491 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3493 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3494 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3496 #define MONITOR_OPERATION(call) \
3498 tree o = pop_value (ptr_type_node); \
3499 tree c; \
3500 flush_quick_stack (); \
3501 c = build_java_monitor (call, o); \
3502 TREE_SIDE_EFFECTS (c) = 1; \
3503 java_add_stmt (c); \
3506 #define SPECIAL_IINC(IGNORED) \
3508 unsigned int local_var_index = IMMEDIATE_u1; \
3509 int ival = IMMEDIATE_s1; \
3510 expand_iinc(local_var_index, ival, oldpc); \
3513 #define SPECIAL_WIDE(IGNORED) \
3515 int modified_opcode = IMMEDIATE_u1; \
3516 unsigned int local_var_index = IMMEDIATE_u2; \
3517 switch (modified_opcode) \
3519 case OPCODE_iinc: \
3521 int ival = IMMEDIATE_s2; \
3522 expand_iinc (local_var_index, ival, oldpc); \
3523 break; \
3525 case OPCODE_iload: \
3526 case OPCODE_lload: \
3527 case OPCODE_fload: \
3528 case OPCODE_dload: \
3529 case OPCODE_aload: \
3531 /* duplicate code from LOAD macro */ \
3532 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3533 break; \
3535 case OPCODE_istore: \
3536 case OPCODE_lstore: \
3537 case OPCODE_fstore: \
3538 case OPCODE_dstore: \
3539 case OPCODE_astore: \
3541 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3542 break; \
3544 default: \
3545 error ("unrecogized wide sub-instruction"); \
3549 #define SPECIAL_THROW(IGNORED) \
3550 build_java_athrow (pop_value (throwable_type_node))
3552 #define SPECIAL_BREAK NOT_IMPL1
3553 #define IMPL NOT_IMPL
3555 #include "javaop.def"
3556 #undef JAVAOP
3557 default:
3558 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3560 return PC;
3563 /* Return the opcode at PC in the code section pointed to by
3564 CODE_OFFSET. */
3566 static unsigned char
3567 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3569 unsigned char opcode;
3570 long absolute_offset = (long)JCF_TELL (jcf);
3572 JCF_SEEK (jcf, code_offset);
3573 opcode = jcf->read_ptr [pc];
3574 JCF_SEEK (jcf, absolute_offset);
3575 return opcode;
3578 /* Some bytecode compilers are emitting accurate LocalVariableTable
3579 attributes. Here's an example:
3581 PC <t>store_<n>
3582 PC+1 ...
3584 Attribute "LocalVariableTable"
3585 slot #<n>: ... (PC: PC+1 length: L)
3587 This is accurate because the local in slot <n> really exists after
3588 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3590 This procedure recognizes this situation and extends the live range
3591 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3592 length of the store instruction.)
3594 This function is used by `give_name_to_locals' so that a local's
3595 DECL features a DECL_LOCAL_START_PC such that the first related
3596 store operation will use DECL as a destination, not an unrelated
3597 temporary created for the occasion.
3599 This function uses a global (instruction_bits) `note_instructions' should
3600 have allocated and filled properly. */
3603 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3604 int start_pc, int slot)
3606 int first, index, opcode;
3607 int pc, insn_pc;
3608 int wide_found = 0;
3610 if (!start_pc)
3611 return start_pc;
3613 first = index = -1;
3615 /* Find last previous instruction and remember it */
3616 for (pc = start_pc-1; pc; pc--)
3617 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3618 break;
3619 insn_pc = pc;
3621 /* Retrieve the instruction, handle `wide'. */
3622 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3623 if (opcode == OPCODE_wide)
3625 wide_found = 1;
3626 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3629 switch (opcode)
3631 case OPCODE_astore_0:
3632 case OPCODE_astore_1:
3633 case OPCODE_astore_2:
3634 case OPCODE_astore_3:
3635 first = OPCODE_astore_0;
3636 break;
3638 case OPCODE_istore_0:
3639 case OPCODE_istore_1:
3640 case OPCODE_istore_2:
3641 case OPCODE_istore_3:
3642 first = OPCODE_istore_0;
3643 break;
3645 case OPCODE_lstore_0:
3646 case OPCODE_lstore_1:
3647 case OPCODE_lstore_2:
3648 case OPCODE_lstore_3:
3649 first = OPCODE_lstore_0;
3650 break;
3652 case OPCODE_fstore_0:
3653 case OPCODE_fstore_1:
3654 case OPCODE_fstore_2:
3655 case OPCODE_fstore_3:
3656 first = OPCODE_fstore_0;
3657 break;
3659 case OPCODE_dstore_0:
3660 case OPCODE_dstore_1:
3661 case OPCODE_dstore_2:
3662 case OPCODE_dstore_3:
3663 first = OPCODE_dstore_0;
3664 break;
3666 case OPCODE_astore:
3667 case OPCODE_istore:
3668 case OPCODE_lstore:
3669 case OPCODE_fstore:
3670 case OPCODE_dstore:
3671 index = peek_opcode_at_pc (jcf, code_offset, pc);
3672 if (wide_found)
3674 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3675 index = (other << 8) + index;
3677 break;
3680 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3681 means we have a <t>store. */
3682 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3683 start_pc = insn_pc;
3685 return start_pc;
3688 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3689 order, as specified by Java Language Specification.
3691 The problem is that while expand_expr will evaluate its sub-operands in
3692 left-to-right order, for variables it will just return an rtx (i.e.
3693 an lvalue) for the variable (rather than an rvalue). So it is possible
3694 that a later sub-operand will change the register, and when the
3695 actual operation is done, it will use the new value, when it should
3696 have used the original value.
3698 We fix this by using save_expr. This forces the sub-operand to be
3699 copied into a fresh virtual register,
3701 For method invocation, we modify the arguments so that a
3702 left-to-right order evaluation is performed. Saved expressions
3703 will, in CALL_EXPR order, be reused when the call will be expanded.
3705 We also promote outgoing args if needed. */
3707 tree
3708 force_evaluation_order (tree node)
3710 if (flag_syntax_only)
3711 return node;
3712 if (TREE_CODE (node) == CALL_EXPR
3713 || (TREE_CODE (node) == COMPOUND_EXPR
3714 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3715 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3717 tree call, cmp;
3718 int i, nargs;
3720 /* Account for wrapped around ctors. */
3721 if (TREE_CODE (node) == COMPOUND_EXPR)
3722 call = TREE_OPERAND (node, 0);
3723 else
3724 call = node;
3726 nargs = call_expr_nargs (call);
3728 /* This reverses the evaluation order. This is a desired effect. */
3729 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3731 tree arg = CALL_EXPR_ARG (call, i);
3732 /* Promote types smaller than integer. This is required by
3733 some ABIs. */
3734 tree type = TREE_TYPE (arg);
3735 tree saved;
3736 if (targetm.calls.promote_prototypes (type)
3737 && INTEGRAL_TYPE_P (type)
3738 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3739 TYPE_SIZE (integer_type_node)))
3740 arg = fold_convert (integer_type_node, arg);
3742 saved = save_expr (force_evaluation_order (arg));
3743 cmp = (cmp == NULL_TREE ? saved :
3744 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3746 CALL_EXPR_ARG (call, i) = saved;
3749 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3750 TREE_SIDE_EFFECTS (cmp) = 1;
3752 if (cmp)
3754 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3755 if (TREE_TYPE (cmp) != void_type_node)
3756 cmp = save_expr (cmp);
3757 TREE_SIDE_EFFECTS (cmp) = 1;
3758 node = cmp;
3761 return node;
3764 /* Build a node to represent empty statements and blocks. */
3766 tree
3767 build_java_empty_stmt (void)
3769 tree t = build_empty_stmt (input_location);
3770 return t;
3773 /* Promote all args of integral type before generating any code. */
3775 static void
3776 promote_arguments (void)
3778 int i;
3779 tree arg;
3780 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3781 arg != NULL_TREE; arg = DECL_CHAIN (arg), i++)
3783 tree arg_type = TREE_TYPE (arg);
3784 if (INTEGRAL_TYPE_P (arg_type)
3785 && TYPE_PRECISION (arg_type) < 32)
3787 tree copy = find_local_variable (i, integer_type_node, -1);
3788 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3789 copy,
3790 fold_convert (integer_type_node, arg)));
3792 if (TYPE_IS_WIDE (arg_type))
3793 i++;
3797 /* Create a local variable that points to the constant pool. */
3799 static void
3800 cache_cpool_data_ref (void)
3802 if (optimize)
3804 tree cpool;
3805 tree d = build_constant_data_ref (flag_indirect_classes);
3806 tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE,
3807 build_pointer_type (TREE_TYPE (d)));
3808 java_add_local_var (cpool_ptr);
3809 TREE_CONSTANT (cpool_ptr) = 1;
3811 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3812 cpool_ptr, build_address_of (d)));
3813 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3814 TREE_THIS_NOTRAP (cpool) = 1;
3815 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3819 #include "gt-java-expr.h"