Also turn off OPTION_MASK_ABI_X32 for -m16
[official-gcc.git] / gcc / java / expr.c
blob51b8f0f7c8f319569b2791cde9a05d9a218b6fb8
1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>.
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
24 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h" /* For INT_TYPE_SIZE,
30 TARGET_VTABLE_USES_DESCRIPTORS,
31 BITS_PER_UNIT,
32 MODIFY_JNI_METHOD_CALL and
33 PARM_BOUNDARY. */
35 #include "tree.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "flags.h"
39 #include "java-tree.h"
40 #include "javaop.h"
41 #include "java-opcodes.h"
42 #include "jcf.h"
43 #include "java-except.h"
44 #include "parse.h"
45 #include "diagnostic-core.h"
46 #include "ggc.h"
47 #include "tree-iterator.h"
48 #include "target.h"
49 #include "wide-int.h"
51 static void flush_quick_stack (void);
52 static void push_value (tree);
53 static tree pop_value (tree);
54 static void java_stack_swap (void);
55 static void java_stack_dup (int, int);
56 static void build_java_athrow (tree);
57 static void build_java_jsr (int, int);
58 static void build_java_ret (tree);
59 static void expand_java_multianewarray (tree, int);
60 static void expand_java_arraystore (tree);
61 static void expand_java_arrayload (tree);
62 static void expand_java_array_length (void);
63 static tree build_java_monitor (tree, tree);
64 static void expand_java_pushc (int, tree);
65 static void expand_java_return (tree);
66 static void expand_load_internal (int, tree, int);
67 static void expand_java_NEW (tree);
68 static void expand_java_INSTANCEOF (tree);
69 static void expand_java_CHECKCAST (tree);
70 static void expand_iinc (unsigned int, int, int);
71 static void expand_java_binop (tree, enum tree_code);
72 static void note_label (int, int);
73 static void expand_compare (enum tree_code, tree, tree, int);
74 static void expand_test (enum tree_code, tree, int);
75 static void expand_cond (enum tree_code, tree, int);
76 static void expand_java_goto (int);
77 static tree expand_java_switch (tree, int);
78 static void expand_java_add_case (tree, int, int);
79 static vec<tree, va_gc> *pop_arguments (tree);
80 static void expand_invoke (int, int, int);
81 static void expand_java_field_op (int, int, int);
82 static void java_push_constant_from_pool (struct JCF *, int);
83 static void java_stack_pop (int);
84 static tree build_java_throw_out_of_bounds_exception (tree);
85 static tree build_java_check_indexed_type (tree, tree);
86 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
87 static void promote_arguments (void);
88 static void cache_cpool_data_ref (void);
90 static GTY(()) tree operand_type[59];
92 static GTY(()) tree methods_ident;
93 static GTY(()) tree ncode_ident;
94 tree dtable_ident = NULL_TREE;
96 /* Set to nonzero value in order to emit class initialization code
97 before static field references. */
98 int always_initialize_class_p = 0;
100 /* We store the stack state in two places:
101 Within a basic block, we use the quick_stack, which is a vec of expression
102 nodes.
103 This is the top part of the stack; below that we use find_stack_slot.
104 At the end of a basic block, the quick_stack must be flushed
105 to the stack slot array (as handled by find_stack_slot).
106 Using quick_stack generates better code (especially when
107 compiled without optimization), because we do not have to
108 explicitly store and load trees to temporary variables.
110 If a variable is on the quick stack, it means the value of variable
111 when the quick stack was last flushed. Conceptually, flush_quick_stack
112 saves all the quick_stack elements in parallel. However, that is
113 complicated, so it actually saves them (i.e. copies each stack value
114 to is home virtual register) from low indexes. This allows a quick_stack
115 element at index i (counting from the bottom of stack the) to references
116 slot virtuals for register that are >= i, but not those that are deeper.
117 This convention makes most operations easier. For example iadd works
118 even when the stack contains (reg[0], reg[1]): It results in the
119 stack containing (reg[0]+reg[1]), which is OK. However, some stack
120 operations are more complicated. For example dup given a stack
121 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
122 the convention, since stack value 1 would refer to a register with
123 lower index (reg[0]), which flush_quick_stack does not safely handle.
124 So dup cannot just add an extra element to the quick_stack, but iadd can.
127 static GTY(()) vec<tree, va_gc> *quick_stack;
129 /* The physical memory page size used in this computer. See
130 build_field_ref(). */
131 static GTY(()) tree page_size;
133 /* The stack pointer of the Java virtual machine.
134 This does include the size of the quick_stack. */
136 int stack_pointer;
138 const unsigned char *linenumber_table;
139 int linenumber_count;
141 /* Largest pc so far in this method that has been passed to lookup_label. */
142 int highest_label_pc_this_method = -1;
144 /* Base value for this method to add to pc to get generated label. */
145 int start_label_pc_this_method = 0;
147 void
148 init_expr_processing (void)
150 operand_type[21] = operand_type[54] = int_type_node;
151 operand_type[22] = operand_type[55] = long_type_node;
152 operand_type[23] = operand_type[56] = float_type_node;
153 operand_type[24] = operand_type[57] = double_type_node;
154 operand_type[25] = operand_type[58] = ptr_type_node;
157 tree
158 java_truthvalue_conversion (tree expr)
160 /* It is simpler and generates better code to have only TRUTH_*_EXPR
161 or comparison expressions as truth values at this level.
163 This function should normally be identity for Java. */
165 switch (TREE_CODE (expr))
167 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
168 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
169 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
170 case ORDERED_EXPR: case UNORDERED_EXPR:
171 case TRUTH_ANDIF_EXPR:
172 case TRUTH_ORIF_EXPR:
173 case TRUTH_AND_EXPR:
174 case TRUTH_OR_EXPR:
175 case TRUTH_XOR_EXPR:
176 case TRUTH_NOT_EXPR:
177 case ERROR_MARK:
178 return expr;
180 case INTEGER_CST:
181 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
183 case REAL_CST:
184 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
186 /* are these legal? XXX JH */
187 case NEGATE_EXPR:
188 case ABS_EXPR:
189 case FLOAT_EXPR:
190 /* These don't change whether an object is nonzero or zero. */
191 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
193 case COND_EXPR:
194 /* Distribute the conversion into the arms of a COND_EXPR. */
195 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
196 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
197 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
199 case NOP_EXPR:
200 /* If this is widening the argument, we can ignore it. */
201 if (TYPE_PRECISION (TREE_TYPE (expr))
202 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
203 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
204 /* fall through to default */
206 default:
207 return fold_build2 (NE_EXPR, boolean_type_node,
208 expr, boolean_false_node);
212 /* Save any stack slots that happen to be in the quick_stack into their
213 home virtual register slots.
215 The copy order is from low stack index to high, to support the invariant
216 that the expression for a slot may contain decls for stack slots with
217 higher (or the same) index, but not lower. */
219 static void
220 flush_quick_stack (void)
222 int stack_index = stack_pointer;
223 unsigned ix;
224 tree t;
226 /* Count the number of slots the quick stack is holding. */
227 for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++)
228 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (t));
230 for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++)
232 tree decl, type = TREE_TYPE (t);
234 decl = find_stack_slot (stack_index, type);
235 if (decl != t)
236 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (t), decl, t));
237 stack_index += 1 + TYPE_IS_WIDE (type);
240 vec_safe_truncate (quick_stack, 0);
243 /* Push TYPE on the type stack.
244 Return true on success, 0 on overflow. */
247 push_type_0 (tree type)
249 int n_words;
250 type = promote_type (type);
251 n_words = 1 + TYPE_IS_WIDE (type);
252 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
253 return 0;
254 /* Allocate decl for this variable now, so we get a temporary that
255 survives the whole method. */
256 find_stack_slot (stack_pointer, type);
257 stack_type_map[stack_pointer++] = type;
258 n_words--;
259 while (--n_words >= 0)
260 stack_type_map[stack_pointer++] = TYPE_SECOND;
261 return 1;
264 void
265 push_type (tree type)
267 int r = push_type_0 (type);
268 gcc_assert (r);
271 static void
272 push_value (tree value)
274 tree type = TREE_TYPE (value);
275 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
277 type = promote_type (type);
278 value = convert (type, value);
280 push_type (type);
281 vec_safe_push (quick_stack, value);
283 /* If the value has a side effect, then we need to evaluate it
284 whether or not the result is used. If the value ends up on the
285 quick stack and is then popped, this won't happen -- so we flush
286 the quick stack. It is safest to simply always flush, though,
287 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
288 the latter we may need to strip conversions. */
289 flush_quick_stack ();
292 /* Pop a type from the type stack.
293 TYPE is the expected type. Return the actual type, which must be
294 convertible to TYPE.
295 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
297 tree
298 pop_type_0 (tree type, char **messagep)
300 int n_words;
301 tree t;
302 *messagep = NULL;
303 if (TREE_CODE (type) == RECORD_TYPE)
304 type = promote_type (type);
305 n_words = 1 + TYPE_IS_WIDE (type);
306 if (stack_pointer < n_words)
308 *messagep = xstrdup ("stack underflow");
309 return type;
311 while (--n_words > 0)
313 if (stack_type_map[--stack_pointer] != void_type_node)
315 *messagep = xstrdup ("Invalid multi-word value on type stack");
316 return type;
319 t = stack_type_map[--stack_pointer];
320 if (type == NULL_TREE || t == type)
321 return t;
322 if (TREE_CODE (t) == TREE_LIST)
326 tree tt = TREE_PURPOSE (t);
327 if (! can_widen_reference_to (tt, type))
329 t = tt;
330 goto fail;
332 t = TREE_CHAIN (t);
334 while (t);
335 return t;
337 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
338 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
339 return t;
340 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
342 /* If the expected type we've been passed is object or ptr
343 (i.e. void*), the caller needs to know the real type. */
344 if (type == ptr_type_node || type == object_ptr_type_node)
345 return t;
347 /* Since the verifier has already run, we know that any
348 types we see will be compatible. In BC mode, this fact
349 may be checked at runtime, but if that is so then we can
350 assume its truth here as well. So, we always succeed
351 here, with the expected type. */
352 return type;
355 if (! flag_verify_invocations && flag_indirect_dispatch
356 && t == object_ptr_type_node)
358 if (type != ptr_type_node)
359 warning (0, "need to insert runtime check for %s",
360 xstrdup (lang_printable_name (type, 0)));
361 return type;
364 /* lang_printable_name uses a static buffer, so we must save the result
365 from calling it the first time. */
366 fail:
368 char *temp = xstrdup (lang_printable_name (type, 0));
369 /* If the stack contains a multi-word type, keep popping the stack until
370 the real type is found. */
371 while (t == void_type_node)
372 t = stack_type_map[--stack_pointer];
373 *messagep = concat ("expected type '", temp,
374 "' but stack contains '", lang_printable_name (t, 0),
375 "'", NULL);
376 free (temp);
378 return type;
381 /* Pop a type from the type stack.
382 TYPE is the expected type. Return the actual type, which must be
383 convertible to TYPE, otherwise call error. */
385 tree
386 pop_type (tree type)
388 char *message = NULL;
389 type = pop_type_0 (type, &message);
390 if (message != NULL)
392 error ("%s", message);
393 free (message);
395 return type;
399 /* Return true if two type assertions are equal. */
401 static int
402 type_assertion_eq (const void * k1_p, const void * k2_p)
404 const type_assertion k1 = *(const type_assertion *)k1_p;
405 const type_assertion k2 = *(const type_assertion *)k2_p;
406 return (k1.assertion_code == k2.assertion_code
407 && k1.op1 == k2.op1
408 && k1.op2 == k2.op2);
411 /* Hash a type assertion. */
413 static hashval_t
414 type_assertion_hash (const void *p)
416 const type_assertion *k_p = (const type_assertion *) p;
417 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
418 k_p->assertion_code, 0);
420 switch (k_p->assertion_code)
422 case JV_ASSERT_TYPES_COMPATIBLE:
423 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
424 hash);
425 /* Fall through. */
427 case JV_ASSERT_IS_INSTANTIABLE:
428 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
429 hash);
430 /* Fall through. */
432 case JV_ASSERT_END_OF_TABLE:
433 break;
435 default:
436 gcc_unreachable ();
439 return hash;
442 /* Add an entry to the type assertion table for the given class.
443 KLASS is the class for which this assertion will be evaluated by the
444 runtime during loading/initialization.
445 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
446 OP1 and OP2 are the operands. The tree type of these arguments may be
447 specific to each assertion_code. */
449 void
450 add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
452 htab_t assertions_htab;
453 type_assertion as;
454 void **as_pp;
456 assertions_htab = TYPE_ASSERTIONS (klass);
457 if (assertions_htab == NULL)
459 assertions_htab = htab_create_ggc (7, type_assertion_hash,
460 type_assertion_eq, NULL);
461 TYPE_ASSERTIONS (current_class) = assertions_htab;
464 as.assertion_code = assertion_code;
465 as.op1 = op1;
466 as.op2 = op2;
468 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
470 /* Don't add the same assertion twice. */
471 if (*as_pp)
472 return;
474 *as_pp = ggc_alloc<type_assertion> ();
475 **(type_assertion **)as_pp = as;
479 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
480 Handles array types and interfaces. */
483 can_widen_reference_to (tree source_type, tree target_type)
485 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
486 return 1;
488 /* Get rid of pointers */
489 if (TREE_CODE (source_type) == POINTER_TYPE)
490 source_type = TREE_TYPE (source_type);
491 if (TREE_CODE (target_type) == POINTER_TYPE)
492 target_type = TREE_TYPE (target_type);
494 if (source_type == target_type)
495 return 1;
497 /* FIXME: This is very pessimistic, in that it checks everything,
498 even if we already know that the types are compatible. If we're
499 to support full Java class loader semantics, we need this.
500 However, we could do something more optimal. */
501 if (! flag_verify_invocations)
503 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
504 source_type, target_type);
506 if (!quiet_flag)
507 warning (0, "assert: %s is assign compatible with %s",
508 xstrdup (lang_printable_name (target_type, 0)),
509 xstrdup (lang_printable_name (source_type, 0)));
510 /* Punt everything to runtime. */
511 return 1;
514 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
516 return 1;
518 else
520 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
522 HOST_WIDE_INT source_length, target_length;
523 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
525 /* An array implements Cloneable and Serializable. */
526 tree name = DECL_NAME (TYPE_NAME (target_type));
527 return (name == java_lang_cloneable_identifier_node
528 || name == java_io_serializable_identifier_node);
530 target_length = java_array_type_length (target_type);
531 if (target_length >= 0)
533 source_length = java_array_type_length (source_type);
534 if (source_length != target_length)
535 return 0;
537 source_type = TYPE_ARRAY_ELEMENT (source_type);
538 target_type = TYPE_ARRAY_ELEMENT (target_type);
539 if (source_type == target_type)
540 return 1;
541 if (TREE_CODE (source_type) != POINTER_TYPE
542 || TREE_CODE (target_type) != POINTER_TYPE)
543 return 0;
544 return can_widen_reference_to (source_type, target_type);
546 else
548 int source_depth = class_depth (source_type);
549 int target_depth = class_depth (target_type);
551 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
553 if (! quiet_flag)
554 warning (0, "assert: %s is assign compatible with %s",
555 xstrdup (lang_printable_name (target_type, 0)),
556 xstrdup (lang_printable_name (source_type, 0)));
557 return 1;
560 /* class_depth can return a negative depth if an error occurred */
561 if (source_depth < 0 || target_depth < 0)
562 return 0;
564 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
566 /* target_type is OK if source_type or source_type ancestors
567 implement target_type. We handle multiple sub-interfaces */
568 tree binfo, base_binfo;
569 int i;
571 for (binfo = TYPE_BINFO (source_type), i = 0;
572 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
573 if (can_widen_reference_to
574 (BINFO_TYPE (base_binfo), target_type))
575 return 1;
577 if (!i)
578 return 0;
581 for ( ; source_depth > target_depth; source_depth--)
583 source_type
584 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
586 return source_type == target_type;
591 static tree
592 pop_value (tree type)
594 type = pop_type (type);
595 if (vec_safe_length (quick_stack) != 0)
596 return quick_stack->pop ();
597 else
598 return find_stack_slot (stack_pointer, promote_type (type));
602 /* Pop and discard the top COUNT stack slots. */
604 static void
605 java_stack_pop (int count)
607 while (count > 0)
609 tree type;
611 gcc_assert (stack_pointer != 0);
613 type = stack_type_map[stack_pointer - 1];
614 if (type == TYPE_SECOND)
616 count--;
617 gcc_assert (stack_pointer != 1 && count > 0);
619 type = stack_type_map[stack_pointer - 2];
621 pop_value (type);
622 count--;
626 /* Implement the 'swap' operator (to swap two top stack slots). */
628 static void
629 java_stack_swap (void)
631 tree type1, type2;
632 tree temp;
633 tree decl1, decl2;
635 if (stack_pointer < 2
636 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
637 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
638 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
639 /* Bad stack swap. */
640 abort ();
641 /* Bad stack swap. */
643 flush_quick_stack ();
644 decl1 = find_stack_slot (stack_pointer - 1, type1);
645 decl2 = find_stack_slot (stack_pointer - 2, type2);
646 temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1);
647 java_add_local_var (temp);
648 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
649 java_add_stmt (build2 (MODIFY_EXPR, type2,
650 find_stack_slot (stack_pointer - 1, type2),
651 decl2));
652 java_add_stmt (build2 (MODIFY_EXPR, type1,
653 find_stack_slot (stack_pointer - 2, type1),
654 temp));
655 stack_type_map[stack_pointer - 1] = type2;
656 stack_type_map[stack_pointer - 2] = type1;
659 static void
660 java_stack_dup (int size, int offset)
662 int low_index = stack_pointer - size - offset;
663 int dst_index;
664 if (low_index < 0)
665 error ("stack underflow - dup* operation");
667 flush_quick_stack ();
669 stack_pointer += size;
670 dst_index = stack_pointer;
672 for (dst_index = stack_pointer; --dst_index >= low_index; )
674 tree type;
675 int src_index = dst_index - size;
676 if (src_index < low_index)
677 src_index = dst_index + size + offset;
678 type = stack_type_map [src_index];
679 if (type == TYPE_SECOND)
681 /* Dup operation splits 64-bit number. */
682 gcc_assert (src_index > low_index);
684 stack_type_map[dst_index] = type;
685 src_index--; dst_index--;
686 type = stack_type_map[src_index];
687 gcc_assert (TYPE_IS_WIDE (type));
689 else
690 gcc_assert (! TYPE_IS_WIDE (type));
692 if (src_index != dst_index)
694 tree src_decl = find_stack_slot (src_index, type);
695 tree dst_decl = find_stack_slot (dst_index, type);
697 java_add_stmt
698 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
699 stack_type_map[dst_index] = type;
704 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
705 value stack. */
707 static void
708 build_java_athrow (tree node)
710 tree call;
712 call = build_call_nary (void_type_node,
713 build_address_of (throw_node),
714 1, node);
715 TREE_SIDE_EFFECTS (call) = 1;
716 java_add_stmt (call);
717 java_stack_pop (stack_pointer);
720 /* Implementation for jsr/ret */
722 static void
723 build_java_jsr (int target_pc, int return_pc)
725 tree where = lookup_label (target_pc);
726 tree ret = lookup_label (return_pc);
727 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
728 push_value (ret_label);
729 flush_quick_stack ();
730 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
732 /* Do not need to emit the label here. We noted the existence of the
733 label as a jump target in note_instructions; we'll emit the label
734 for real at the beginning of the expand_byte_code loop. */
737 static void
738 build_java_ret (tree location)
740 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
743 /* Implementation of operations on array: new, load, store, length */
745 tree
746 decode_newarray_type (int atype)
748 switch (atype)
750 case 4: return boolean_type_node;
751 case 5: return char_type_node;
752 case 6: return float_type_node;
753 case 7: return double_type_node;
754 case 8: return byte_type_node;
755 case 9: return short_type_node;
756 case 10: return int_type_node;
757 case 11: return long_type_node;
758 default: return NULL_TREE;
762 /* Map primitive type to the code used by OPCODE_newarray. */
765 encode_newarray_type (tree type)
767 if (type == boolean_type_node)
768 return 4;
769 else if (type == char_type_node)
770 return 5;
771 else if (type == float_type_node)
772 return 6;
773 else if (type == double_type_node)
774 return 7;
775 else if (type == byte_type_node)
776 return 8;
777 else if (type == short_type_node)
778 return 9;
779 else if (type == int_type_node)
780 return 10;
781 else if (type == long_type_node)
782 return 11;
783 else
784 gcc_unreachable ();
787 /* Build a call to _Jv_ThrowBadArrayIndex(), the
788 ArrayIndexOfBoundsException exception handler. */
790 static tree
791 build_java_throw_out_of_bounds_exception (tree index)
793 tree node;
795 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
796 has void return type. We cannot just set the type of the CALL_EXPR below
797 to int_type_node because we would lose it during gimplification. */
798 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
799 node = build_call_nary (void_type_node,
800 build_address_of (soft_badarrayindex_node),
801 1, index);
802 TREE_SIDE_EFFECTS (node) = 1;
804 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
805 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
807 return (node);
810 /* Return the length of an array. Doesn't perform any checking on the nature
811 or value of the array NODE. May be used to implement some bytecodes. */
813 tree
814 build_java_array_length_access (tree node)
816 tree type = TREE_TYPE (node);
817 tree array_type = TREE_TYPE (type);
818 HOST_WIDE_INT length;
820 if (!is_array_type_p (type))
822 /* With the new verifier, we will see an ordinary pointer type
823 here. In this case, we just use an arbitrary array type. */
824 array_type = build_java_array_type (object_ptr_type_node, -1);
825 type = promote_type (array_type);
828 length = java_array_type_length (type);
829 if (length >= 0)
830 return build_int_cst (NULL_TREE, length);
832 node = build3 (COMPONENT_REF, int_type_node,
833 build_java_indirect_ref (array_type, node,
834 flag_check_references),
835 lookup_field (&array_type, get_identifier ("length")),
836 NULL_TREE);
837 IS_ARRAY_LENGTH_ACCESS (node) = 1;
838 return node;
841 /* Optionally checks a reference against the NULL pointer. ARG1: the
842 expr, ARG2: we should check the reference. Don't generate extra
843 checks if we're not generating code. */
845 tree
846 java_check_reference (tree expr, int check)
848 if (!flag_syntax_only && check)
850 expr = save_expr (expr);
851 expr = build3 (COND_EXPR, TREE_TYPE (expr),
852 build2 (EQ_EXPR, boolean_type_node,
853 expr, null_pointer_node),
854 build_call_nary (void_type_node,
855 build_address_of (soft_nullpointer_node),
857 expr);
860 return expr;
863 /* Reference an object: just like an INDIRECT_REF, but with checking. */
865 tree
866 build_java_indirect_ref (tree type, tree expr, int check)
868 tree t;
869 t = java_check_reference (expr, check);
870 t = convert (build_pointer_type (type), t);
871 return build1 (INDIRECT_REF, type, t);
874 /* Implement array indexing (either as l-value or r-value).
875 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
876 Optionally performs bounds checking and/or test to NULL.
877 At this point, ARRAY should have been verified as an array. */
879 tree
880 build_java_arrayaccess (tree array, tree type, tree index)
882 tree node, throw_expr = NULL_TREE;
883 tree data_field;
884 tree ref;
885 tree array_type = TREE_TYPE (TREE_TYPE (array));
886 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
888 if (!is_array_type_p (TREE_TYPE (array)))
890 /* With the new verifier, we will see an ordinary pointer type
891 here. In this case, we just use the correct array type. */
892 array_type = build_java_array_type (type, -1);
895 if (flag_bounds_check)
897 /* Generate:
898 * (unsigned jint) INDEX >= (unsigned jint) LEN
899 * && throw ArrayIndexOutOfBoundsException.
900 * Note this is equivalent to and more efficient than:
901 * INDEX < 0 || INDEX >= LEN && throw ... */
902 tree test;
903 tree len = convert (unsigned_int_type_node,
904 build_java_array_length_access (array));
905 test = fold_build2 (GE_EXPR, boolean_type_node,
906 convert (unsigned_int_type_node, index),
907 len);
908 if (! integer_zerop (test))
910 throw_expr
911 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
912 build_java_throw_out_of_bounds_exception (index));
913 /* allows expansion within COMPOUND */
914 TREE_SIDE_EFFECTS( throw_expr ) = 1;
918 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
919 to have the bounds check evaluated first. */
920 if (throw_expr != NULL_TREE)
921 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
923 data_field = lookup_field (&array_type, get_identifier ("data"));
925 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
926 build_java_indirect_ref (array_type, array,
927 flag_check_references),
928 data_field, NULL_TREE);
930 /* Take the address of the data field and convert it to a pointer to
931 the element type. */
932 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
934 /* Multiply the index by the size of an element to obtain a byte
935 offset. Convert the result to a pointer to the element type. */
936 index = build2 (MULT_EXPR, sizetype,
937 fold_convert (sizetype, index),
938 size_exp);
940 /* Sum the byte offset and the address of the data field. */
941 node = fold_build_pointer_plus (node, index);
943 /* Finally, return
945 *((&array->data) + index*size_exp)
948 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
951 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
952 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
953 determine that no check is required. */
955 tree
956 build_java_arraystore_check (tree array, tree object)
958 tree check, element_type, source;
959 tree array_type_p = TREE_TYPE (array);
960 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
962 if (! flag_verify_invocations)
964 /* With the new verifier, we don't track precise types. FIXME:
965 performance regression here. */
966 element_type = TYPE_NAME (object_type_node);
968 else
970 gcc_assert (is_array_type_p (array_type_p));
972 /* Get the TYPE_DECL for ARRAY's element type. */
973 element_type
974 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
977 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
978 && TREE_CODE (object_type) == TYPE_DECL);
980 if (!flag_store_check)
981 return build1 (NOP_EXPR, array_type_p, array);
983 /* No check is needed if the element type is final. Also check that
984 element_type matches object_type, since in the bytecode
985 compilation case element_type may be the actual element type of
986 the array rather than its declared type. However, if we're doing
987 indirect dispatch, we can't do the `final' optimization. */
988 if (element_type == object_type
989 && ! flag_indirect_dispatch
990 && CLASS_FINAL (element_type))
991 return build1 (NOP_EXPR, array_type_p, array);
993 /* OBJECT might be wrapped by a SAVE_EXPR. */
994 if (TREE_CODE (object) == SAVE_EXPR)
995 source = TREE_OPERAND (object, 0);
996 else
997 source = object;
999 /* Avoid the check if OBJECT was just loaded from the same array. */
1000 if (TREE_CODE (source) == ARRAY_REF)
1002 tree target;
1003 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1004 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1005 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1006 if (TREE_CODE (source) == SAVE_EXPR)
1007 source = TREE_OPERAND (source, 0);
1009 target = array;
1010 if (TREE_CODE (target) == SAVE_EXPR)
1011 target = TREE_OPERAND (target, 0);
1013 if (source == target)
1014 return build1 (NOP_EXPR, array_type_p, array);
1017 /* Build an invocation of _Jv_CheckArrayStore */
1018 check = build_call_nary (void_type_node,
1019 build_address_of (soft_checkarraystore_node),
1020 2, array, object);
1021 TREE_SIDE_EFFECTS (check) = 1;
1023 return check;
1026 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1027 ARRAY_NODE. This function is used to retrieve something less vague than
1028 a pointer type when indexing the first dimension of something like [[<t>.
1029 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1030 return unchanged. */
1032 static tree
1033 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1034 tree indexed_type)
1036 /* We used to check to see if ARRAY_NODE really had array type.
1037 However, with the new verifier, this is not necessary, as we know
1038 that the object will be an array of the appropriate type. */
1040 return indexed_type;
1043 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1044 called with an integer code (the type of array to create), and the length
1045 of the array to create. */
1047 tree
1048 build_newarray (int atype_value, tree length)
1050 tree type_arg;
1052 tree prim_type = decode_newarray_type (atype_value);
1053 tree type
1054 = build_java_array_type (prim_type,
1055 tree_fits_shwi_p (length)
1056 ? tree_to_shwi (length) : -1);
1058 /* Pass a reference to the primitive type class and save the runtime
1059 some work. */
1060 type_arg = build_class_ref (prim_type);
1062 return build_call_nary (promote_type (type),
1063 build_address_of (soft_newarray_node),
1064 2, type_arg, length);
1067 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1068 of the dimension. */
1070 tree
1071 build_anewarray (tree class_type, tree length)
1073 tree type
1074 = build_java_array_type (class_type,
1075 tree_fits_shwi_p (length)
1076 ? tree_to_shwi (length) : -1);
1078 return build_call_nary (promote_type (type),
1079 build_address_of (soft_anewarray_node),
1081 length,
1082 build_class_ref (class_type),
1083 null_pointer_node);
1086 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1088 tree
1089 build_new_array (tree type, tree length)
1091 if (JPRIMITIVE_TYPE_P (type))
1092 return build_newarray (encode_newarray_type (type), length);
1093 else
1094 return build_anewarray (TREE_TYPE (type), length);
1097 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1098 class pointer, a number of dimensions and the matching number of
1099 dimensions. The argument list is NULL terminated. */
1101 static void
1102 expand_java_multianewarray (tree class_type, int ndim)
1104 int i;
1105 vec<tree, va_gc> *args = NULL;
1107 vec_safe_grow (args, 3 + ndim);
1109 (*args)[0] = build_class_ref (class_type);
1110 (*args)[1] = build_int_cst (NULL_TREE, ndim);
1112 for(i = ndim - 1; i >= 0; i-- )
1113 (*args)[(unsigned)(2 + i)] = pop_value (int_type_node);
1115 (*args)[2 + ndim] = null_pointer_node;
1117 push_value (build_call_vec (promote_type (class_type),
1118 build_address_of (soft_multianewarray_node),
1119 args));
1122 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1123 ARRAY is an array type. May expand some bound checking and NULL
1124 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1125 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1126 INT. In those cases, we make the conversion.
1128 if ARRAy is a reference type, the assignment is checked at run-time
1129 to make sure that the RHS can be assigned to the array element
1130 type. It is not necessary to generate this code if ARRAY is final. */
1132 static void
1133 expand_java_arraystore (tree rhs_type_node)
1135 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1136 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1137 int_type_node : rhs_type_node);
1138 tree index = pop_value (int_type_node);
1139 tree array_type, array, temp, access;
1141 /* If we're processing an `aaload' we might as well just pick
1142 `Object'. */
1143 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1145 array_type = build_java_array_type (object_ptr_type_node, -1);
1146 rhs_type_node = object_ptr_type_node;
1148 else
1149 array_type = build_java_array_type (rhs_type_node, -1);
1151 array = pop_value (array_type);
1152 array = build1 (NOP_EXPR, promote_type (array_type), array);
1154 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1156 flush_quick_stack ();
1158 index = save_expr (index);
1159 array = save_expr (array);
1161 /* We want to perform the bounds check (done by
1162 build_java_arrayaccess) before the type check (done by
1163 build_java_arraystore_check). So, we call build_java_arrayaccess
1164 -- which returns an ARRAY_REF lvalue -- and we then generate code
1165 to stash the address of that lvalue in a temp. Then we call
1166 build_java_arraystore_check, and finally we generate a
1167 MODIFY_EXPR to set the array element. */
1169 access = build_java_arrayaccess (array, rhs_type_node, index);
1170 temp = build_decl (input_location, VAR_DECL, NULL_TREE,
1171 build_pointer_type (TREE_TYPE (access)));
1172 java_add_local_var (temp);
1173 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1174 temp,
1175 build_fold_addr_expr (access)));
1177 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1179 tree check = build_java_arraystore_check (array, rhs_node);
1180 java_add_stmt (check);
1183 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1184 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1185 rhs_node));
1188 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1189 sure that LHS is an array type. May expand some bound checking and NULL
1190 pointer checking.
1191 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1192 BOOLEAN/SHORT, we push a promoted type back to the stack.
1195 static void
1196 expand_java_arrayload (tree lhs_type_node)
1198 tree load_node;
1199 tree index_node = pop_value (int_type_node);
1200 tree array_type;
1201 tree array_node;
1203 /* If we're processing an `aaload' we might as well just pick
1204 `Object'. */
1205 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1207 array_type = build_java_array_type (object_ptr_type_node, -1);
1208 lhs_type_node = object_ptr_type_node;
1210 else
1211 array_type = build_java_array_type (lhs_type_node, -1);
1212 array_node = pop_value (array_type);
1213 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1215 index_node = save_expr (index_node);
1216 array_node = save_expr (array_node);
1218 lhs_type_node = build_java_check_indexed_type (array_node,
1219 lhs_type_node);
1220 load_node = build_java_arrayaccess (array_node,
1221 lhs_type_node,
1222 index_node);
1223 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1224 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1225 push_value (load_node);
1228 /* Expands .length. Makes sure that we deal with and array and may expand
1229 a NULL check on the array object. */
1231 static void
1232 expand_java_array_length (void)
1234 tree array = pop_value (ptr_type_node);
1235 tree length = build_java_array_length_access (array);
1237 push_value (length);
1240 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1241 either soft_monitorenter_node or soft_monitorexit_node. */
1243 static tree
1244 build_java_monitor (tree call, tree object)
1246 return build_call_nary (void_type_node,
1247 build_address_of (call),
1248 1, object);
1251 /* Emit code for one of the PUSHC instructions. */
1253 static void
1254 expand_java_pushc (int ival, tree type)
1256 tree value;
1257 if (type == ptr_type_node && ival == 0)
1258 value = null_pointer_node;
1259 else if (type == int_type_node || type == long_type_node)
1260 value = build_int_cst (type, ival);
1261 else if (type == float_type_node || type == double_type_node)
1263 REAL_VALUE_TYPE x;
1264 real_from_integer (&x, TYPE_MODE (type), ival, SIGNED);
1265 value = build_real (type, x);
1267 else
1268 gcc_unreachable ();
1270 push_value (value);
1273 static void
1274 expand_java_return (tree type)
1276 if (type == void_type_node)
1277 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1278 else
1280 tree retval = pop_value (type);
1281 tree res = DECL_RESULT (current_function_decl);
1282 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1284 /* Handle the situation where the native integer type is smaller
1285 than the JVM integer. It can happen for many cross compilers.
1286 The whole if expression just goes away if INT_TYPE_SIZE < 32
1287 is false. */
1288 if (INT_TYPE_SIZE < 32
1289 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1290 < GET_MODE_SIZE (TYPE_MODE (type))))
1291 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1293 TREE_SIDE_EFFECTS (retval) = 1;
1294 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1298 static void
1299 expand_load_internal (int index, tree type, int pc)
1301 tree copy;
1302 tree var = find_local_variable (index, type, pc);
1304 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1305 on the stack. If there is an assignment to this VAR_DECL between
1306 the stack push and the use, then the wrong code could be
1307 generated. To avoid this we create a new local and copy our
1308 value into it. Then we push this new local on the stack.
1309 Hopefully this all gets optimized out. */
1310 copy = build_decl (input_location, VAR_DECL, NULL_TREE, type);
1311 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1312 && TREE_TYPE (copy) != TREE_TYPE (var))
1313 var = convert (type, var);
1314 java_add_local_var (copy);
1315 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1317 push_value (copy);
1320 tree
1321 build_address_of (tree value)
1323 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1326 bool
1327 class_has_finalize_method (tree type)
1329 tree super = CLASSTYPE_SUPER (type);
1331 if (super == NULL_TREE)
1332 return false; /* Every class with a real finalizer inherits */
1333 /* from java.lang.Object. */
1334 else
1335 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1338 tree
1339 java_create_object (tree type)
1341 tree alloc_node = (class_has_finalize_method (type)
1342 ? alloc_object_node
1343 : alloc_no_finalizer_node);
1345 return build_call_nary (promote_type (type),
1346 build_address_of (alloc_node),
1347 1, build_class_ref (type));
1350 static void
1351 expand_java_NEW (tree type)
1353 tree alloc_node;
1355 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1356 : alloc_no_finalizer_node);
1357 if (! CLASS_LOADED_P (type))
1358 load_class (type, 1);
1359 safe_layout_class (type);
1360 push_value (build_call_nary (promote_type (type),
1361 build_address_of (alloc_node),
1362 1, build_class_ref (type)));
1365 /* This returns an expression which will extract the class of an
1366 object. */
1368 tree
1369 build_get_class (tree value)
1371 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1372 tree vtable_field = lookup_field (&object_type_node,
1373 get_identifier ("vtable"));
1374 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1375 build_java_indirect_ref (object_type_node, value,
1376 flag_check_references),
1377 vtable_field, NULL_TREE);
1378 return build3 (COMPONENT_REF, class_ptr_type,
1379 build1 (INDIRECT_REF, dtable_type, tmp),
1380 class_field, NULL_TREE);
1383 /* This builds the tree representation of the `instanceof' operator.
1384 It tries various tricks to optimize this in cases where types are
1385 known. */
1387 tree
1388 build_instanceof (tree value, tree type)
1390 tree expr;
1391 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1392 tree valtype = TREE_TYPE (TREE_TYPE (value));
1393 tree valclass = TYPE_NAME (valtype);
1394 tree klass;
1396 /* When compiling from bytecode, we need to ensure that TYPE has
1397 been loaded. */
1398 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1400 load_class (type, 1);
1401 safe_layout_class (type);
1402 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1403 return error_mark_node;
1405 klass = TYPE_NAME (type);
1407 if (type == object_type_node || inherits_from_p (valtype, type))
1409 /* Anything except `null' is an instance of Object. Likewise,
1410 if the object is known to be an instance of the class, then
1411 we only need to check for `null'. */
1412 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1414 else if (flag_verify_invocations
1415 && ! TYPE_ARRAY_P (type)
1416 && ! TYPE_ARRAY_P (valtype)
1417 && DECL_P (klass) && DECL_P (valclass)
1418 && ! CLASS_INTERFACE (valclass)
1419 && ! CLASS_INTERFACE (klass)
1420 && ! inherits_from_p (type, valtype)
1421 && (CLASS_FINAL (klass)
1422 || ! inherits_from_p (valtype, type)))
1424 /* The classes are from different branches of the derivation
1425 tree, so we immediately know the answer. */
1426 expr = boolean_false_node;
1428 else if (DECL_P (klass) && CLASS_FINAL (klass))
1430 tree save = save_expr (value);
1431 expr = build3 (COND_EXPR, itype,
1432 build2 (NE_EXPR, boolean_type_node,
1433 save, null_pointer_node),
1434 build2 (EQ_EXPR, itype,
1435 build_get_class (save),
1436 build_class_ref (type)),
1437 boolean_false_node);
1439 else
1441 expr = build_call_nary (itype,
1442 build_address_of (soft_instanceof_node),
1443 2, value, build_class_ref (type));
1445 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1446 return expr;
1449 static void
1450 expand_java_INSTANCEOF (tree type)
1452 tree value = pop_value (object_ptr_type_node);
1453 value = build_instanceof (value, type);
1454 push_value (value);
1457 static void
1458 expand_java_CHECKCAST (tree type)
1460 tree value = pop_value (ptr_type_node);
1461 value = build_call_nary (promote_type (type),
1462 build_address_of (soft_checkcast_node),
1463 2, build_class_ref (type), value);
1464 push_value (value);
1467 static void
1468 expand_iinc (unsigned int local_var_index, int ival, int pc)
1470 tree local_var, res;
1471 tree constant_value;
1473 flush_quick_stack ();
1474 local_var = find_local_variable (local_var_index, int_type_node, pc);
1475 constant_value = build_int_cst (NULL_TREE, ival);
1476 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1477 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1481 tree
1482 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1484 tree call = NULL;
1485 tree arg1 = convert (type, op1);
1486 tree arg2 = convert (type, op2);
1488 if (type == int_type_node)
1490 switch (op)
1492 case TRUNC_DIV_EXPR:
1493 call = soft_idiv_node;
1494 break;
1495 case TRUNC_MOD_EXPR:
1496 call = soft_irem_node;
1497 break;
1498 default:
1499 break;
1502 else if (type == long_type_node)
1504 switch (op)
1506 case TRUNC_DIV_EXPR:
1507 call = soft_ldiv_node;
1508 break;
1509 case TRUNC_MOD_EXPR:
1510 call = soft_lrem_node;
1511 break;
1512 default:
1513 break;
1517 gcc_assert (call);
1518 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1519 return call;
1522 tree
1523 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1525 tree mask;
1526 switch (op)
1528 case URSHIFT_EXPR:
1530 tree u_type = unsigned_type_for (type);
1531 arg1 = convert (u_type, arg1);
1532 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1533 return convert (type, arg1);
1535 case LSHIFT_EXPR:
1536 case RSHIFT_EXPR:
1537 mask = build_int_cst (int_type_node,
1538 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1539 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1540 break;
1542 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1543 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1544 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1546 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1547 boolean_type_node, arg1, arg2);
1548 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1549 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1550 ifexp2, integer_zero_node,
1551 op == COMPARE_L_EXPR
1552 ? integer_minus_one_node
1553 : integer_one_node);
1554 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1555 op == COMPARE_L_EXPR ? integer_one_node
1556 : integer_minus_one_node,
1557 second_compare);
1559 case COMPARE_EXPR:
1560 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1562 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1563 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1564 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1565 ifexp2, integer_one_node,
1566 integer_zero_node);
1567 return fold_build3 (COND_EXPR, int_type_node,
1568 ifexp1, integer_minus_one_node, second_compare);
1570 case TRUNC_DIV_EXPR:
1571 case TRUNC_MOD_EXPR:
1572 if (TREE_CODE (type) == REAL_TYPE
1573 && op == TRUNC_MOD_EXPR)
1575 tree call;
1576 if (type != double_type_node)
1578 arg1 = convert (double_type_node, arg1);
1579 arg2 = convert (double_type_node, arg2);
1581 call = build_call_nary (double_type_node,
1582 build_address_of (soft_fmod_node),
1583 2, arg1, arg2);
1584 if (type != double_type_node)
1585 call = convert (type, call);
1586 return call;
1589 if (TREE_CODE (type) == INTEGER_TYPE
1590 && flag_use_divide_subroutine
1591 && ! flag_syntax_only)
1592 return build_java_soft_divmod (op, type, arg1, arg2);
1594 break;
1595 default: ;
1597 return fold_build2 (op, type, arg1, arg2);
1600 static void
1601 expand_java_binop (tree type, enum tree_code op)
1603 tree larg, rarg;
1604 tree ltype = type;
1605 tree rtype = type;
1606 switch (op)
1608 case LSHIFT_EXPR:
1609 case RSHIFT_EXPR:
1610 case URSHIFT_EXPR:
1611 rtype = int_type_node;
1612 rarg = pop_value (rtype);
1613 break;
1614 default:
1615 rarg = pop_value (rtype);
1617 larg = pop_value (ltype);
1618 push_value (build_java_binop (op, type, larg, rarg));
1621 /* Lookup the field named NAME in *TYPEP or its super classes.
1622 If not found, return NULL_TREE.
1623 (If the *TYPEP is not found, or if the field reference is
1624 ambiguous, return error_mark_node.)
1625 If found, return the FIELD_DECL, and set *TYPEP to the
1626 class containing the field. */
1628 tree
1629 lookup_field (tree *typep, tree name)
1631 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1633 load_class (*typep, 1);
1634 safe_layout_class (*typep);
1635 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1636 return error_mark_node;
1640 tree field, binfo, base_binfo;
1641 tree save_field;
1642 int i;
1644 for (field = TYPE_FIELDS (*typep); field; field = DECL_CHAIN (field))
1645 if (DECL_NAME (field) == name)
1646 return field;
1648 /* Process implemented interfaces. */
1649 save_field = NULL_TREE;
1650 for (binfo = TYPE_BINFO (*typep), i = 0;
1651 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1653 tree t = BINFO_TYPE (base_binfo);
1654 if ((field = lookup_field (&t, name)))
1656 if (save_field == field)
1657 continue;
1658 if (save_field == NULL_TREE)
1659 save_field = field;
1660 else
1662 tree i1 = DECL_CONTEXT (save_field);
1663 tree i2 = DECL_CONTEXT (field);
1664 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1665 IDENTIFIER_POINTER (name),
1666 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1667 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1668 return error_mark_node;
1673 if (save_field != NULL_TREE)
1674 return save_field;
1676 *typep = CLASSTYPE_SUPER (*typep);
1677 } while (*typep);
1678 return NULL_TREE;
1681 /* Look up the field named NAME in object SELF_VALUE,
1682 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1683 SELF_VALUE is NULL_TREE if looking for a static field. */
1685 tree
1686 build_field_ref (tree self_value, tree self_class, tree name)
1688 tree base_class = self_class;
1689 tree field_decl = lookup_field (&base_class, name);
1690 if (field_decl == NULL_TREE)
1692 error ("field %qs not found", IDENTIFIER_POINTER (name));
1693 return error_mark_node;
1695 if (self_value == NULL_TREE)
1697 return build_static_field_ref (field_decl);
1699 else
1701 tree base_type = promote_type (base_class);
1703 /* CHECK is true if self_value is not the this pointer. */
1704 int check = (! (DECL_P (self_value)
1705 && DECL_NAME (self_value) == this_identifier_node));
1707 /* Determine whether a field offset from NULL will lie within
1708 Page 0: this is necessary on those GNU/Linux/BSD systems that
1709 trap SEGV to generate NullPointerExceptions.
1711 We assume that Page 0 will be mapped with NOPERM, and that
1712 memory may be allocated from any other page, so only field
1713 offsets < pagesize are guaranteed to trap. We also assume
1714 the smallest page size we'll encounter is 4k bytes. */
1715 if (! flag_syntax_only && check && ! flag_check_references
1716 && ! flag_indirect_dispatch)
1718 tree field_offset = byte_position (field_decl);
1719 if (! page_size)
1720 page_size = size_int (4096);
1721 check = !tree_int_cst_lt (field_offset, page_size);
1724 if (base_type != TREE_TYPE (self_value))
1725 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1726 if (! flag_syntax_only && flag_indirect_dispatch)
1728 tree otable_index
1729 = build_int_cst (NULL_TREE, get_symbol_table_index
1730 (field_decl, NULL_TREE,
1731 &TYPE_OTABLE_METHODS (output_class)));
1732 tree field_offset
1733 = build4 (ARRAY_REF, integer_type_node,
1734 TYPE_OTABLE_DECL (output_class), otable_index,
1735 NULL_TREE, NULL_TREE);
1736 tree address;
1738 if (DECL_CONTEXT (field_decl) != output_class)
1739 field_offset
1740 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1741 build2 (EQ_EXPR, boolean_type_node,
1742 field_offset, integer_zero_node),
1743 build_call_nary (void_type_node,
1744 build_address_of (soft_nosuchfield_node),
1745 1, otable_index),
1746 field_offset);
1748 self_value = java_check_reference (self_value, check);
1749 address = fold_build_pointer_plus (self_value, field_offset);
1750 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1751 address);
1752 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1755 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1756 self_value, check);
1757 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1758 self_value, field_decl, NULL_TREE);
1762 tree
1763 lookup_label (int pc)
1765 tree name;
1766 char buf[32];
1767 if (pc > highest_label_pc_this_method)
1768 highest_label_pc_this_method = pc;
1769 targetm.asm_out.generate_internal_label (buf, "LJpc=",
1770 start_label_pc_this_method + pc);
1771 name = get_identifier (buf);
1772 if (IDENTIFIER_LOCAL_VALUE (name))
1773 return IDENTIFIER_LOCAL_VALUE (name);
1774 else
1776 /* The type of the address of a label is return_address_type_node. */
1777 tree decl = create_label_decl (name);
1778 return pushdecl (decl);
1782 /* Generate a unique name for the purpose of loops and switches
1783 labels, and try-catch-finally blocks label or temporary variables. */
1785 tree
1786 generate_name (void)
1788 static int l_number = 0;
1789 char buff [32];
1790 targetm.asm_out.generate_internal_label (buff, "LJv", l_number);
1791 l_number++;
1792 return get_identifier (buff);
1795 tree
1796 create_label_decl (tree name)
1798 tree decl;
1799 decl = build_decl (input_location, LABEL_DECL, name,
1800 TREE_TYPE (return_address_type_node));
1801 DECL_CONTEXT (decl) = current_function_decl;
1802 DECL_IGNORED_P (decl) = 1;
1803 return decl;
1806 /* This maps a bytecode offset (PC) to various flags. */
1807 char *instruction_bits;
1809 /* This is a vector of type states for the current method. It is
1810 indexed by PC. Each element is a tree vector holding the type
1811 state at that PC. We only note type states at basic block
1812 boundaries. */
1813 vec<tree, va_gc> *type_states;
1815 static void
1816 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1818 lookup_label (target_pc);
1819 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1822 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1823 where CONDITION is one of one the compare operators. */
1825 static void
1826 expand_compare (enum tree_code condition, tree value1, tree value2,
1827 int target_pc)
1829 tree target = lookup_label (target_pc);
1830 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1831 java_add_stmt
1832 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1833 build1 (GOTO_EXPR, void_type_node, target),
1834 build_java_empty_stmt ()));
1837 /* Emit code for a TEST-type opcode. */
1839 static void
1840 expand_test (enum tree_code condition, tree type, int target_pc)
1842 tree value1, value2;
1843 flush_quick_stack ();
1844 value1 = pop_value (type);
1845 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1846 expand_compare (condition, value1, value2, target_pc);
1849 /* Emit code for a COND-type opcode. */
1851 static void
1852 expand_cond (enum tree_code condition, tree type, int target_pc)
1854 tree value1, value2;
1855 flush_quick_stack ();
1856 /* note: pop values in opposite order */
1857 value2 = pop_value (type);
1858 value1 = pop_value (type);
1859 /* Maybe should check value1 and value2 for type compatibility ??? */
1860 expand_compare (condition, value1, value2, target_pc);
1863 static void
1864 expand_java_goto (int target_pc)
1866 tree target_label = lookup_label (target_pc);
1867 flush_quick_stack ();
1868 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1871 static tree
1872 expand_java_switch (tree selector, int default_pc)
1874 tree switch_expr, x;
1876 flush_quick_stack ();
1877 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1878 NULL_TREE, NULL_TREE);
1879 java_add_stmt (switch_expr);
1881 x = build_case_label (NULL_TREE, NULL_TREE,
1882 create_artificial_label (input_location));
1883 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1885 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1886 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1888 return switch_expr;
1891 static void
1892 expand_java_add_case (tree switch_expr, int match, int target_pc)
1894 tree value, x;
1896 value = build_int_cst (TREE_TYPE (switch_expr), match);
1898 x = build_case_label (value, NULL_TREE,
1899 create_artificial_label (input_location));
1900 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1902 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1903 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1906 static vec<tree, va_gc> *
1907 pop_arguments (tree method_type)
1909 function_args_iterator fnai;
1910 tree type;
1911 vec<tree, va_gc> *args = NULL;
1912 int arity;
1914 FOREACH_FUNCTION_ARGS (method_type, type, fnai)
1916 /* XXX: leaky abstraction. */
1917 if (type == void_type_node)
1918 break;
1920 vec_safe_push (args, type);
1923 arity = vec_safe_length (args);
1925 while (arity--)
1927 tree arg = pop_value ((*args)[arity]);
1929 /* We simply cast each argument to its proper type. This is
1930 needed since we lose type information coming out of the
1931 verifier. We also have to do this when we pop an integer
1932 type that must be promoted for the function call. */
1933 if (TREE_CODE (type) == POINTER_TYPE)
1934 arg = build1 (NOP_EXPR, type, arg);
1935 else if (targetm.calls.promote_prototypes (type)
1936 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1937 && INTEGRAL_TYPE_P (type))
1938 arg = convert (integer_type_node, arg);
1940 (*args)[arity] = arg;
1943 return args;
1946 /* Attach to PTR (a block) the declaration found in ENTRY. */
1949 attach_init_test_initialization_flags (void **entry, void *ptr)
1951 tree block = (tree)ptr;
1952 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1954 if (block != error_mark_node)
1956 if (TREE_CODE (block) == BIND_EXPR)
1958 tree body = BIND_EXPR_BODY (block);
1959 DECL_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1960 BIND_EXPR_VARS (block) = ite->value;
1961 body = build2 (COMPOUND_EXPR, void_type_node,
1962 build1 (DECL_EXPR, void_type_node, ite->value), body);
1963 BIND_EXPR_BODY (block) = body;
1965 else
1967 tree body = BLOCK_SUBBLOCKS (block);
1968 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1969 BLOCK_EXPR_DECLS (block) = ite->value;
1970 body = build2 (COMPOUND_EXPR, void_type_node,
1971 build1 (DECL_EXPR, void_type_node, ite->value), body);
1972 BLOCK_SUBBLOCKS (block) = body;
1976 return true;
1979 /* Build an expression to initialize the class CLAS.
1980 if EXPR is non-NULL, returns an expression to first call the initializer
1981 (if it is needed) and then calls EXPR. */
1983 tree
1984 build_class_init (tree clas, tree expr)
1986 tree init;
1988 /* An optimization: if CLAS is a superclass of the class we're
1989 compiling, we don't need to initialize it. However, if CLAS is
1990 an interface, it won't necessarily be initialized, even if we
1991 implement it. */
1992 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1993 && inherits_from_p (current_class, clas))
1994 || current_class == clas)
1995 return expr;
1997 if (always_initialize_class_p)
1999 init = build_call_nary (void_type_node,
2000 build_address_of (soft_initclass_node),
2001 1, build_class_ref (clas));
2002 TREE_SIDE_EFFECTS (init) = 1;
2004 else
2006 tree *init_test_decl;
2007 tree decl;
2008 init_test_decl = java_treetreehash_new
2009 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2011 if (*init_test_decl == NULL)
2013 /* Build a declaration and mark it as a flag used to track
2014 static class initializations. */
2015 decl = build_decl (input_location, VAR_DECL, NULL_TREE,
2016 boolean_type_node);
2017 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2018 DECL_CONTEXT (decl) = current_function_decl;
2019 DECL_INITIAL (decl) = boolean_false_node;
2020 /* Don't emit any symbolic debugging info for this decl. */
2021 DECL_IGNORED_P (decl) = 1;
2022 *init_test_decl = decl;
2025 init = build_call_nary (void_type_node,
2026 build_address_of (soft_initclass_node),
2027 1, build_class_ref (clas));
2028 TREE_SIDE_EFFECTS (init) = 1;
2029 init = build3 (COND_EXPR, void_type_node,
2030 build2 (EQ_EXPR, boolean_type_node,
2031 *init_test_decl, boolean_false_node),
2032 init, integer_zero_node);
2033 TREE_SIDE_EFFECTS (init) = 1;
2034 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2035 build2 (MODIFY_EXPR, boolean_type_node,
2036 *init_test_decl, boolean_true_node));
2037 TREE_SIDE_EFFECTS (init) = 1;
2040 if (expr != NULL_TREE)
2042 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2043 TREE_SIDE_EFFECTS (expr) = 1;
2044 return expr;
2046 return init;
2051 /* Rewrite expensive calls that require stack unwinding at runtime to
2052 cheaper alternatives. The logic here performs these
2053 transformations:
2055 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2056 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2060 typedef struct
2062 const char *classname;
2063 const char *method;
2064 const char *signature;
2065 const char *new_classname;
2066 const char *new_signature;
2067 int flags;
2068 void (*rewrite_arglist) (vec<tree, va_gc> **);
2069 } rewrite_rule;
2071 /* Add __builtin_return_address(0) to the end of an arglist. */
2074 static void
2075 rewrite_arglist_getcaller (vec<tree, va_gc> **arglist)
2077 tree retaddr
2078 = build_call_expr (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS),
2079 1, integer_zero_node);
2081 DECL_UNINLINABLE (current_function_decl) = 1;
2083 vec_safe_push (*arglist, retaddr);
2086 /* Add this.class to the end of an arglist. */
2088 static void
2089 rewrite_arglist_getclass (vec<tree, va_gc> **arglist)
2091 vec_safe_push (*arglist, build_class_ref (output_class));
2094 static rewrite_rule rules[] =
2095 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2096 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2097 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2099 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2100 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2101 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2103 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2104 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2105 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2107 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2108 "()Ljava/lang/ClassLoader;",
2109 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2110 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2112 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2113 "java.lang.String", "([CII)Ljava/lang/String;",
2114 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2116 {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2118 /* True if this method is special, i.e. it's a private method that
2119 should be exported from a DSO. */
2121 bool
2122 special_method_p (tree candidate_method)
2124 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2125 tree method = DECL_NAME (candidate_method);
2126 rewrite_rule *p;
2128 for (p = rules; p->classname; p++)
2130 if (get_identifier (p->classname) == context
2131 && get_identifier (p->method) == method)
2132 return true;
2134 return false;
2137 /* Scan the rules list for replacements for *METHOD_P and replace the
2138 args accordingly. If the rewrite results in an access to a private
2139 method, update SPECIAL.*/
2141 void
2142 maybe_rewrite_invocation (tree *method_p, vec<tree, va_gc> **arg_list_p,
2143 tree *method_signature_p, tree *special)
2145 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2146 rewrite_rule *p;
2147 *special = NULL_TREE;
2149 for (p = rules; p->classname; p++)
2151 if (get_identifier (p->classname) == context)
2153 tree method = DECL_NAME (*method_p);
2154 if (get_identifier (p->method) == method
2155 && get_identifier (p->signature) == *method_signature_p)
2157 tree maybe_method;
2158 tree destination_class
2159 = lookup_class (get_identifier (p->new_classname));
2160 gcc_assert (destination_class);
2161 maybe_method
2162 = lookup_java_method (destination_class,
2163 method,
2164 get_identifier (p->new_signature));
2165 if (! maybe_method && ! flag_verify_invocations)
2167 maybe_method
2168 = add_method (destination_class, p->flags,
2169 method, get_identifier (p->new_signature));
2170 DECL_EXTERNAL (maybe_method) = 1;
2172 *method_p = maybe_method;
2173 gcc_assert (*method_p);
2174 if (p->rewrite_arglist)
2175 p->rewrite_arglist (arg_list_p);
2176 *method_signature_p = get_identifier (p->new_signature);
2177 *special = integer_one_node;
2179 break;
2187 tree
2188 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2189 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2190 vec<tree, va_gc> *arg_list ATTRIBUTE_UNUSED, tree special)
2192 tree func;
2193 if (is_compiled_class (self_type))
2195 /* With indirect dispatch we have to use indirect calls for all
2196 publicly visible methods or gcc will use PLT indirections
2197 to reach them. We also have to use indirect dispatch for all
2198 external methods. */
2199 if (! flag_indirect_dispatch
2200 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2202 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2203 method);
2205 else
2207 tree table_index
2208 = build_int_cst (NULL_TREE,
2209 (get_symbol_table_index
2210 (method, special,
2211 &TYPE_ATABLE_METHODS (output_class))));
2212 func
2213 = build4 (ARRAY_REF,
2214 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2215 TYPE_ATABLE_DECL (output_class), table_index,
2216 NULL_TREE, NULL_TREE);
2218 func = convert (method_ptr_type_node, func);
2220 else
2222 /* We don't know whether the method has been (statically) compiled.
2223 Compile this code to get a reference to the method's code:
2225 SELF_TYPE->methods[METHOD_INDEX].ncode
2229 int method_index = 0;
2230 tree meth, ref;
2232 /* The method might actually be declared in some superclass, so
2233 we have to use its class context, not the caller's notion of
2234 where the method is. */
2235 self_type = DECL_CONTEXT (method);
2236 ref = build_class_ref (self_type);
2237 ref = build1 (INDIRECT_REF, class_type_node, ref);
2238 if (ncode_ident == NULL_TREE)
2239 ncode_ident = get_identifier ("ncode");
2240 if (methods_ident == NULL_TREE)
2241 methods_ident = get_identifier ("methods");
2242 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2243 lookup_field (&class_type_node, methods_ident),
2244 NULL_TREE);
2245 for (meth = TYPE_METHODS (self_type);
2246 ; meth = DECL_CHAIN (meth))
2248 if (method == meth)
2249 break;
2250 if (meth == NULL_TREE)
2251 fatal_error ("method '%s' not found in class",
2252 IDENTIFIER_POINTER (DECL_NAME (method)));
2253 method_index++;
2255 method_index *= int_size_in_bytes (method_type_node);
2256 ref = fold_build_pointer_plus_hwi (ref, method_index);
2257 ref = build1 (INDIRECT_REF, method_type_node, ref);
2258 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2259 ref, lookup_field (&method_type_node, ncode_ident),
2260 NULL_TREE);
2262 return func;
2265 tree
2266 invoke_build_dtable (int is_invoke_interface, vec<tree, va_gc> *arg_list)
2268 tree dtable, objectref;
2269 tree saved = save_expr ((*arg_list)[0]);
2271 (*arg_list)[0] = saved;
2273 /* If we're dealing with interfaces and if the objectref
2274 argument is an array then get the dispatch table of the class
2275 Object rather than the one from the objectref. */
2276 objectref = (is_invoke_interface
2277 && is_array_type_p (TREE_TYPE (saved))
2278 ? build_class_ref (object_type_node) : saved);
2280 if (dtable_ident == NULL_TREE)
2281 dtable_ident = get_identifier ("vtable");
2282 dtable = build_java_indirect_ref (object_type_node, objectref,
2283 flag_check_references);
2284 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2285 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2287 return dtable;
2290 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2291 T. If this decl has not been seen before, it will be added to the
2292 [oa]table_methods. If it has, the existing table slot will be
2293 reused. */
2296 get_symbol_table_index (tree t, tree special,
2297 vec<method_entry, va_gc> **symbol_table)
2299 method_entry *e;
2300 unsigned i;
2301 method_entry elem = {t, special};
2303 FOR_EACH_VEC_SAFE_ELT (*symbol_table, i, e)
2304 if (t == e->method && special == e->special)
2305 goto done;
2307 vec_safe_push (*symbol_table, elem);
2309 done:
2310 return i + 1;
2313 tree
2314 build_invokevirtual (tree dtable, tree method, tree special)
2316 tree func;
2317 tree nativecode_ptr_ptr_type_node
2318 = build_pointer_type (nativecode_ptr_type_node);
2319 tree method_index;
2320 tree otable_index;
2322 if (flag_indirect_dispatch)
2324 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2326 otable_index
2327 = build_int_cst (NULL_TREE, get_symbol_table_index
2328 (method, special,
2329 &TYPE_OTABLE_METHODS (output_class)));
2330 method_index = build4 (ARRAY_REF, integer_type_node,
2331 TYPE_OTABLE_DECL (output_class),
2332 otable_index, NULL_TREE, NULL_TREE);
2334 else
2336 /* We fetch the DECL_VINDEX field directly here, rather than
2337 using get_method_index(). DECL_VINDEX is the true offset
2338 from the vtable base to a method, regrdless of any extra
2339 words inserted at the start of the vtable. */
2340 method_index = DECL_VINDEX (method);
2341 method_index = size_binop (MULT_EXPR, method_index,
2342 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2343 if (TARGET_VTABLE_USES_DESCRIPTORS)
2344 method_index = size_binop (MULT_EXPR, method_index,
2345 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2348 func = fold_build_pointer_plus (dtable, method_index);
2350 if (TARGET_VTABLE_USES_DESCRIPTORS)
2351 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2352 else
2354 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2355 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2358 return func;
2361 static GTY(()) tree class_ident;
2362 tree
2363 build_invokeinterface (tree dtable, tree method)
2365 tree interface;
2366 tree idx;
2368 /* We expand invokeinterface here. */
2370 if (class_ident == NULL_TREE)
2371 class_ident = get_identifier ("class");
2373 dtable = build_java_indirect_ref (dtable_type, dtable,
2374 flag_check_references);
2375 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2376 lookup_field (&dtable_type, class_ident), NULL_TREE);
2378 interface = DECL_CONTEXT (method);
2379 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2380 layout_class_methods (interface);
2382 if (flag_indirect_dispatch)
2384 int itable_index
2385 = 2 * (get_symbol_table_index
2386 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2387 interface
2388 = build4 (ARRAY_REF,
2389 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2390 TYPE_ITABLE_DECL (output_class),
2391 build_int_cst (NULL_TREE, itable_index-1),
2392 NULL_TREE, NULL_TREE);
2393 idx
2394 = build4 (ARRAY_REF,
2395 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2396 TYPE_ITABLE_DECL (output_class),
2397 build_int_cst (NULL_TREE, itable_index),
2398 NULL_TREE, NULL_TREE);
2399 interface = convert (class_ptr_type, interface);
2400 idx = convert (integer_type_node, idx);
2402 else
2404 idx = build_int_cst (NULL_TREE,
2405 get_interface_method_index (method, interface));
2406 interface = build_class_ref (interface);
2409 return build_call_nary (ptr_type_node,
2410 build_address_of (soft_lookupinterfacemethod_node),
2411 3, dtable, interface, idx);
2414 /* Expand one of the invoke_* opcodes.
2415 OPCODE is the specific opcode.
2416 METHOD_REF_INDEX is an index into the constant pool.
2417 NARGS is the number of arguments, or -1 if not specified. */
2419 static void
2420 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2422 tree method_signature
2423 = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
2424 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool,
2425 method_ref_index);
2426 tree self_type
2427 = get_class_constant (current_jcf,
2428 COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool,
2429 method_ref_index));
2430 const char *const self_name
2431 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2432 tree call, func, method, method_type;
2433 vec<tree, va_gc> *arg_list;
2434 tree check = NULL_TREE;
2436 tree special = NULL_TREE;
2438 if (! CLASS_LOADED_P (self_type))
2440 load_class (self_type, 1);
2441 safe_layout_class (self_type);
2442 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2443 fatal_error ("failed to find class '%s'", self_name);
2445 layout_class_methods (self_type);
2447 if (ID_INIT_P (method_name))
2448 method = lookup_java_constructor (self_type, method_signature);
2449 else
2450 method = lookup_java_method (self_type, method_name, method_signature);
2452 /* We've found a method in a class other than the one in which it
2453 was wanted. This can happen if, for instance, we're trying to
2454 compile invokespecial super.equals().
2455 FIXME: This is a kludge. Rather than nullifying the result, we
2456 should change lookup_java_method() so that it doesn't search the
2457 superclass chain when we're BC-compiling. */
2458 if (! flag_verify_invocations
2459 && method
2460 && ! TYPE_ARRAY_P (self_type)
2461 && self_type != DECL_CONTEXT (method))
2462 method = NULL_TREE;
2464 /* We've found a method in an interface, but this isn't an interface
2465 call. */
2466 if (opcode != OPCODE_invokeinterface
2467 && method
2468 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2469 method = NULL_TREE;
2471 /* We've found a non-interface method but we are making an
2472 interface call. This can happen if the interface overrides a
2473 method in Object. */
2474 if (! flag_verify_invocations
2475 && opcode == OPCODE_invokeinterface
2476 && method
2477 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2478 method = NULL_TREE;
2480 if (method == NULL_TREE)
2482 if (flag_verify_invocations || ! flag_indirect_dispatch)
2484 error ("class '%s' has no method named '%s' matching signature '%s'",
2485 self_name,
2486 IDENTIFIER_POINTER (method_name),
2487 IDENTIFIER_POINTER (method_signature));
2489 else
2491 int flags = ACC_PUBLIC;
2492 if (opcode == OPCODE_invokestatic)
2493 flags |= ACC_STATIC;
2494 if (opcode == OPCODE_invokeinterface)
2496 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2497 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2499 method = add_method (self_type, flags, method_name,
2500 method_signature);
2501 DECL_ARTIFICIAL (method) = 1;
2502 METHOD_DUMMY (method) = 1;
2503 layout_class_method (self_type, NULL,
2504 method, NULL);
2508 /* Invoke static can't invoke static/abstract method */
2509 if (method != NULL_TREE)
2511 if (opcode == OPCODE_invokestatic)
2513 if (!METHOD_STATIC (method))
2515 error ("invokestatic on non static method");
2516 method = NULL_TREE;
2518 else if (METHOD_ABSTRACT (method))
2520 error ("invokestatic on abstract method");
2521 method = NULL_TREE;
2524 else
2526 if (METHOD_STATIC (method))
2528 error ("invoke[non-static] on static method");
2529 method = NULL_TREE;
2534 if (method == NULL_TREE)
2536 /* If we got here, we emitted an error message above. So we
2537 just pop the arguments, push a properly-typed zero, and
2538 continue. */
2539 method_type = get_type_from_signature (method_signature);
2540 pop_arguments (method_type);
2541 if (opcode != OPCODE_invokestatic)
2542 pop_type (self_type);
2543 method_type = promote_type (TREE_TYPE (method_type));
2544 push_value (convert (method_type, integer_zero_node));
2545 return;
2548 arg_list = pop_arguments (TREE_TYPE (method));
2549 flush_quick_stack ();
2551 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2552 &special);
2553 method_type = TREE_TYPE (method);
2555 func = NULL_TREE;
2556 if (opcode == OPCODE_invokestatic)
2557 func = build_known_method_ref (method, method_type, self_type,
2558 method_signature, arg_list, special);
2559 else if (opcode == OPCODE_invokespecial
2560 || (opcode == OPCODE_invokevirtual
2561 && (METHOD_PRIVATE (method)
2562 || METHOD_FINAL (method)
2563 || CLASS_FINAL (TYPE_NAME (self_type)))))
2565 /* If the object for the method call is null, we throw an
2566 exception. We don't do this if the object is the current
2567 method's `this'. In other cases we just rely on an
2568 optimization pass to eliminate redundant checks. FIXME:
2569 Unfortunately there doesn't seem to be a way to determine
2570 what the current method is right now.
2571 We do omit the check if we're calling <init>. */
2572 /* We use a SAVE_EXPR here to make sure we only evaluate
2573 the new `self' expression once. */
2574 tree save_arg = save_expr ((*arg_list)[0]);
2575 (*arg_list)[0] = save_arg;
2576 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2577 func = build_known_method_ref (method, method_type, self_type,
2578 method_signature, arg_list, special);
2580 else
2582 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2583 arg_list);
2584 if (opcode == OPCODE_invokevirtual)
2585 func = build_invokevirtual (dtable, method, special);
2586 else
2587 func = build_invokeinterface (dtable, method);
2590 if (TREE_CODE (func) == ADDR_EXPR)
2591 TREE_TYPE (func) = build_pointer_type (method_type);
2592 else
2593 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2595 call = build_call_vec (TREE_TYPE (method_type), func, arg_list);
2596 TREE_SIDE_EFFECTS (call) = 1;
2597 call = check_for_builtin (method, call);
2599 if (check != NULL_TREE)
2601 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2602 TREE_SIDE_EFFECTS (call) = 1;
2605 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2606 java_add_stmt (call);
2607 else
2609 push_value (call);
2610 flush_quick_stack ();
2614 /* Create a stub which will be put into the vtable but which will call
2615 a JNI function. */
2617 tree
2618 build_jni_stub (tree method)
2620 tree jnifunc, call, body, method_sig, arg_types;
2621 tree jniarg0, jniarg1, jniarg2, jniarg3;
2622 tree jni_func_type, tem;
2623 tree env_var, res_var = NULL_TREE, block;
2624 tree method_args;
2625 tree meth_var;
2626 tree bind;
2627 vec<tree, va_gc> *args = NULL;
2628 int args_size = 0;
2630 tree klass = DECL_CONTEXT (method);
2631 klass = build_class_ref (klass);
2633 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2635 DECL_ARTIFICIAL (method) = 1;
2636 DECL_EXTERNAL (method) = 0;
2638 env_var = build_decl (input_location,
2639 VAR_DECL, get_identifier ("env"), ptr_type_node);
2640 DECL_CONTEXT (env_var) = method;
2642 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2644 res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"),
2645 TREE_TYPE (TREE_TYPE (method)));
2646 DECL_CONTEXT (res_var) = method;
2647 DECL_CHAIN (env_var) = res_var;
2650 method_args = DECL_ARGUMENTS (method);
2651 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2652 TREE_SIDE_EFFECTS (block) = 1;
2654 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2655 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2656 build_call_nary (ptr_type_node,
2657 build_address_of (soft_getjnienvnewframe_node),
2658 1, klass));
2660 /* The JNIEnv structure is the first argument to the JNI function. */
2661 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2662 vec_safe_push (args, env_var);
2664 /* For a static method the second argument is the class. For a
2665 non-static method the second argument is `this'; that is already
2666 available in the argument list. */
2667 if (METHOD_STATIC (method))
2669 args_size += int_size_in_bytes (TREE_TYPE (klass));
2670 vec_safe_push (args, klass);
2673 /* All the arguments to this method become arguments to the
2674 underlying JNI function. If we had to wrap object arguments in a
2675 special way, we would do that here. */
2676 for (tem = method_args; tem != NULL_TREE; tem = DECL_CHAIN (tem))
2678 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2679 #ifdef PARM_BOUNDARY
2680 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2681 * PARM_BOUNDARY);
2682 #endif
2683 args_size += (arg_bits / BITS_PER_UNIT);
2685 vec_safe_push (args, tem);
2687 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2689 /* Argument types for static methods and the JNIEnv structure.
2690 FIXME: Write and use build_function_type_vec to avoid this. */
2691 if (METHOD_STATIC (method))
2692 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2693 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2695 /* We call _Jv_LookupJNIMethod to find the actual underlying
2696 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2697 exception if this function is not found at runtime. */
2698 method_sig = build_java_signature (TREE_TYPE (method));
2699 jniarg0 = klass;
2700 jniarg1 = build_utf8_ref (DECL_NAME (method));
2701 jniarg2 = build_utf8_ref (unmangle_classname
2702 (IDENTIFIER_POINTER (method_sig),
2703 IDENTIFIER_LENGTH (method_sig)));
2704 jniarg3 = build_int_cst (NULL_TREE, args_size);
2706 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2708 #ifdef MODIFY_JNI_METHOD_CALL
2709 tem = MODIFY_JNI_METHOD_CALL (tem);
2710 #endif
2712 jni_func_type = build_pointer_type (tem);
2714 /* Use the actual function type, rather than a generic pointer type,
2715 such that this decl keeps the actual pointer type from being
2716 garbage-collected. If it is, we end up using canonical types
2717 with different uids for equivalent function types, and this in
2718 turn causes utf8 identifiers and output order to vary. */
2719 meth_var = build_decl (input_location,
2720 VAR_DECL, get_identifier ("meth"), jni_func_type);
2721 TREE_STATIC (meth_var) = 1;
2722 TREE_PUBLIC (meth_var) = 0;
2723 DECL_EXTERNAL (meth_var) = 0;
2724 DECL_CONTEXT (meth_var) = method;
2725 DECL_ARTIFICIAL (meth_var) = 1;
2726 DECL_INITIAL (meth_var) = null_pointer_node;
2727 TREE_USED (meth_var) = 1;
2728 chainon (env_var, meth_var);
2729 build_result_decl (method);
2731 jnifunc = build3 (COND_EXPR, jni_func_type,
2732 build2 (NE_EXPR, boolean_type_node,
2733 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2734 meth_var,
2735 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2736 build1
2737 (NOP_EXPR, jni_func_type,
2738 build_call_nary (ptr_type_node,
2739 build_address_of
2740 (soft_lookupjnimethod_node),
2742 jniarg0, jniarg1,
2743 jniarg2, jniarg3))));
2745 /* Now we make the actual JNI call via the resulting function
2746 pointer. */
2747 call = build_call_vec (TREE_TYPE (TREE_TYPE (method)), jnifunc, args);
2749 /* If the JNI call returned a result, capture it here. If we had to
2750 unwrap JNI object results, we would do that here. */
2751 if (res_var != NULL_TREE)
2753 /* If the call returns an object, it may return a JNI weak
2754 reference, in which case we must unwrap it. */
2755 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2756 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2757 build_address_of (soft_unwrapjni_node),
2758 1, call);
2759 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2760 res_var, call);
2763 TREE_SIDE_EFFECTS (call) = 1;
2765 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2766 TREE_SIDE_EFFECTS (body) = 1;
2768 /* Now free the environment we allocated. */
2769 call = build_call_nary (ptr_type_node,
2770 build_address_of (soft_jnipopsystemframe_node),
2771 1, env_var);
2772 TREE_SIDE_EFFECTS (call) = 1;
2773 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2774 TREE_SIDE_EFFECTS (body) = 1;
2776 /* Finally, do the return. */
2777 if (res_var != NULL_TREE)
2779 tree drt;
2780 gcc_assert (DECL_RESULT (method));
2781 /* Make sure we copy the result variable to the actual
2782 result. We use the type of the DECL_RESULT because it
2783 might be different from the return type of the function:
2784 it might be promoted. */
2785 drt = TREE_TYPE (DECL_RESULT (method));
2786 if (drt != TREE_TYPE (res_var))
2787 res_var = build1 (CONVERT_EXPR, drt, res_var);
2788 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2789 TREE_SIDE_EFFECTS (res_var) = 1;
2792 body = build2 (COMPOUND_EXPR, void_type_node, body,
2793 build1 (RETURN_EXPR, void_type_node, res_var));
2794 TREE_SIDE_EFFECTS (body) = 1;
2796 /* Prepend class initialization for static methods reachable from
2797 other classes. */
2798 if (METHOD_STATIC (method)
2799 && (! METHOD_PRIVATE (method)
2800 || INNER_CLASS_P (DECL_CONTEXT (method))))
2802 tree init = build_call_expr (soft_initclass_node, 1,
2803 klass);
2804 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2805 TREE_SIDE_EFFECTS (body) = 1;
2808 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2809 body, block);
2810 return bind;
2814 /* Given lvalue EXP, return a volatile expression that references the
2815 same object. */
2817 tree
2818 java_modify_addr_for_volatile (tree exp)
2820 tree exp_type = TREE_TYPE (exp);
2821 tree v_type
2822 = build_qualified_type (exp_type,
2823 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2824 tree addr = build_fold_addr_expr (exp);
2825 v_type = build_pointer_type (v_type);
2826 addr = fold_convert (v_type, addr);
2827 exp = build_fold_indirect_ref (addr);
2828 return exp;
2832 /* Expand an operation to extract from or store into a field.
2833 IS_STATIC is 1 iff the field is static.
2834 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2835 FIELD_REF_INDEX is an index into the constant pool. */
2837 static void
2838 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2840 tree self_type
2841 = get_class_constant (current_jcf,
2842 COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
2843 field_ref_index));
2844 const char *self_name
2845 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2846 tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2847 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool,
2848 field_ref_index);
2849 tree field_type = get_type_from_signature (field_signature);
2850 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2851 tree field_ref;
2852 int is_error = 0;
2853 tree original_self_type = self_type;
2854 tree field_decl;
2855 tree modify_expr;
2857 if (! CLASS_LOADED_P (self_type))
2858 load_class (self_type, 1);
2859 field_decl = lookup_field (&self_type, field_name);
2860 if (field_decl == error_mark_node)
2862 is_error = 1;
2864 else if (field_decl == NULL_TREE)
2866 if (! flag_verify_invocations)
2868 int flags = ACC_PUBLIC;
2869 if (is_static)
2870 flags |= ACC_STATIC;
2871 self_type = original_self_type;
2872 field_decl = add_field (original_self_type, field_name,
2873 field_type, flags);
2874 DECL_ARTIFICIAL (field_decl) = 1;
2875 DECL_IGNORED_P (field_decl) = 1;
2876 #if 0
2877 /* FIXME: We should be pessimistic about volatility. We
2878 don't know one way or another, but this is safe.
2879 However, doing this has bad effects on code quality. We
2880 need to look at better ways to do this. */
2881 TREE_THIS_VOLATILE (field_decl) = 1;
2882 #endif
2884 else
2886 error ("missing field '%s' in '%s'",
2887 IDENTIFIER_POINTER (field_name), self_name);
2888 is_error = 1;
2891 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2893 error ("mismatching signature for field '%s' in '%s'",
2894 IDENTIFIER_POINTER (field_name), self_name);
2895 is_error = 1;
2897 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2898 if (is_error)
2900 if (! is_putting)
2901 push_value (convert (field_type, integer_zero_node));
2902 flush_quick_stack ();
2903 return;
2906 field_ref = build_field_ref (field_ref, self_type, field_name);
2907 if (is_static
2908 && ! flag_indirect_dispatch)
2910 tree context = DECL_CONTEXT (field_ref);
2911 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2912 field_ref = build_class_init (context, field_ref);
2913 else
2914 field_ref = build_class_init (self_type, field_ref);
2916 if (is_putting)
2918 flush_quick_stack ();
2919 if (FIELD_FINAL (field_decl))
2921 if (DECL_CONTEXT (field_decl) != current_class)
2922 error ("assignment to final field %q+D not in field%'s class",
2923 field_decl);
2924 /* We used to check for assignments to final fields not
2925 occurring in the class initializer or in a constructor
2926 here. However, this constraint doesn't seem to be
2927 enforced by the JVM. */
2930 if (TREE_THIS_VOLATILE (field_decl))
2931 field_ref = java_modify_addr_for_volatile (field_ref);
2933 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2934 field_ref, new_value);
2936 if (TREE_THIS_VOLATILE (field_decl))
2938 tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2939 java_add_stmt (build_call_expr (sync, 0));
2942 java_add_stmt (modify_expr);
2944 else
2946 tree temp = build_decl (input_location,
2947 VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2948 java_add_local_var (temp);
2950 if (TREE_THIS_VOLATILE (field_decl))
2951 field_ref = java_modify_addr_for_volatile (field_ref);
2953 modify_expr
2954 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2955 java_add_stmt (modify_expr);
2957 if (TREE_THIS_VOLATILE (field_decl))
2959 tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2960 java_add_stmt (build_call_expr (sync, 0));
2963 push_value (temp);
2965 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2968 static void
2969 load_type_state (int pc)
2971 int i;
2972 tree vec = (*type_states)[pc];
2973 int cur_length = TREE_VEC_LENGTH (vec);
2974 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2975 for (i = 0; i < cur_length; i++)
2976 type_map [i] = TREE_VEC_ELT (vec, i);
2979 /* Go over METHOD's bytecode and note instruction starts in
2980 instruction_bits[]. */
2982 void
2983 note_instructions (JCF *jcf, tree method)
2985 int PC;
2986 unsigned char* byte_ops;
2987 long length = DECL_CODE_LENGTH (method);
2989 int saw_index;
2990 jint INT_temp;
2992 #undef RET /* Defined by config/i386/i386.h */
2993 #undef PTR
2994 #define BCODE byte_ops
2995 #define BYTE_type_node byte_type_node
2996 #define SHORT_type_node short_type_node
2997 #define INT_type_node int_type_node
2998 #define LONG_type_node long_type_node
2999 #define CHAR_type_node char_type_node
3000 #define PTR_type_node ptr_type_node
3001 #define FLOAT_type_node float_type_node
3002 #define DOUBLE_type_node double_type_node
3003 #define VOID_type_node void_type_node
3004 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3005 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3006 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3007 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3009 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3011 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3012 byte_ops = jcf->read_ptr;
3013 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3014 memset (instruction_bits, 0, length + 1);
3015 vec_alloc (type_states, length + 1);
3016 type_states->quick_grow_cleared (length + 1);
3018 /* This pass figures out which PC can be the targets of jumps. */
3019 for (PC = 0; PC < length;)
3021 int oldpc = PC; /* PC at instruction start. */
3022 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3023 switch (byte_ops[PC++])
3025 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3026 case OPCODE: \
3027 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3028 break;
3030 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3032 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3033 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3034 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3035 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3036 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3037 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3038 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3039 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3041 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3042 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3043 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3044 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3045 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3046 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3047 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3048 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3050 /* two forms of wide instructions */
3051 #define PRE_SPECIAL_WIDE(IGNORE) \
3053 int modified_opcode = IMMEDIATE_u1; \
3054 if (modified_opcode == OPCODE_iinc) \
3056 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3057 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3059 else \
3061 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3065 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3067 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3069 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3070 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3071 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3072 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3073 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3074 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3075 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3076 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3077 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3078 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3080 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3081 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3082 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3083 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3084 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3085 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3086 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3087 NOTE_LABEL (PC); \
3088 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3090 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3092 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3093 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3095 #define PRE_LOOKUP_SWITCH \
3096 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3097 NOTE_LABEL (default_offset+oldpc); \
3098 if (npairs >= 0) \
3099 while (--npairs >= 0) { \
3100 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3101 jint offset = IMMEDIATE_s4; \
3102 NOTE_LABEL (offset+oldpc); } \
3105 #define PRE_TABLE_SWITCH \
3106 { jint default_offset = IMMEDIATE_s4; \
3107 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3108 NOTE_LABEL (default_offset+oldpc); \
3109 if (low <= high) \
3110 while (low++ <= high) { \
3111 jint offset = IMMEDIATE_s4; \
3112 NOTE_LABEL (offset+oldpc); } \
3115 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3116 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3117 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3118 (void)(IMMEDIATE_u2); \
3119 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3121 #include "javaop.def"
3122 #undef JAVAOP
3124 } /* for */
3127 void
3128 expand_byte_code (JCF *jcf, tree method)
3130 int PC;
3131 int i;
3132 const unsigned char *linenumber_pointer;
3133 int dead_code_index = -1;
3134 unsigned char* byte_ops;
3135 long length = DECL_CODE_LENGTH (method);
3136 location_t max_location = input_location;
3138 stack_pointer = 0;
3139 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3140 byte_ops = jcf->read_ptr;
3142 /* We make an initial pass of the line number table, to note
3143 which instructions have associated line number entries. */
3144 linenumber_pointer = linenumber_table;
3145 for (i = 0; i < linenumber_count; i++)
3147 int pc = GET_u2 (linenumber_pointer);
3148 linenumber_pointer += 4;
3149 if (pc >= length)
3150 warning (0, "invalid PC in line number table");
3151 else
3153 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3154 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3155 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3159 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3160 return;
3162 promote_arguments ();
3163 cache_this_class_ref (method);
3164 cache_cpool_data_ref ();
3166 /* Translate bytecodes. */
3167 linenumber_pointer = linenumber_table;
3168 for (PC = 0; PC < length;)
3170 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3172 tree label = lookup_label (PC);
3173 flush_quick_stack ();
3174 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3175 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3176 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3177 load_type_state (PC);
3180 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3182 if (dead_code_index == -1)
3184 /* This is the start of a region of unreachable bytecodes.
3185 They still need to be processed in order for EH ranges
3186 to get handled correctly. However, we can simply
3187 replace these bytecodes with nops. */
3188 dead_code_index = PC;
3191 /* Turn this bytecode into a nop. */
3192 byte_ops[PC] = 0x0;
3194 else
3196 if (dead_code_index != -1)
3198 /* We've just reached the end of a region of dead code. */
3199 if (extra_warnings)
3200 warning (0, "unreachable bytecode from %d to before %d",
3201 dead_code_index, PC);
3202 dead_code_index = -1;
3206 /* Handle possible line number entry for this PC.
3208 This code handles out-of-order and multiple linenumbers per PC,
3209 but is optimized for the case of line numbers increasing
3210 monotonically with PC. */
3211 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3213 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3214 || GET_u2 (linenumber_pointer) != PC)
3215 linenumber_pointer = linenumber_table;
3216 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3218 int pc = GET_u2 (linenumber_pointer);
3219 linenumber_pointer += 4;
3220 if (pc == PC)
3222 int line = GET_u2 (linenumber_pointer - 2);
3223 input_location = linemap_line_start (line_table, line, 1);
3224 if (input_location > max_location)
3225 max_location = input_location;
3226 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3227 break;
3231 maybe_pushlevels (PC);
3232 PC = process_jvm_instruction (PC, byte_ops, length);
3233 maybe_poplevels (PC);
3234 } /* for */
3236 uncache_this_class_ref (method);
3238 if (dead_code_index != -1)
3240 /* We've just reached the end of a region of dead code. */
3241 if (extra_warnings)
3242 warning (0, "unreachable bytecode from %d to the end of the method",
3243 dead_code_index);
3246 DECL_FUNCTION_LAST_LINE (method) = max_location;
3249 static void
3250 java_push_constant_from_pool (JCF *jcf, int index)
3252 tree c;
3253 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3255 tree name;
3256 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3257 index = alloc_name_constant (CONSTANT_String, name);
3258 c = build_ref_from_constant_pool (index);
3259 c = convert (promote_type (string_type_node), c);
3261 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3262 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3264 tree record = get_class_constant (jcf, index);
3265 c = build_class_ref (record);
3267 else
3268 c = get_constant (jcf, index);
3269 push_value (c);
3273 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3274 long length ATTRIBUTE_UNUSED)
3276 const char *opname; /* Temporary ??? */
3277 int oldpc = PC; /* PC at instruction start. */
3279 /* If the instruction is at the beginning of an exception handler,
3280 replace the top of the stack with the thrown object reference. */
3281 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3283 /* Note that the verifier will not emit a type map at all for
3284 dead exception handlers. In this case we just ignore the
3285 situation. */
3286 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3288 tree type = pop_type (promote_type (throwable_type_node));
3289 push_value (build_exception_object_ref (type));
3293 switch (byte_ops[PC++])
3295 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3296 case OPCODE: \
3297 opname = #OPNAME; \
3298 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3299 break;
3301 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3303 int saw_index = 0; \
3304 int index = OPERAND_VALUE; \
3305 (void) saw_index; /* Avoid set but not used warning. */ \
3306 build_java_ret \
3307 (find_local_variable (index, return_address_type_node, oldpc)); \
3310 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3312 /* OPERAND_VALUE may have side-effects on PC */ \
3313 int opvalue = OPERAND_VALUE; \
3314 build_java_jsr (oldpc + opvalue, PC); \
3317 /* Push a constant onto the stack. */
3318 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3319 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3320 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3321 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3323 /* internal macro added for use by the WIDE case */
3324 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3325 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3327 /* Push local variable onto the opcode stack. */
3328 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3330 /* have to do this since OPERAND_VALUE may have side-effects */ \
3331 int opvalue = OPERAND_VALUE; \
3332 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3335 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3336 expand_java_return (OPERAND_TYPE##_type_node)
3338 #define REM_EXPR TRUNC_MOD_EXPR
3339 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3340 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3342 #define FIELD(IS_STATIC, IS_PUT) \
3343 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3345 #define TEST(OPERAND_TYPE, CONDITION) \
3346 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3348 #define COND(OPERAND_TYPE, CONDITION) \
3349 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3351 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3352 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3354 #define BRANCH_GOTO(OPERAND_VALUE) \
3355 expand_java_goto (oldpc + OPERAND_VALUE)
3357 #define BRANCH_CALL(OPERAND_VALUE) \
3358 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3360 #if 0
3361 #define BRANCH_RETURN(OPERAND_VALUE) \
3363 tree type = OPERAND_TYPE##_type_node; \
3364 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3365 expand_java_ret (value); \
3367 #endif
3369 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3370 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3371 fprintf (stderr, "(not implemented)\n")
3372 #define NOT_IMPL1(OPERAND_VALUE) \
3373 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3374 fprintf (stderr, "(not implemented)\n")
3376 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3378 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3380 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3382 #define STACK_SWAP(COUNT) java_stack_swap()
3384 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3385 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3386 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3388 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3389 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3391 #define LOOKUP_SWITCH \
3392 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3393 tree selector = pop_value (INT_type_node); \
3394 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3395 while (--npairs >= 0) \
3397 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3398 expand_java_add_case (switch_expr, match, oldpc + offset); \
3402 #define TABLE_SWITCH \
3403 { jint default_offset = IMMEDIATE_s4; \
3404 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3405 tree selector = pop_value (INT_type_node); \
3406 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3407 for (; low <= high; low++) \
3409 jint offset = IMMEDIATE_s4; \
3410 expand_java_add_case (switch_expr, low, oldpc + offset); \
3414 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3415 { int opcode = byte_ops[PC-1]; \
3416 int method_ref_index = IMMEDIATE_u2; \
3417 int nargs; \
3418 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3419 else nargs = -1; \
3420 expand_invoke (opcode, method_ref_index, nargs); \
3423 /* Handle new, checkcast, instanceof */
3424 #define OBJECT(TYPE, OP) \
3425 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3427 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3429 #define ARRAY_LOAD(OPERAND_TYPE) \
3431 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3434 #define ARRAY_STORE(OPERAND_TYPE) \
3436 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3439 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3440 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3441 #define ARRAY_NEW_PTR() \
3442 push_value (build_anewarray (get_class_constant (current_jcf, \
3443 IMMEDIATE_u2), \
3444 pop_value (int_type_node)));
3445 #define ARRAY_NEW_NUM() \
3447 int atype = IMMEDIATE_u1; \
3448 push_value (build_newarray (atype, pop_value (int_type_node)));\
3450 #define ARRAY_NEW_MULTI() \
3452 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3453 int ndims = IMMEDIATE_u1; \
3454 expand_java_multianewarray( klass, ndims ); \
3457 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3458 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3459 pop_value (OPERAND_TYPE##_type_node)));
3461 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3463 push_value (build1 (NOP_EXPR, int_type_node, \
3464 (convert (TO_TYPE##_type_node, \
3465 pop_value (FROM_TYPE##_type_node))))); \
3468 #define CONVERT(FROM_TYPE, TO_TYPE) \
3470 push_value (convert (TO_TYPE##_type_node, \
3471 pop_value (FROM_TYPE##_type_node))); \
3474 /* internal macro added for use by the WIDE case
3475 Added TREE_TYPE (decl) assignment, apbianco */
3476 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3478 tree decl, value; \
3479 int index = OPVALUE; \
3480 tree type = OPTYPE; \
3481 value = pop_value (type); \
3482 type = TREE_TYPE (value); \
3483 decl = find_local_variable (index, type, oldpc); \
3484 set_local_type (index, type); \
3485 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3488 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3490 /* have to do this since OPERAND_VALUE may have side-effects */ \
3491 int opvalue = OPERAND_VALUE; \
3492 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3495 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3496 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3498 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3499 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3501 #define MONITOR_OPERATION(call) \
3503 tree o = pop_value (ptr_type_node); \
3504 tree c; \
3505 flush_quick_stack (); \
3506 c = build_java_monitor (call, o); \
3507 TREE_SIDE_EFFECTS (c) = 1; \
3508 java_add_stmt (c); \
3511 #define SPECIAL_IINC(IGNORED) \
3513 unsigned int local_var_index = IMMEDIATE_u1; \
3514 int ival = IMMEDIATE_s1; \
3515 expand_iinc(local_var_index, ival, oldpc); \
3518 #define SPECIAL_WIDE(IGNORED) \
3520 int modified_opcode = IMMEDIATE_u1; \
3521 unsigned int local_var_index = IMMEDIATE_u2; \
3522 switch (modified_opcode) \
3524 case OPCODE_iinc: \
3526 int ival = IMMEDIATE_s2; \
3527 expand_iinc (local_var_index, ival, oldpc); \
3528 break; \
3530 case OPCODE_iload: \
3531 case OPCODE_lload: \
3532 case OPCODE_fload: \
3533 case OPCODE_dload: \
3534 case OPCODE_aload: \
3536 /* duplicate code from LOAD macro */ \
3537 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3538 break; \
3540 case OPCODE_istore: \
3541 case OPCODE_lstore: \
3542 case OPCODE_fstore: \
3543 case OPCODE_dstore: \
3544 case OPCODE_astore: \
3546 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3547 break; \
3549 default: \
3550 error ("unrecognized wide sub-instruction"); \
3554 #define SPECIAL_THROW(IGNORED) \
3555 build_java_athrow (pop_value (throwable_type_node))
3557 #define SPECIAL_BREAK NOT_IMPL1
3558 #define IMPL NOT_IMPL
3560 #include "javaop.def"
3561 #undef JAVAOP
3562 default:
3563 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3565 return PC;
3568 /* Return the opcode at PC in the code section pointed to by
3569 CODE_OFFSET. */
3571 static unsigned char
3572 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3574 unsigned char opcode;
3575 long absolute_offset = (long)JCF_TELL (jcf);
3577 JCF_SEEK (jcf, code_offset);
3578 opcode = jcf->read_ptr [pc];
3579 JCF_SEEK (jcf, absolute_offset);
3580 return opcode;
3583 /* Some bytecode compilers are emitting accurate LocalVariableTable
3584 attributes. Here's an example:
3586 PC <t>store_<n>
3587 PC+1 ...
3589 Attribute "LocalVariableTable"
3590 slot #<n>: ... (PC: PC+1 length: L)
3592 This is accurate because the local in slot <n> really exists after
3593 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3595 This procedure recognizes this situation and extends the live range
3596 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3597 length of the store instruction.)
3599 This function is used by `give_name_to_locals' so that a local's
3600 DECL features a DECL_LOCAL_START_PC such that the first related
3601 store operation will use DECL as a destination, not an unrelated
3602 temporary created for the occasion.
3604 This function uses a global (instruction_bits) `note_instructions' should
3605 have allocated and filled properly. */
3608 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3609 int start_pc, int slot)
3611 int first, index, opcode;
3612 int pc, insn_pc;
3613 int wide_found = 0;
3615 if (!start_pc)
3616 return start_pc;
3618 first = index = -1;
3620 /* Find last previous instruction and remember it */
3621 for (pc = start_pc-1; pc; pc--)
3622 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3623 break;
3624 insn_pc = pc;
3626 /* Retrieve the instruction, handle `wide'. */
3627 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3628 if (opcode == OPCODE_wide)
3630 wide_found = 1;
3631 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3634 switch (opcode)
3636 case OPCODE_astore_0:
3637 case OPCODE_astore_1:
3638 case OPCODE_astore_2:
3639 case OPCODE_astore_3:
3640 first = OPCODE_astore_0;
3641 break;
3643 case OPCODE_istore_0:
3644 case OPCODE_istore_1:
3645 case OPCODE_istore_2:
3646 case OPCODE_istore_3:
3647 first = OPCODE_istore_0;
3648 break;
3650 case OPCODE_lstore_0:
3651 case OPCODE_lstore_1:
3652 case OPCODE_lstore_2:
3653 case OPCODE_lstore_3:
3654 first = OPCODE_lstore_0;
3655 break;
3657 case OPCODE_fstore_0:
3658 case OPCODE_fstore_1:
3659 case OPCODE_fstore_2:
3660 case OPCODE_fstore_3:
3661 first = OPCODE_fstore_0;
3662 break;
3664 case OPCODE_dstore_0:
3665 case OPCODE_dstore_1:
3666 case OPCODE_dstore_2:
3667 case OPCODE_dstore_3:
3668 first = OPCODE_dstore_0;
3669 break;
3671 case OPCODE_astore:
3672 case OPCODE_istore:
3673 case OPCODE_lstore:
3674 case OPCODE_fstore:
3675 case OPCODE_dstore:
3676 index = peek_opcode_at_pc (jcf, code_offset, pc);
3677 if (wide_found)
3679 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3680 index = (other << 8) + index;
3682 break;
3685 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3686 means we have a <t>store. */
3687 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3688 start_pc = insn_pc;
3690 return start_pc;
3693 /* Build a node to represent empty statements and blocks. */
3695 tree
3696 build_java_empty_stmt (void)
3698 tree t = build_empty_stmt (input_location);
3699 return t;
3702 /* Promote all args of integral type before generating any code. */
3704 static void
3705 promote_arguments (void)
3707 int i;
3708 tree arg;
3709 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3710 arg != NULL_TREE; arg = DECL_CHAIN (arg), i++)
3712 tree arg_type = TREE_TYPE (arg);
3713 if (INTEGRAL_TYPE_P (arg_type)
3714 && TYPE_PRECISION (arg_type) < 32)
3716 tree copy = find_local_variable (i, integer_type_node, -1);
3717 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3718 copy,
3719 fold_convert (integer_type_node, arg)));
3721 if (TYPE_IS_WIDE (arg_type))
3722 i++;
3726 /* Create a local variable that points to the constant pool. */
3728 static void
3729 cache_cpool_data_ref (void)
3731 if (optimize)
3733 tree cpool;
3734 tree d = build_constant_data_ref (flag_indirect_classes);
3735 tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE,
3736 build_pointer_type (TREE_TYPE (d)));
3737 java_add_local_var (cpool_ptr);
3738 TREE_CONSTANT (cpool_ptr) = 1;
3740 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3741 cpool_ptr, build_address_of (d)));
3742 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3743 TREE_THIS_NOTRAP (cpool) = 1;
3744 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3748 #include "gt-java-expr.h"