Introduce gimple_omp_task
[official-gcc.git] / gcc / java / expr.c
blob877f1b034ecd8466cf1fbd357d5209ae3427c687
1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>.
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
24 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h" /* For INT_TYPE_SIZE,
30 TARGET_VTABLE_USES_DESCRIPTORS,
31 BITS_PER_UNIT,
32 MODIFY_JNI_METHOD_CALL and
33 PARM_BOUNDARY. */
35 #include "tree.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "flags.h"
39 #include "java-tree.h"
40 #include "javaop.h"
41 #include "java-opcodes.h"
42 #include "jcf.h"
43 #include "java-except.h"
44 #include "parse.h"
45 #include "diagnostic-core.h"
46 #include "ggc.h"
47 #include "tree-iterator.h"
48 #include "target.h"
49 #include "wide-int.h"
51 static void flush_quick_stack (void);
52 static void push_value (tree);
53 static tree pop_value (tree);
54 static void java_stack_swap (void);
55 static void java_stack_dup (int, int);
56 static void build_java_athrow (tree);
57 static void build_java_jsr (int, int);
58 static void build_java_ret (tree);
59 static void expand_java_multianewarray (tree, int);
60 static void expand_java_arraystore (tree);
61 static void expand_java_arrayload (tree);
62 static void expand_java_array_length (void);
63 static tree build_java_monitor (tree, tree);
64 static void expand_java_pushc (int, tree);
65 static void expand_java_return (tree);
66 static void expand_load_internal (int, tree, int);
67 static void expand_java_NEW (tree);
68 static void expand_java_INSTANCEOF (tree);
69 static void expand_java_CHECKCAST (tree);
70 static void expand_iinc (unsigned int, int, int);
71 static void expand_java_binop (tree, enum tree_code);
72 static void note_label (int, int);
73 static void expand_compare (enum tree_code, tree, tree, int);
74 static void expand_test (enum tree_code, tree, int);
75 static void expand_cond (enum tree_code, tree, int);
76 static void expand_java_goto (int);
77 static tree expand_java_switch (tree, int);
78 static void expand_java_add_case (tree, int, int);
79 static vec<tree, va_gc> *pop_arguments (tree);
80 static void expand_invoke (int, int, int);
81 static void expand_java_field_op (int, int, int);
82 static void java_push_constant_from_pool (struct JCF *, int);
83 static void java_stack_pop (int);
84 static tree build_java_throw_out_of_bounds_exception (tree);
85 static tree build_java_check_indexed_type (tree, tree);
86 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
87 static void promote_arguments (void);
88 static void cache_cpool_data_ref (void);
90 static GTY(()) tree operand_type[59];
92 static GTY(()) tree methods_ident;
93 static GTY(()) tree ncode_ident;
94 tree dtable_ident = NULL_TREE;
96 /* Set to nonzero value in order to emit class initialization code
97 before static field references. */
98 int always_initialize_class_p = 0;
100 /* We store the stack state in two places:
101 Within a basic block, we use the quick_stack, which is a vec of expression
102 nodes.
103 This is the top part of the stack; below that we use find_stack_slot.
104 At the end of a basic block, the quick_stack must be flushed
105 to the stack slot array (as handled by find_stack_slot).
106 Using quick_stack generates better code (especially when
107 compiled without optimization), because we do not have to
108 explicitly store and load trees to temporary variables.
110 If a variable is on the quick stack, it means the value of variable
111 when the quick stack was last flushed. Conceptually, flush_quick_stack
112 saves all the quick_stack elements in parallel. However, that is
113 complicated, so it actually saves them (i.e. copies each stack value
114 to is home virtual register) from low indexes. This allows a quick_stack
115 element at index i (counting from the bottom of stack the) to references
116 slot virtuals for register that are >= i, but not those that are deeper.
117 This convention makes most operations easier. For example iadd works
118 even when the stack contains (reg[0], reg[1]): It results in the
119 stack containing (reg[0]+reg[1]), which is OK. However, some stack
120 operations are more complicated. For example dup given a stack
121 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
122 the convention, since stack value 1 would refer to a register with
123 lower index (reg[0]), which flush_quick_stack does not safely handle.
124 So dup cannot just add an extra element to the quick_stack, but iadd can.
127 static GTY(()) vec<tree, va_gc> *quick_stack;
129 /* The physical memory page size used in this computer. See
130 build_field_ref(). */
131 static GTY(()) tree page_size;
133 /* The stack pointer of the Java virtual machine.
134 This does include the size of the quick_stack. */
136 int stack_pointer;
138 const unsigned char *linenumber_table;
139 int linenumber_count;
141 /* Largest pc so far in this method that has been passed to lookup_label. */
142 int highest_label_pc_this_method = -1;
144 /* Base value for this method to add to pc to get generated label. */
145 int start_label_pc_this_method = 0;
147 void
148 init_expr_processing (void)
150 operand_type[21] = operand_type[54] = int_type_node;
151 operand_type[22] = operand_type[55] = long_type_node;
152 operand_type[23] = operand_type[56] = float_type_node;
153 operand_type[24] = operand_type[57] = double_type_node;
154 operand_type[25] = operand_type[58] = ptr_type_node;
157 tree
158 java_truthvalue_conversion (tree expr)
160 /* It is simpler and generates better code to have only TRUTH_*_EXPR
161 or comparison expressions as truth values at this level.
163 This function should normally be identity for Java. */
165 switch (TREE_CODE (expr))
167 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
168 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
169 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
170 case ORDERED_EXPR: case UNORDERED_EXPR:
171 case TRUTH_ANDIF_EXPR:
172 case TRUTH_ORIF_EXPR:
173 case TRUTH_AND_EXPR:
174 case TRUTH_OR_EXPR:
175 case TRUTH_XOR_EXPR:
176 case TRUTH_NOT_EXPR:
177 case ERROR_MARK:
178 return expr;
180 case INTEGER_CST:
181 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
183 case REAL_CST:
184 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
186 /* are these legal? XXX JH */
187 case NEGATE_EXPR:
188 case ABS_EXPR:
189 case FLOAT_EXPR:
190 /* These don't change whether an object is nonzero or zero. */
191 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
193 case COND_EXPR:
194 /* Distribute the conversion into the arms of a COND_EXPR. */
195 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
196 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
197 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
199 case NOP_EXPR:
200 /* If this is widening the argument, we can ignore it. */
201 if (TYPE_PRECISION (TREE_TYPE (expr))
202 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
203 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
204 /* fall through to default */
206 default:
207 return fold_build2 (NE_EXPR, boolean_type_node,
208 expr, boolean_false_node);
212 /* Save any stack slots that happen to be in the quick_stack into their
213 home virtual register slots.
215 The copy order is from low stack index to high, to support the invariant
216 that the expression for a slot may contain decls for stack slots with
217 higher (or the same) index, but not lower. */
219 static void
220 flush_quick_stack (void)
222 int stack_index = stack_pointer;
223 unsigned ix;
224 tree t;
226 /* Count the number of slots the quick stack is holding. */
227 for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++)
228 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (t));
230 for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++)
232 tree decl, type = TREE_TYPE (t);
234 decl = find_stack_slot (stack_index, type);
235 if (decl != t)
236 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (t), decl, t));
237 stack_index += 1 + TYPE_IS_WIDE (type);
240 vec_safe_truncate (quick_stack, 0);
243 /* Push TYPE on the type stack.
244 Return true on success, 0 on overflow. */
247 push_type_0 (tree type)
249 int n_words;
250 type = promote_type (type);
251 n_words = 1 + TYPE_IS_WIDE (type);
252 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
253 return 0;
254 /* Allocate decl for this variable now, so we get a temporary that
255 survives the whole method. */
256 find_stack_slot (stack_pointer, type);
257 stack_type_map[stack_pointer++] = type;
258 n_words--;
259 while (--n_words >= 0)
260 stack_type_map[stack_pointer++] = TYPE_SECOND;
261 return 1;
264 void
265 push_type (tree type)
267 int r = push_type_0 (type);
268 gcc_assert (r);
271 static void
272 push_value (tree value)
274 tree type = TREE_TYPE (value);
275 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
277 type = promote_type (type);
278 value = convert (type, value);
280 push_type (type);
281 vec_safe_push (quick_stack, value);
283 /* If the value has a side effect, then we need to evaluate it
284 whether or not the result is used. If the value ends up on the
285 quick stack and is then popped, this won't happen -- so we flush
286 the quick stack. It is safest to simply always flush, though,
287 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
288 the latter we may need to strip conversions. */
289 flush_quick_stack ();
292 /* Pop a type from the type stack.
293 TYPE is the expected type. Return the actual type, which must be
294 convertible to TYPE.
295 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
297 tree
298 pop_type_0 (tree type, char **messagep)
300 int n_words;
301 tree t;
302 *messagep = NULL;
303 if (TREE_CODE (type) == RECORD_TYPE)
304 type = promote_type (type);
305 n_words = 1 + TYPE_IS_WIDE (type);
306 if (stack_pointer < n_words)
308 *messagep = xstrdup ("stack underflow");
309 return type;
311 while (--n_words > 0)
313 if (stack_type_map[--stack_pointer] != void_type_node)
315 *messagep = xstrdup ("Invalid multi-word value on type stack");
316 return type;
319 t = stack_type_map[--stack_pointer];
320 if (type == NULL_TREE || t == type)
321 return t;
322 if (TREE_CODE (t) == TREE_LIST)
326 tree tt = TREE_PURPOSE (t);
327 if (! can_widen_reference_to (tt, type))
329 t = tt;
330 goto fail;
332 t = TREE_CHAIN (t);
334 while (t);
335 return t;
337 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
338 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
339 return t;
340 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
342 /* If the expected type we've been passed is object or ptr
343 (i.e. void*), the caller needs to know the real type. */
344 if (type == ptr_type_node || type == object_ptr_type_node)
345 return t;
347 /* Since the verifier has already run, we know that any
348 types we see will be compatible. In BC mode, this fact
349 may be checked at runtime, but if that is so then we can
350 assume its truth here as well. So, we always succeed
351 here, with the expected type. */
352 return type;
355 if (! flag_verify_invocations && flag_indirect_dispatch
356 && t == object_ptr_type_node)
358 if (type != ptr_type_node)
359 warning (0, "need to insert runtime check for %s",
360 xstrdup (lang_printable_name (type, 0)));
361 return type;
364 /* lang_printable_name uses a static buffer, so we must save the result
365 from calling it the first time. */
366 fail:
368 char *temp = xstrdup (lang_printable_name (type, 0));
369 /* If the stack contains a multi-word type, keep popping the stack until
370 the real type is found. */
371 while (t == void_type_node)
372 t = stack_type_map[--stack_pointer];
373 *messagep = concat ("expected type '", temp,
374 "' but stack contains '", lang_printable_name (t, 0),
375 "'", NULL);
376 free (temp);
378 return type;
381 /* Pop a type from the type stack.
382 TYPE is the expected type. Return the actual type, which must be
383 convertible to TYPE, otherwise call error. */
385 tree
386 pop_type (tree type)
388 char *message = NULL;
389 type = pop_type_0 (type, &message);
390 if (message != NULL)
392 error ("%s", message);
393 free (message);
395 return type;
399 /* Return true if two type assertions are equal. */
401 bool
402 type_assertion_hasher::equal (type_assertion *k1, type_assertion *k2)
404 return (k1->assertion_code == k2->assertion_code
405 && k1->op1 == k2->op1
406 && k1->op2 == k2->op2);
409 /* Hash a type assertion. */
411 hashval_t
412 type_assertion_hasher::hash (type_assertion *k_p)
414 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
415 k_p->assertion_code, 0);
417 switch (k_p->assertion_code)
419 case JV_ASSERT_TYPES_COMPATIBLE:
420 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
421 hash);
422 /* Fall through. */
424 case JV_ASSERT_IS_INSTANTIABLE:
425 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
426 hash);
427 /* Fall through. */
429 case JV_ASSERT_END_OF_TABLE:
430 break;
432 default:
433 gcc_unreachable ();
436 return hash;
439 /* Add an entry to the type assertion table for the given class.
440 KLASS is the class for which this assertion will be evaluated by the
441 runtime during loading/initialization.
442 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
443 OP1 and OP2 are the operands. The tree type of these arguments may be
444 specific to each assertion_code. */
446 void
447 add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
449 hash_table<type_assertion_hasher> *assertions_htab;
450 type_assertion as;
451 type_assertion **as_pp;
453 assertions_htab = TYPE_ASSERTIONS (klass);
454 if (assertions_htab == NULL)
456 assertions_htab = hash_table<type_assertion_hasher>::create_ggc (7);
457 TYPE_ASSERTIONS (current_class) = assertions_htab;
460 as.assertion_code = assertion_code;
461 as.op1 = op1;
462 as.op2 = op2;
464 as_pp = assertions_htab->find_slot (&as, INSERT);
466 /* Don't add the same assertion twice. */
467 if (*as_pp)
468 return;
470 *as_pp = ggc_alloc<type_assertion> ();
471 **as_pp = as;
475 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
476 Handles array types and interfaces. */
479 can_widen_reference_to (tree source_type, tree target_type)
481 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
482 return 1;
484 /* Get rid of pointers */
485 if (TREE_CODE (source_type) == POINTER_TYPE)
486 source_type = TREE_TYPE (source_type);
487 if (TREE_CODE (target_type) == POINTER_TYPE)
488 target_type = TREE_TYPE (target_type);
490 if (source_type == target_type)
491 return 1;
493 /* FIXME: This is very pessimistic, in that it checks everything,
494 even if we already know that the types are compatible. If we're
495 to support full Java class loader semantics, we need this.
496 However, we could do something more optimal. */
497 if (! flag_verify_invocations)
499 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
500 source_type, target_type);
502 if (!quiet_flag)
503 warning (0, "assert: %s is assign compatible with %s",
504 xstrdup (lang_printable_name (target_type, 0)),
505 xstrdup (lang_printable_name (source_type, 0)));
506 /* Punt everything to runtime. */
507 return 1;
510 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
512 return 1;
514 else
516 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
518 HOST_WIDE_INT source_length, target_length;
519 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
521 /* An array implements Cloneable and Serializable. */
522 tree name = DECL_NAME (TYPE_NAME (target_type));
523 return (name == java_lang_cloneable_identifier_node
524 || name == java_io_serializable_identifier_node);
526 target_length = java_array_type_length (target_type);
527 if (target_length >= 0)
529 source_length = java_array_type_length (source_type);
530 if (source_length != target_length)
531 return 0;
533 source_type = TYPE_ARRAY_ELEMENT (source_type);
534 target_type = TYPE_ARRAY_ELEMENT (target_type);
535 if (source_type == target_type)
536 return 1;
537 if (TREE_CODE (source_type) != POINTER_TYPE
538 || TREE_CODE (target_type) != POINTER_TYPE)
539 return 0;
540 return can_widen_reference_to (source_type, target_type);
542 else
544 int source_depth = class_depth (source_type);
545 int target_depth = class_depth (target_type);
547 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
549 if (! quiet_flag)
550 warning (0, "assert: %s is assign compatible with %s",
551 xstrdup (lang_printable_name (target_type, 0)),
552 xstrdup (lang_printable_name (source_type, 0)));
553 return 1;
556 /* class_depth can return a negative depth if an error occurred */
557 if (source_depth < 0 || target_depth < 0)
558 return 0;
560 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
562 /* target_type is OK if source_type or source_type ancestors
563 implement target_type. We handle multiple sub-interfaces */
564 tree binfo, base_binfo;
565 int i;
567 for (binfo = TYPE_BINFO (source_type), i = 0;
568 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
569 if (can_widen_reference_to
570 (BINFO_TYPE (base_binfo), target_type))
571 return 1;
573 if (!i)
574 return 0;
577 for ( ; source_depth > target_depth; source_depth--)
579 source_type
580 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
582 return source_type == target_type;
587 static tree
588 pop_value (tree type)
590 type = pop_type (type);
591 if (vec_safe_length (quick_stack) != 0)
592 return quick_stack->pop ();
593 else
594 return find_stack_slot (stack_pointer, promote_type (type));
598 /* Pop and discard the top COUNT stack slots. */
600 static void
601 java_stack_pop (int count)
603 while (count > 0)
605 tree type;
607 gcc_assert (stack_pointer != 0);
609 type = stack_type_map[stack_pointer - 1];
610 if (type == TYPE_SECOND)
612 count--;
613 gcc_assert (stack_pointer != 1 && count > 0);
615 type = stack_type_map[stack_pointer - 2];
617 pop_value (type);
618 count--;
622 /* Implement the 'swap' operator (to swap two top stack slots). */
624 static void
625 java_stack_swap (void)
627 tree type1, type2;
628 tree temp;
629 tree decl1, decl2;
631 if (stack_pointer < 2
632 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
633 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
634 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
635 /* Bad stack swap. */
636 abort ();
637 /* Bad stack swap. */
639 flush_quick_stack ();
640 decl1 = find_stack_slot (stack_pointer - 1, type1);
641 decl2 = find_stack_slot (stack_pointer - 2, type2);
642 temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1);
643 java_add_local_var (temp);
644 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
645 java_add_stmt (build2 (MODIFY_EXPR, type2,
646 find_stack_slot (stack_pointer - 1, type2),
647 decl2));
648 java_add_stmt (build2 (MODIFY_EXPR, type1,
649 find_stack_slot (stack_pointer - 2, type1),
650 temp));
651 stack_type_map[stack_pointer - 1] = type2;
652 stack_type_map[stack_pointer - 2] = type1;
655 static void
656 java_stack_dup (int size, int offset)
658 int low_index = stack_pointer - size - offset;
659 int dst_index;
660 if (low_index < 0)
661 error ("stack underflow - dup* operation");
663 flush_quick_stack ();
665 stack_pointer += size;
666 dst_index = stack_pointer;
668 for (dst_index = stack_pointer; --dst_index >= low_index; )
670 tree type;
671 int src_index = dst_index - size;
672 if (src_index < low_index)
673 src_index = dst_index + size + offset;
674 type = stack_type_map [src_index];
675 if (type == TYPE_SECOND)
677 /* Dup operation splits 64-bit number. */
678 gcc_assert (src_index > low_index);
680 stack_type_map[dst_index] = type;
681 src_index--; dst_index--;
682 type = stack_type_map[src_index];
683 gcc_assert (TYPE_IS_WIDE (type));
685 else
686 gcc_assert (! TYPE_IS_WIDE (type));
688 if (src_index != dst_index)
690 tree src_decl = find_stack_slot (src_index, type);
691 tree dst_decl = find_stack_slot (dst_index, type);
693 java_add_stmt
694 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
695 stack_type_map[dst_index] = type;
700 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
701 value stack. */
703 static void
704 build_java_athrow (tree node)
706 tree call;
708 call = build_call_nary (void_type_node,
709 build_address_of (throw_node),
710 1, node);
711 TREE_SIDE_EFFECTS (call) = 1;
712 java_add_stmt (call);
713 java_stack_pop (stack_pointer);
716 /* Implementation for jsr/ret */
718 static void
719 build_java_jsr (int target_pc, int return_pc)
721 tree where = lookup_label (target_pc);
722 tree ret = lookup_label (return_pc);
723 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
724 push_value (ret_label);
725 flush_quick_stack ();
726 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
728 /* Do not need to emit the label here. We noted the existence of the
729 label as a jump target in note_instructions; we'll emit the label
730 for real at the beginning of the expand_byte_code loop. */
733 static void
734 build_java_ret (tree location)
736 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
739 /* Implementation of operations on array: new, load, store, length */
741 tree
742 decode_newarray_type (int atype)
744 switch (atype)
746 case 4: return boolean_type_node;
747 case 5: return char_type_node;
748 case 6: return float_type_node;
749 case 7: return double_type_node;
750 case 8: return byte_type_node;
751 case 9: return short_type_node;
752 case 10: return int_type_node;
753 case 11: return long_type_node;
754 default: return NULL_TREE;
758 /* Map primitive type to the code used by OPCODE_newarray. */
761 encode_newarray_type (tree type)
763 if (type == boolean_type_node)
764 return 4;
765 else if (type == char_type_node)
766 return 5;
767 else if (type == float_type_node)
768 return 6;
769 else if (type == double_type_node)
770 return 7;
771 else if (type == byte_type_node)
772 return 8;
773 else if (type == short_type_node)
774 return 9;
775 else if (type == int_type_node)
776 return 10;
777 else if (type == long_type_node)
778 return 11;
779 else
780 gcc_unreachable ();
783 /* Build a call to _Jv_ThrowBadArrayIndex(), the
784 ArrayIndexOfBoundsException exception handler. */
786 static tree
787 build_java_throw_out_of_bounds_exception (tree index)
789 tree node;
791 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
792 has void return type. We cannot just set the type of the CALL_EXPR below
793 to int_type_node because we would lose it during gimplification. */
794 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
795 node = build_call_nary (void_type_node,
796 build_address_of (soft_badarrayindex_node),
797 1, index);
798 TREE_SIDE_EFFECTS (node) = 1;
800 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
801 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
803 return (node);
806 /* Return the length of an array. Doesn't perform any checking on the nature
807 or value of the array NODE. May be used to implement some bytecodes. */
809 tree
810 build_java_array_length_access (tree node)
812 tree type = TREE_TYPE (node);
813 tree array_type = TREE_TYPE (type);
814 HOST_WIDE_INT length;
816 if (!is_array_type_p (type))
818 /* With the new verifier, we will see an ordinary pointer type
819 here. In this case, we just use an arbitrary array type. */
820 array_type = build_java_array_type (object_ptr_type_node, -1);
821 type = promote_type (array_type);
824 length = java_array_type_length (type);
825 if (length >= 0)
826 return build_int_cst (NULL_TREE, length);
828 node = build3 (COMPONENT_REF, int_type_node,
829 build_java_indirect_ref (array_type, node,
830 flag_check_references),
831 lookup_field (&array_type, get_identifier ("length")),
832 NULL_TREE);
833 IS_ARRAY_LENGTH_ACCESS (node) = 1;
834 return node;
837 /* Optionally checks a reference against the NULL pointer. ARG1: the
838 expr, ARG2: we should check the reference. Don't generate extra
839 checks if we're not generating code. */
841 tree
842 java_check_reference (tree expr, int check)
844 if (!flag_syntax_only && check)
846 expr = save_expr (expr);
847 expr = build3 (COND_EXPR, TREE_TYPE (expr),
848 build2 (EQ_EXPR, boolean_type_node,
849 expr, null_pointer_node),
850 build_call_nary (void_type_node,
851 build_address_of (soft_nullpointer_node),
853 expr);
856 return expr;
859 /* Reference an object: just like an INDIRECT_REF, but with checking. */
861 tree
862 build_java_indirect_ref (tree type, tree expr, int check)
864 tree t;
865 t = java_check_reference (expr, check);
866 t = convert (build_pointer_type (type), t);
867 return build1 (INDIRECT_REF, type, t);
870 /* Implement array indexing (either as l-value or r-value).
871 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
872 Optionally performs bounds checking and/or test to NULL.
873 At this point, ARRAY should have been verified as an array. */
875 tree
876 build_java_arrayaccess (tree array, tree type, tree index)
878 tree node, throw_expr = NULL_TREE;
879 tree data_field;
880 tree ref;
881 tree array_type = TREE_TYPE (TREE_TYPE (array));
882 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
884 if (!is_array_type_p (TREE_TYPE (array)))
886 /* With the new verifier, we will see an ordinary pointer type
887 here. In this case, we just use the correct array type. */
888 array_type = build_java_array_type (type, -1);
891 if (flag_bounds_check)
893 /* Generate:
894 * (unsigned jint) INDEX >= (unsigned jint) LEN
895 * && throw ArrayIndexOutOfBoundsException.
896 * Note this is equivalent to and more efficient than:
897 * INDEX < 0 || INDEX >= LEN && throw ... */
898 tree test;
899 tree len = convert (unsigned_int_type_node,
900 build_java_array_length_access (array));
901 test = fold_build2 (GE_EXPR, boolean_type_node,
902 convert (unsigned_int_type_node, index),
903 len);
904 if (! integer_zerop (test))
906 throw_expr
907 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
908 build_java_throw_out_of_bounds_exception (index));
909 /* allows expansion within COMPOUND */
910 TREE_SIDE_EFFECTS( throw_expr ) = 1;
914 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
915 to have the bounds check evaluated first. */
916 if (throw_expr != NULL_TREE)
917 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
919 data_field = lookup_field (&array_type, get_identifier ("data"));
921 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
922 build_java_indirect_ref (array_type, array,
923 flag_check_references),
924 data_field, NULL_TREE);
926 /* Take the address of the data field and convert it to a pointer to
927 the element type. */
928 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
930 /* Multiply the index by the size of an element to obtain a byte
931 offset. Convert the result to a pointer to the element type. */
932 index = build2 (MULT_EXPR, sizetype,
933 fold_convert (sizetype, index),
934 size_exp);
936 /* Sum the byte offset and the address of the data field. */
937 node = fold_build_pointer_plus (node, index);
939 /* Finally, return
941 *((&array->data) + index*size_exp)
944 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
947 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
948 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
949 determine that no check is required. */
951 tree
952 build_java_arraystore_check (tree array, tree object)
954 tree check, element_type, source;
955 tree array_type_p = TREE_TYPE (array);
956 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
958 if (! flag_verify_invocations)
960 /* With the new verifier, we don't track precise types. FIXME:
961 performance regression here. */
962 element_type = TYPE_NAME (object_type_node);
964 else
966 gcc_assert (is_array_type_p (array_type_p));
968 /* Get the TYPE_DECL for ARRAY's element type. */
969 element_type
970 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
973 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
974 && TREE_CODE (object_type) == TYPE_DECL);
976 if (!flag_store_check)
977 return build1 (NOP_EXPR, array_type_p, array);
979 /* No check is needed if the element type is final. Also check that
980 element_type matches object_type, since in the bytecode
981 compilation case element_type may be the actual element type of
982 the array rather than its declared type. However, if we're doing
983 indirect dispatch, we can't do the `final' optimization. */
984 if (element_type == object_type
985 && ! flag_indirect_dispatch
986 && CLASS_FINAL (element_type))
987 return build1 (NOP_EXPR, array_type_p, array);
989 /* OBJECT might be wrapped by a SAVE_EXPR. */
990 if (TREE_CODE (object) == SAVE_EXPR)
991 source = TREE_OPERAND (object, 0);
992 else
993 source = object;
995 /* Avoid the check if OBJECT was just loaded from the same array. */
996 if (TREE_CODE (source) == ARRAY_REF)
998 tree target;
999 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1000 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1001 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1002 if (TREE_CODE (source) == SAVE_EXPR)
1003 source = TREE_OPERAND (source, 0);
1005 target = array;
1006 if (TREE_CODE (target) == SAVE_EXPR)
1007 target = TREE_OPERAND (target, 0);
1009 if (source == target)
1010 return build1 (NOP_EXPR, array_type_p, array);
1013 /* Build an invocation of _Jv_CheckArrayStore */
1014 check = build_call_nary (void_type_node,
1015 build_address_of (soft_checkarraystore_node),
1016 2, array, object);
1017 TREE_SIDE_EFFECTS (check) = 1;
1019 return check;
1022 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1023 ARRAY_NODE. This function is used to retrieve something less vague than
1024 a pointer type when indexing the first dimension of something like [[<t>.
1025 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1026 return unchanged. */
1028 static tree
1029 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1030 tree indexed_type)
1032 /* We used to check to see if ARRAY_NODE really had array type.
1033 However, with the new verifier, this is not necessary, as we know
1034 that the object will be an array of the appropriate type. */
1036 return indexed_type;
1039 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1040 called with an integer code (the type of array to create), and the length
1041 of the array to create. */
1043 tree
1044 build_newarray (int atype_value, tree length)
1046 tree type_arg;
1048 tree prim_type = decode_newarray_type (atype_value);
1049 tree type
1050 = build_java_array_type (prim_type,
1051 tree_fits_shwi_p (length)
1052 ? tree_to_shwi (length) : -1);
1054 /* Pass a reference to the primitive type class and save the runtime
1055 some work. */
1056 type_arg = build_class_ref (prim_type);
1058 return build_call_nary (promote_type (type),
1059 build_address_of (soft_newarray_node),
1060 2, type_arg, length);
1063 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1064 of the dimension. */
1066 tree
1067 build_anewarray (tree class_type, tree length)
1069 tree type
1070 = build_java_array_type (class_type,
1071 tree_fits_shwi_p (length)
1072 ? tree_to_shwi (length) : -1);
1074 return build_call_nary (promote_type (type),
1075 build_address_of (soft_anewarray_node),
1077 length,
1078 build_class_ref (class_type),
1079 null_pointer_node);
1082 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1084 tree
1085 build_new_array (tree type, tree length)
1087 if (JPRIMITIVE_TYPE_P (type))
1088 return build_newarray (encode_newarray_type (type), length);
1089 else
1090 return build_anewarray (TREE_TYPE (type), length);
1093 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1094 class pointer, a number of dimensions and the matching number of
1095 dimensions. The argument list is NULL terminated. */
1097 static void
1098 expand_java_multianewarray (tree class_type, int ndim)
1100 int i;
1101 vec<tree, va_gc> *args = NULL;
1103 vec_safe_grow (args, 3 + ndim);
1105 (*args)[0] = build_class_ref (class_type);
1106 (*args)[1] = build_int_cst (NULL_TREE, ndim);
1108 for(i = ndim - 1; i >= 0; i-- )
1109 (*args)[(unsigned)(2 + i)] = pop_value (int_type_node);
1111 (*args)[2 + ndim] = null_pointer_node;
1113 push_value (build_call_vec (promote_type (class_type),
1114 build_address_of (soft_multianewarray_node),
1115 args));
1118 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1119 ARRAY is an array type. May expand some bound checking and NULL
1120 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1121 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1122 INT. In those cases, we make the conversion.
1124 if ARRAy is a reference type, the assignment is checked at run-time
1125 to make sure that the RHS can be assigned to the array element
1126 type. It is not necessary to generate this code if ARRAY is final. */
1128 static void
1129 expand_java_arraystore (tree rhs_type_node)
1131 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1132 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1133 int_type_node : rhs_type_node);
1134 tree index = pop_value (int_type_node);
1135 tree array_type, array, temp, access;
1137 /* If we're processing an `aaload' we might as well just pick
1138 `Object'. */
1139 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1141 array_type = build_java_array_type (object_ptr_type_node, -1);
1142 rhs_type_node = object_ptr_type_node;
1144 else
1145 array_type = build_java_array_type (rhs_type_node, -1);
1147 array = pop_value (array_type);
1148 array = build1 (NOP_EXPR, promote_type (array_type), array);
1150 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1152 flush_quick_stack ();
1154 index = save_expr (index);
1155 array = save_expr (array);
1157 /* We want to perform the bounds check (done by
1158 build_java_arrayaccess) before the type check (done by
1159 build_java_arraystore_check). So, we call build_java_arrayaccess
1160 -- which returns an ARRAY_REF lvalue -- and we then generate code
1161 to stash the address of that lvalue in a temp. Then we call
1162 build_java_arraystore_check, and finally we generate a
1163 MODIFY_EXPR to set the array element. */
1165 access = build_java_arrayaccess (array, rhs_type_node, index);
1166 temp = build_decl (input_location, VAR_DECL, NULL_TREE,
1167 build_pointer_type (TREE_TYPE (access)));
1168 java_add_local_var (temp);
1169 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1170 temp,
1171 build_fold_addr_expr (access)));
1173 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1175 tree check = build_java_arraystore_check (array, rhs_node);
1176 java_add_stmt (check);
1179 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1180 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1181 rhs_node));
1184 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1185 sure that LHS is an array type. May expand some bound checking and NULL
1186 pointer checking.
1187 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1188 BOOLEAN/SHORT, we push a promoted type back to the stack.
1191 static void
1192 expand_java_arrayload (tree lhs_type_node)
1194 tree load_node;
1195 tree index_node = pop_value (int_type_node);
1196 tree array_type;
1197 tree array_node;
1199 /* If we're processing an `aaload' we might as well just pick
1200 `Object'. */
1201 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1203 array_type = build_java_array_type (object_ptr_type_node, -1);
1204 lhs_type_node = object_ptr_type_node;
1206 else
1207 array_type = build_java_array_type (lhs_type_node, -1);
1208 array_node = pop_value (array_type);
1209 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1211 index_node = save_expr (index_node);
1212 array_node = save_expr (array_node);
1214 lhs_type_node = build_java_check_indexed_type (array_node,
1215 lhs_type_node);
1216 load_node = build_java_arrayaccess (array_node,
1217 lhs_type_node,
1218 index_node);
1219 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1220 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1221 push_value (load_node);
1224 /* Expands .length. Makes sure that we deal with and array and may expand
1225 a NULL check on the array object. */
1227 static void
1228 expand_java_array_length (void)
1230 tree array = pop_value (ptr_type_node);
1231 tree length = build_java_array_length_access (array);
1233 push_value (length);
1236 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1237 either soft_monitorenter_node or soft_monitorexit_node. */
1239 static tree
1240 build_java_monitor (tree call, tree object)
1242 return build_call_nary (void_type_node,
1243 build_address_of (call),
1244 1, object);
1247 /* Emit code for one of the PUSHC instructions. */
1249 static void
1250 expand_java_pushc (int ival, tree type)
1252 tree value;
1253 if (type == ptr_type_node && ival == 0)
1254 value = null_pointer_node;
1255 else if (type == int_type_node || type == long_type_node)
1256 value = build_int_cst (type, ival);
1257 else if (type == float_type_node || type == double_type_node)
1259 REAL_VALUE_TYPE x;
1260 real_from_integer (&x, TYPE_MODE (type), ival, SIGNED);
1261 value = build_real (type, x);
1263 else
1264 gcc_unreachable ();
1266 push_value (value);
1269 static void
1270 expand_java_return (tree type)
1272 if (type == void_type_node)
1273 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1274 else
1276 tree retval = pop_value (type);
1277 tree res = DECL_RESULT (current_function_decl);
1278 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1280 /* Handle the situation where the native integer type is smaller
1281 than the JVM integer. It can happen for many cross compilers.
1282 The whole if expression just goes away if INT_TYPE_SIZE < 32
1283 is false. */
1284 if (INT_TYPE_SIZE < 32
1285 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1286 < GET_MODE_SIZE (TYPE_MODE (type))))
1287 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1289 TREE_SIDE_EFFECTS (retval) = 1;
1290 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1294 static void
1295 expand_load_internal (int index, tree type, int pc)
1297 tree copy;
1298 tree var = find_local_variable (index, type, pc);
1300 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1301 on the stack. If there is an assignment to this VAR_DECL between
1302 the stack push and the use, then the wrong code could be
1303 generated. To avoid this we create a new local and copy our
1304 value into it. Then we push this new local on the stack.
1305 Hopefully this all gets optimized out. */
1306 copy = build_decl (input_location, VAR_DECL, NULL_TREE, type);
1307 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1308 && TREE_TYPE (copy) != TREE_TYPE (var))
1309 var = convert (type, var);
1310 java_add_local_var (copy);
1311 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1313 push_value (copy);
1316 tree
1317 build_address_of (tree value)
1319 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1322 bool
1323 class_has_finalize_method (tree type)
1325 tree super = CLASSTYPE_SUPER (type);
1327 if (super == NULL_TREE)
1328 return false; /* Every class with a real finalizer inherits */
1329 /* from java.lang.Object. */
1330 else
1331 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1334 tree
1335 java_create_object (tree type)
1337 tree alloc_node = (class_has_finalize_method (type)
1338 ? alloc_object_node
1339 : alloc_no_finalizer_node);
1341 return build_call_nary (promote_type (type),
1342 build_address_of (alloc_node),
1343 1, build_class_ref (type));
1346 static void
1347 expand_java_NEW (tree type)
1349 tree alloc_node;
1351 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1352 : alloc_no_finalizer_node);
1353 if (! CLASS_LOADED_P (type))
1354 load_class (type, 1);
1355 safe_layout_class (type);
1356 push_value (build_call_nary (promote_type (type),
1357 build_address_of (alloc_node),
1358 1, build_class_ref (type)));
1361 /* This returns an expression which will extract the class of an
1362 object. */
1364 tree
1365 build_get_class (tree value)
1367 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1368 tree vtable_field = lookup_field (&object_type_node,
1369 get_identifier ("vtable"));
1370 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1371 build_java_indirect_ref (object_type_node, value,
1372 flag_check_references),
1373 vtable_field, NULL_TREE);
1374 return build3 (COMPONENT_REF, class_ptr_type,
1375 build1 (INDIRECT_REF, dtable_type, tmp),
1376 class_field, NULL_TREE);
1379 /* This builds the tree representation of the `instanceof' operator.
1380 It tries various tricks to optimize this in cases where types are
1381 known. */
1383 tree
1384 build_instanceof (tree value, tree type)
1386 tree expr;
1387 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1388 tree valtype = TREE_TYPE (TREE_TYPE (value));
1389 tree valclass = TYPE_NAME (valtype);
1390 tree klass;
1392 /* When compiling from bytecode, we need to ensure that TYPE has
1393 been loaded. */
1394 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1396 load_class (type, 1);
1397 safe_layout_class (type);
1398 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1399 return error_mark_node;
1401 klass = TYPE_NAME (type);
1403 if (type == object_type_node || inherits_from_p (valtype, type))
1405 /* Anything except `null' is an instance of Object. Likewise,
1406 if the object is known to be an instance of the class, then
1407 we only need to check for `null'. */
1408 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1410 else if (flag_verify_invocations
1411 && ! TYPE_ARRAY_P (type)
1412 && ! TYPE_ARRAY_P (valtype)
1413 && DECL_P (klass) && DECL_P (valclass)
1414 && ! CLASS_INTERFACE (valclass)
1415 && ! CLASS_INTERFACE (klass)
1416 && ! inherits_from_p (type, valtype)
1417 && (CLASS_FINAL (klass)
1418 || ! inherits_from_p (valtype, type)))
1420 /* The classes are from different branches of the derivation
1421 tree, so we immediately know the answer. */
1422 expr = boolean_false_node;
1424 else if (DECL_P (klass) && CLASS_FINAL (klass))
1426 tree save = save_expr (value);
1427 expr = build3 (COND_EXPR, itype,
1428 build2 (NE_EXPR, boolean_type_node,
1429 save, null_pointer_node),
1430 build2 (EQ_EXPR, itype,
1431 build_get_class (save),
1432 build_class_ref (type)),
1433 boolean_false_node);
1435 else
1437 expr = build_call_nary (itype,
1438 build_address_of (soft_instanceof_node),
1439 2, value, build_class_ref (type));
1441 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1442 return expr;
1445 static void
1446 expand_java_INSTANCEOF (tree type)
1448 tree value = pop_value (object_ptr_type_node);
1449 value = build_instanceof (value, type);
1450 push_value (value);
1453 static void
1454 expand_java_CHECKCAST (tree type)
1456 tree value = pop_value (ptr_type_node);
1457 value = build_call_nary (promote_type (type),
1458 build_address_of (soft_checkcast_node),
1459 2, build_class_ref (type), value);
1460 push_value (value);
1463 static void
1464 expand_iinc (unsigned int local_var_index, int ival, int pc)
1466 tree local_var, res;
1467 tree constant_value;
1469 flush_quick_stack ();
1470 local_var = find_local_variable (local_var_index, int_type_node, pc);
1471 constant_value = build_int_cst (NULL_TREE, ival);
1472 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1473 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1477 tree
1478 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1480 tree call = NULL;
1481 tree arg1 = convert (type, op1);
1482 tree arg2 = convert (type, op2);
1484 if (type == int_type_node)
1486 switch (op)
1488 case TRUNC_DIV_EXPR:
1489 call = soft_idiv_node;
1490 break;
1491 case TRUNC_MOD_EXPR:
1492 call = soft_irem_node;
1493 break;
1494 default:
1495 break;
1498 else if (type == long_type_node)
1500 switch (op)
1502 case TRUNC_DIV_EXPR:
1503 call = soft_ldiv_node;
1504 break;
1505 case TRUNC_MOD_EXPR:
1506 call = soft_lrem_node;
1507 break;
1508 default:
1509 break;
1513 gcc_assert (call);
1514 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1515 return call;
1518 tree
1519 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1521 tree mask;
1522 switch (op)
1524 case URSHIFT_EXPR:
1526 tree u_type = unsigned_type_for (type);
1527 arg1 = convert (u_type, arg1);
1528 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1529 return convert (type, arg1);
1531 case LSHIFT_EXPR:
1532 case RSHIFT_EXPR:
1533 mask = build_int_cst (int_type_node,
1534 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1535 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1536 break;
1538 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1539 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1540 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1542 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1543 boolean_type_node, arg1, arg2);
1544 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1545 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1546 ifexp2, integer_zero_node,
1547 op == COMPARE_L_EXPR
1548 ? integer_minus_one_node
1549 : integer_one_node);
1550 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1551 op == COMPARE_L_EXPR ? integer_one_node
1552 : integer_minus_one_node,
1553 second_compare);
1555 case COMPARE_EXPR:
1556 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1558 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1559 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1560 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1561 ifexp2, integer_one_node,
1562 integer_zero_node);
1563 return fold_build3 (COND_EXPR, int_type_node,
1564 ifexp1, integer_minus_one_node, second_compare);
1566 case TRUNC_DIV_EXPR:
1567 case TRUNC_MOD_EXPR:
1568 if (TREE_CODE (type) == REAL_TYPE
1569 && op == TRUNC_MOD_EXPR)
1571 tree call;
1572 if (type != double_type_node)
1574 arg1 = convert (double_type_node, arg1);
1575 arg2 = convert (double_type_node, arg2);
1577 call = build_call_nary (double_type_node,
1578 build_address_of (soft_fmod_node),
1579 2, arg1, arg2);
1580 if (type != double_type_node)
1581 call = convert (type, call);
1582 return call;
1585 if (TREE_CODE (type) == INTEGER_TYPE
1586 && flag_use_divide_subroutine
1587 && ! flag_syntax_only)
1588 return build_java_soft_divmod (op, type, arg1, arg2);
1590 break;
1591 default: ;
1593 return fold_build2 (op, type, arg1, arg2);
1596 static void
1597 expand_java_binop (tree type, enum tree_code op)
1599 tree larg, rarg;
1600 tree ltype = type;
1601 tree rtype = type;
1602 switch (op)
1604 case LSHIFT_EXPR:
1605 case RSHIFT_EXPR:
1606 case URSHIFT_EXPR:
1607 rtype = int_type_node;
1608 rarg = pop_value (rtype);
1609 break;
1610 default:
1611 rarg = pop_value (rtype);
1613 larg = pop_value (ltype);
1614 push_value (build_java_binop (op, type, larg, rarg));
1617 /* Lookup the field named NAME in *TYPEP or its super classes.
1618 If not found, return NULL_TREE.
1619 (If the *TYPEP is not found, or if the field reference is
1620 ambiguous, return error_mark_node.)
1621 If found, return the FIELD_DECL, and set *TYPEP to the
1622 class containing the field. */
1624 tree
1625 lookup_field (tree *typep, tree name)
1627 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1629 load_class (*typep, 1);
1630 safe_layout_class (*typep);
1631 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1632 return error_mark_node;
1636 tree field, binfo, base_binfo;
1637 tree save_field;
1638 int i;
1640 for (field = TYPE_FIELDS (*typep); field; field = DECL_CHAIN (field))
1641 if (DECL_NAME (field) == name)
1642 return field;
1644 /* Process implemented interfaces. */
1645 save_field = NULL_TREE;
1646 for (binfo = TYPE_BINFO (*typep), i = 0;
1647 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1649 tree t = BINFO_TYPE (base_binfo);
1650 if ((field = lookup_field (&t, name)))
1652 if (save_field == field)
1653 continue;
1654 if (save_field == NULL_TREE)
1655 save_field = field;
1656 else
1658 tree i1 = DECL_CONTEXT (save_field);
1659 tree i2 = DECL_CONTEXT (field);
1660 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1661 IDENTIFIER_POINTER (name),
1662 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1663 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1664 return error_mark_node;
1669 if (save_field != NULL_TREE)
1670 return save_field;
1672 *typep = CLASSTYPE_SUPER (*typep);
1673 } while (*typep);
1674 return NULL_TREE;
1677 /* Look up the field named NAME in object SELF_VALUE,
1678 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1679 SELF_VALUE is NULL_TREE if looking for a static field. */
1681 tree
1682 build_field_ref (tree self_value, tree self_class, tree name)
1684 tree base_class = self_class;
1685 tree field_decl = lookup_field (&base_class, name);
1686 if (field_decl == NULL_TREE)
1688 error ("field %qs not found", IDENTIFIER_POINTER (name));
1689 return error_mark_node;
1691 if (self_value == NULL_TREE)
1693 return build_static_field_ref (field_decl);
1695 else
1697 tree base_type = promote_type (base_class);
1699 /* CHECK is true if self_value is not the this pointer. */
1700 int check = (! (DECL_P (self_value)
1701 && DECL_NAME (self_value) == this_identifier_node));
1703 /* Determine whether a field offset from NULL will lie within
1704 Page 0: this is necessary on those GNU/Linux/BSD systems that
1705 trap SEGV to generate NullPointerExceptions.
1707 We assume that Page 0 will be mapped with NOPERM, and that
1708 memory may be allocated from any other page, so only field
1709 offsets < pagesize are guaranteed to trap. We also assume
1710 the smallest page size we'll encounter is 4k bytes. */
1711 if (! flag_syntax_only && check && ! flag_check_references
1712 && ! flag_indirect_dispatch)
1714 tree field_offset = byte_position (field_decl);
1715 if (! page_size)
1716 page_size = size_int (4096);
1717 check = !tree_int_cst_lt (field_offset, page_size);
1720 if (base_type != TREE_TYPE (self_value))
1721 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1722 if (! flag_syntax_only && flag_indirect_dispatch)
1724 tree otable_index
1725 = build_int_cst (NULL_TREE, get_symbol_table_index
1726 (field_decl, NULL_TREE,
1727 &TYPE_OTABLE_METHODS (output_class)));
1728 tree field_offset
1729 = build4 (ARRAY_REF, integer_type_node,
1730 TYPE_OTABLE_DECL (output_class), otable_index,
1731 NULL_TREE, NULL_TREE);
1732 tree address;
1734 if (DECL_CONTEXT (field_decl) != output_class)
1735 field_offset
1736 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1737 build2 (EQ_EXPR, boolean_type_node,
1738 field_offset, integer_zero_node),
1739 build_call_nary (void_type_node,
1740 build_address_of (soft_nosuchfield_node),
1741 1, otable_index),
1742 field_offset);
1744 self_value = java_check_reference (self_value, check);
1745 address = fold_build_pointer_plus (self_value, field_offset);
1746 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1747 address);
1748 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1751 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1752 self_value, check);
1753 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1754 self_value, field_decl, NULL_TREE);
1758 tree
1759 lookup_label (int pc)
1761 tree name;
1762 char buf[32];
1763 if (pc > highest_label_pc_this_method)
1764 highest_label_pc_this_method = pc;
1765 targetm.asm_out.generate_internal_label (buf, "LJpc=",
1766 start_label_pc_this_method + pc);
1767 name = get_identifier (buf);
1768 if (IDENTIFIER_LOCAL_VALUE (name))
1769 return IDENTIFIER_LOCAL_VALUE (name);
1770 else
1772 /* The type of the address of a label is return_address_type_node. */
1773 tree decl = create_label_decl (name);
1774 return pushdecl (decl);
1778 /* Generate a unique name for the purpose of loops and switches
1779 labels, and try-catch-finally blocks label or temporary variables. */
1781 tree
1782 generate_name (void)
1784 static int l_number = 0;
1785 char buff [32];
1786 targetm.asm_out.generate_internal_label (buff, "LJv", l_number);
1787 l_number++;
1788 return get_identifier (buff);
1791 tree
1792 create_label_decl (tree name)
1794 tree decl;
1795 decl = build_decl (input_location, LABEL_DECL, name,
1796 TREE_TYPE (return_address_type_node));
1797 DECL_CONTEXT (decl) = current_function_decl;
1798 DECL_IGNORED_P (decl) = 1;
1799 return decl;
1802 /* This maps a bytecode offset (PC) to various flags. */
1803 char *instruction_bits;
1805 /* This is a vector of type states for the current method. It is
1806 indexed by PC. Each element is a tree vector holding the type
1807 state at that PC. We only note type states at basic block
1808 boundaries. */
1809 vec<tree, va_gc> *type_states;
1811 static void
1812 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1814 lookup_label (target_pc);
1815 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1818 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1819 where CONDITION is one of one the compare operators. */
1821 static void
1822 expand_compare (enum tree_code condition, tree value1, tree value2,
1823 int target_pc)
1825 tree target = lookup_label (target_pc);
1826 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1827 java_add_stmt
1828 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1829 build1 (GOTO_EXPR, void_type_node, target),
1830 build_java_empty_stmt ()));
1833 /* Emit code for a TEST-type opcode. */
1835 static void
1836 expand_test (enum tree_code condition, tree type, int target_pc)
1838 tree value1, value2;
1839 flush_quick_stack ();
1840 value1 = pop_value (type);
1841 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1842 expand_compare (condition, value1, value2, target_pc);
1845 /* Emit code for a COND-type opcode. */
1847 static void
1848 expand_cond (enum tree_code condition, tree type, int target_pc)
1850 tree value1, value2;
1851 flush_quick_stack ();
1852 /* note: pop values in opposite order */
1853 value2 = pop_value (type);
1854 value1 = pop_value (type);
1855 /* Maybe should check value1 and value2 for type compatibility ??? */
1856 expand_compare (condition, value1, value2, target_pc);
1859 static void
1860 expand_java_goto (int target_pc)
1862 tree target_label = lookup_label (target_pc);
1863 flush_quick_stack ();
1864 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1867 static tree
1868 expand_java_switch (tree selector, int default_pc)
1870 tree switch_expr, x;
1872 flush_quick_stack ();
1873 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1874 NULL_TREE, NULL_TREE);
1875 java_add_stmt (switch_expr);
1877 x = build_case_label (NULL_TREE, NULL_TREE,
1878 create_artificial_label (input_location));
1879 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1881 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1882 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1884 return switch_expr;
1887 static void
1888 expand_java_add_case (tree switch_expr, int match, int target_pc)
1890 tree value, x;
1892 value = build_int_cst (TREE_TYPE (switch_expr), match);
1894 x = build_case_label (value, NULL_TREE,
1895 create_artificial_label (input_location));
1896 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1898 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1899 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1902 static vec<tree, va_gc> *
1903 pop_arguments (tree method_type)
1905 function_args_iterator fnai;
1906 tree type;
1907 vec<tree, va_gc> *args = NULL;
1908 int arity;
1910 FOREACH_FUNCTION_ARGS (method_type, type, fnai)
1912 /* XXX: leaky abstraction. */
1913 if (type == void_type_node)
1914 break;
1916 vec_safe_push (args, type);
1919 arity = vec_safe_length (args);
1921 while (arity--)
1923 tree arg = pop_value ((*args)[arity]);
1925 /* We simply cast each argument to its proper type. This is
1926 needed since we lose type information coming out of the
1927 verifier. We also have to do this when we pop an integer
1928 type that must be promoted for the function call. */
1929 if (TREE_CODE (type) == POINTER_TYPE)
1930 arg = build1 (NOP_EXPR, type, arg);
1931 else if (targetm.calls.promote_prototypes (type)
1932 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1933 && INTEGRAL_TYPE_P (type))
1934 arg = convert (integer_type_node, arg);
1936 (*args)[arity] = arg;
1939 return args;
1942 /* Attach to PTR (a block) the declaration found in ENTRY. */
1945 attach_init_test_initialization_flags (treetreehash_entry **slot, tree block)
1947 treetreehash_entry *ite = *slot;
1949 if (block != error_mark_node)
1951 if (TREE_CODE (block) == BIND_EXPR)
1953 tree body = BIND_EXPR_BODY (block);
1954 DECL_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1955 BIND_EXPR_VARS (block) = ite->value;
1956 body = build2 (COMPOUND_EXPR, void_type_node,
1957 build1 (DECL_EXPR, void_type_node, ite->value), body);
1958 BIND_EXPR_BODY (block) = body;
1960 else
1962 tree body = BLOCK_SUBBLOCKS (block);
1963 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1964 BLOCK_EXPR_DECLS (block) = ite->value;
1965 body = build2 (COMPOUND_EXPR, void_type_node,
1966 build1 (DECL_EXPR, void_type_node, ite->value), body);
1967 BLOCK_SUBBLOCKS (block) = body;
1971 return true;
1974 /* Build an expression to initialize the class CLAS.
1975 if EXPR is non-NULL, returns an expression to first call the initializer
1976 (if it is needed) and then calls EXPR. */
1978 tree
1979 build_class_init (tree clas, tree expr)
1981 tree init;
1983 /* An optimization: if CLAS is a superclass of the class we're
1984 compiling, we don't need to initialize it. However, if CLAS is
1985 an interface, it won't necessarily be initialized, even if we
1986 implement it. */
1987 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1988 && inherits_from_p (current_class, clas))
1989 || current_class == clas)
1990 return expr;
1992 if (always_initialize_class_p)
1994 init = build_call_nary (void_type_node,
1995 build_address_of (soft_initclass_node),
1996 1, build_class_ref (clas));
1997 TREE_SIDE_EFFECTS (init) = 1;
1999 else
2001 tree *init_test_decl;
2002 tree decl;
2003 init_test_decl = java_treetreehash_new
2004 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2006 if (*init_test_decl == NULL)
2008 /* Build a declaration and mark it as a flag used to track
2009 static class initializations. */
2010 decl = build_decl (input_location, VAR_DECL, NULL_TREE,
2011 boolean_type_node);
2012 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2013 DECL_CONTEXT (decl) = current_function_decl;
2014 DECL_INITIAL (decl) = boolean_false_node;
2015 /* Don't emit any symbolic debugging info for this decl. */
2016 DECL_IGNORED_P (decl) = 1;
2017 *init_test_decl = decl;
2020 init = build_call_nary (void_type_node,
2021 build_address_of (soft_initclass_node),
2022 1, build_class_ref (clas));
2023 TREE_SIDE_EFFECTS (init) = 1;
2024 init = build3 (COND_EXPR, void_type_node,
2025 build2 (EQ_EXPR, boolean_type_node,
2026 *init_test_decl, boolean_false_node),
2027 init, integer_zero_node);
2028 TREE_SIDE_EFFECTS (init) = 1;
2029 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2030 build2 (MODIFY_EXPR, boolean_type_node,
2031 *init_test_decl, boolean_true_node));
2032 TREE_SIDE_EFFECTS (init) = 1;
2035 if (expr != NULL_TREE)
2037 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2038 TREE_SIDE_EFFECTS (expr) = 1;
2039 return expr;
2041 return init;
2046 /* Rewrite expensive calls that require stack unwinding at runtime to
2047 cheaper alternatives. The logic here performs these
2048 transformations:
2050 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2051 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2055 typedef struct
2057 const char *classname;
2058 const char *method;
2059 const char *signature;
2060 const char *new_classname;
2061 const char *new_signature;
2062 int flags;
2063 void (*rewrite_arglist) (vec<tree, va_gc> **);
2064 } rewrite_rule;
2066 /* Add __builtin_return_address(0) to the end of an arglist. */
2069 static void
2070 rewrite_arglist_getcaller (vec<tree, va_gc> **arglist)
2072 tree retaddr
2073 = build_call_expr (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS),
2074 1, integer_zero_node);
2076 DECL_UNINLINABLE (current_function_decl) = 1;
2078 vec_safe_push (*arglist, retaddr);
2081 /* Add this.class to the end of an arglist. */
2083 static void
2084 rewrite_arglist_getclass (vec<tree, va_gc> **arglist)
2086 vec_safe_push (*arglist, build_class_ref (output_class));
2089 static rewrite_rule rules[] =
2090 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2091 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2092 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2094 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2095 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2096 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2098 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2099 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2100 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2102 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2103 "()Ljava/lang/ClassLoader;",
2104 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2105 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2107 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2108 "java.lang.String", "([CII)Ljava/lang/String;",
2109 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2111 {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2113 /* True if this method is special, i.e. it's a private method that
2114 should be exported from a DSO. */
2116 bool
2117 special_method_p (tree candidate_method)
2119 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2120 tree method = DECL_NAME (candidate_method);
2121 rewrite_rule *p;
2123 for (p = rules; p->classname; p++)
2125 if (get_identifier (p->classname) == context
2126 && get_identifier (p->method) == method)
2127 return true;
2129 return false;
2132 /* Scan the rules list for replacements for *METHOD_P and replace the
2133 args accordingly. If the rewrite results in an access to a private
2134 method, update SPECIAL.*/
2136 void
2137 maybe_rewrite_invocation (tree *method_p, vec<tree, va_gc> **arg_list_p,
2138 tree *method_signature_p, tree *special)
2140 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2141 rewrite_rule *p;
2142 *special = NULL_TREE;
2144 for (p = rules; p->classname; p++)
2146 if (get_identifier (p->classname) == context)
2148 tree method = DECL_NAME (*method_p);
2149 if (get_identifier (p->method) == method
2150 && get_identifier (p->signature) == *method_signature_p)
2152 tree maybe_method;
2153 tree destination_class
2154 = lookup_class (get_identifier (p->new_classname));
2155 gcc_assert (destination_class);
2156 maybe_method
2157 = lookup_java_method (destination_class,
2158 method,
2159 get_identifier (p->new_signature));
2160 if (! maybe_method && ! flag_verify_invocations)
2162 maybe_method
2163 = add_method (destination_class, p->flags,
2164 method, get_identifier (p->new_signature));
2165 DECL_EXTERNAL (maybe_method) = 1;
2167 *method_p = maybe_method;
2168 gcc_assert (*method_p);
2169 if (p->rewrite_arglist)
2170 p->rewrite_arglist (arg_list_p);
2171 *method_signature_p = get_identifier (p->new_signature);
2172 *special = integer_one_node;
2174 break;
2182 tree
2183 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2184 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2185 vec<tree, va_gc> *arg_list ATTRIBUTE_UNUSED, tree special)
2187 tree func;
2188 if (is_compiled_class (self_type))
2190 /* With indirect dispatch we have to use indirect calls for all
2191 publicly visible methods or gcc will use PLT indirections
2192 to reach them. We also have to use indirect dispatch for all
2193 external methods. */
2194 if (! flag_indirect_dispatch
2195 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2197 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2198 method);
2200 else
2202 tree table_index
2203 = build_int_cst (NULL_TREE,
2204 (get_symbol_table_index
2205 (method, special,
2206 &TYPE_ATABLE_METHODS (output_class))));
2207 func
2208 = build4 (ARRAY_REF,
2209 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2210 TYPE_ATABLE_DECL (output_class), table_index,
2211 NULL_TREE, NULL_TREE);
2213 func = convert (method_ptr_type_node, func);
2215 else
2217 /* We don't know whether the method has been (statically) compiled.
2218 Compile this code to get a reference to the method's code:
2220 SELF_TYPE->methods[METHOD_INDEX].ncode
2224 int method_index = 0;
2225 tree meth, ref;
2227 /* The method might actually be declared in some superclass, so
2228 we have to use its class context, not the caller's notion of
2229 where the method is. */
2230 self_type = DECL_CONTEXT (method);
2231 ref = build_class_ref (self_type);
2232 ref = build1 (INDIRECT_REF, class_type_node, ref);
2233 if (ncode_ident == NULL_TREE)
2234 ncode_ident = get_identifier ("ncode");
2235 if (methods_ident == NULL_TREE)
2236 methods_ident = get_identifier ("methods");
2237 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2238 lookup_field (&class_type_node, methods_ident),
2239 NULL_TREE);
2240 for (meth = TYPE_METHODS (self_type);
2241 ; meth = DECL_CHAIN (meth))
2243 if (method == meth)
2244 break;
2245 if (meth == NULL_TREE)
2246 fatal_error ("method '%s' not found in class",
2247 IDENTIFIER_POINTER (DECL_NAME (method)));
2248 method_index++;
2250 method_index *= int_size_in_bytes (method_type_node);
2251 ref = fold_build_pointer_plus_hwi (ref, method_index);
2252 ref = build1 (INDIRECT_REF, method_type_node, ref);
2253 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2254 ref, lookup_field (&method_type_node, ncode_ident),
2255 NULL_TREE);
2257 return func;
2260 tree
2261 invoke_build_dtable (int is_invoke_interface, vec<tree, va_gc> *arg_list)
2263 tree dtable, objectref;
2264 tree saved = save_expr ((*arg_list)[0]);
2266 (*arg_list)[0] = saved;
2268 /* If we're dealing with interfaces and if the objectref
2269 argument is an array then get the dispatch table of the class
2270 Object rather than the one from the objectref. */
2271 objectref = (is_invoke_interface
2272 && is_array_type_p (TREE_TYPE (saved))
2273 ? build_class_ref (object_type_node) : saved);
2275 if (dtable_ident == NULL_TREE)
2276 dtable_ident = get_identifier ("vtable");
2277 dtable = build_java_indirect_ref (object_type_node, objectref,
2278 flag_check_references);
2279 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2280 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2282 return dtable;
2285 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2286 T. If this decl has not been seen before, it will be added to the
2287 [oa]table_methods. If it has, the existing table slot will be
2288 reused. */
2291 get_symbol_table_index (tree t, tree special,
2292 vec<method_entry, va_gc> **symbol_table)
2294 method_entry *e;
2295 unsigned i;
2296 method_entry elem = {t, special};
2298 FOR_EACH_VEC_SAFE_ELT (*symbol_table, i, e)
2299 if (t == e->method && special == e->special)
2300 goto done;
2302 vec_safe_push (*symbol_table, elem);
2304 done:
2305 return i + 1;
2308 tree
2309 build_invokevirtual (tree dtable, tree method, tree special)
2311 tree func;
2312 tree nativecode_ptr_ptr_type_node
2313 = build_pointer_type (nativecode_ptr_type_node);
2314 tree method_index;
2315 tree otable_index;
2317 if (flag_indirect_dispatch)
2319 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2321 otable_index
2322 = build_int_cst (NULL_TREE, get_symbol_table_index
2323 (method, special,
2324 &TYPE_OTABLE_METHODS (output_class)));
2325 method_index = build4 (ARRAY_REF, integer_type_node,
2326 TYPE_OTABLE_DECL (output_class),
2327 otable_index, NULL_TREE, NULL_TREE);
2329 else
2331 /* We fetch the DECL_VINDEX field directly here, rather than
2332 using get_method_index(). DECL_VINDEX is the true offset
2333 from the vtable base to a method, regrdless of any extra
2334 words inserted at the start of the vtable. */
2335 method_index = DECL_VINDEX (method);
2336 method_index = size_binop (MULT_EXPR, method_index,
2337 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2338 if (TARGET_VTABLE_USES_DESCRIPTORS)
2339 method_index = size_binop (MULT_EXPR, method_index,
2340 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2343 func = fold_build_pointer_plus (dtable, method_index);
2345 if (TARGET_VTABLE_USES_DESCRIPTORS)
2346 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2347 else
2349 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2350 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2353 return func;
2356 static GTY(()) tree class_ident;
2357 tree
2358 build_invokeinterface (tree dtable, tree method)
2360 tree interface;
2361 tree idx;
2363 /* We expand invokeinterface here. */
2365 if (class_ident == NULL_TREE)
2366 class_ident = get_identifier ("class");
2368 dtable = build_java_indirect_ref (dtable_type, dtable,
2369 flag_check_references);
2370 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2371 lookup_field (&dtable_type, class_ident), NULL_TREE);
2373 interface = DECL_CONTEXT (method);
2374 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2375 layout_class_methods (interface);
2377 if (flag_indirect_dispatch)
2379 int itable_index
2380 = 2 * (get_symbol_table_index
2381 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2382 interface
2383 = build4 (ARRAY_REF,
2384 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2385 TYPE_ITABLE_DECL (output_class),
2386 build_int_cst (NULL_TREE, itable_index-1),
2387 NULL_TREE, NULL_TREE);
2388 idx
2389 = build4 (ARRAY_REF,
2390 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2391 TYPE_ITABLE_DECL (output_class),
2392 build_int_cst (NULL_TREE, itable_index),
2393 NULL_TREE, NULL_TREE);
2394 interface = convert (class_ptr_type, interface);
2395 idx = convert (integer_type_node, idx);
2397 else
2399 idx = build_int_cst (NULL_TREE,
2400 get_interface_method_index (method, interface));
2401 interface = build_class_ref (interface);
2404 return build_call_nary (ptr_type_node,
2405 build_address_of (soft_lookupinterfacemethod_node),
2406 3, dtable, interface, idx);
2409 /* Expand one of the invoke_* opcodes.
2410 OPCODE is the specific opcode.
2411 METHOD_REF_INDEX is an index into the constant pool.
2412 NARGS is the number of arguments, or -1 if not specified. */
2414 static void
2415 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2417 tree method_signature
2418 = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
2419 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool,
2420 method_ref_index);
2421 tree self_type
2422 = get_class_constant (current_jcf,
2423 COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool,
2424 method_ref_index));
2425 const char *const self_name
2426 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2427 tree call, func, method, method_type;
2428 vec<tree, va_gc> *arg_list;
2429 tree check = NULL_TREE;
2431 tree special = NULL_TREE;
2433 if (! CLASS_LOADED_P (self_type))
2435 load_class (self_type, 1);
2436 safe_layout_class (self_type);
2437 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2438 fatal_error ("failed to find class '%s'", self_name);
2440 layout_class_methods (self_type);
2442 if (ID_INIT_P (method_name))
2443 method = lookup_java_constructor (self_type, method_signature);
2444 else
2445 method = lookup_java_method (self_type, method_name, method_signature);
2447 /* We've found a method in a class other than the one in which it
2448 was wanted. This can happen if, for instance, we're trying to
2449 compile invokespecial super.equals().
2450 FIXME: This is a kludge. Rather than nullifying the result, we
2451 should change lookup_java_method() so that it doesn't search the
2452 superclass chain when we're BC-compiling. */
2453 if (! flag_verify_invocations
2454 && method
2455 && ! TYPE_ARRAY_P (self_type)
2456 && self_type != DECL_CONTEXT (method))
2457 method = NULL_TREE;
2459 /* We've found a method in an interface, but this isn't an interface
2460 call. */
2461 if (opcode != OPCODE_invokeinterface
2462 && method
2463 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2464 method = NULL_TREE;
2466 /* We've found a non-interface method but we are making an
2467 interface call. This can happen if the interface overrides a
2468 method in Object. */
2469 if (! flag_verify_invocations
2470 && opcode == OPCODE_invokeinterface
2471 && method
2472 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2473 method = NULL_TREE;
2475 if (method == NULL_TREE)
2477 if (flag_verify_invocations || ! flag_indirect_dispatch)
2479 error ("class '%s' has no method named '%s' matching signature '%s'",
2480 self_name,
2481 IDENTIFIER_POINTER (method_name),
2482 IDENTIFIER_POINTER (method_signature));
2484 else
2486 int flags = ACC_PUBLIC;
2487 if (opcode == OPCODE_invokestatic)
2488 flags |= ACC_STATIC;
2489 if (opcode == OPCODE_invokeinterface)
2491 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2492 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2494 method = add_method (self_type, flags, method_name,
2495 method_signature);
2496 DECL_ARTIFICIAL (method) = 1;
2497 METHOD_DUMMY (method) = 1;
2498 layout_class_method (self_type, NULL,
2499 method, NULL);
2503 /* Invoke static can't invoke static/abstract method */
2504 if (method != NULL_TREE)
2506 if (opcode == OPCODE_invokestatic)
2508 if (!METHOD_STATIC (method))
2510 error ("invokestatic on non static method");
2511 method = NULL_TREE;
2513 else if (METHOD_ABSTRACT (method))
2515 error ("invokestatic on abstract method");
2516 method = NULL_TREE;
2519 else
2521 if (METHOD_STATIC (method))
2523 error ("invoke[non-static] on static method");
2524 method = NULL_TREE;
2529 if (method == NULL_TREE)
2531 /* If we got here, we emitted an error message above. So we
2532 just pop the arguments, push a properly-typed zero, and
2533 continue. */
2534 method_type = get_type_from_signature (method_signature);
2535 pop_arguments (method_type);
2536 if (opcode != OPCODE_invokestatic)
2537 pop_type (self_type);
2538 method_type = promote_type (TREE_TYPE (method_type));
2539 push_value (convert (method_type, integer_zero_node));
2540 return;
2543 arg_list = pop_arguments (TREE_TYPE (method));
2544 flush_quick_stack ();
2546 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2547 &special);
2548 method_type = TREE_TYPE (method);
2550 func = NULL_TREE;
2551 if (opcode == OPCODE_invokestatic)
2552 func = build_known_method_ref (method, method_type, self_type,
2553 method_signature, arg_list, special);
2554 else if (opcode == OPCODE_invokespecial
2555 || (opcode == OPCODE_invokevirtual
2556 && (METHOD_PRIVATE (method)
2557 || METHOD_FINAL (method)
2558 || CLASS_FINAL (TYPE_NAME (self_type)))))
2560 /* If the object for the method call is null, we throw an
2561 exception. We don't do this if the object is the current
2562 method's `this'. In other cases we just rely on an
2563 optimization pass to eliminate redundant checks. FIXME:
2564 Unfortunately there doesn't seem to be a way to determine
2565 what the current method is right now.
2566 We do omit the check if we're calling <init>. */
2567 /* We use a SAVE_EXPR here to make sure we only evaluate
2568 the new `self' expression once. */
2569 tree save_arg = save_expr ((*arg_list)[0]);
2570 (*arg_list)[0] = save_arg;
2571 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2572 func = build_known_method_ref (method, method_type, self_type,
2573 method_signature, arg_list, special);
2575 else
2577 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2578 arg_list);
2579 if (opcode == OPCODE_invokevirtual)
2580 func = build_invokevirtual (dtable, method, special);
2581 else
2582 func = build_invokeinterface (dtable, method);
2585 if (TREE_CODE (func) == ADDR_EXPR)
2586 TREE_TYPE (func) = build_pointer_type (method_type);
2587 else
2588 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2590 call = build_call_vec (TREE_TYPE (method_type), func, arg_list);
2591 TREE_SIDE_EFFECTS (call) = 1;
2592 call = check_for_builtin (method, call);
2594 if (check != NULL_TREE)
2596 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2597 TREE_SIDE_EFFECTS (call) = 1;
2600 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2601 java_add_stmt (call);
2602 else
2604 push_value (call);
2605 flush_quick_stack ();
2609 /* Create a stub which will be put into the vtable but which will call
2610 a JNI function. */
2612 tree
2613 build_jni_stub (tree method)
2615 tree jnifunc, call, body, method_sig, arg_types;
2616 tree jniarg0, jniarg1, jniarg2, jniarg3;
2617 tree jni_func_type, tem;
2618 tree env_var, res_var = NULL_TREE, block;
2619 tree method_args;
2620 tree meth_var;
2621 tree bind;
2622 vec<tree, va_gc> *args = NULL;
2623 int args_size = 0;
2625 tree klass = DECL_CONTEXT (method);
2626 klass = build_class_ref (klass);
2628 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2630 DECL_ARTIFICIAL (method) = 1;
2631 DECL_EXTERNAL (method) = 0;
2633 env_var = build_decl (input_location,
2634 VAR_DECL, get_identifier ("env"), ptr_type_node);
2635 DECL_CONTEXT (env_var) = method;
2637 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2639 res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"),
2640 TREE_TYPE (TREE_TYPE (method)));
2641 DECL_CONTEXT (res_var) = method;
2642 DECL_CHAIN (env_var) = res_var;
2645 method_args = DECL_ARGUMENTS (method);
2646 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2647 TREE_SIDE_EFFECTS (block) = 1;
2649 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2650 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2651 build_call_nary (ptr_type_node,
2652 build_address_of (soft_getjnienvnewframe_node),
2653 1, klass));
2655 /* The JNIEnv structure is the first argument to the JNI function. */
2656 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2657 vec_safe_push (args, env_var);
2659 /* For a static method the second argument is the class. For a
2660 non-static method the second argument is `this'; that is already
2661 available in the argument list. */
2662 if (METHOD_STATIC (method))
2664 args_size += int_size_in_bytes (TREE_TYPE (klass));
2665 vec_safe_push (args, klass);
2668 /* All the arguments to this method become arguments to the
2669 underlying JNI function. If we had to wrap object arguments in a
2670 special way, we would do that here. */
2671 for (tem = method_args; tem != NULL_TREE; tem = DECL_CHAIN (tem))
2673 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2674 #ifdef PARM_BOUNDARY
2675 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2676 * PARM_BOUNDARY);
2677 #endif
2678 args_size += (arg_bits / BITS_PER_UNIT);
2680 vec_safe_push (args, tem);
2682 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2684 /* Argument types for static methods and the JNIEnv structure.
2685 FIXME: Write and use build_function_type_vec to avoid this. */
2686 if (METHOD_STATIC (method))
2687 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2688 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2690 /* We call _Jv_LookupJNIMethod to find the actual underlying
2691 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2692 exception if this function is not found at runtime. */
2693 method_sig = build_java_signature (TREE_TYPE (method));
2694 jniarg0 = klass;
2695 jniarg1 = build_utf8_ref (DECL_NAME (method));
2696 jniarg2 = build_utf8_ref (unmangle_classname
2697 (IDENTIFIER_POINTER (method_sig),
2698 IDENTIFIER_LENGTH (method_sig)));
2699 jniarg3 = build_int_cst (NULL_TREE, args_size);
2701 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2703 #ifdef MODIFY_JNI_METHOD_CALL
2704 tem = MODIFY_JNI_METHOD_CALL (tem);
2705 #endif
2707 jni_func_type = build_pointer_type (tem);
2709 /* Use the actual function type, rather than a generic pointer type,
2710 such that this decl keeps the actual pointer type from being
2711 garbage-collected. If it is, we end up using canonical types
2712 with different uids for equivalent function types, and this in
2713 turn causes utf8 identifiers and output order to vary. */
2714 meth_var = build_decl (input_location,
2715 VAR_DECL, get_identifier ("meth"), jni_func_type);
2716 TREE_STATIC (meth_var) = 1;
2717 TREE_PUBLIC (meth_var) = 0;
2718 DECL_EXTERNAL (meth_var) = 0;
2719 DECL_CONTEXT (meth_var) = method;
2720 DECL_ARTIFICIAL (meth_var) = 1;
2721 DECL_INITIAL (meth_var) = null_pointer_node;
2722 TREE_USED (meth_var) = 1;
2723 chainon (env_var, meth_var);
2724 build_result_decl (method);
2726 jnifunc = build3 (COND_EXPR, jni_func_type,
2727 build2 (NE_EXPR, boolean_type_node,
2728 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2729 meth_var,
2730 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2731 build1
2732 (NOP_EXPR, jni_func_type,
2733 build_call_nary (ptr_type_node,
2734 build_address_of
2735 (soft_lookupjnimethod_node),
2737 jniarg0, jniarg1,
2738 jniarg2, jniarg3))));
2740 /* Now we make the actual JNI call via the resulting function
2741 pointer. */
2742 call = build_call_vec (TREE_TYPE (TREE_TYPE (method)), jnifunc, args);
2744 /* If the JNI call returned a result, capture it here. If we had to
2745 unwrap JNI object results, we would do that here. */
2746 if (res_var != NULL_TREE)
2748 /* If the call returns an object, it may return a JNI weak
2749 reference, in which case we must unwrap it. */
2750 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2751 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2752 build_address_of (soft_unwrapjni_node),
2753 1, call);
2754 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2755 res_var, call);
2758 TREE_SIDE_EFFECTS (call) = 1;
2760 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2761 TREE_SIDE_EFFECTS (body) = 1;
2763 /* Now free the environment we allocated. */
2764 call = build_call_nary (ptr_type_node,
2765 build_address_of (soft_jnipopsystemframe_node),
2766 1, env_var);
2767 TREE_SIDE_EFFECTS (call) = 1;
2768 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2769 TREE_SIDE_EFFECTS (body) = 1;
2771 /* Finally, do the return. */
2772 if (res_var != NULL_TREE)
2774 tree drt;
2775 gcc_assert (DECL_RESULT (method));
2776 /* Make sure we copy the result variable to the actual
2777 result. We use the type of the DECL_RESULT because it
2778 might be different from the return type of the function:
2779 it might be promoted. */
2780 drt = TREE_TYPE (DECL_RESULT (method));
2781 if (drt != TREE_TYPE (res_var))
2782 res_var = build1 (CONVERT_EXPR, drt, res_var);
2783 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2784 TREE_SIDE_EFFECTS (res_var) = 1;
2787 body = build2 (COMPOUND_EXPR, void_type_node, body,
2788 build1 (RETURN_EXPR, void_type_node, res_var));
2789 TREE_SIDE_EFFECTS (body) = 1;
2791 /* Prepend class initialization for static methods reachable from
2792 other classes. */
2793 if (METHOD_STATIC (method)
2794 && (! METHOD_PRIVATE (method)
2795 || INNER_CLASS_P (DECL_CONTEXT (method))))
2797 tree init = build_call_expr (soft_initclass_node, 1,
2798 klass);
2799 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2800 TREE_SIDE_EFFECTS (body) = 1;
2803 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2804 body, block);
2805 return bind;
2809 /* Given lvalue EXP, return a volatile expression that references the
2810 same object. */
2812 tree
2813 java_modify_addr_for_volatile (tree exp)
2815 tree exp_type = TREE_TYPE (exp);
2816 tree v_type
2817 = build_qualified_type (exp_type,
2818 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2819 tree addr = build_fold_addr_expr (exp);
2820 v_type = build_pointer_type (v_type);
2821 addr = fold_convert (v_type, addr);
2822 exp = build_fold_indirect_ref (addr);
2823 return exp;
2827 /* Expand an operation to extract from or store into a field.
2828 IS_STATIC is 1 iff the field is static.
2829 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2830 FIELD_REF_INDEX is an index into the constant pool. */
2832 static void
2833 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2835 tree self_type
2836 = get_class_constant (current_jcf,
2837 COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
2838 field_ref_index));
2839 const char *self_name
2840 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2841 tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2842 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool,
2843 field_ref_index);
2844 tree field_type = get_type_from_signature (field_signature);
2845 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2846 tree field_ref;
2847 int is_error = 0;
2848 tree original_self_type = self_type;
2849 tree field_decl;
2850 tree modify_expr;
2852 if (! CLASS_LOADED_P (self_type))
2853 load_class (self_type, 1);
2854 field_decl = lookup_field (&self_type, field_name);
2855 if (field_decl == error_mark_node)
2857 is_error = 1;
2859 else if (field_decl == NULL_TREE)
2861 if (! flag_verify_invocations)
2863 int flags = ACC_PUBLIC;
2864 if (is_static)
2865 flags |= ACC_STATIC;
2866 self_type = original_self_type;
2867 field_decl = add_field (original_self_type, field_name,
2868 field_type, flags);
2869 DECL_ARTIFICIAL (field_decl) = 1;
2870 DECL_IGNORED_P (field_decl) = 1;
2871 #if 0
2872 /* FIXME: We should be pessimistic about volatility. We
2873 don't know one way or another, but this is safe.
2874 However, doing this has bad effects on code quality. We
2875 need to look at better ways to do this. */
2876 TREE_THIS_VOLATILE (field_decl) = 1;
2877 #endif
2879 else
2881 error ("missing field '%s' in '%s'",
2882 IDENTIFIER_POINTER (field_name), self_name);
2883 is_error = 1;
2886 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2888 error ("mismatching signature for field '%s' in '%s'",
2889 IDENTIFIER_POINTER (field_name), self_name);
2890 is_error = 1;
2892 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2893 if (is_error)
2895 if (! is_putting)
2896 push_value (convert (field_type, integer_zero_node));
2897 flush_quick_stack ();
2898 return;
2901 field_ref = build_field_ref (field_ref, self_type, field_name);
2902 if (is_static
2903 && ! flag_indirect_dispatch)
2905 tree context = DECL_CONTEXT (field_ref);
2906 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2907 field_ref = build_class_init (context, field_ref);
2908 else
2909 field_ref = build_class_init (self_type, field_ref);
2911 if (is_putting)
2913 flush_quick_stack ();
2914 if (FIELD_FINAL (field_decl))
2916 if (DECL_CONTEXT (field_decl) != current_class)
2917 error ("assignment to final field %q+D not in field%'s class",
2918 field_decl);
2919 /* We used to check for assignments to final fields not
2920 occurring in the class initializer or in a constructor
2921 here. However, this constraint doesn't seem to be
2922 enforced by the JVM. */
2925 if (TREE_THIS_VOLATILE (field_decl))
2926 field_ref = java_modify_addr_for_volatile (field_ref);
2928 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2929 field_ref, new_value);
2931 if (TREE_THIS_VOLATILE (field_decl))
2933 tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2934 java_add_stmt (build_call_expr (sync, 0));
2937 java_add_stmt (modify_expr);
2939 else
2941 tree temp = build_decl (input_location,
2942 VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2943 java_add_local_var (temp);
2945 if (TREE_THIS_VOLATILE (field_decl))
2946 field_ref = java_modify_addr_for_volatile (field_ref);
2948 modify_expr
2949 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2950 java_add_stmt (modify_expr);
2952 if (TREE_THIS_VOLATILE (field_decl))
2954 tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2955 java_add_stmt (build_call_expr (sync, 0));
2958 push_value (temp);
2960 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2963 static void
2964 load_type_state (int pc)
2966 int i;
2967 tree vec = (*type_states)[pc];
2968 int cur_length = TREE_VEC_LENGTH (vec);
2969 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2970 for (i = 0; i < cur_length; i++)
2971 type_map [i] = TREE_VEC_ELT (vec, i);
2974 /* Go over METHOD's bytecode and note instruction starts in
2975 instruction_bits[]. */
2977 void
2978 note_instructions (JCF *jcf, tree method)
2980 int PC;
2981 unsigned char* byte_ops;
2982 long length = DECL_CODE_LENGTH (method);
2984 int saw_index;
2985 jint INT_temp;
2987 #undef RET /* Defined by config/i386/i386.h */
2988 #undef PTR
2989 #define BCODE byte_ops
2990 #define BYTE_type_node byte_type_node
2991 #define SHORT_type_node short_type_node
2992 #define INT_type_node int_type_node
2993 #define LONG_type_node long_type_node
2994 #define CHAR_type_node char_type_node
2995 #define PTR_type_node ptr_type_node
2996 #define FLOAT_type_node float_type_node
2997 #define DOUBLE_type_node double_type_node
2998 #define VOID_type_node void_type_node
2999 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3000 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3001 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3002 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3004 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3006 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3007 byte_ops = jcf->read_ptr;
3008 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3009 memset (instruction_bits, 0, length + 1);
3010 vec_alloc (type_states, length + 1);
3011 type_states->quick_grow_cleared (length + 1);
3013 /* This pass figures out which PC can be the targets of jumps. */
3014 for (PC = 0; PC < length;)
3016 int oldpc = PC; /* PC at instruction start. */
3017 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3018 switch (byte_ops[PC++])
3020 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3021 case OPCODE: \
3022 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3023 break;
3025 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3027 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3028 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3029 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3030 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3031 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3032 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3033 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3034 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3036 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3037 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3038 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3039 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3040 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3041 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3042 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3043 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3045 /* two forms of wide instructions */
3046 #define PRE_SPECIAL_WIDE(IGNORE) \
3048 int modified_opcode = IMMEDIATE_u1; \
3049 if (modified_opcode == OPCODE_iinc) \
3051 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3052 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3054 else \
3056 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3060 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3062 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3064 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3065 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3066 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3067 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3068 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3069 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3070 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3071 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3072 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3073 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3075 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3076 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3077 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3078 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3079 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3080 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3081 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3082 NOTE_LABEL (PC); \
3083 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3085 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3087 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3088 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3090 #define PRE_LOOKUP_SWITCH \
3091 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3092 NOTE_LABEL (default_offset+oldpc); \
3093 if (npairs >= 0) \
3094 while (--npairs >= 0) { \
3095 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3096 jint offset = IMMEDIATE_s4; \
3097 NOTE_LABEL (offset+oldpc); } \
3100 #define PRE_TABLE_SWITCH \
3101 { jint default_offset = IMMEDIATE_s4; \
3102 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3103 NOTE_LABEL (default_offset+oldpc); \
3104 if (low <= high) \
3105 while (low++ <= high) { \
3106 jint offset = IMMEDIATE_s4; \
3107 NOTE_LABEL (offset+oldpc); } \
3110 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3111 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3112 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3113 (void)(IMMEDIATE_u2); \
3114 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3116 #include "javaop.def"
3117 #undef JAVAOP
3119 } /* for */
3122 void
3123 expand_byte_code (JCF *jcf, tree method)
3125 int PC;
3126 int i;
3127 const unsigned char *linenumber_pointer;
3128 int dead_code_index = -1;
3129 unsigned char* byte_ops;
3130 long length = DECL_CODE_LENGTH (method);
3131 location_t max_location = input_location;
3133 stack_pointer = 0;
3134 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3135 byte_ops = jcf->read_ptr;
3137 /* We make an initial pass of the line number table, to note
3138 which instructions have associated line number entries. */
3139 linenumber_pointer = linenumber_table;
3140 for (i = 0; i < linenumber_count; i++)
3142 int pc = GET_u2 (linenumber_pointer);
3143 linenumber_pointer += 4;
3144 if (pc >= length)
3145 warning (0, "invalid PC in line number table");
3146 else
3148 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3149 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3150 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3154 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3155 return;
3157 promote_arguments ();
3158 cache_this_class_ref (method);
3159 cache_cpool_data_ref ();
3161 /* Translate bytecodes. */
3162 linenumber_pointer = linenumber_table;
3163 for (PC = 0; PC < length;)
3165 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3167 tree label = lookup_label (PC);
3168 flush_quick_stack ();
3169 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3170 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3171 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3172 load_type_state (PC);
3175 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3177 if (dead_code_index == -1)
3179 /* This is the start of a region of unreachable bytecodes.
3180 They still need to be processed in order for EH ranges
3181 to get handled correctly. However, we can simply
3182 replace these bytecodes with nops. */
3183 dead_code_index = PC;
3186 /* Turn this bytecode into a nop. */
3187 byte_ops[PC] = 0x0;
3189 else
3191 if (dead_code_index != -1)
3193 /* We've just reached the end of a region of dead code. */
3194 if (extra_warnings)
3195 warning (0, "unreachable bytecode from %d to before %d",
3196 dead_code_index, PC);
3197 dead_code_index = -1;
3201 /* Handle possible line number entry for this PC.
3203 This code handles out-of-order and multiple linenumbers per PC,
3204 but is optimized for the case of line numbers increasing
3205 monotonically with PC. */
3206 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3208 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3209 || GET_u2 (linenumber_pointer) != PC)
3210 linenumber_pointer = linenumber_table;
3211 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3213 int pc = GET_u2 (linenumber_pointer);
3214 linenumber_pointer += 4;
3215 if (pc == PC)
3217 int line = GET_u2 (linenumber_pointer - 2);
3218 input_location = linemap_line_start (line_table, line, 1);
3219 if (input_location > max_location)
3220 max_location = input_location;
3221 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3222 break;
3226 maybe_pushlevels (PC);
3227 PC = process_jvm_instruction (PC, byte_ops, length);
3228 maybe_poplevels (PC);
3229 } /* for */
3231 uncache_this_class_ref (method);
3233 if (dead_code_index != -1)
3235 /* We've just reached the end of a region of dead code. */
3236 if (extra_warnings)
3237 warning (0, "unreachable bytecode from %d to the end of the method",
3238 dead_code_index);
3241 DECL_FUNCTION_LAST_LINE (method) = max_location;
3244 static void
3245 java_push_constant_from_pool (JCF *jcf, int index)
3247 tree c;
3248 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3250 tree name;
3251 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3252 index = alloc_name_constant (CONSTANT_String, name);
3253 c = build_ref_from_constant_pool (index);
3254 c = convert (promote_type (string_type_node), c);
3256 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3257 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3259 tree record = get_class_constant (jcf, index);
3260 c = build_class_ref (record);
3262 else
3263 c = get_constant (jcf, index);
3264 push_value (c);
3268 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3269 long length ATTRIBUTE_UNUSED)
3271 const char *opname; /* Temporary ??? */
3272 int oldpc = PC; /* PC at instruction start. */
3274 /* If the instruction is at the beginning of an exception handler,
3275 replace the top of the stack with the thrown object reference. */
3276 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3278 /* Note that the verifier will not emit a type map at all for
3279 dead exception handlers. In this case we just ignore the
3280 situation. */
3281 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3283 tree type = pop_type (promote_type (throwable_type_node));
3284 push_value (build_exception_object_ref (type));
3288 switch (byte_ops[PC++])
3290 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3291 case OPCODE: \
3292 opname = #OPNAME; \
3293 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3294 break;
3296 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3298 int saw_index = 0; \
3299 int index = OPERAND_VALUE; \
3300 (void) saw_index; /* Avoid set but not used warning. */ \
3301 build_java_ret \
3302 (find_local_variable (index, return_address_type_node, oldpc)); \
3305 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3307 /* OPERAND_VALUE may have side-effects on PC */ \
3308 int opvalue = OPERAND_VALUE; \
3309 build_java_jsr (oldpc + opvalue, PC); \
3312 /* Push a constant onto the stack. */
3313 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3314 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3315 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3316 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3318 /* internal macro added for use by the WIDE case */
3319 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3320 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3322 /* Push local variable onto the opcode stack. */
3323 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3325 /* have to do this since OPERAND_VALUE may have side-effects */ \
3326 int opvalue = OPERAND_VALUE; \
3327 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3330 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3331 expand_java_return (OPERAND_TYPE##_type_node)
3333 #define REM_EXPR TRUNC_MOD_EXPR
3334 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3335 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3337 #define FIELD(IS_STATIC, IS_PUT) \
3338 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3340 #define TEST(OPERAND_TYPE, CONDITION) \
3341 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3343 #define COND(OPERAND_TYPE, CONDITION) \
3344 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3346 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3347 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3349 #define BRANCH_GOTO(OPERAND_VALUE) \
3350 expand_java_goto (oldpc + OPERAND_VALUE)
3352 #define BRANCH_CALL(OPERAND_VALUE) \
3353 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3355 #if 0
3356 #define BRANCH_RETURN(OPERAND_VALUE) \
3358 tree type = OPERAND_TYPE##_type_node; \
3359 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3360 expand_java_ret (value); \
3362 #endif
3364 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3365 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3366 fprintf (stderr, "(not implemented)\n")
3367 #define NOT_IMPL1(OPERAND_VALUE) \
3368 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3369 fprintf (stderr, "(not implemented)\n")
3371 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3373 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3375 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3377 #define STACK_SWAP(COUNT) java_stack_swap()
3379 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3380 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3381 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3383 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3384 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3386 #define LOOKUP_SWITCH \
3387 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3388 tree selector = pop_value (INT_type_node); \
3389 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3390 while (--npairs >= 0) \
3392 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3393 expand_java_add_case (switch_expr, match, oldpc + offset); \
3397 #define TABLE_SWITCH \
3398 { jint default_offset = IMMEDIATE_s4; \
3399 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3400 tree selector = pop_value (INT_type_node); \
3401 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3402 for (; low <= high; low++) \
3404 jint offset = IMMEDIATE_s4; \
3405 expand_java_add_case (switch_expr, low, oldpc + offset); \
3409 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3410 { int opcode = byte_ops[PC-1]; \
3411 int method_ref_index = IMMEDIATE_u2; \
3412 int nargs; \
3413 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3414 else nargs = -1; \
3415 expand_invoke (opcode, method_ref_index, nargs); \
3418 /* Handle new, checkcast, instanceof */
3419 #define OBJECT(TYPE, OP) \
3420 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3422 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3424 #define ARRAY_LOAD(OPERAND_TYPE) \
3426 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3429 #define ARRAY_STORE(OPERAND_TYPE) \
3431 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3434 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3435 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3436 #define ARRAY_NEW_PTR() \
3437 push_value (build_anewarray (get_class_constant (current_jcf, \
3438 IMMEDIATE_u2), \
3439 pop_value (int_type_node)));
3440 #define ARRAY_NEW_NUM() \
3442 int atype = IMMEDIATE_u1; \
3443 push_value (build_newarray (atype, pop_value (int_type_node)));\
3445 #define ARRAY_NEW_MULTI() \
3447 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3448 int ndims = IMMEDIATE_u1; \
3449 expand_java_multianewarray( klass, ndims ); \
3452 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3453 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3454 pop_value (OPERAND_TYPE##_type_node)));
3456 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3458 push_value (build1 (NOP_EXPR, int_type_node, \
3459 (convert (TO_TYPE##_type_node, \
3460 pop_value (FROM_TYPE##_type_node))))); \
3463 #define CONVERT(FROM_TYPE, TO_TYPE) \
3465 push_value (convert (TO_TYPE##_type_node, \
3466 pop_value (FROM_TYPE##_type_node))); \
3469 /* internal macro added for use by the WIDE case
3470 Added TREE_TYPE (decl) assignment, apbianco */
3471 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3473 tree decl, value; \
3474 int index = OPVALUE; \
3475 tree type = OPTYPE; \
3476 value = pop_value (type); \
3477 type = TREE_TYPE (value); \
3478 decl = find_local_variable (index, type, oldpc); \
3479 set_local_type (index, type); \
3480 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3483 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3485 /* have to do this since OPERAND_VALUE may have side-effects */ \
3486 int opvalue = OPERAND_VALUE; \
3487 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3490 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3491 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3493 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3494 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3496 #define MONITOR_OPERATION(call) \
3498 tree o = pop_value (ptr_type_node); \
3499 tree c; \
3500 flush_quick_stack (); \
3501 c = build_java_monitor (call, o); \
3502 TREE_SIDE_EFFECTS (c) = 1; \
3503 java_add_stmt (c); \
3506 #define SPECIAL_IINC(IGNORED) \
3508 unsigned int local_var_index = IMMEDIATE_u1; \
3509 int ival = IMMEDIATE_s1; \
3510 expand_iinc(local_var_index, ival, oldpc); \
3513 #define SPECIAL_WIDE(IGNORED) \
3515 int modified_opcode = IMMEDIATE_u1; \
3516 unsigned int local_var_index = IMMEDIATE_u2; \
3517 switch (modified_opcode) \
3519 case OPCODE_iinc: \
3521 int ival = IMMEDIATE_s2; \
3522 expand_iinc (local_var_index, ival, oldpc); \
3523 break; \
3525 case OPCODE_iload: \
3526 case OPCODE_lload: \
3527 case OPCODE_fload: \
3528 case OPCODE_dload: \
3529 case OPCODE_aload: \
3531 /* duplicate code from LOAD macro */ \
3532 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3533 break; \
3535 case OPCODE_istore: \
3536 case OPCODE_lstore: \
3537 case OPCODE_fstore: \
3538 case OPCODE_dstore: \
3539 case OPCODE_astore: \
3541 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3542 break; \
3544 default: \
3545 error ("unrecognized wide sub-instruction"); \
3549 #define SPECIAL_THROW(IGNORED) \
3550 build_java_athrow (pop_value (throwable_type_node))
3552 #define SPECIAL_BREAK NOT_IMPL1
3553 #define IMPL NOT_IMPL
3555 #include "javaop.def"
3556 #undef JAVAOP
3557 default:
3558 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3560 return PC;
3563 /* Return the opcode at PC in the code section pointed to by
3564 CODE_OFFSET. */
3566 static unsigned char
3567 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3569 unsigned char opcode;
3570 long absolute_offset = (long)JCF_TELL (jcf);
3572 JCF_SEEK (jcf, code_offset);
3573 opcode = jcf->read_ptr [pc];
3574 JCF_SEEK (jcf, absolute_offset);
3575 return opcode;
3578 /* Some bytecode compilers are emitting accurate LocalVariableTable
3579 attributes. Here's an example:
3581 PC <t>store_<n>
3582 PC+1 ...
3584 Attribute "LocalVariableTable"
3585 slot #<n>: ... (PC: PC+1 length: L)
3587 This is accurate because the local in slot <n> really exists after
3588 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3590 This procedure recognizes this situation and extends the live range
3591 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3592 length of the store instruction.)
3594 This function is used by `give_name_to_locals' so that a local's
3595 DECL features a DECL_LOCAL_START_PC such that the first related
3596 store operation will use DECL as a destination, not an unrelated
3597 temporary created for the occasion.
3599 This function uses a global (instruction_bits) `note_instructions' should
3600 have allocated and filled properly. */
3603 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3604 int start_pc, int slot)
3606 int first, index, opcode;
3607 int pc, insn_pc;
3608 int wide_found = 0;
3610 if (!start_pc)
3611 return start_pc;
3613 first = index = -1;
3615 /* Find last previous instruction and remember it */
3616 for (pc = start_pc-1; pc; pc--)
3617 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3618 break;
3619 insn_pc = pc;
3621 /* Retrieve the instruction, handle `wide'. */
3622 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3623 if (opcode == OPCODE_wide)
3625 wide_found = 1;
3626 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3629 switch (opcode)
3631 case OPCODE_astore_0:
3632 case OPCODE_astore_1:
3633 case OPCODE_astore_2:
3634 case OPCODE_astore_3:
3635 first = OPCODE_astore_0;
3636 break;
3638 case OPCODE_istore_0:
3639 case OPCODE_istore_1:
3640 case OPCODE_istore_2:
3641 case OPCODE_istore_3:
3642 first = OPCODE_istore_0;
3643 break;
3645 case OPCODE_lstore_0:
3646 case OPCODE_lstore_1:
3647 case OPCODE_lstore_2:
3648 case OPCODE_lstore_3:
3649 first = OPCODE_lstore_0;
3650 break;
3652 case OPCODE_fstore_0:
3653 case OPCODE_fstore_1:
3654 case OPCODE_fstore_2:
3655 case OPCODE_fstore_3:
3656 first = OPCODE_fstore_0;
3657 break;
3659 case OPCODE_dstore_0:
3660 case OPCODE_dstore_1:
3661 case OPCODE_dstore_2:
3662 case OPCODE_dstore_3:
3663 first = OPCODE_dstore_0;
3664 break;
3666 case OPCODE_astore:
3667 case OPCODE_istore:
3668 case OPCODE_lstore:
3669 case OPCODE_fstore:
3670 case OPCODE_dstore:
3671 index = peek_opcode_at_pc (jcf, code_offset, pc);
3672 if (wide_found)
3674 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3675 index = (other << 8) + index;
3677 break;
3680 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3681 means we have a <t>store. */
3682 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3683 start_pc = insn_pc;
3685 return start_pc;
3688 /* Build a node to represent empty statements and blocks. */
3690 tree
3691 build_java_empty_stmt (void)
3693 tree t = build_empty_stmt (input_location);
3694 return t;
3697 /* Promote all args of integral type before generating any code. */
3699 static void
3700 promote_arguments (void)
3702 int i;
3703 tree arg;
3704 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3705 arg != NULL_TREE; arg = DECL_CHAIN (arg), i++)
3707 tree arg_type = TREE_TYPE (arg);
3708 if (INTEGRAL_TYPE_P (arg_type)
3709 && TYPE_PRECISION (arg_type) < 32)
3711 tree copy = find_local_variable (i, integer_type_node, -1);
3712 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3713 copy,
3714 fold_convert (integer_type_node, arg)));
3716 if (TYPE_IS_WIDE (arg_type))
3717 i++;
3721 /* Create a local variable that points to the constant pool. */
3723 static void
3724 cache_cpool_data_ref (void)
3726 if (optimize)
3728 tree cpool;
3729 tree d = build_constant_data_ref (flag_indirect_classes);
3730 tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE,
3731 build_pointer_type (TREE_TYPE (d)));
3732 java_add_local_var (cpool_ptr);
3733 TREE_CONSTANT (cpool_ptr) = 1;
3735 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3736 cpool_ptr, build_address_of (d)));
3737 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3738 TREE_THIS_NOTRAP (cpool) = 1;
3739 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3743 #include "gt-java-expr.h"