1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>.
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
24 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
28 #include "coretypes.h"
32 #include "fold-const.h"
33 #include "stringpool.h"
34 #include "stor-layout.h"
36 #include "java-tree.h"
38 #include "java-opcodes.h"
40 #include "java-except.h"
42 #include "diagnostic-core.h"
43 #include "tree-iterator.h"
46 static void flush_quick_stack (void);
47 static void push_value (tree
);
48 static tree
pop_value (tree
);
49 static void java_stack_swap (void);
50 static void java_stack_dup (int, int);
51 static void build_java_athrow (tree
);
52 static void build_java_jsr (int, int);
53 static void build_java_ret (tree
);
54 static void expand_java_multianewarray (tree
, int);
55 static void expand_java_arraystore (tree
);
56 static void expand_java_arrayload (tree
);
57 static void expand_java_array_length (void);
58 static tree
build_java_monitor (tree
, tree
);
59 static void expand_java_pushc (int, tree
);
60 static void expand_java_return (tree
);
61 static void expand_load_internal (int, tree
, int);
62 static void expand_java_NEW (tree
);
63 static void expand_java_INSTANCEOF (tree
);
64 static void expand_java_CHECKCAST (tree
);
65 static void expand_iinc (unsigned int, int, int);
66 static void expand_java_binop (tree
, enum tree_code
);
67 static void note_label (int, int);
68 static void expand_compare (enum tree_code
, tree
, tree
, int);
69 static void expand_test (enum tree_code
, tree
, int);
70 static void expand_cond (enum tree_code
, tree
, int);
71 static void expand_java_goto (int);
72 static tree
expand_java_switch (tree
, int);
73 static void expand_java_add_case (tree
, int, int);
74 static vec
<tree
, va_gc
> *pop_arguments (tree
);
75 static void expand_invoke (int, int, int);
76 static void expand_java_field_op (int, int, int);
77 static void java_push_constant_from_pool (struct JCF
*, int);
78 static void java_stack_pop (int);
79 static tree
build_java_throw_out_of_bounds_exception (tree
);
80 static tree
build_java_check_indexed_type (tree
, tree
);
81 static unsigned char peek_opcode_at_pc (struct JCF
*, int, int);
82 static void promote_arguments (void);
83 static void cache_cpool_data_ref (void);
85 static GTY(()) tree operand_type
[59];
87 static GTY(()) tree methods_ident
;
88 static GTY(()) tree ncode_ident
;
89 tree dtable_ident
= NULL_TREE
;
91 /* Set to nonzero value in order to emit class initialization code
92 before static field references. */
93 int always_initialize_class_p
= 0;
95 /* We store the stack state in two places:
96 Within a basic block, we use the quick_stack, which is a vec of expression
98 This is the top part of the stack; below that we use find_stack_slot.
99 At the end of a basic block, the quick_stack must be flushed
100 to the stack slot array (as handled by find_stack_slot).
101 Using quick_stack generates better code (especially when
102 compiled without optimization), because we do not have to
103 explicitly store and load trees to temporary variables.
105 If a variable is on the quick stack, it means the value of variable
106 when the quick stack was last flushed. Conceptually, flush_quick_stack
107 saves all the quick_stack elements in parallel. However, that is
108 complicated, so it actually saves them (i.e. copies each stack value
109 to is home virtual register) from low indexes. This allows a quick_stack
110 element at index i (counting from the bottom of stack the) to references
111 slot virtuals for register that are >= i, but not those that are deeper.
112 This convention makes most operations easier. For example iadd works
113 even when the stack contains (reg[0], reg[1]): It results in the
114 stack containing (reg[0]+reg[1]), which is OK. However, some stack
115 operations are more complicated. For example dup given a stack
116 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
117 the convention, since stack value 1 would refer to a register with
118 lower index (reg[0]), which flush_quick_stack does not safely handle.
119 So dup cannot just add an extra element to the quick_stack, but iadd can.
122 static GTY(()) vec
<tree
, va_gc
> *quick_stack
;
124 /* The physical memory page size used in this computer. See
125 build_field_ref(). */
126 static GTY(()) tree page_size
;
128 /* The stack pointer of the Java virtual machine.
129 This does include the size of the quick_stack. */
133 const unsigned char *linenumber_table
;
134 int linenumber_count
;
136 /* Largest pc so far in this method that has been passed to lookup_label. */
137 int highest_label_pc_this_method
= -1;
139 /* Base value for this method to add to pc to get generated label. */
140 int start_label_pc_this_method
= 0;
143 init_expr_processing (void)
145 operand_type
[21] = operand_type
[54] = int_type_node
;
146 operand_type
[22] = operand_type
[55] = long_type_node
;
147 operand_type
[23] = operand_type
[56] = float_type_node
;
148 operand_type
[24] = operand_type
[57] = double_type_node
;
149 operand_type
[25] = operand_type
[58] = ptr_type_node
;
153 java_truthvalue_conversion (tree expr
)
155 /* It is simpler and generates better code to have only TRUTH_*_EXPR
156 or comparison expressions as truth values at this level.
158 This function should normally be identity for Java. */
160 switch (TREE_CODE (expr
))
162 case EQ_EXPR
: case NE_EXPR
: case UNEQ_EXPR
: case LTGT_EXPR
:
163 case LE_EXPR
: case GE_EXPR
: case LT_EXPR
: case GT_EXPR
:
164 case UNLE_EXPR
: case UNGE_EXPR
: case UNLT_EXPR
: case UNGT_EXPR
:
165 case ORDERED_EXPR
: case UNORDERED_EXPR
:
166 case TRUTH_ANDIF_EXPR
:
167 case TRUTH_ORIF_EXPR
:
176 return integer_zerop (expr
) ? boolean_false_node
: boolean_true_node
;
179 return real_zerop (expr
) ? boolean_false_node
: boolean_true_node
;
181 /* are these legal? XXX JH */
185 /* These don't change whether an object is nonzero or zero. */
186 return java_truthvalue_conversion (TREE_OPERAND (expr
, 0));
189 /* Distribute the conversion into the arms of a COND_EXPR. */
190 return fold_build3 (COND_EXPR
, boolean_type_node
, TREE_OPERAND (expr
, 0),
191 java_truthvalue_conversion (TREE_OPERAND (expr
, 1)),
192 java_truthvalue_conversion (TREE_OPERAND (expr
, 2)));
195 /* If this is widening the argument, we can ignore it. */
196 if (TYPE_PRECISION (TREE_TYPE (expr
))
197 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
198 return java_truthvalue_conversion (TREE_OPERAND (expr
, 0));
199 /* fall through to default */
202 return fold_build2 (NE_EXPR
, boolean_type_node
,
203 expr
, boolean_false_node
);
207 /* Save any stack slots that happen to be in the quick_stack into their
208 home virtual register slots.
210 The copy order is from low stack index to high, to support the invariant
211 that the expression for a slot may contain decls for stack slots with
212 higher (or the same) index, but not lower. */
215 flush_quick_stack (void)
217 int stack_index
= stack_pointer
;
221 /* Count the number of slots the quick stack is holding. */
222 for (ix
= 0; vec_safe_iterate (quick_stack
, ix
, &t
); ix
++)
223 stack_index
-= 1 + TYPE_IS_WIDE (TREE_TYPE (t
));
225 for (ix
= 0; vec_safe_iterate (quick_stack
, ix
, &t
); ix
++)
227 tree decl
, type
= TREE_TYPE (t
);
229 decl
= find_stack_slot (stack_index
, type
);
231 java_add_stmt (build2 (MODIFY_EXPR
, TREE_TYPE (t
), decl
, t
));
232 stack_index
+= 1 + TYPE_IS_WIDE (type
);
235 vec_safe_truncate (quick_stack
, 0);
238 /* Push TYPE on the type stack.
239 Return true on success, 0 on overflow. */
242 push_type_0 (tree type
)
245 type
= promote_type (type
);
246 n_words
= 1 + TYPE_IS_WIDE (type
);
247 if (stack_pointer
+ n_words
> DECL_MAX_STACK (current_function_decl
))
249 /* Allocate decl for this variable now, so we get a temporary that
250 survives the whole method. */
251 find_stack_slot (stack_pointer
, type
);
252 stack_type_map
[stack_pointer
++] = type
;
254 while (--n_words
>= 0)
255 stack_type_map
[stack_pointer
++] = TYPE_SECOND
;
260 push_type (tree type
)
262 int r
= push_type_0 (type
);
267 push_value (tree value
)
269 tree type
= TREE_TYPE (value
);
270 if (TYPE_PRECISION (type
) < 32 && INTEGRAL_TYPE_P (type
))
272 type
= promote_type (type
);
273 value
= convert (type
, value
);
276 vec_safe_push (quick_stack
, value
);
278 /* If the value has a side effect, then we need to evaluate it
279 whether or not the result is used. If the value ends up on the
280 quick stack and is then popped, this won't happen -- so we flush
281 the quick stack. It is safest to simply always flush, though,
282 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
283 the latter we may need to strip conversions. */
284 flush_quick_stack ();
287 /* Pop a type from the type stack.
288 TYPE is the expected type. Return the actual type, which must be
290 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
293 pop_type_0 (tree type
, char **messagep
)
298 if (TREE_CODE (type
) == RECORD_TYPE
)
299 type
= promote_type (type
);
300 n_words
= 1 + TYPE_IS_WIDE (type
);
301 if (stack_pointer
< n_words
)
303 *messagep
= xstrdup ("stack underflow");
306 while (--n_words
> 0)
308 if (stack_type_map
[--stack_pointer
] != void_type_node
)
310 *messagep
= xstrdup ("Invalid multi-word value on type stack");
314 t
= stack_type_map
[--stack_pointer
];
315 if (type
== NULL_TREE
|| t
== type
)
317 if (TREE_CODE (t
) == TREE_LIST
)
321 tree tt
= TREE_PURPOSE (t
);
322 if (! can_widen_reference_to (tt
, type
))
332 if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (t
)
333 && TYPE_PRECISION (type
) <= 32 && TYPE_PRECISION (t
) <= 32)
335 if (TREE_CODE (type
) == POINTER_TYPE
&& TREE_CODE (t
) == POINTER_TYPE
)
337 /* If the expected type we've been passed is object or ptr
338 (i.e. void*), the caller needs to know the real type. */
339 if (type
== ptr_type_node
|| type
== object_ptr_type_node
)
342 /* Since the verifier has already run, we know that any
343 types we see will be compatible. In BC mode, this fact
344 may be checked at runtime, but if that is so then we can
345 assume its truth here as well. So, we always succeed
346 here, with the expected type. */
350 if (! flag_verify_invocations
&& flag_indirect_dispatch
351 && t
== object_ptr_type_node
)
353 if (type
!= ptr_type_node
)
354 warning (0, "need to insert runtime check for %s",
355 xstrdup (lang_printable_name (type
, 0)));
359 /* lang_printable_name uses a static buffer, so we must save the result
360 from calling it the first time. */
363 char *temp
= xstrdup (lang_printable_name (type
, 0));
364 /* If the stack contains a multi-word type, keep popping the stack until
365 the real type is found. */
366 while (t
== void_type_node
)
367 t
= stack_type_map
[--stack_pointer
];
368 *messagep
= concat ("expected type '", temp
,
369 "' but stack contains '", lang_printable_name (t
, 0),
376 /* Pop a type from the type stack.
377 TYPE is the expected type. Return the actual type, which must be
378 convertible to TYPE, otherwise call error. */
383 char *message
= NULL
;
384 type
= pop_type_0 (type
, &message
);
387 error ("%s", message
);
394 /* Return true if two type assertions are equal. */
397 type_assertion_hasher::equal (type_assertion
*k1
, type_assertion
*k2
)
399 return (k1
->assertion_code
== k2
->assertion_code
400 && k1
->op1
== k2
->op1
401 && k1
->op2
== k2
->op2
);
404 /* Hash a type assertion. */
407 type_assertion_hasher::hash (type_assertion
*k_p
)
409 hashval_t hash
= iterative_hash (&k_p
->assertion_code
, sizeof
410 k_p
->assertion_code
, 0);
412 switch (k_p
->assertion_code
)
414 case JV_ASSERT_TYPES_COMPATIBLE
:
415 hash
= iterative_hash (&TYPE_UID (k_p
->op2
), sizeof TYPE_UID (k_p
->op2
),
419 case JV_ASSERT_IS_INSTANTIABLE
:
420 hash
= iterative_hash (&TYPE_UID (k_p
->op1
), sizeof TYPE_UID (k_p
->op1
),
424 case JV_ASSERT_END_OF_TABLE
:
434 /* Add an entry to the type assertion table for the given class.
435 KLASS is the class for which this assertion will be evaluated by the
436 runtime during loading/initialization.
437 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
438 OP1 and OP2 are the operands. The tree type of these arguments may be
439 specific to each assertion_code. */
442 add_type_assertion (tree klass
, int assertion_code
, tree op1
, tree op2
)
444 hash_table
<type_assertion_hasher
> *assertions_htab
;
446 type_assertion
**as_pp
;
448 assertions_htab
= TYPE_ASSERTIONS (klass
);
449 if (assertions_htab
== NULL
)
451 assertions_htab
= hash_table
<type_assertion_hasher
>::create_ggc (7);
452 TYPE_ASSERTIONS (current_class
) = assertions_htab
;
455 as
.assertion_code
= assertion_code
;
459 as_pp
= assertions_htab
->find_slot (&as
, INSERT
);
461 /* Don't add the same assertion twice. */
465 *as_pp
= ggc_alloc
<type_assertion
> ();
470 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
471 Handles array types and interfaces. */
474 can_widen_reference_to (tree source_type
, tree target_type
)
476 if (source_type
== ptr_type_node
|| target_type
== object_ptr_type_node
)
479 /* Get rid of pointers */
480 if (TREE_CODE (source_type
) == POINTER_TYPE
)
481 source_type
= TREE_TYPE (source_type
);
482 if (TREE_CODE (target_type
) == POINTER_TYPE
)
483 target_type
= TREE_TYPE (target_type
);
485 if (source_type
== target_type
)
488 /* FIXME: This is very pessimistic, in that it checks everything,
489 even if we already know that the types are compatible. If we're
490 to support full Java class loader semantics, we need this.
491 However, we could do something more optimal. */
492 if (! flag_verify_invocations
)
494 add_type_assertion (current_class
, JV_ASSERT_TYPES_COMPATIBLE
,
495 source_type
, target_type
);
498 warning (0, "assert: %s is assign compatible with %s",
499 xstrdup (lang_printable_name (target_type
, 0)),
500 xstrdup (lang_printable_name (source_type
, 0)));
501 /* Punt everything to runtime. */
505 if (TYPE_DUMMY (source_type
) || TYPE_DUMMY (target_type
))
511 if (TYPE_ARRAY_P (source_type
) || TYPE_ARRAY_P (target_type
))
513 HOST_WIDE_INT source_length
, target_length
;
514 if (TYPE_ARRAY_P (source_type
) != TYPE_ARRAY_P (target_type
))
516 /* An array implements Cloneable and Serializable. */
517 tree name
= DECL_NAME (TYPE_NAME (target_type
));
518 return (name
== java_lang_cloneable_identifier_node
519 || name
== java_io_serializable_identifier_node
);
521 target_length
= java_array_type_length (target_type
);
522 if (target_length
>= 0)
524 source_length
= java_array_type_length (source_type
);
525 if (source_length
!= target_length
)
528 source_type
= TYPE_ARRAY_ELEMENT (source_type
);
529 target_type
= TYPE_ARRAY_ELEMENT (target_type
);
530 if (source_type
== target_type
)
532 if (TREE_CODE (source_type
) != POINTER_TYPE
533 || TREE_CODE (target_type
) != POINTER_TYPE
)
535 return can_widen_reference_to (source_type
, target_type
);
539 int source_depth
= class_depth (source_type
);
540 int target_depth
= class_depth (target_type
);
542 if (TYPE_DUMMY (source_type
) || TYPE_DUMMY (target_type
))
545 warning (0, "assert: %s is assign compatible with %s",
546 xstrdup (lang_printable_name (target_type
, 0)),
547 xstrdup (lang_printable_name (source_type
, 0)));
551 /* class_depth can return a negative depth if an error occurred */
552 if (source_depth
< 0 || target_depth
< 0)
555 if (CLASS_INTERFACE (TYPE_NAME (target_type
)))
557 /* target_type is OK if source_type or source_type ancestors
558 implement target_type. We handle multiple sub-interfaces */
559 tree binfo
, base_binfo
;
562 for (binfo
= TYPE_BINFO (source_type
), i
= 0;
563 BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
564 if (can_widen_reference_to
565 (BINFO_TYPE (base_binfo
), target_type
))
572 for ( ; source_depth
> target_depth
; source_depth
--)
575 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type
), 0));
577 return source_type
== target_type
;
583 pop_value (tree type
)
585 type
= pop_type (type
);
586 if (vec_safe_length (quick_stack
) != 0)
587 return quick_stack
->pop ();
589 return find_stack_slot (stack_pointer
, promote_type (type
));
593 /* Pop and discard the top COUNT stack slots. */
596 java_stack_pop (int count
)
602 gcc_assert (stack_pointer
!= 0);
604 type
= stack_type_map
[stack_pointer
- 1];
605 if (type
== TYPE_SECOND
)
608 gcc_assert (stack_pointer
!= 1 && count
> 0);
610 type
= stack_type_map
[stack_pointer
- 2];
617 /* Implement the 'swap' operator (to swap two top stack slots). */
620 java_stack_swap (void)
626 if (stack_pointer
< 2
627 || (type1
= stack_type_map
[stack_pointer
- 1]) == TYPE_SECOND
628 || (type2
= stack_type_map
[stack_pointer
- 2]) == TYPE_SECOND
629 || TYPE_IS_WIDE (type1
) || TYPE_IS_WIDE (type2
))
630 /* Bad stack swap. */
632 /* Bad stack swap. */
634 flush_quick_stack ();
635 decl1
= find_stack_slot (stack_pointer
- 1, type1
);
636 decl2
= find_stack_slot (stack_pointer
- 2, type2
);
637 temp
= build_decl (input_location
, VAR_DECL
, NULL_TREE
, type1
);
638 java_add_local_var (temp
);
639 java_add_stmt (build2 (MODIFY_EXPR
, type1
, temp
, decl1
));
640 java_add_stmt (build2 (MODIFY_EXPR
, type2
,
641 find_stack_slot (stack_pointer
- 1, type2
),
643 java_add_stmt (build2 (MODIFY_EXPR
, type1
,
644 find_stack_slot (stack_pointer
- 2, type1
),
646 stack_type_map
[stack_pointer
- 1] = type2
;
647 stack_type_map
[stack_pointer
- 2] = type1
;
651 java_stack_dup (int size
, int offset
)
653 int low_index
= stack_pointer
- size
- offset
;
656 error ("stack underflow - dup* operation");
658 flush_quick_stack ();
660 stack_pointer
+= size
;
661 dst_index
= stack_pointer
;
663 for (dst_index
= stack_pointer
; --dst_index
>= low_index
; )
666 int src_index
= dst_index
- size
;
667 if (src_index
< low_index
)
668 src_index
= dst_index
+ size
+ offset
;
669 type
= stack_type_map
[src_index
];
670 if (type
== TYPE_SECOND
)
672 /* Dup operation splits 64-bit number. */
673 gcc_assert (src_index
> low_index
);
675 stack_type_map
[dst_index
] = type
;
676 src_index
--; dst_index
--;
677 type
= stack_type_map
[src_index
];
678 gcc_assert (TYPE_IS_WIDE (type
));
681 gcc_assert (! TYPE_IS_WIDE (type
));
683 if (src_index
!= dst_index
)
685 tree src_decl
= find_stack_slot (src_index
, type
);
686 tree dst_decl
= find_stack_slot (dst_index
, type
);
689 (build2 (MODIFY_EXPR
, TREE_TYPE (dst_decl
), dst_decl
, src_decl
));
690 stack_type_map
[dst_index
] = type
;
695 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
699 build_java_athrow (tree node
)
703 call
= build_call_nary (void_type_node
,
704 build_address_of (throw_node
),
706 TREE_SIDE_EFFECTS (call
) = 1;
707 java_add_stmt (call
);
708 java_stack_pop (stack_pointer
);
711 /* Implementation for jsr/ret */
714 build_java_jsr (int target_pc
, int return_pc
)
716 tree where
= lookup_label (target_pc
);
717 tree ret
= lookup_label (return_pc
);
718 tree ret_label
= fold_build1 (ADDR_EXPR
, return_address_type_node
, ret
);
719 push_value (ret_label
);
720 flush_quick_stack ();
721 java_add_stmt (build1 (GOTO_EXPR
, void_type_node
, where
));
723 /* Do not need to emit the label here. We noted the existence of the
724 label as a jump target in note_instructions; we'll emit the label
725 for real at the beginning of the expand_byte_code loop. */
729 build_java_ret (tree location
)
731 java_add_stmt (build1 (GOTO_EXPR
, void_type_node
, location
));
734 /* Implementation of operations on array: new, load, store, length */
737 decode_newarray_type (int atype
)
741 case 4: return boolean_type_node
;
742 case 5: return char_type_node
;
743 case 6: return float_type_node
;
744 case 7: return double_type_node
;
745 case 8: return byte_type_node
;
746 case 9: return short_type_node
;
747 case 10: return int_type_node
;
748 case 11: return long_type_node
;
749 default: return NULL_TREE
;
753 /* Map primitive type to the code used by OPCODE_newarray. */
756 encode_newarray_type (tree type
)
758 if (type
== boolean_type_node
)
760 else if (type
== char_type_node
)
762 else if (type
== float_type_node
)
764 else if (type
== double_type_node
)
766 else if (type
== byte_type_node
)
768 else if (type
== short_type_node
)
770 else if (type
== int_type_node
)
772 else if (type
== long_type_node
)
778 /* Build a call to _Jv_ThrowBadArrayIndex(), the
779 ArrayIndexOfBoundsException exception handler. */
782 build_java_throw_out_of_bounds_exception (tree index
)
786 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
787 has void return type. We cannot just set the type of the CALL_EXPR below
788 to int_type_node because we would lose it during gimplification. */
789 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node
))));
790 node
= build_call_nary (void_type_node
,
791 build_address_of (soft_badarrayindex_node
),
793 TREE_SIDE_EFFECTS (node
) = 1;
795 node
= build2 (COMPOUND_EXPR
, int_type_node
, node
, integer_zero_node
);
796 TREE_SIDE_EFFECTS (node
) = 1; /* Allows expansion within ANDIF */
801 /* Return the length of an array. Doesn't perform any checking on the nature
802 or value of the array NODE. May be used to implement some bytecodes. */
805 build_java_array_length_access (tree node
)
807 tree type
= TREE_TYPE (node
);
808 tree array_type
= TREE_TYPE (type
);
809 HOST_WIDE_INT length
;
811 if (!is_array_type_p (type
))
813 /* With the new verifier, we will see an ordinary pointer type
814 here. In this case, we just use an arbitrary array type. */
815 array_type
= build_java_array_type (object_ptr_type_node
, -1);
816 type
= promote_type (array_type
);
819 length
= java_array_type_length (type
);
821 return build_int_cst (NULL_TREE
, length
);
823 node
= build3 (COMPONENT_REF
, int_type_node
,
824 build_java_indirect_ref (array_type
, node
,
825 flag_check_references
),
826 lookup_field (&array_type
, get_identifier ("length")),
828 IS_ARRAY_LENGTH_ACCESS (node
) = 1;
832 /* Optionally checks a reference against the NULL pointer. ARG1: the
833 expr, ARG2: we should check the reference. Don't generate extra
834 checks if we're not generating code. */
837 java_check_reference (tree expr
, int check
)
839 if (!flag_syntax_only
&& check
)
841 expr
= save_expr (expr
);
842 expr
= build3 (COND_EXPR
, TREE_TYPE (expr
),
843 build2 (EQ_EXPR
, boolean_type_node
,
844 expr
, null_pointer_node
),
845 build_call_nary (void_type_node
,
846 build_address_of (soft_nullpointer_node
),
854 /* Reference an object: just like an INDIRECT_REF, but with checking. */
857 build_java_indirect_ref (tree type
, tree expr
, int check
)
860 t
= java_check_reference (expr
, check
);
861 t
= convert (build_pointer_type (type
), t
);
862 return build1 (INDIRECT_REF
, type
, t
);
865 /* Implement array indexing (either as l-value or r-value).
866 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
867 Optionally performs bounds checking and/or test to NULL.
868 At this point, ARRAY should have been verified as an array. */
871 build_java_arrayaccess (tree array
, tree type
, tree index
)
873 tree node
, throw_expr
= NULL_TREE
;
876 tree array_type
= TREE_TYPE (TREE_TYPE (array
));
877 tree size_exp
= fold_convert (sizetype
, size_in_bytes (type
));
879 if (!is_array_type_p (TREE_TYPE (array
)))
881 /* With the new verifier, we will see an ordinary pointer type
882 here. In this case, we just use the correct array type. */
883 array_type
= build_java_array_type (type
, -1);
886 if (flag_bounds_check
)
889 * (unsigned jint) INDEX >= (unsigned jint) LEN
890 * && throw ArrayIndexOutOfBoundsException.
891 * Note this is equivalent to and more efficient than:
892 * INDEX < 0 || INDEX >= LEN && throw ... */
894 tree len
= convert (unsigned_int_type_node
,
895 build_java_array_length_access (array
));
896 test
= fold_build2 (GE_EXPR
, boolean_type_node
,
897 convert (unsigned_int_type_node
, index
),
899 if (! integer_zerop (test
))
902 = build2 (TRUTH_ANDIF_EXPR
, int_type_node
, test
,
903 build_java_throw_out_of_bounds_exception (index
));
904 /* allows expansion within COMPOUND */
905 TREE_SIDE_EFFECTS( throw_expr
) = 1;
909 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
910 to have the bounds check evaluated first. */
911 if (throw_expr
!= NULL_TREE
)
912 index
= build2 (COMPOUND_EXPR
, int_type_node
, throw_expr
, index
);
914 data_field
= lookup_field (&array_type
, get_identifier ("data"));
916 ref
= build3 (COMPONENT_REF
, TREE_TYPE (data_field
),
917 build_java_indirect_ref (array_type
, array
,
918 flag_check_references
),
919 data_field
, NULL_TREE
);
921 /* Take the address of the data field and convert it to a pointer to
923 node
= build1 (NOP_EXPR
, build_pointer_type (type
), build_address_of (ref
));
925 /* Multiply the index by the size of an element to obtain a byte
926 offset. Convert the result to a pointer to the element type. */
927 index
= build2 (MULT_EXPR
, sizetype
,
928 fold_convert (sizetype
, index
),
931 /* Sum the byte offset and the address of the data field. */
932 node
= fold_build_pointer_plus (node
, index
);
936 *((&array->data) + index*size_exp)
939 return build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (node
)), node
);
942 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
943 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
944 determine that no check is required. */
947 build_java_arraystore_check (tree array
, tree object
)
949 tree check
, element_type
, source
;
950 tree array_type_p
= TREE_TYPE (array
);
951 tree object_type
= TYPE_NAME (TREE_TYPE (TREE_TYPE (object
)));
953 if (! flag_verify_invocations
)
955 /* With the new verifier, we don't track precise types. FIXME:
956 performance regression here. */
957 element_type
= TYPE_NAME (object_type_node
);
961 gcc_assert (is_array_type_p (array_type_p
));
963 /* Get the TYPE_DECL for ARRAY's element type. */
965 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p
))));
968 gcc_assert (TREE_CODE (element_type
) == TYPE_DECL
969 && TREE_CODE (object_type
) == TYPE_DECL
);
971 if (!flag_store_check
)
972 return build1 (NOP_EXPR
, array_type_p
, array
);
974 /* No check is needed if the element type is final. Also check that
975 element_type matches object_type, since in the bytecode
976 compilation case element_type may be the actual element type of
977 the array rather than its declared type. However, if we're doing
978 indirect dispatch, we can't do the `final' optimization. */
979 if (element_type
== object_type
980 && ! flag_indirect_dispatch
981 && CLASS_FINAL (element_type
))
982 return build1 (NOP_EXPR
, array_type_p
, array
);
984 /* OBJECT might be wrapped by a SAVE_EXPR. */
985 if (TREE_CODE (object
) == SAVE_EXPR
)
986 source
= TREE_OPERAND (object
, 0);
990 /* Avoid the check if OBJECT was just loaded from the same array. */
991 if (TREE_CODE (source
) == ARRAY_REF
)
994 source
= TREE_OPERAND (source
, 0); /* COMPONENT_REF. */
995 source
= TREE_OPERAND (source
, 0); /* INDIRECT_REF. */
996 source
= TREE_OPERAND (source
, 0); /* Source array's DECL or SAVE_EXPR. */
997 if (TREE_CODE (source
) == SAVE_EXPR
)
998 source
= TREE_OPERAND (source
, 0);
1001 if (TREE_CODE (target
) == SAVE_EXPR
)
1002 target
= TREE_OPERAND (target
, 0);
1004 if (source
== target
)
1005 return build1 (NOP_EXPR
, array_type_p
, array
);
1008 /* Build an invocation of _Jv_CheckArrayStore */
1009 check
= build_call_nary (void_type_node
,
1010 build_address_of (soft_checkarraystore_node
),
1012 TREE_SIDE_EFFECTS (check
) = 1;
1017 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1018 ARRAY_NODE. This function is used to retrieve something less vague than
1019 a pointer type when indexing the first dimension of something like [[<t>.
1020 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1021 return unchanged. */
1024 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED
,
1027 /* We used to check to see if ARRAY_NODE really had array type.
1028 However, with the new verifier, this is not necessary, as we know
1029 that the object will be an array of the appropriate type. */
1031 return indexed_type
;
1034 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1035 called with an integer code (the type of array to create), and the length
1036 of the array to create. */
1039 build_newarray (int atype_value
, tree length
)
1043 tree prim_type
= decode_newarray_type (atype_value
);
1045 = build_java_array_type (prim_type
,
1046 tree_fits_shwi_p (length
)
1047 ? tree_to_shwi (length
) : -1);
1049 /* Pass a reference to the primitive type class and save the runtime
1051 type_arg
= build_class_ref (prim_type
);
1053 return build_call_nary (promote_type (type
),
1054 build_address_of (soft_newarray_node
),
1055 2, type_arg
, length
);
1058 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1059 of the dimension. */
1062 build_anewarray (tree class_type
, tree length
)
1065 = build_java_array_type (class_type
,
1066 tree_fits_shwi_p (length
)
1067 ? tree_to_shwi (length
) : -1);
1069 return build_call_nary (promote_type (type
),
1070 build_address_of (soft_anewarray_node
),
1073 build_class_ref (class_type
),
1077 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1080 build_new_array (tree type
, tree length
)
1082 if (JPRIMITIVE_TYPE_P (type
))
1083 return build_newarray (encode_newarray_type (type
), length
);
1085 return build_anewarray (TREE_TYPE (type
), length
);
1088 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1089 class pointer, a number of dimensions and the matching number of
1090 dimensions. The argument list is NULL terminated. */
1093 expand_java_multianewarray (tree class_type
, int ndim
)
1096 vec
<tree
, va_gc
> *args
= NULL
;
1098 vec_safe_grow (args
, 3 + ndim
);
1100 (*args
)[0] = build_class_ref (class_type
);
1101 (*args
)[1] = build_int_cst (NULL_TREE
, ndim
);
1103 for(i
= ndim
- 1; i
>= 0; i
-- )
1104 (*args
)[(unsigned)(2 + i
)] = pop_value (int_type_node
);
1106 (*args
)[2 + ndim
] = null_pointer_node
;
1108 push_value (build_call_vec (promote_type (class_type
),
1109 build_address_of (soft_multianewarray_node
),
1113 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1114 ARRAY is an array type. May expand some bound checking and NULL
1115 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1116 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1117 INT. In those cases, we make the conversion.
1119 if ARRAy is a reference type, the assignment is checked at run-time
1120 to make sure that the RHS can be assigned to the array element
1121 type. It is not necessary to generate this code if ARRAY is final. */
1124 expand_java_arraystore (tree rhs_type_node
)
1126 tree rhs_node
= pop_value ((INTEGRAL_TYPE_P (rhs_type_node
)
1127 && TYPE_PRECISION (rhs_type_node
) <= 32) ?
1128 int_type_node
: rhs_type_node
);
1129 tree index
= pop_value (int_type_node
);
1130 tree array_type
, array
, temp
, access
;
1132 /* If we're processing an `aaload' we might as well just pick
1134 if (TREE_CODE (rhs_type_node
) == POINTER_TYPE
)
1136 array_type
= build_java_array_type (object_ptr_type_node
, -1);
1137 rhs_type_node
= object_ptr_type_node
;
1140 array_type
= build_java_array_type (rhs_type_node
, -1);
1142 array
= pop_value (array_type
);
1143 array
= build1 (NOP_EXPR
, promote_type (array_type
), array
);
1145 rhs_type_node
= build_java_check_indexed_type (array
, rhs_type_node
);
1147 flush_quick_stack ();
1149 index
= save_expr (index
);
1150 array
= save_expr (array
);
1152 /* We want to perform the bounds check (done by
1153 build_java_arrayaccess) before the type check (done by
1154 build_java_arraystore_check). So, we call build_java_arrayaccess
1155 -- which returns an ARRAY_REF lvalue -- and we then generate code
1156 to stash the address of that lvalue in a temp. Then we call
1157 build_java_arraystore_check, and finally we generate a
1158 MODIFY_EXPR to set the array element. */
1160 access
= build_java_arrayaccess (array
, rhs_type_node
, index
);
1161 temp
= build_decl (input_location
, VAR_DECL
, NULL_TREE
,
1162 build_pointer_type (TREE_TYPE (access
)));
1163 java_add_local_var (temp
);
1164 java_add_stmt (build2 (MODIFY_EXPR
, TREE_TYPE (temp
),
1166 build_fold_addr_expr (access
)));
1168 if (TREE_CODE (rhs_type_node
) == POINTER_TYPE
)
1170 tree check
= build_java_arraystore_check (array
, rhs_node
);
1171 java_add_stmt (check
);
1174 java_add_stmt (build2 (MODIFY_EXPR
, TREE_TYPE (access
),
1175 build1 (INDIRECT_REF
, TREE_TYPE (access
), temp
),
1179 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1180 sure that LHS is an array type. May expand some bound checking and NULL
1182 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1183 BOOLEAN/SHORT, we push a promoted type back to the stack.
1187 expand_java_arrayload (tree lhs_type_node
)
1190 tree index_node
= pop_value (int_type_node
);
1194 /* If we're processing an `aaload' we might as well just pick
1196 if (TREE_CODE (lhs_type_node
) == POINTER_TYPE
)
1198 array_type
= build_java_array_type (object_ptr_type_node
, -1);
1199 lhs_type_node
= object_ptr_type_node
;
1202 array_type
= build_java_array_type (lhs_type_node
, -1);
1203 array_node
= pop_value (array_type
);
1204 array_node
= build1 (NOP_EXPR
, promote_type (array_type
), array_node
);
1206 index_node
= save_expr (index_node
);
1207 array_node
= save_expr (array_node
);
1209 lhs_type_node
= build_java_check_indexed_type (array_node
,
1211 load_node
= build_java_arrayaccess (array_node
,
1214 if (INTEGRAL_TYPE_P (lhs_type_node
) && TYPE_PRECISION (lhs_type_node
) <= 32)
1215 load_node
= fold_build1 (NOP_EXPR
, int_type_node
, load_node
);
1216 push_value (load_node
);
1219 /* Expands .length. Makes sure that we deal with and array and may expand
1220 a NULL check on the array object. */
1223 expand_java_array_length (void)
1225 tree array
= pop_value (ptr_type_node
);
1226 tree length
= build_java_array_length_access (array
);
1228 push_value (length
);
1231 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1232 either soft_monitorenter_node or soft_monitorexit_node. */
1235 build_java_monitor (tree call
, tree object
)
1237 return build_call_nary (void_type_node
,
1238 build_address_of (call
),
1242 /* Emit code for one of the PUSHC instructions. */
1245 expand_java_pushc (int ival
, tree type
)
1248 if (type
== ptr_type_node
&& ival
== 0)
1249 value
= null_pointer_node
;
1250 else if (type
== int_type_node
|| type
== long_type_node
)
1251 value
= build_int_cst (type
, ival
);
1252 else if (type
== float_type_node
|| type
== double_type_node
)
1255 real_from_integer (&x
, TYPE_MODE (type
), ival
, SIGNED
);
1256 value
= build_real (type
, x
);
1265 expand_java_return (tree type
)
1267 if (type
== void_type_node
)
1268 java_add_stmt (build1 (RETURN_EXPR
, void_type_node
, NULL
));
1271 tree retval
= pop_value (type
);
1272 tree res
= DECL_RESULT (current_function_decl
);
1273 retval
= build2 (MODIFY_EXPR
, TREE_TYPE (res
), res
, retval
);
1275 /* Handle the situation where the native integer type is smaller
1276 than the JVM integer. It can happen for many cross compilers.
1277 The whole if expression just goes away if INT_TYPE_SIZE < 32
1279 if (INT_TYPE_SIZE
< 32
1280 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res
)))
1281 < GET_MODE_SIZE (TYPE_MODE (type
))))
1282 retval
= build1(NOP_EXPR
, TREE_TYPE(res
), retval
);
1284 TREE_SIDE_EFFECTS (retval
) = 1;
1285 java_add_stmt (build1 (RETURN_EXPR
, void_type_node
, retval
));
1290 expand_load_internal (int index
, tree type
, int pc
)
1293 tree var
= find_local_variable (index
, type
, pc
);
1295 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1296 on the stack. If there is an assignment to this VAR_DECL between
1297 the stack push and the use, then the wrong code could be
1298 generated. To avoid this we create a new local and copy our
1299 value into it. Then we push this new local on the stack.
1300 Hopefully this all gets optimized out. */
1301 copy
= build_decl (input_location
, VAR_DECL
, NULL_TREE
, type
);
1302 if ((INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
))
1303 && TREE_TYPE (copy
) != TREE_TYPE (var
))
1304 var
= convert (type
, var
);
1305 java_add_local_var (copy
);
1306 java_add_stmt (build2 (MODIFY_EXPR
, TREE_TYPE (var
), copy
, var
));
1312 build_address_of (tree value
)
1314 return build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (value
)), value
);
1318 class_has_finalize_method (tree type
)
1320 tree super
= CLASSTYPE_SUPER (type
);
1322 if (super
== NULL_TREE
)
1323 return false; /* Every class with a real finalizer inherits */
1324 /* from java.lang.Object. */
1326 return HAS_FINALIZER_P (type
) || class_has_finalize_method (super
);
1330 java_create_object (tree type
)
1332 tree alloc_node
= (class_has_finalize_method (type
)
1334 : alloc_no_finalizer_node
);
1336 return build_call_nary (promote_type (type
),
1337 build_address_of (alloc_node
),
1338 1, build_class_ref (type
));
1342 expand_java_NEW (tree type
)
1346 alloc_node
= (class_has_finalize_method (type
) ? alloc_object_node
1347 : alloc_no_finalizer_node
);
1348 if (! CLASS_LOADED_P (type
))
1349 load_class (type
, 1);
1350 safe_layout_class (type
);
1351 push_value (build_call_nary (promote_type (type
),
1352 build_address_of (alloc_node
),
1353 1, build_class_ref (type
)));
1356 /* This returns an expression which will extract the class of an
1360 build_get_class (tree value
)
1362 tree class_field
= lookup_field (&dtable_type
, get_identifier ("class"));
1363 tree vtable_field
= lookup_field (&object_type_node
,
1364 get_identifier ("vtable"));
1365 tree tmp
= build3 (COMPONENT_REF
, dtable_ptr_type
,
1366 build_java_indirect_ref (object_type_node
, value
,
1367 flag_check_references
),
1368 vtable_field
, NULL_TREE
);
1369 return build3 (COMPONENT_REF
, class_ptr_type
,
1370 build1 (INDIRECT_REF
, dtable_type
, tmp
),
1371 class_field
, NULL_TREE
);
1374 /* This builds the tree representation of the `instanceof' operator.
1375 It tries various tricks to optimize this in cases where types are
1379 build_instanceof (tree value
, tree type
)
1382 tree itype
= TREE_TYPE (TREE_TYPE (soft_instanceof_node
));
1383 tree valtype
= TREE_TYPE (TREE_TYPE (value
));
1384 tree valclass
= TYPE_NAME (valtype
);
1387 /* When compiling from bytecode, we need to ensure that TYPE has
1389 if (CLASS_P (type
) && ! CLASS_LOADED_P (type
))
1391 load_class (type
, 1);
1392 safe_layout_class (type
);
1393 if (! TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) == ERROR_MARK
)
1394 return error_mark_node
;
1396 klass
= TYPE_NAME (type
);
1398 if (type
== object_type_node
|| inherits_from_p (valtype
, type
))
1400 /* Anything except `null' is an instance of Object. Likewise,
1401 if the object is known to be an instance of the class, then
1402 we only need to check for `null'. */
1403 expr
= build2 (NE_EXPR
, itype
, value
, null_pointer_node
);
1405 else if (flag_verify_invocations
1406 && ! TYPE_ARRAY_P (type
)
1407 && ! TYPE_ARRAY_P (valtype
)
1408 && DECL_P (klass
) && DECL_P (valclass
)
1409 && ! CLASS_INTERFACE (valclass
)
1410 && ! CLASS_INTERFACE (klass
)
1411 && ! inherits_from_p (type
, valtype
)
1412 && (CLASS_FINAL (klass
)
1413 || ! inherits_from_p (valtype
, type
)))
1415 /* The classes are from different branches of the derivation
1416 tree, so we immediately know the answer. */
1417 expr
= boolean_false_node
;
1419 else if (DECL_P (klass
) && CLASS_FINAL (klass
))
1421 tree save
= save_expr (value
);
1422 expr
= build3 (COND_EXPR
, itype
,
1423 build2 (NE_EXPR
, boolean_type_node
,
1424 save
, null_pointer_node
),
1425 build2 (EQ_EXPR
, itype
,
1426 build_get_class (save
),
1427 build_class_ref (type
)),
1428 boolean_false_node
);
1432 expr
= build_call_nary (itype
,
1433 build_address_of (soft_instanceof_node
),
1434 2, value
, build_class_ref (type
));
1436 TREE_SIDE_EFFECTS (expr
) = TREE_SIDE_EFFECTS (value
);
1441 expand_java_INSTANCEOF (tree type
)
1443 tree value
= pop_value (object_ptr_type_node
);
1444 value
= build_instanceof (value
, type
);
1449 expand_java_CHECKCAST (tree type
)
1451 tree value
= pop_value (ptr_type_node
);
1452 value
= build_call_nary (promote_type (type
),
1453 build_address_of (soft_checkcast_node
),
1454 2, build_class_ref (type
), value
);
1459 expand_iinc (unsigned int local_var_index
, int ival
, int pc
)
1461 tree local_var
, res
;
1462 tree constant_value
;
1464 flush_quick_stack ();
1465 local_var
= find_local_variable (local_var_index
, int_type_node
, pc
);
1466 constant_value
= build_int_cst (NULL_TREE
, ival
);
1467 res
= fold_build2 (PLUS_EXPR
, int_type_node
, local_var
, constant_value
);
1468 java_add_stmt (build2 (MODIFY_EXPR
, TREE_TYPE (local_var
), local_var
, res
));
1473 build_java_soft_divmod (enum tree_code op
, tree type
, tree op1
, tree op2
)
1476 tree arg1
= convert (type
, op1
);
1477 tree arg2
= convert (type
, op2
);
1479 if (type
== int_type_node
)
1483 case TRUNC_DIV_EXPR
:
1484 call
= soft_idiv_node
;
1486 case TRUNC_MOD_EXPR
:
1487 call
= soft_irem_node
;
1493 else if (type
== long_type_node
)
1497 case TRUNC_DIV_EXPR
:
1498 call
= soft_ldiv_node
;
1500 case TRUNC_MOD_EXPR
:
1501 call
= soft_lrem_node
;
1509 call
= build_call_nary (type
, build_address_of (call
), 2, arg1
, arg2
);
1514 build_java_binop (enum tree_code op
, tree type
, tree arg1
, tree arg2
)
1521 tree u_type
= unsigned_type_for (type
);
1522 arg1
= convert (u_type
, arg1
);
1523 arg1
= build_java_binop (RSHIFT_EXPR
, u_type
, arg1
, arg2
);
1524 return convert (type
, arg1
);
1528 mask
= build_int_cst (int_type_node
,
1529 TYPE_PRECISION (TREE_TYPE (arg1
)) - 1);
1530 arg2
= fold_build2 (BIT_AND_EXPR
, int_type_node
, arg2
, mask
);
1533 case COMPARE_L_EXPR
: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1534 case COMPARE_G_EXPR
: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1535 arg1
= save_expr (arg1
); arg2
= save_expr (arg2
);
1537 tree ifexp1
= fold_build2 (op
== COMPARE_L_EXPR
? GT_EXPR
: LT_EXPR
,
1538 boolean_type_node
, arg1
, arg2
);
1539 tree ifexp2
= fold_build2 (EQ_EXPR
, boolean_type_node
, arg1
, arg2
);
1540 tree second_compare
= fold_build3 (COND_EXPR
, int_type_node
,
1541 ifexp2
, integer_zero_node
,
1542 op
== COMPARE_L_EXPR
1543 ? integer_minus_one_node
1544 : integer_one_node
);
1545 return fold_build3 (COND_EXPR
, int_type_node
, ifexp1
,
1546 op
== COMPARE_L_EXPR
? integer_one_node
1547 : integer_minus_one_node
,
1551 arg1
= save_expr (arg1
); arg2
= save_expr (arg2
);
1553 tree ifexp1
= fold_build2 (LT_EXPR
, boolean_type_node
, arg1
, arg2
);
1554 tree ifexp2
= fold_build2 (GT_EXPR
, boolean_type_node
, arg1
, arg2
);
1555 tree second_compare
= fold_build3 (COND_EXPR
, int_type_node
,
1556 ifexp2
, integer_one_node
,
1558 return fold_build3 (COND_EXPR
, int_type_node
,
1559 ifexp1
, integer_minus_one_node
, second_compare
);
1561 case TRUNC_DIV_EXPR
:
1562 case TRUNC_MOD_EXPR
:
1563 if (TREE_CODE (type
) == REAL_TYPE
1564 && op
== TRUNC_MOD_EXPR
)
1567 if (type
!= double_type_node
)
1569 arg1
= convert (double_type_node
, arg1
);
1570 arg2
= convert (double_type_node
, arg2
);
1572 call
= build_call_nary (double_type_node
,
1573 build_address_of (soft_fmod_node
),
1575 if (type
!= double_type_node
)
1576 call
= convert (type
, call
);
1580 if (TREE_CODE (type
) == INTEGER_TYPE
1581 && flag_use_divide_subroutine
1582 && ! flag_syntax_only
)
1583 return build_java_soft_divmod (op
, type
, arg1
, arg2
);
1588 return fold_build2 (op
, type
, arg1
, arg2
);
1592 expand_java_binop (tree type
, enum tree_code op
)
1602 rtype
= int_type_node
;
1603 rarg
= pop_value (rtype
);
1606 rarg
= pop_value (rtype
);
1608 larg
= pop_value (ltype
);
1609 push_value (build_java_binop (op
, type
, larg
, rarg
));
1612 /* Lookup the field named NAME in *TYPEP or its super classes.
1613 If not found, return NULL_TREE.
1614 (If the *TYPEP is not found, or if the field reference is
1615 ambiguous, return error_mark_node.)
1616 If found, return the FIELD_DECL, and set *TYPEP to the
1617 class containing the field. */
1620 lookup_field (tree
*typep
, tree name
)
1622 if (CLASS_P (*typep
) && !CLASS_LOADED_P (*typep
))
1624 load_class (*typep
, 1);
1625 safe_layout_class (*typep
);
1626 if (!TYPE_SIZE (*typep
) || TREE_CODE (TYPE_SIZE (*typep
)) == ERROR_MARK
)
1627 return error_mark_node
;
1631 tree field
, binfo
, base_binfo
;
1635 for (field
= TYPE_FIELDS (*typep
); field
; field
= DECL_CHAIN (field
))
1636 if (DECL_NAME (field
) == name
)
1639 /* Process implemented interfaces. */
1640 save_field
= NULL_TREE
;
1641 for (binfo
= TYPE_BINFO (*typep
), i
= 0;
1642 BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
1644 tree t
= BINFO_TYPE (base_binfo
);
1645 if ((field
= lookup_field (&t
, name
)))
1647 if (save_field
== field
)
1649 if (save_field
== NULL_TREE
)
1653 tree i1
= DECL_CONTEXT (save_field
);
1654 tree i2
= DECL_CONTEXT (field
);
1655 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1656 IDENTIFIER_POINTER (name
),
1657 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1
))),
1658 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2
))));
1659 return error_mark_node
;
1664 if (save_field
!= NULL_TREE
)
1667 *typep
= CLASSTYPE_SUPER (*typep
);
1672 /* Look up the field named NAME in object SELF_VALUE,
1673 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1674 SELF_VALUE is NULL_TREE if looking for a static field. */
1677 build_field_ref (tree self_value
, tree self_class
, tree name
)
1679 tree base_class
= self_class
;
1680 tree field_decl
= lookup_field (&base_class
, name
);
1681 if (field_decl
== NULL_TREE
)
1683 error ("field %qs not found", IDENTIFIER_POINTER (name
));
1684 return error_mark_node
;
1686 if (self_value
== NULL_TREE
)
1688 return build_static_field_ref (field_decl
);
1692 tree base_type
= promote_type (base_class
);
1694 /* CHECK is true if self_value is not the this pointer. */
1695 int check
= (! (DECL_P (self_value
)
1696 && DECL_NAME (self_value
) == this_identifier_node
));
1698 /* Determine whether a field offset from NULL will lie within
1699 Page 0: this is necessary on those GNU/Linux/BSD systems that
1700 trap SEGV to generate NullPointerExceptions.
1702 We assume that Page 0 will be mapped with NOPERM, and that
1703 memory may be allocated from any other page, so only field
1704 offsets < pagesize are guaranteed to trap. We also assume
1705 the smallest page size we'll encounter is 4k bytes. */
1706 if (! flag_syntax_only
&& check
&& ! flag_check_references
1707 && ! flag_indirect_dispatch
)
1709 tree field_offset
= byte_position (field_decl
);
1711 page_size
= size_int (4096);
1712 check
= !tree_int_cst_lt (field_offset
, page_size
);
1715 if (base_type
!= TREE_TYPE (self_value
))
1716 self_value
= fold_build1 (NOP_EXPR
, base_type
, self_value
);
1717 if (! flag_syntax_only
&& flag_indirect_dispatch
)
1720 = build_int_cst (NULL_TREE
, get_symbol_table_index
1721 (field_decl
, NULL_TREE
,
1722 &TYPE_OTABLE_METHODS (output_class
)));
1724 = build4 (ARRAY_REF
, integer_type_node
,
1725 TYPE_OTABLE_DECL (output_class
), otable_index
,
1726 NULL_TREE
, NULL_TREE
);
1729 if (DECL_CONTEXT (field_decl
) != output_class
)
1731 = build3 (COND_EXPR
, TREE_TYPE (field_offset
),
1732 build2 (EQ_EXPR
, boolean_type_node
,
1733 field_offset
, integer_zero_node
),
1734 build_call_nary (void_type_node
,
1735 build_address_of (soft_nosuchfield_node
),
1739 self_value
= java_check_reference (self_value
, check
);
1740 address
= fold_build_pointer_plus (self_value
, field_offset
);
1741 address
= fold_convert (build_pointer_type (TREE_TYPE (field_decl
)),
1743 return fold_build1 (INDIRECT_REF
, TREE_TYPE (field_decl
), address
);
1746 self_value
= build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value
)),
1748 return fold_build3 (COMPONENT_REF
, TREE_TYPE (field_decl
),
1749 self_value
, field_decl
, NULL_TREE
);
1754 lookup_label (int pc
)
1758 if (pc
> highest_label_pc_this_method
)
1759 highest_label_pc_this_method
= pc
;
1760 targetm
.asm_out
.generate_internal_label (buf
, "LJpc=",
1761 start_label_pc_this_method
+ pc
);
1762 name
= get_identifier (buf
);
1763 if (IDENTIFIER_LOCAL_VALUE (name
))
1764 return IDENTIFIER_LOCAL_VALUE (name
);
1767 /* The type of the address of a label is return_address_type_node. */
1768 tree decl
= create_label_decl (name
);
1769 return pushdecl (decl
);
1773 /* Generate a unique name for the purpose of loops and switches
1774 labels, and try-catch-finally blocks label or temporary variables. */
1777 generate_name (void)
1779 static int l_number
= 0;
1781 targetm
.asm_out
.generate_internal_label (buff
, "LJv", l_number
);
1783 return get_identifier (buff
);
1787 create_label_decl (tree name
)
1790 decl
= build_decl (input_location
, LABEL_DECL
, name
,
1791 TREE_TYPE (return_address_type_node
));
1792 DECL_CONTEXT (decl
) = current_function_decl
;
1793 DECL_IGNORED_P (decl
) = 1;
1797 /* This maps a bytecode offset (PC) to various flags. */
1798 char *instruction_bits
;
1800 /* This is a vector of type states for the current method. It is
1801 indexed by PC. Each element is a tree vector holding the type
1802 state at that PC. We only note type states at basic block
1804 vec
<tree
, va_gc
> *type_states
;
1807 note_label (int current_pc ATTRIBUTE_UNUSED
, int target_pc
)
1809 lookup_label (target_pc
);
1810 instruction_bits
[target_pc
] |= BCODE_JUMP_TARGET
;
1813 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1814 where CONDITION is one of one the compare operators. */
1817 expand_compare (enum tree_code condition
, tree value1
, tree value2
,
1820 tree target
= lookup_label (target_pc
);
1821 tree cond
= fold_build2 (condition
, boolean_type_node
, value1
, value2
);
1823 (build3 (COND_EXPR
, void_type_node
, java_truthvalue_conversion (cond
),
1824 build1 (GOTO_EXPR
, void_type_node
, target
),
1825 build_java_empty_stmt ()));
1828 /* Emit code for a TEST-type opcode. */
1831 expand_test (enum tree_code condition
, tree type
, int target_pc
)
1833 tree value1
, value2
;
1834 flush_quick_stack ();
1835 value1
= pop_value (type
);
1836 value2
= (type
== ptr_type_node
) ? null_pointer_node
: integer_zero_node
;
1837 expand_compare (condition
, value1
, value2
, target_pc
);
1840 /* Emit code for a COND-type opcode. */
1843 expand_cond (enum tree_code condition
, tree type
, int target_pc
)
1845 tree value1
, value2
;
1846 flush_quick_stack ();
1847 /* note: pop values in opposite order */
1848 value2
= pop_value (type
);
1849 value1
= pop_value (type
);
1850 /* Maybe should check value1 and value2 for type compatibility ??? */
1851 expand_compare (condition
, value1
, value2
, target_pc
);
1855 expand_java_goto (int target_pc
)
1857 tree target_label
= lookup_label (target_pc
);
1858 flush_quick_stack ();
1859 java_add_stmt (build1 (GOTO_EXPR
, void_type_node
, target_label
));
1863 expand_java_switch (tree selector
, int default_pc
)
1865 tree switch_expr
, x
;
1867 flush_quick_stack ();
1868 switch_expr
= build3 (SWITCH_EXPR
, TREE_TYPE (selector
), selector
,
1869 NULL_TREE
, NULL_TREE
);
1870 java_add_stmt (switch_expr
);
1872 x
= build_case_label (NULL_TREE
, NULL_TREE
,
1873 create_artificial_label (input_location
));
1874 append_to_statement_list (x
, &SWITCH_BODY (switch_expr
));
1876 x
= build1 (GOTO_EXPR
, void_type_node
, lookup_label (default_pc
));
1877 append_to_statement_list (x
, &SWITCH_BODY (switch_expr
));
1883 expand_java_add_case (tree switch_expr
, int match
, int target_pc
)
1887 value
= build_int_cst (TREE_TYPE (switch_expr
), match
);
1889 x
= build_case_label (value
, NULL_TREE
,
1890 create_artificial_label (input_location
));
1891 append_to_statement_list (x
, &SWITCH_BODY (switch_expr
));
1893 x
= build1 (GOTO_EXPR
, void_type_node
, lookup_label (target_pc
));
1894 append_to_statement_list (x
, &SWITCH_BODY (switch_expr
));
1897 static vec
<tree
, va_gc
> *
1898 pop_arguments (tree method_type
)
1900 function_args_iterator fnai
;
1902 vec
<tree
, va_gc
> *args
= NULL
;
1905 FOREACH_FUNCTION_ARGS (method_type
, type
, fnai
)
1907 /* XXX: leaky abstraction. */
1908 if (type
== void_type_node
)
1911 vec_safe_push (args
, type
);
1914 arity
= vec_safe_length (args
);
1918 tree arg
= pop_value ((*args
)[arity
]);
1920 /* We simply cast each argument to its proper type. This is
1921 needed since we lose type information coming out of the
1922 verifier. We also have to do this when we pop an integer
1923 type that must be promoted for the function call. */
1924 if (TREE_CODE (type
) == POINTER_TYPE
)
1925 arg
= build1 (NOP_EXPR
, type
, arg
);
1926 else if (targetm
.calls
.promote_prototypes (type
)
1927 && TYPE_PRECISION (type
) < TYPE_PRECISION (integer_type_node
)
1928 && INTEGRAL_TYPE_P (type
))
1929 arg
= convert (integer_type_node
, arg
);
1931 (*args
)[arity
] = arg
;
1937 /* Attach to PTR (a block) the declaration found in ENTRY. */
1940 attach_init_test_initialization_flags (treetreehash_entry
**slot
, tree block
)
1942 treetreehash_entry
*ite
= *slot
;
1944 if (block
!= error_mark_node
)
1946 if (TREE_CODE (block
) == BIND_EXPR
)
1948 tree body
= BIND_EXPR_BODY (block
);
1949 DECL_CHAIN (ite
->value
) = BIND_EXPR_VARS (block
);
1950 BIND_EXPR_VARS (block
) = ite
->value
;
1951 body
= build2 (COMPOUND_EXPR
, void_type_node
,
1952 build1 (DECL_EXPR
, void_type_node
, ite
->value
), body
);
1953 BIND_EXPR_BODY (block
) = body
;
1957 tree body
= BLOCK_SUBBLOCKS (block
);
1958 TREE_CHAIN (ite
->value
) = BLOCK_EXPR_DECLS (block
);
1959 BLOCK_EXPR_DECLS (block
) = ite
->value
;
1960 body
= build2 (COMPOUND_EXPR
, void_type_node
,
1961 build1 (DECL_EXPR
, void_type_node
, ite
->value
), body
);
1962 BLOCK_SUBBLOCKS (block
) = body
;
1969 /* Build an expression to initialize the class CLAS.
1970 if EXPR is non-NULL, returns an expression to first call the initializer
1971 (if it is needed) and then calls EXPR. */
1974 build_class_init (tree clas
, tree expr
)
1978 /* An optimization: if CLAS is a superclass of the class we're
1979 compiling, we don't need to initialize it. However, if CLAS is
1980 an interface, it won't necessarily be initialized, even if we
1982 if ((! CLASS_INTERFACE (TYPE_NAME (clas
))
1983 && inherits_from_p (current_class
, clas
))
1984 || current_class
== clas
)
1987 if (always_initialize_class_p
)
1989 init
= build_call_nary (void_type_node
,
1990 build_address_of (soft_initclass_node
),
1991 1, build_class_ref (clas
));
1992 TREE_SIDE_EFFECTS (init
) = 1;
1996 tree
*init_test_decl
;
1998 init_test_decl
= java_treetreehash_new
1999 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl
), clas
);
2001 if (*init_test_decl
== NULL
)
2003 /* Build a declaration and mark it as a flag used to track
2004 static class initializations. */
2005 decl
= build_decl (input_location
, VAR_DECL
, NULL_TREE
,
2007 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl
);
2008 DECL_CONTEXT (decl
) = current_function_decl
;
2009 DECL_INITIAL (decl
) = boolean_false_node
;
2010 /* Don't emit any symbolic debugging info for this decl. */
2011 DECL_IGNORED_P (decl
) = 1;
2012 *init_test_decl
= decl
;
2015 init
= build_call_nary (void_type_node
,
2016 build_address_of (soft_initclass_node
),
2017 1, build_class_ref (clas
));
2018 TREE_SIDE_EFFECTS (init
) = 1;
2019 init
= build3 (COND_EXPR
, void_type_node
,
2020 build2 (EQ_EXPR
, boolean_type_node
,
2021 *init_test_decl
, boolean_false_node
),
2022 init
, integer_zero_node
);
2023 TREE_SIDE_EFFECTS (init
) = 1;
2024 init
= build2 (COMPOUND_EXPR
, TREE_TYPE (expr
), init
,
2025 build2 (MODIFY_EXPR
, boolean_type_node
,
2026 *init_test_decl
, boolean_true_node
));
2027 TREE_SIDE_EFFECTS (init
) = 1;
2030 if (expr
!= NULL_TREE
)
2032 expr
= build2 (COMPOUND_EXPR
, TREE_TYPE (expr
), init
, expr
);
2033 TREE_SIDE_EFFECTS (expr
) = 1;
2041 /* Rewrite expensive calls that require stack unwinding at runtime to
2042 cheaper alternatives. The logic here performs these
2045 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2046 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2052 const char *classname
;
2054 const char *signature
;
2055 const char *new_classname
;
2056 const char *new_signature
;
2058 void (*rewrite_arglist
) (vec
<tree
, va_gc
> **);
2061 /* Add __builtin_return_address(0) to the end of an arglist. */
2065 rewrite_arglist_getcaller (vec
<tree
, va_gc
> **arglist
)
2068 = build_call_expr (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
),
2069 1, integer_zero_node
);
2071 DECL_UNINLINABLE (current_function_decl
) = 1;
2073 vec_safe_push (*arglist
, retaddr
);
2076 /* Add this.class to the end of an arglist. */
2079 rewrite_arglist_getclass (vec
<tree
, va_gc
> **arglist
)
2081 vec_safe_push (*arglist
, build_class_ref (output_class
));
2084 static rewrite_rule rules
[] =
2085 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2086 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2087 ACC_FINAL
|ACC_PRIVATE
, rewrite_arglist_getclass
},
2089 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2090 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2091 ACC_FINAL
|ACC_PRIVATE
|ACC_STATIC
, rewrite_arglist_getclass
},
2093 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2094 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2095 ACC_FINAL
|ACC_PRIVATE
|ACC_STATIC
, rewrite_arglist_getcaller
},
2097 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2098 "()Ljava/lang/ClassLoader;",
2099 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2100 ACC_FINAL
|ACC_PRIVATE
|ACC_STATIC
, rewrite_arglist_getcaller
},
2102 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2103 "java.lang.String", "([CII)Ljava/lang/String;",
2104 ACC_FINAL
|ACC_PRIVATE
|ACC_STATIC
, NULL
},
2106 {NULL
, NULL
, NULL
, NULL
, NULL
, 0, NULL
}};
2108 /* True if this method is special, i.e. it's a private method that
2109 should be exported from a DSO. */
2112 special_method_p (tree candidate_method
)
2114 tree context
= DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method
)));
2115 tree method
= DECL_NAME (candidate_method
);
2118 for (p
= rules
; p
->classname
; p
++)
2120 if (get_identifier (p
->classname
) == context
2121 && get_identifier (p
->method
) == method
)
2127 /* Scan the rules list for replacements for *METHOD_P and replace the
2128 args accordingly. If the rewrite results in an access to a private
2129 method, update SPECIAL.*/
2132 maybe_rewrite_invocation (tree
*method_p
, vec
<tree
, va_gc
> **arg_list_p
,
2133 tree
*method_signature_p
, tree
*special
)
2135 tree context
= DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p
)));
2137 *special
= NULL_TREE
;
2139 for (p
= rules
; p
->classname
; p
++)
2141 if (get_identifier (p
->classname
) == context
)
2143 tree method
= DECL_NAME (*method_p
);
2144 if (get_identifier (p
->method
) == method
2145 && get_identifier (p
->signature
) == *method_signature_p
)
2148 tree destination_class
2149 = lookup_class (get_identifier (p
->new_classname
));
2150 gcc_assert (destination_class
);
2152 = lookup_java_method (destination_class
,
2154 get_identifier (p
->new_signature
));
2155 if (! maybe_method
&& ! flag_verify_invocations
)
2158 = add_method (destination_class
, p
->flags
,
2159 method
, get_identifier (p
->new_signature
));
2160 DECL_EXTERNAL (maybe_method
) = 1;
2162 *method_p
= maybe_method
;
2163 gcc_assert (*method_p
);
2164 if (p
->rewrite_arglist
)
2165 p
->rewrite_arglist (arg_list_p
);
2166 *method_signature_p
= get_identifier (p
->new_signature
);
2167 *special
= integer_one_node
;
2178 build_known_method_ref (tree method
, tree method_type ATTRIBUTE_UNUSED
,
2179 tree self_type
, tree method_signature ATTRIBUTE_UNUSED
,
2180 vec
<tree
, va_gc
> *arg_list ATTRIBUTE_UNUSED
, tree special
)
2183 if (is_compiled_class (self_type
))
2185 /* With indirect dispatch we have to use indirect calls for all
2186 publicly visible methods or gcc will use PLT indirections
2187 to reach them. We also have to use indirect dispatch for all
2188 external methods. */
2189 if (! flag_indirect_dispatch
2190 || (! DECL_EXTERNAL (method
) && ! TREE_PUBLIC (method
)))
2192 func
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (method
)),
2198 = build_int_cst (NULL_TREE
,
2199 (get_symbol_table_index
2201 &TYPE_ATABLE_METHODS (output_class
))));
2203 = build4 (ARRAY_REF
,
2204 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class
))),
2205 TYPE_ATABLE_DECL (output_class
), table_index
,
2206 NULL_TREE
, NULL_TREE
);
2208 func
= convert (method_ptr_type_node
, func
);
2212 /* We don't know whether the method has been (statically) compiled.
2213 Compile this code to get a reference to the method's code:
2215 SELF_TYPE->methods[METHOD_INDEX].ncode
2219 int method_index
= 0;
2222 /* The method might actually be declared in some superclass, so
2223 we have to use its class context, not the caller's notion of
2224 where the method is. */
2225 self_type
= DECL_CONTEXT (method
);
2226 ref
= build_class_ref (self_type
);
2227 ref
= build1 (INDIRECT_REF
, class_type_node
, ref
);
2228 if (ncode_ident
== NULL_TREE
)
2229 ncode_ident
= get_identifier ("ncode");
2230 if (methods_ident
== NULL_TREE
)
2231 methods_ident
= get_identifier ("methods");
2232 ref
= build3 (COMPONENT_REF
, method_ptr_type_node
, ref
,
2233 lookup_field (&class_type_node
, methods_ident
),
2235 for (meth
= TYPE_METHODS (self_type
);
2236 ; meth
= DECL_CHAIN (meth
))
2240 if (meth
== NULL_TREE
)
2241 fatal_error (input_location
, "method '%s' not found in class",
2242 IDENTIFIER_POINTER (DECL_NAME (method
)));
2245 method_index
*= int_size_in_bytes (method_type_node
);
2246 ref
= fold_build_pointer_plus_hwi (ref
, method_index
);
2247 ref
= build1 (INDIRECT_REF
, method_type_node
, ref
);
2248 func
= build3 (COMPONENT_REF
, nativecode_ptr_type_node
,
2249 ref
, lookup_field (&method_type_node
, ncode_ident
),
2256 invoke_build_dtable (int is_invoke_interface
, vec
<tree
, va_gc
> *arg_list
)
2258 tree dtable
, objectref
;
2259 tree saved
= save_expr ((*arg_list
)[0]);
2261 (*arg_list
)[0] = saved
;
2263 /* If we're dealing with interfaces and if the objectref
2264 argument is an array then get the dispatch table of the class
2265 Object rather than the one from the objectref. */
2266 objectref
= (is_invoke_interface
2267 && is_array_type_p (TREE_TYPE (saved
))
2268 ? build_class_ref (object_type_node
) : saved
);
2270 if (dtable_ident
== NULL_TREE
)
2271 dtable_ident
= get_identifier ("vtable");
2272 dtable
= build_java_indirect_ref (object_type_node
, objectref
,
2273 flag_check_references
);
2274 dtable
= build3 (COMPONENT_REF
, dtable_ptr_type
, dtable
,
2275 lookup_field (&object_type_node
, dtable_ident
), NULL_TREE
);
2280 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2281 T. If this decl has not been seen before, it will be added to the
2282 [oa]table_methods. If it has, the existing table slot will be
2286 get_symbol_table_index (tree t
, tree special
,
2287 vec
<method_entry
, va_gc
> **symbol_table
)
2291 method_entry elem
= {t
, special
};
2293 FOR_EACH_VEC_SAFE_ELT (*symbol_table
, i
, e
)
2294 if (t
== e
->method
&& special
== e
->special
)
2297 vec_safe_push (*symbol_table
, elem
);
2304 build_invokevirtual (tree dtable
, tree method
, tree special
)
2307 tree nativecode_ptr_ptr_type_node
2308 = build_pointer_type (nativecode_ptr_type_node
);
2312 if (flag_indirect_dispatch
)
2314 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method
))));
2317 = build_int_cst (NULL_TREE
, get_symbol_table_index
2319 &TYPE_OTABLE_METHODS (output_class
)));
2320 method_index
= build4 (ARRAY_REF
, integer_type_node
,
2321 TYPE_OTABLE_DECL (output_class
),
2322 otable_index
, NULL_TREE
, NULL_TREE
);
2326 /* We fetch the DECL_VINDEX field directly here, rather than
2327 using get_method_index(). DECL_VINDEX is the true offset
2328 from the vtable base to a method, regrdless of any extra
2329 words inserted at the start of the vtable. */
2330 method_index
= DECL_VINDEX (method
);
2331 method_index
= size_binop (MULT_EXPR
, method_index
,
2332 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node
));
2333 if (TARGET_VTABLE_USES_DESCRIPTORS
)
2334 method_index
= size_binop (MULT_EXPR
, method_index
,
2335 size_int (TARGET_VTABLE_USES_DESCRIPTORS
));
2338 func
= fold_build_pointer_plus (dtable
, method_index
);
2340 if (TARGET_VTABLE_USES_DESCRIPTORS
)
2341 func
= build1 (NOP_EXPR
, nativecode_ptr_type_node
, func
);
2344 func
= fold_convert (nativecode_ptr_ptr_type_node
, func
);
2345 func
= build1 (INDIRECT_REF
, nativecode_ptr_type_node
, func
);
2351 static GTY(()) tree class_ident
;
2353 build_invokeinterface (tree dtable
, tree method
)
2358 /* We expand invokeinterface here. */
2360 if (class_ident
== NULL_TREE
)
2361 class_ident
= get_identifier ("class");
2363 dtable
= build_java_indirect_ref (dtable_type
, dtable
,
2364 flag_check_references
);
2365 dtable
= build3 (COMPONENT_REF
, class_ptr_type
, dtable
,
2366 lookup_field (&dtable_type
, class_ident
), NULL_TREE
);
2368 interface
= DECL_CONTEXT (method
);
2369 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface
)));
2370 layout_class_methods (interface
);
2372 if (flag_indirect_dispatch
)
2375 = 2 * (get_symbol_table_index
2376 (method
, NULL_TREE
, &TYPE_ITABLE_METHODS (output_class
)));
2378 = build4 (ARRAY_REF
,
2379 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class
))),
2380 TYPE_ITABLE_DECL (output_class
),
2381 build_int_cst (NULL_TREE
, itable_index
-1),
2382 NULL_TREE
, NULL_TREE
);
2384 = build4 (ARRAY_REF
,
2385 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class
))),
2386 TYPE_ITABLE_DECL (output_class
),
2387 build_int_cst (NULL_TREE
, itable_index
),
2388 NULL_TREE
, NULL_TREE
);
2389 interface
= convert (class_ptr_type
, interface
);
2390 idx
= convert (integer_type_node
, idx
);
2394 idx
= build_int_cst (NULL_TREE
,
2395 get_interface_method_index (method
, interface
));
2396 interface
= build_class_ref (interface
);
2399 return build_call_nary (ptr_type_node
,
2400 build_address_of (soft_lookupinterfacemethod_node
),
2401 3, dtable
, interface
, idx
);
2404 /* Expand one of the invoke_* opcodes.
2405 OPCODE is the specific opcode.
2406 METHOD_REF_INDEX is an index into the constant pool.
2407 NARGS is the number of arguments, or -1 if not specified. */
2410 expand_invoke (int opcode
, int method_ref_index
, int nargs ATTRIBUTE_UNUSED
)
2412 tree method_signature
2413 = COMPONENT_REF_SIGNATURE(¤t_jcf
->cpool
, method_ref_index
);
2414 tree method_name
= COMPONENT_REF_NAME (¤t_jcf
->cpool
,
2417 = get_class_constant (current_jcf
,
2418 COMPONENT_REF_CLASS_INDEX(¤t_jcf
->cpool
,
2420 const char *const self_name
2421 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type
)));
2422 tree call
, func
, method
, method_type
;
2423 vec
<tree
, va_gc
> *arg_list
;
2424 tree check
= NULL_TREE
;
2426 tree special
= NULL_TREE
;
2428 if (! CLASS_LOADED_P (self_type
))
2430 load_class (self_type
, 1);
2431 safe_layout_class (self_type
);
2432 if (TREE_CODE (TYPE_SIZE (self_type
)) == ERROR_MARK
)
2433 fatal_error (input_location
, "failed to find class '%s'", self_name
);
2435 layout_class_methods (self_type
);
2437 if (ID_INIT_P (method_name
))
2438 method
= lookup_java_constructor (self_type
, method_signature
);
2440 method
= lookup_java_method (self_type
, method_name
, method_signature
);
2442 /* We've found a method in a class other than the one in which it
2443 was wanted. This can happen if, for instance, we're trying to
2444 compile invokespecial super.equals().
2445 FIXME: This is a kludge. Rather than nullifying the result, we
2446 should change lookup_java_method() so that it doesn't search the
2447 superclass chain when we're BC-compiling. */
2448 if (! flag_verify_invocations
2450 && ! TYPE_ARRAY_P (self_type
)
2451 && self_type
!= DECL_CONTEXT (method
))
2454 /* We've found a method in an interface, but this isn't an interface
2456 if (opcode
!= OPCODE_invokeinterface
2458 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method
)))))
2461 /* We've found a non-interface method but we are making an
2462 interface call. This can happen if the interface overrides a
2463 method in Object. */
2464 if (! flag_verify_invocations
2465 && opcode
== OPCODE_invokeinterface
2467 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method
))))
2470 if (method
== NULL_TREE
)
2472 if (flag_verify_invocations
|| ! flag_indirect_dispatch
)
2474 error ("class '%s' has no method named '%s' matching signature '%s'",
2476 IDENTIFIER_POINTER (method_name
),
2477 IDENTIFIER_POINTER (method_signature
));
2481 int flags
= ACC_PUBLIC
;
2482 if (opcode
== OPCODE_invokestatic
)
2483 flags
|= ACC_STATIC
;
2484 if (opcode
== OPCODE_invokeinterface
)
2486 flags
|= ACC_INTERFACE
| ACC_ABSTRACT
;
2487 CLASS_INTERFACE (TYPE_NAME (self_type
)) = 1;
2489 method
= add_method (self_type
, flags
, method_name
,
2491 DECL_ARTIFICIAL (method
) = 1;
2492 METHOD_DUMMY (method
) = 1;
2493 layout_class_method (self_type
, NULL
,
2498 /* Invoke static can't invoke static/abstract method */
2499 if (method
!= NULL_TREE
)
2501 if (opcode
== OPCODE_invokestatic
)
2503 if (!METHOD_STATIC (method
))
2505 error ("invokestatic on non static method");
2508 else if (METHOD_ABSTRACT (method
))
2510 error ("invokestatic on abstract method");
2516 if (METHOD_STATIC (method
))
2518 error ("invoke[non-static] on static method");
2524 if (method
== NULL_TREE
)
2526 /* If we got here, we emitted an error message above. So we
2527 just pop the arguments, push a properly-typed zero, and
2529 method_type
= get_type_from_signature (method_signature
);
2530 pop_arguments (method_type
);
2531 if (opcode
!= OPCODE_invokestatic
)
2532 pop_type (self_type
);
2533 method_type
= promote_type (TREE_TYPE (method_type
));
2534 push_value (convert (method_type
, integer_zero_node
));
2538 arg_list
= pop_arguments (TREE_TYPE (method
));
2539 flush_quick_stack ();
2541 maybe_rewrite_invocation (&method
, &arg_list
, &method_signature
,
2543 method_type
= TREE_TYPE (method
);
2546 if (opcode
== OPCODE_invokestatic
)
2547 func
= build_known_method_ref (method
, method_type
, self_type
,
2548 method_signature
, arg_list
, special
);
2549 else if (opcode
== OPCODE_invokespecial
2550 || (opcode
== OPCODE_invokevirtual
2551 && (METHOD_PRIVATE (method
)
2552 || METHOD_FINAL (method
)
2553 || CLASS_FINAL (TYPE_NAME (self_type
)))))
2555 /* If the object for the method call is null, we throw an
2556 exception. We don't do this if the object is the current
2557 method's `this'. In other cases we just rely on an
2558 optimization pass to eliminate redundant checks. FIXME:
2559 Unfortunately there doesn't seem to be a way to determine
2560 what the current method is right now.
2561 We do omit the check if we're calling <init>. */
2562 /* We use a SAVE_EXPR here to make sure we only evaluate
2563 the new `self' expression once. */
2564 tree save_arg
= save_expr ((*arg_list
)[0]);
2565 (*arg_list
)[0] = save_arg
;
2566 check
= java_check_reference (save_arg
, ! DECL_INIT_P (method
));
2567 func
= build_known_method_ref (method
, method_type
, self_type
,
2568 method_signature
, arg_list
, special
);
2572 tree dtable
= invoke_build_dtable (opcode
== OPCODE_invokeinterface
,
2574 if (opcode
== OPCODE_invokevirtual
)
2575 func
= build_invokevirtual (dtable
, method
, special
);
2577 func
= build_invokeinterface (dtable
, method
);
2580 if (TREE_CODE (func
) == ADDR_EXPR
)
2581 TREE_TYPE (func
) = build_pointer_type (method_type
);
2583 func
= build1 (NOP_EXPR
, build_pointer_type (method_type
), func
);
2585 call
= build_call_vec (TREE_TYPE (method_type
), func
, arg_list
);
2586 TREE_SIDE_EFFECTS (call
) = 1;
2587 call
= check_for_builtin (method
, call
);
2589 if (check
!= NULL_TREE
)
2591 call
= build2 (COMPOUND_EXPR
, TREE_TYPE (call
), check
, call
);
2592 TREE_SIDE_EFFECTS (call
) = 1;
2595 if (TREE_CODE (TREE_TYPE (method_type
)) == VOID_TYPE
)
2596 java_add_stmt (call
);
2600 flush_quick_stack ();
2604 /* Create a stub which will be put into the vtable but which will call
2608 build_jni_stub (tree method
)
2610 tree jnifunc
, call
, body
, method_sig
, arg_types
;
2611 tree jniarg0
, jniarg1
, jniarg2
, jniarg3
;
2612 tree jni_func_type
, tem
;
2613 tree env_var
, res_var
= NULL_TREE
, block
;
2617 vec
<tree
, va_gc
> *args
= NULL
;
2620 tree klass
= DECL_CONTEXT (method
);
2621 klass
= build_class_ref (klass
);
2623 gcc_assert (METHOD_NATIVE (method
) && flag_jni
);
2625 DECL_ARTIFICIAL (method
) = 1;
2626 DECL_EXTERNAL (method
) = 0;
2628 env_var
= build_decl (input_location
,
2629 VAR_DECL
, get_identifier ("env"), ptr_type_node
);
2630 DECL_CONTEXT (env_var
) = method
;
2632 if (TREE_TYPE (TREE_TYPE (method
)) != void_type_node
)
2634 res_var
= build_decl (input_location
, VAR_DECL
, get_identifier ("res"),
2635 TREE_TYPE (TREE_TYPE (method
)));
2636 DECL_CONTEXT (res_var
) = method
;
2637 DECL_CHAIN (env_var
) = res_var
;
2640 method_args
= DECL_ARGUMENTS (method
);
2641 block
= build_block (env_var
, NULL_TREE
, method_args
, NULL_TREE
);
2642 TREE_SIDE_EFFECTS (block
) = 1;
2644 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2645 body
= build2 (MODIFY_EXPR
, ptr_type_node
, env_var
,
2646 build_call_nary (ptr_type_node
,
2647 build_address_of (soft_getjnienvnewframe_node
),
2650 /* The JNIEnv structure is the first argument to the JNI function. */
2651 args_size
+= int_size_in_bytes (TREE_TYPE (env_var
));
2652 vec_safe_push (args
, env_var
);
2654 /* For a static method the second argument is the class. For a
2655 non-static method the second argument is `this'; that is already
2656 available in the argument list. */
2657 if (METHOD_STATIC (method
))
2659 args_size
+= int_size_in_bytes (TREE_TYPE (klass
));
2660 vec_safe_push (args
, klass
);
2663 /* All the arguments to this method become arguments to the
2664 underlying JNI function. If we had to wrap object arguments in a
2665 special way, we would do that here. */
2666 for (tem
= method_args
; tem
!= NULL_TREE
; tem
= DECL_CHAIN (tem
))
2668 int arg_bits
= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem
)));
2669 #ifdef PARM_BOUNDARY
2670 arg_bits
= (((arg_bits
+ PARM_BOUNDARY
- 1) / PARM_BOUNDARY
)
2673 args_size
+= (arg_bits
/ BITS_PER_UNIT
);
2675 vec_safe_push (args
, tem
);
2677 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (method
));
2679 /* Argument types for static methods and the JNIEnv structure.
2680 FIXME: Write and use build_function_type_vec to avoid this. */
2681 if (METHOD_STATIC (method
))
2682 arg_types
= tree_cons (NULL_TREE
, object_ptr_type_node
, arg_types
);
2683 arg_types
= tree_cons (NULL_TREE
, ptr_type_node
, arg_types
);
2685 /* We call _Jv_LookupJNIMethod to find the actual underlying
2686 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2687 exception if this function is not found at runtime. */
2688 method_sig
= build_java_signature (TREE_TYPE (method
));
2690 jniarg1
= build_utf8_ref (DECL_NAME (method
));
2691 jniarg2
= build_utf8_ref (unmangle_classname
2692 (IDENTIFIER_POINTER (method_sig
),
2693 IDENTIFIER_LENGTH (method_sig
)));
2694 jniarg3
= build_int_cst (NULL_TREE
, args_size
);
2696 tem
= build_function_type (TREE_TYPE (TREE_TYPE (method
)), arg_types
);
2698 #ifdef MODIFY_JNI_METHOD_CALL
2699 tem
= MODIFY_JNI_METHOD_CALL (tem
);
2702 jni_func_type
= build_pointer_type (tem
);
2704 /* Use the actual function type, rather than a generic pointer type,
2705 such that this decl keeps the actual pointer type from being
2706 garbage-collected. If it is, we end up using canonical types
2707 with different uids for equivalent function types, and this in
2708 turn causes utf8 identifiers and output order to vary. */
2709 meth_var
= build_decl (input_location
,
2710 VAR_DECL
, get_identifier ("meth"), jni_func_type
);
2711 TREE_STATIC (meth_var
) = 1;
2712 TREE_PUBLIC (meth_var
) = 0;
2713 DECL_EXTERNAL (meth_var
) = 0;
2714 DECL_CONTEXT (meth_var
) = method
;
2715 DECL_ARTIFICIAL (meth_var
) = 1;
2716 DECL_INITIAL (meth_var
) = null_pointer_node
;
2717 TREE_USED (meth_var
) = 1;
2718 chainon (env_var
, meth_var
);
2719 build_result_decl (method
);
2721 jnifunc
= build3 (COND_EXPR
, jni_func_type
,
2722 build2 (NE_EXPR
, boolean_type_node
,
2723 meth_var
, build_int_cst (TREE_TYPE (meth_var
), 0)),
2725 build2 (MODIFY_EXPR
, jni_func_type
, meth_var
,
2727 (NOP_EXPR
, jni_func_type
,
2728 build_call_nary (ptr_type_node
,
2730 (soft_lookupjnimethod_node
),
2733 jniarg2
, jniarg3
))));
2735 /* Now we make the actual JNI call via the resulting function
2737 call
= build_call_vec (TREE_TYPE (TREE_TYPE (method
)), jnifunc
, args
);
2739 /* If the JNI call returned a result, capture it here. If we had to
2740 unwrap JNI object results, we would do that here. */
2741 if (res_var
!= NULL_TREE
)
2743 /* If the call returns an object, it may return a JNI weak
2744 reference, in which case we must unwrap it. */
2745 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method
))))
2746 call
= build_call_nary (TREE_TYPE (TREE_TYPE (method
)),
2747 build_address_of (soft_unwrapjni_node
),
2749 call
= build2 (MODIFY_EXPR
, TREE_TYPE (TREE_TYPE (method
)),
2753 TREE_SIDE_EFFECTS (call
) = 1;
2755 body
= build2 (COMPOUND_EXPR
, void_type_node
, body
, call
);
2756 TREE_SIDE_EFFECTS (body
) = 1;
2758 /* Now free the environment we allocated. */
2759 call
= build_call_nary (ptr_type_node
,
2760 build_address_of (soft_jnipopsystemframe_node
),
2762 TREE_SIDE_EFFECTS (call
) = 1;
2763 body
= build2 (COMPOUND_EXPR
, void_type_node
, body
, call
);
2764 TREE_SIDE_EFFECTS (body
) = 1;
2766 /* Finally, do the return. */
2767 if (res_var
!= NULL_TREE
)
2770 gcc_assert (DECL_RESULT (method
));
2771 /* Make sure we copy the result variable to the actual
2772 result. We use the type of the DECL_RESULT because it
2773 might be different from the return type of the function:
2774 it might be promoted. */
2775 drt
= TREE_TYPE (DECL_RESULT (method
));
2776 if (drt
!= TREE_TYPE (res_var
))
2777 res_var
= build1 (CONVERT_EXPR
, drt
, res_var
);
2778 res_var
= build2 (MODIFY_EXPR
, drt
, DECL_RESULT (method
), res_var
);
2779 TREE_SIDE_EFFECTS (res_var
) = 1;
2782 body
= build2 (COMPOUND_EXPR
, void_type_node
, body
,
2783 build1 (RETURN_EXPR
, void_type_node
, res_var
));
2784 TREE_SIDE_EFFECTS (body
) = 1;
2786 /* Prepend class initialization for static methods reachable from
2788 if (METHOD_STATIC (method
)
2789 && (! METHOD_PRIVATE (method
)
2790 || INNER_CLASS_P (DECL_CONTEXT (method
))))
2792 tree init
= build_call_expr (soft_initclass_node
, 1,
2794 body
= build2 (COMPOUND_EXPR
, void_type_node
, init
, body
);
2795 TREE_SIDE_EFFECTS (body
) = 1;
2798 bind
= build3 (BIND_EXPR
, void_type_node
, BLOCK_VARS (block
),
2804 /* Given lvalue EXP, return a volatile expression that references the
2808 java_modify_addr_for_volatile (tree exp
)
2810 tree exp_type
= TREE_TYPE (exp
);
2812 = build_qualified_type (exp_type
,
2813 TYPE_QUALS (exp_type
) | TYPE_QUAL_VOLATILE
);
2814 tree addr
= build_fold_addr_expr (exp
);
2815 v_type
= build_pointer_type (v_type
);
2816 addr
= fold_convert (v_type
, addr
);
2817 exp
= build_fold_indirect_ref (addr
);
2822 /* Expand an operation to extract from or store into a field.
2823 IS_STATIC is 1 iff the field is static.
2824 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2825 FIELD_REF_INDEX is an index into the constant pool. */
2828 expand_java_field_op (int is_static
, int is_putting
, int field_ref_index
)
2831 = get_class_constant (current_jcf
,
2832 COMPONENT_REF_CLASS_INDEX (¤t_jcf
->cpool
,
2834 const char *self_name
2835 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type
)));
2836 tree field_name
= COMPONENT_REF_NAME (¤t_jcf
->cpool
, field_ref_index
);
2837 tree field_signature
= COMPONENT_REF_SIGNATURE (¤t_jcf
->cpool
,
2839 tree field_type
= get_type_from_signature (field_signature
);
2840 tree new_value
= is_putting
? pop_value (field_type
) : NULL_TREE
;
2843 tree original_self_type
= self_type
;
2847 if (! CLASS_LOADED_P (self_type
))
2848 load_class (self_type
, 1);
2849 field_decl
= lookup_field (&self_type
, field_name
);
2850 if (field_decl
== error_mark_node
)
2854 else if (field_decl
== NULL_TREE
)
2856 if (! flag_verify_invocations
)
2858 int flags
= ACC_PUBLIC
;
2860 flags
|= ACC_STATIC
;
2861 self_type
= original_self_type
;
2862 field_decl
= add_field (original_self_type
, field_name
,
2864 DECL_ARTIFICIAL (field_decl
) = 1;
2865 DECL_IGNORED_P (field_decl
) = 1;
2867 /* FIXME: We should be pessimistic about volatility. We
2868 don't know one way or another, but this is safe.
2869 However, doing this has bad effects on code quality. We
2870 need to look at better ways to do this. */
2871 TREE_THIS_VOLATILE (field_decl
) = 1;
2876 error ("missing field '%s' in '%s'",
2877 IDENTIFIER_POINTER (field_name
), self_name
);
2881 else if (build_java_signature (TREE_TYPE (field_decl
)) != field_signature
)
2883 error ("mismatching signature for field '%s' in '%s'",
2884 IDENTIFIER_POINTER (field_name
), self_name
);
2887 field_ref
= is_static
? NULL_TREE
: pop_value (self_type
);
2891 push_value (convert (field_type
, integer_zero_node
));
2892 flush_quick_stack ();
2896 field_ref
= build_field_ref (field_ref
, self_type
, field_name
);
2898 && ! flag_indirect_dispatch
)
2900 tree context
= DECL_CONTEXT (field_ref
);
2901 if (context
!= self_type
&& CLASS_INTERFACE (TYPE_NAME (context
)))
2902 field_ref
= build_class_init (context
, field_ref
);
2904 field_ref
= build_class_init (self_type
, field_ref
);
2908 flush_quick_stack ();
2909 if (FIELD_FINAL (field_decl
))
2911 if (DECL_CONTEXT (field_decl
) != current_class
)
2912 error ("assignment to final field %q+D not in field%'s class",
2914 /* We used to check for assignments to final fields not
2915 occurring in the class initializer or in a constructor
2916 here. However, this constraint doesn't seem to be
2917 enforced by the JVM. */
2920 if (TREE_THIS_VOLATILE (field_decl
))
2921 field_ref
= java_modify_addr_for_volatile (field_ref
);
2923 modify_expr
= build2 (MODIFY_EXPR
, TREE_TYPE (field_ref
),
2924 field_ref
, new_value
);
2926 if (TREE_THIS_VOLATILE (field_decl
))
2928 tree sync
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
2929 java_add_stmt (build_call_expr (sync
, 0));
2932 java_add_stmt (modify_expr
);
2936 tree temp
= build_decl (input_location
,
2937 VAR_DECL
, NULL_TREE
, TREE_TYPE (field_ref
));
2938 java_add_local_var (temp
);
2940 if (TREE_THIS_VOLATILE (field_decl
))
2941 field_ref
= java_modify_addr_for_volatile (field_ref
);
2944 = build2 (MODIFY_EXPR
, TREE_TYPE (field_ref
), temp
, field_ref
);
2945 java_add_stmt (modify_expr
);
2947 if (TREE_THIS_VOLATILE (field_decl
))
2949 tree sync
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
2950 java_add_stmt (build_call_expr (sync
, 0));
2955 TREE_THIS_VOLATILE (field_ref
) = TREE_THIS_VOLATILE (field_decl
);
2959 load_type_state (int pc
)
2962 tree vec
= (*type_states
)[pc
];
2963 int cur_length
= TREE_VEC_LENGTH (vec
);
2964 stack_pointer
= cur_length
- DECL_MAX_LOCALS(current_function_decl
);
2965 for (i
= 0; i
< cur_length
; i
++)
2966 type_map
[i
] = TREE_VEC_ELT (vec
, i
);
2969 /* Go over METHOD's bytecode and note instruction starts in
2970 instruction_bits[]. */
2973 note_instructions (JCF
*jcf
, tree method
)
2976 unsigned char* byte_ops
;
2977 long length
= DECL_CODE_LENGTH (method
);
2982 #undef RET /* Defined by config/i386/i386.h */
2984 #define BCODE byte_ops
2985 #define BYTE_type_node byte_type_node
2986 #define SHORT_type_node short_type_node
2987 #define INT_type_node int_type_node
2988 #define LONG_type_node long_type_node
2989 #define CHAR_type_node char_type_node
2990 #define PTR_type_node ptr_type_node
2991 #define FLOAT_type_node float_type_node
2992 #define DOUBLE_type_node double_type_node
2993 #define VOID_type_node void_type_node
2994 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2995 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2996 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2997 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2999 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3001 JCF_SEEK (jcf
, DECL_CODE_OFFSET (method
));
3002 byte_ops
= jcf
->read_ptr
;
3003 instruction_bits
= XRESIZEVAR (char, instruction_bits
, length
+ 1);
3004 memset (instruction_bits
, 0, length
+ 1);
3005 vec_alloc (type_states
, length
+ 1);
3006 type_states
->quick_grow_cleared (length
+ 1);
3008 /* This pass figures out which PC can be the targets of jumps. */
3009 for (PC
= 0; PC
< length
;)
3011 int oldpc
= PC
; /* PC at instruction start. */
3012 instruction_bits
[PC
] |= BCODE_INSTRUCTION_START
;
3013 switch (byte_ops
[PC
++])
3015 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3017 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3020 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3022 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3023 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3024 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3025 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3026 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3027 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3028 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3029 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3031 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3032 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3033 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3034 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3035 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3036 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3037 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3038 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3040 /* two forms of wide instructions */
3041 #define PRE_SPECIAL_WIDE(IGNORE) \
3043 int modified_opcode = IMMEDIATE_u1; \
3044 if (modified_opcode == OPCODE_iinc) \
3046 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3047 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3051 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3055 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3057 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3059 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3060 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3061 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3062 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3063 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3064 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3065 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3066 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3067 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3068 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3070 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3071 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3072 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3073 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3074 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3075 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3076 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3078 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3080 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3082 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3083 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3085 #define PRE_LOOKUP_SWITCH \
3086 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3087 NOTE_LABEL (default_offset+oldpc); \
3089 while (--npairs >= 0) { \
3090 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3091 jint offset = IMMEDIATE_s4; \
3092 NOTE_LABEL (offset+oldpc); } \
3095 #define PRE_TABLE_SWITCH \
3096 { jint default_offset = IMMEDIATE_s4; \
3097 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3098 NOTE_LABEL (default_offset+oldpc); \
3100 while (low++ <= high) { \
3101 jint offset = IMMEDIATE_s4; \
3102 NOTE_LABEL (offset+oldpc); } \
3105 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3106 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3107 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3108 (void)(IMMEDIATE_u2); \
3109 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3111 #include "javaop.def"
3118 expand_byte_code (JCF
*jcf
, tree method
)
3122 const unsigned char *linenumber_pointer
;
3123 int dead_code_index
= -1;
3124 unsigned char* byte_ops
;
3125 long length
= DECL_CODE_LENGTH (method
);
3126 location_t max_location
= input_location
;
3129 JCF_SEEK (jcf
, DECL_CODE_OFFSET (method
));
3130 byte_ops
= jcf
->read_ptr
;
3132 /* We make an initial pass of the line number table, to note
3133 which instructions have associated line number entries. */
3134 linenumber_pointer
= linenumber_table
;
3135 for (i
= 0; i
< linenumber_count
; i
++)
3137 int pc
= GET_u2 (linenumber_pointer
);
3138 linenumber_pointer
+= 4;
3140 warning (0, "invalid PC in line number table");
3143 if ((instruction_bits
[pc
] & BCODE_HAS_LINENUMBER
) != 0)
3144 instruction_bits
[pc
] |= BCODE_HAS_MULTI_LINENUMBERS
;
3145 instruction_bits
[pc
] |= BCODE_HAS_LINENUMBER
;
3149 if (! verify_jvm_instructions_new (jcf
, byte_ops
, length
))
3152 promote_arguments ();
3153 cache_this_class_ref (method
);
3154 cache_cpool_data_ref ();
3156 /* Translate bytecodes. */
3157 linenumber_pointer
= linenumber_table
;
3158 for (PC
= 0; PC
< length
;)
3160 if ((instruction_bits
[PC
] & BCODE_TARGET
) != 0 || PC
== 0)
3162 tree label
= lookup_label (PC
);
3163 flush_quick_stack ();
3164 if ((instruction_bits
[PC
] & BCODE_TARGET
) != 0)
3165 java_add_stmt (build1 (LABEL_EXPR
, void_type_node
, label
));
3166 if ((instruction_bits
[PC
] & BCODE_VERIFIED
) != 0)
3167 load_type_state (PC
);
3170 if (! (instruction_bits
[PC
] & BCODE_VERIFIED
))
3172 if (dead_code_index
== -1)
3174 /* This is the start of a region of unreachable bytecodes.
3175 They still need to be processed in order for EH ranges
3176 to get handled correctly. However, we can simply
3177 replace these bytecodes with nops. */
3178 dead_code_index
= PC
;
3181 /* Turn this bytecode into a nop. */
3186 if (dead_code_index
!= -1)
3188 /* We've just reached the end of a region of dead code. */
3190 warning (0, "unreachable bytecode from %d to before %d",
3191 dead_code_index
, PC
);
3192 dead_code_index
= -1;
3196 /* Handle possible line number entry for this PC.
3198 This code handles out-of-order and multiple linenumbers per PC,
3199 but is optimized for the case of line numbers increasing
3200 monotonically with PC. */
3201 if ((instruction_bits
[PC
] & BCODE_HAS_LINENUMBER
) != 0)
3203 if ((instruction_bits
[PC
] & BCODE_HAS_MULTI_LINENUMBERS
) != 0
3204 || GET_u2 (linenumber_pointer
) != PC
)
3205 linenumber_pointer
= linenumber_table
;
3206 while (linenumber_pointer
< linenumber_table
+ linenumber_count
* 4)
3208 int pc
= GET_u2 (linenumber_pointer
);
3209 linenumber_pointer
+= 4;
3212 int line
= GET_u2 (linenumber_pointer
- 2);
3213 input_location
= linemap_line_start (line_table
, line
, 1);
3214 if (input_location
> max_location
)
3215 max_location
= input_location
;
3216 if (!(instruction_bits
[PC
] & BCODE_HAS_MULTI_LINENUMBERS
))
3221 maybe_pushlevels (PC
);
3222 PC
= process_jvm_instruction (PC
, byte_ops
, length
);
3223 maybe_poplevels (PC
);
3226 uncache_this_class_ref (method
);
3228 if (dead_code_index
!= -1)
3230 /* We've just reached the end of a region of dead code. */
3232 warning (0, "unreachable bytecode from %d to the end of the method",
3236 DECL_FUNCTION_LAST_LINE (method
) = max_location
;
3240 java_push_constant_from_pool (JCF
*jcf
, int index
)
3243 if (JPOOL_TAG (jcf
, index
) == CONSTANT_String
)
3246 name
= get_name_constant (jcf
, JPOOL_USHORT1 (jcf
, index
));
3247 index
= alloc_name_constant (CONSTANT_String
, name
);
3248 c
= build_ref_from_constant_pool (index
);
3249 c
= convert (promote_type (string_type_node
), c
);
3251 else if (JPOOL_TAG (jcf
, index
) == CONSTANT_Class
3252 || JPOOL_TAG (jcf
, index
) == CONSTANT_ResolvedClass
)
3254 tree record
= get_class_constant (jcf
, index
);
3255 c
= build_class_ref (record
);
3258 c
= get_constant (jcf
, index
);
3263 process_jvm_instruction (int PC
, const unsigned char* byte_ops
,
3264 long length ATTRIBUTE_UNUSED
)
3266 const char *opname
; /* Temporary ??? */
3267 int oldpc
= PC
; /* PC at instruction start. */
3269 /* If the instruction is at the beginning of an exception handler,
3270 replace the top of the stack with the thrown object reference. */
3271 if (instruction_bits
[PC
] & BCODE_EXCEPTION_TARGET
)
3273 /* Note that the verifier will not emit a type map at all for
3274 dead exception handlers. In this case we just ignore the
3276 if ((instruction_bits
[PC
] & BCODE_VERIFIED
) != 0)
3278 tree type
= pop_type (promote_type (throwable_type_node
));
3279 push_value (build_exception_object_ref (type
));
3283 switch (byte_ops
[PC
++])
3285 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3288 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3291 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3293 int saw_index = 0; \
3294 int index = OPERAND_VALUE; \
3295 (void) saw_index; /* Avoid set but not used warning. */ \
3297 (find_local_variable (index, return_address_type_node, oldpc)); \
3300 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3302 /* OPERAND_VALUE may have side-effects on PC */ \
3303 int opvalue = OPERAND_VALUE; \
3304 build_java_jsr (oldpc + opvalue, PC); \
3307 /* Push a constant onto the stack. */
3308 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3309 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3310 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3311 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3313 /* internal macro added for use by the WIDE case */
3314 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3315 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3317 /* Push local variable onto the opcode stack. */
3318 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3320 /* have to do this since OPERAND_VALUE may have side-effects */ \
3321 int opvalue = OPERAND_VALUE; \
3322 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3325 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3326 expand_java_return (OPERAND_TYPE##_type_node)
3328 #define REM_EXPR TRUNC_MOD_EXPR
3329 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3330 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3332 #define FIELD(IS_STATIC, IS_PUT) \
3333 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3335 #define TEST(OPERAND_TYPE, CONDITION) \
3336 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3338 #define COND(OPERAND_TYPE, CONDITION) \
3339 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3341 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3342 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3344 #define BRANCH_GOTO(OPERAND_VALUE) \
3345 expand_java_goto (oldpc + OPERAND_VALUE)
3347 #define BRANCH_CALL(OPERAND_VALUE) \
3348 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3351 #define BRANCH_RETURN(OPERAND_VALUE) \
3353 tree type = OPERAND_TYPE##_type_node; \
3354 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3355 expand_java_ret (value); \
3359 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3360 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3361 fprintf (stderr, "(not implemented)\n")
3362 #define NOT_IMPL1(OPERAND_VALUE) \
3363 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3364 fprintf (stderr, "(not implemented)\n")
3366 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3368 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3370 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3372 #define STACK_SWAP(COUNT) java_stack_swap()
3374 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3375 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3376 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3378 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3379 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3381 #define LOOKUP_SWITCH \
3382 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3383 tree selector = pop_value (INT_type_node); \
3384 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3385 while (--npairs >= 0) \
3387 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3388 expand_java_add_case (switch_expr, match, oldpc + offset); \
3392 #define TABLE_SWITCH \
3393 { jint default_offset = IMMEDIATE_s4; \
3394 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3395 tree selector = pop_value (INT_type_node); \
3396 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3397 for (; low <= high; low++) \
3399 jint offset = IMMEDIATE_s4; \
3400 expand_java_add_case (switch_expr, low, oldpc + offset); \
3404 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3405 { int opcode = byte_ops[PC-1]; \
3406 int method_ref_index = IMMEDIATE_u2; \
3408 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3410 expand_invoke (opcode, method_ref_index, nargs); \
3413 /* Handle new, checkcast, instanceof */
3414 #define OBJECT(TYPE, OP) \
3415 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3417 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3419 #define ARRAY_LOAD(OPERAND_TYPE) \
3421 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3424 #define ARRAY_STORE(OPERAND_TYPE) \
3426 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3429 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3430 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3431 #define ARRAY_NEW_PTR() \
3432 push_value (build_anewarray (get_class_constant (current_jcf, \
3434 pop_value (int_type_node)));
3435 #define ARRAY_NEW_NUM() \
3437 int atype = IMMEDIATE_u1; \
3438 push_value (build_newarray (atype, pop_value (int_type_node)));\
3440 #define ARRAY_NEW_MULTI() \
3442 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3443 int ndims = IMMEDIATE_u1; \
3444 expand_java_multianewarray( klass, ndims ); \
3447 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3448 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3449 pop_value (OPERAND_TYPE##_type_node)));
3451 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3453 push_value (build1 (NOP_EXPR, int_type_node, \
3454 (convert (TO_TYPE##_type_node, \
3455 pop_value (FROM_TYPE##_type_node))))); \
3458 #define CONVERT(FROM_TYPE, TO_TYPE) \
3460 push_value (convert (TO_TYPE##_type_node, \
3461 pop_value (FROM_TYPE##_type_node))); \
3464 /* internal macro added for use by the WIDE case
3465 Added TREE_TYPE (decl) assignment, apbianco */
3466 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3469 int index = OPVALUE; \
3470 tree type = OPTYPE; \
3471 value = pop_value (type); \
3472 type = TREE_TYPE (value); \
3473 decl = find_local_variable (index, type, oldpc); \
3474 set_local_type (index, type); \
3475 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3478 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3480 /* have to do this since OPERAND_VALUE may have side-effects */ \
3481 int opvalue = OPERAND_VALUE; \
3482 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3485 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3486 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3488 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3489 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3491 #define MONITOR_OPERATION(call) \
3493 tree o = pop_value (ptr_type_node); \
3495 flush_quick_stack (); \
3496 c = build_java_monitor (call, o); \
3497 TREE_SIDE_EFFECTS (c) = 1; \
3498 java_add_stmt (c); \
3501 #define SPECIAL_IINC(IGNORED) \
3503 unsigned int local_var_index = IMMEDIATE_u1; \
3504 int ival = IMMEDIATE_s1; \
3505 expand_iinc(local_var_index, ival, oldpc); \
3508 #define SPECIAL_WIDE(IGNORED) \
3510 int modified_opcode = IMMEDIATE_u1; \
3511 unsigned int local_var_index = IMMEDIATE_u2; \
3512 switch (modified_opcode) \
3516 int ival = IMMEDIATE_s2; \
3517 expand_iinc (local_var_index, ival, oldpc); \
3520 case OPCODE_iload: \
3521 case OPCODE_lload: \
3522 case OPCODE_fload: \
3523 case OPCODE_dload: \
3524 case OPCODE_aload: \
3526 /* duplicate code from LOAD macro */ \
3527 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3530 case OPCODE_istore: \
3531 case OPCODE_lstore: \
3532 case OPCODE_fstore: \
3533 case OPCODE_dstore: \
3534 case OPCODE_astore: \
3536 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3540 error ("unrecognized wide sub-instruction"); \
3544 #define SPECIAL_THROW(IGNORED) \
3545 build_java_athrow (pop_value (throwable_type_node))
3547 #define SPECIAL_BREAK NOT_IMPL1
3548 #define IMPL NOT_IMPL
3550 #include "javaop.def"
3553 fprintf (stderr
, "%3d: unknown(%3d)\n", oldpc
, byte_ops
[PC
]);
3558 /* Return the opcode at PC in the code section pointed to by
3561 static unsigned char
3562 peek_opcode_at_pc (JCF
*jcf
, int code_offset
, int pc
)
3564 unsigned char opcode
;
3565 long absolute_offset
= (long)JCF_TELL (jcf
);
3567 JCF_SEEK (jcf
, code_offset
);
3568 opcode
= jcf
->read_ptr
[pc
];
3569 JCF_SEEK (jcf
, absolute_offset
);
3573 /* Some bytecode compilers are emitting accurate LocalVariableTable
3574 attributes. Here's an example:
3579 Attribute "LocalVariableTable"
3580 slot #<n>: ... (PC: PC+1 length: L)
3582 This is accurate because the local in slot <n> really exists after
3583 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3585 This procedure recognizes this situation and extends the live range
3586 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3587 length of the store instruction.)
3589 This function is used by `give_name_to_locals' so that a local's
3590 DECL features a DECL_LOCAL_START_PC such that the first related
3591 store operation will use DECL as a destination, not an unrelated
3592 temporary created for the occasion.
3594 This function uses a global (instruction_bits) `note_instructions' should
3595 have allocated and filled properly. */
3598 maybe_adjust_start_pc (struct JCF
*jcf
, int code_offset
,
3599 int start_pc
, int slot
)
3601 int first
, index
, opcode
;
3610 /* Find last previous instruction and remember it */
3611 for (pc
= start_pc
-1; pc
; pc
--)
3612 if (instruction_bits
[pc
] & BCODE_INSTRUCTION_START
)
3616 /* Retrieve the instruction, handle `wide'. */
3617 opcode
= (int) peek_opcode_at_pc (jcf
, code_offset
, pc
++);
3618 if (opcode
== OPCODE_wide
)
3621 opcode
= (int) peek_opcode_at_pc (jcf
, code_offset
, pc
++);
3626 case OPCODE_astore_0
:
3627 case OPCODE_astore_1
:
3628 case OPCODE_astore_2
:
3629 case OPCODE_astore_3
:
3630 first
= OPCODE_astore_0
;
3633 case OPCODE_istore_0
:
3634 case OPCODE_istore_1
:
3635 case OPCODE_istore_2
:
3636 case OPCODE_istore_3
:
3637 first
= OPCODE_istore_0
;
3640 case OPCODE_lstore_0
:
3641 case OPCODE_lstore_1
:
3642 case OPCODE_lstore_2
:
3643 case OPCODE_lstore_3
:
3644 first
= OPCODE_lstore_0
;
3647 case OPCODE_fstore_0
:
3648 case OPCODE_fstore_1
:
3649 case OPCODE_fstore_2
:
3650 case OPCODE_fstore_3
:
3651 first
= OPCODE_fstore_0
;
3654 case OPCODE_dstore_0
:
3655 case OPCODE_dstore_1
:
3656 case OPCODE_dstore_2
:
3657 case OPCODE_dstore_3
:
3658 first
= OPCODE_dstore_0
;
3666 index
= peek_opcode_at_pc (jcf
, code_offset
, pc
);
3669 int other
= peek_opcode_at_pc (jcf
, code_offset
, ++pc
);
3670 index
= (other
<< 8) + index
;
3675 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3676 means we have a <t>store. */
3677 if ((first
> 0 && opcode
- first
== slot
) || (index
> 0 && index
== slot
))
3683 /* Build a node to represent empty statements and blocks. */
3686 build_java_empty_stmt (void)
3688 tree t
= build_empty_stmt (input_location
);
3692 /* Promote all args of integral type before generating any code. */
3695 promote_arguments (void)
3699 for (arg
= DECL_ARGUMENTS (current_function_decl
), i
= 0;
3700 arg
!= NULL_TREE
; arg
= DECL_CHAIN (arg
), i
++)
3702 tree arg_type
= TREE_TYPE (arg
);
3703 if (INTEGRAL_TYPE_P (arg_type
)
3704 && TYPE_PRECISION (arg_type
) < 32)
3706 tree copy
= find_local_variable (i
, integer_type_node
, -1);
3707 java_add_stmt (build2 (MODIFY_EXPR
, integer_type_node
,
3709 fold_convert (integer_type_node
, arg
)));
3711 if (TYPE_IS_WIDE (arg_type
))
3716 /* Create a local variable that points to the constant pool. */
3719 cache_cpool_data_ref (void)
3724 tree d
= build_constant_data_ref (flag_indirect_classes
);
3725 tree cpool_ptr
= build_decl (input_location
, VAR_DECL
, NULL_TREE
,
3726 build_pointer_type (TREE_TYPE (d
)));
3727 java_add_local_var (cpool_ptr
);
3728 TREE_CONSTANT (cpool_ptr
) = 1;
3730 java_add_stmt (build2 (MODIFY_EXPR
, TREE_TYPE (cpool_ptr
),
3731 cpool_ptr
, build_address_of (d
)));
3732 cpool
= build1 (INDIRECT_REF
, TREE_TYPE (d
), cpool_ptr
);
3733 TREE_THIS_NOTRAP (cpool
) = 1;
3734 TYPE_CPOOL_DATA_REF (output_class
) = cpool
;
3738 #include "gt-java-expr.h"