1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA.
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
26 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
30 #include "coretypes.h"
37 #include "java-tree.h"
39 #include "java-opcodes.h"
41 #include "java-except.h"
46 #include "tree-gimple.h"
49 static void flush_quick_stack (void);
50 static void push_value (tree
);
51 static tree
pop_value (tree
);
52 static void java_stack_swap (void);
53 static void java_stack_dup (int, int);
54 static void build_java_athrow (tree
);
55 static void build_java_jsr (int, int);
56 static void build_java_ret (tree
);
57 static void expand_java_multianewarray (tree
, int);
58 static void expand_java_arraystore (tree
);
59 static void expand_java_arrayload (tree
);
60 static void expand_java_array_length (void);
61 static tree
build_java_monitor (tree
, tree
);
62 static void expand_java_pushc (int, tree
);
63 static void expand_java_return (tree
);
64 static void expand_load_internal (int, tree
, int);
65 static void expand_java_NEW (tree
);
66 static void expand_java_INSTANCEOF (tree
);
67 static void expand_java_CHECKCAST (tree
);
68 static void expand_iinc (unsigned int, int, int);
69 static void expand_java_binop (tree
, enum tree_code
);
70 static void note_label (int, int);
71 static void expand_compare (enum tree_code
, tree
, tree
, int);
72 static void expand_test (enum tree_code
, tree
, int);
73 static void expand_cond (enum tree_code
, tree
, int);
74 static void expand_java_goto (int);
75 static tree
expand_java_switch (tree
, int);
76 static void expand_java_add_case (tree
, int, int);
78 static void expand_java_call (int, int);
79 static void expand_java_ret (tree
);
81 static tree
pop_arguments (tree
);
82 static void expand_invoke (int, int, int);
83 static void expand_java_field_op (int, int, int);
84 static void java_push_constant_from_pool (struct JCF
*, int);
85 static void java_stack_pop (int);
86 static tree
build_java_throw_out_of_bounds_exception (tree
);
87 static tree
build_java_check_indexed_type (tree
, tree
);
88 static unsigned char peek_opcode_at_pc (struct JCF
*, int, int);
89 static void promote_arguments (void);
91 static GTY(()) tree operand_type
[59];
93 static GTY(()) tree methods_ident
;
94 static GTY(()) tree ncode_ident
;
95 tree dtable_ident
= NULL_TREE
;
97 /* Set to nonzero value in order to emit class initialization code
98 before static field references. */
99 int always_initialize_class_p
= 0;
101 /* We store the stack state in two places:
102 Within a basic block, we use the quick_stack, which is a
103 pushdown list (TREE_LISTs) of expression nodes.
104 This is the top part of the stack; below that we use find_stack_slot.
105 At the end of a basic block, the quick_stack must be flushed
106 to the stack slot array (as handled by find_stack_slot).
107 Using quick_stack generates better code (especially when
108 compiled without optimization), because we do not have to
109 explicitly store and load trees to temporary variables.
111 If a variable is on the quick stack, it means the value of variable
112 when the quick stack was last flushed. Conceptually, flush_quick_stack
113 saves all the quick_stack elements in parallel. However, that is
114 complicated, so it actually saves them (i.e. copies each stack value
115 to is home virtual register) from low indexes. This allows a quick_stack
116 element at index i (counting from the bottom of stack the) to references
117 slot virtuals for register that are >= i, but not those that are deeper.
118 This convention makes most operations easier. For example iadd works
119 even when the stack contains (reg[0], reg[1]): It results in the
120 stack containing (reg[0]+reg[1]), which is OK. However, some stack
121 operations are more complicated. For example dup given a stack
122 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
123 the convention, since stack value 1 would refer to a register with
124 lower index (reg[0]), which flush_quick_stack does not safely handle.
125 So dup cannot just add an extra element to the quick_stack, but iadd can.
128 static GTY(()) tree quick_stack
;
130 /* A free-list of unused permanent TREE_LIST nodes. */
131 static GTY((deletable
)) tree tree_list_free_list
;
133 /* The physical memory page size used in this computer. See
134 build_field_ref(). */
135 static GTY(()) tree page_size
;
137 /* The stack pointer of the Java virtual machine.
138 This does include the size of the quick_stack. */
142 const unsigned char *linenumber_table
;
143 int linenumber_count
;
145 /* Largest pc so far in this method that has been passed to lookup_label. */
146 int highest_label_pc_this_method
= -1;
148 /* Base value for this method to add to pc to get generated label. */
149 int start_label_pc_this_method
= 0;
152 init_expr_processing (void)
154 operand_type
[21] = operand_type
[54] = int_type_node
;
155 operand_type
[22] = operand_type
[55] = long_type_node
;
156 operand_type
[23] = operand_type
[56] = float_type_node
;
157 operand_type
[24] = operand_type
[57] = double_type_node
;
158 operand_type
[25] = operand_type
[58] = ptr_type_node
;
162 java_truthvalue_conversion (tree expr
)
164 /* It is simpler and generates better code to have only TRUTH_*_EXPR
165 or comparison expressions as truth values at this level.
167 This function should normally be identity for Java. */
169 switch (TREE_CODE (expr
))
171 case EQ_EXPR
: case NE_EXPR
: case UNEQ_EXPR
: case LTGT_EXPR
:
172 case LE_EXPR
: case GE_EXPR
: case LT_EXPR
: case GT_EXPR
:
173 case UNLE_EXPR
: case UNGE_EXPR
: case UNLT_EXPR
: case UNGT_EXPR
:
174 case ORDERED_EXPR
: case UNORDERED_EXPR
:
175 case TRUTH_ANDIF_EXPR
:
176 case TRUTH_ORIF_EXPR
:
185 return integer_zerop (expr
) ? boolean_false_node
: boolean_true_node
;
188 return real_zerop (expr
) ? boolean_false_node
: boolean_true_node
;
190 /* are these legal? XXX JH */
194 /* These don't change whether an object is nonzero or zero. */
195 return java_truthvalue_conversion (TREE_OPERAND (expr
, 0));
198 /* Distribute the conversion into the arms of a COND_EXPR. */
199 return fold_build3 (COND_EXPR
, boolean_type_node
, TREE_OPERAND (expr
, 0),
200 java_truthvalue_conversion (TREE_OPERAND (expr
, 1)),
201 java_truthvalue_conversion (TREE_OPERAND (expr
, 2)));
204 /* If this is widening the argument, we can ignore it. */
205 if (TYPE_PRECISION (TREE_TYPE (expr
))
206 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
207 return java_truthvalue_conversion (TREE_OPERAND (expr
, 0));
208 /* fall through to default */
211 return fold_build2 (NE_EXPR
, boolean_type_node
,
212 expr
, boolean_false_node
);
216 /* Save any stack slots that happen to be in the quick_stack into their
217 home virtual register slots.
219 The copy order is from low stack index to high, to support the invariant
220 that the expression for a slot may contain decls for stack slots with
221 higher (or the same) index, but not lower. */
224 flush_quick_stack (void)
226 int stack_index
= stack_pointer
;
227 tree prev
, cur
, next
;
229 /* First reverse the quick_stack, and count the number of slots it has. */
230 for (cur
= quick_stack
, prev
= NULL_TREE
; cur
!= NULL_TREE
; cur
= next
)
232 next
= TREE_CHAIN (cur
);
233 TREE_CHAIN (cur
) = prev
;
235 stack_index
-= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur
)));
239 while (quick_stack
!= NULL_TREE
)
242 tree node
= quick_stack
, type
;
243 quick_stack
= TREE_CHAIN (node
);
244 TREE_CHAIN (node
) = tree_list_free_list
;
245 tree_list_free_list
= node
;
246 node
= TREE_VALUE (node
);
247 type
= TREE_TYPE (node
);
249 decl
= find_stack_slot (stack_index
, type
);
251 java_add_stmt (build2 (MODIFY_EXPR
, TREE_TYPE (node
), decl
, node
));
252 stack_index
+= 1 + TYPE_IS_WIDE (type
);
256 /* Push TYPE on the type stack.
257 Return true on success, 0 on overflow. */
260 push_type_0 (tree type
)
263 type
= promote_type (type
);
264 n_words
= 1 + TYPE_IS_WIDE (type
);
265 if (stack_pointer
+ n_words
> DECL_MAX_STACK (current_function_decl
))
267 /* Allocate decl for this variable now, so we get a temporary that
268 survives the whole method. */
269 find_stack_slot (stack_pointer
, type
);
270 stack_type_map
[stack_pointer
++] = type
;
272 while (--n_words
>= 0)
273 stack_type_map
[stack_pointer
++] = TYPE_SECOND
;
278 push_type (tree type
)
280 int r
= push_type_0 (type
);
285 push_value (tree value
)
287 tree type
= TREE_TYPE (value
);
288 if (TYPE_PRECISION (type
) < 32 && INTEGRAL_TYPE_P (type
))
290 type
= promote_type (type
);
291 value
= convert (type
, value
);
294 if (tree_list_free_list
== NULL_TREE
)
295 quick_stack
= tree_cons (NULL_TREE
, value
, quick_stack
);
298 tree node
= tree_list_free_list
;
299 tree_list_free_list
= TREE_CHAIN (tree_list_free_list
);
300 TREE_VALUE (node
) = value
;
301 TREE_CHAIN (node
) = quick_stack
;
304 /* If the value has a side effect, then we need to evaluate it
305 whether or not the result is used. If the value ends up on the
306 quick stack and is then popped, this won't happen -- so we flush
307 the quick stack. It is safest to simply always flush, though,
308 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
309 the latter we may need to strip conversions. */
310 flush_quick_stack ();
313 /* Pop a type from the type stack.
314 TYPE is the expected type. Return the actual type, which must be
316 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
319 pop_type_0 (tree type
, char **messagep
)
324 if (TREE_CODE (type
) == RECORD_TYPE
)
325 type
= promote_type (type
);
326 n_words
= 1 + TYPE_IS_WIDE (type
);
327 if (stack_pointer
< n_words
)
329 *messagep
= xstrdup ("stack underflow");
332 while (--n_words
> 0)
334 if (stack_type_map
[--stack_pointer
] != void_type_node
)
336 *messagep
= xstrdup ("Invalid multi-word value on type stack");
340 t
= stack_type_map
[--stack_pointer
];
341 if (type
== NULL_TREE
|| t
== type
)
343 if (TREE_CODE (t
) == TREE_LIST
)
347 tree tt
= TREE_PURPOSE (t
);
348 if (! can_widen_reference_to (tt
, type
))
358 if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (t
)
359 && TYPE_PRECISION (type
) <= 32 && TYPE_PRECISION (t
) <= 32)
361 if (TREE_CODE (type
) == POINTER_TYPE
&& TREE_CODE (t
) == POINTER_TYPE
)
363 /* If the expected type we've been passed is object or ptr
364 (i.e. void*), the caller needs to know the real type. */
365 if (type
== ptr_type_node
|| type
== object_ptr_type_node
)
368 /* Since the verifier has already run, we know that any
369 types we see will be compatible. In BC mode, this fact
370 may be checked at runtime, but if that is so then we can
371 assume its truth here as well. So, we always succeed
372 here, with the expected type. */
376 if (! flag_verify_invocations
&& flag_indirect_dispatch
377 && t
== object_ptr_type_node
)
379 if (type
!= ptr_type_node
)
380 warning (0, "need to insert runtime check for %s",
381 xstrdup (lang_printable_name (type
, 0)));
385 /* lang_printable_name uses a static buffer, so we must save the result
386 from calling it the first time. */
389 char *temp
= xstrdup (lang_printable_name (type
, 0));
390 /* If the stack contains a multi-word type, keep popping the stack until
391 the real type is found. */
392 while (t
== void_type_node
)
393 t
= stack_type_map
[--stack_pointer
];
394 *messagep
= concat ("expected type '", temp
,
395 "' but stack contains '", lang_printable_name (t
, 0),
402 /* Pop a type from the type stack.
403 TYPE is the expected type. Return the actual type, which must be
404 convertible to TYPE, otherwise call error. */
409 char *message
= NULL
;
410 type
= pop_type_0 (type
, &message
);
413 error ("%s", message
);
420 /* Return true if two type assertions are equal. */
423 type_assertion_eq (const void * k1_p
, const void * k2_p
)
425 type_assertion k1
= *(type_assertion
*)k1_p
;
426 type_assertion k2
= *(type_assertion
*)k2_p
;
427 return (k1
.assertion_code
== k2
.assertion_code
429 && k1
.op2
== k2
.op2
);
432 /* Hash a type assertion. */
435 type_assertion_hash (const void *p
)
437 const type_assertion
*k_p
= p
;
438 hashval_t hash
= iterative_hash (&k_p
->assertion_code
, sizeof
439 k_p
->assertion_code
, 0);
440 hash
= iterative_hash (&k_p
->op1
, sizeof k_p
->op1
, hash
);
441 return iterative_hash (&k_p
->op2
, sizeof k_p
->op2
, hash
);
444 /* Add an entry to the type assertion table for the given class.
445 CLASS is the class for which this assertion will be evaluated by the
446 runtime during loading/initialization.
447 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
448 OP1 and OP2 are the operands. The tree type of these arguments may be
449 specific to each assertion_code. */
452 add_type_assertion (tree
class, int assertion_code
, tree op1
, tree op2
)
454 htab_t assertions_htab
;
458 assertions_htab
= TYPE_ASSERTIONS (class);
459 if (assertions_htab
== NULL
)
461 assertions_htab
= htab_create_ggc (7, type_assertion_hash
,
462 type_assertion_eq
, NULL
);
463 TYPE_ASSERTIONS (current_class
) = assertions_htab
;
466 as
.assertion_code
= assertion_code
;
470 as_pp
= htab_find_slot (assertions_htab
, &as
, INSERT
);
472 /* Don't add the same assertion twice. */
476 *as_pp
= ggc_alloc (sizeof (type_assertion
));
477 **(type_assertion
**)as_pp
= as
;
481 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
482 Handles array types and interfaces. */
485 can_widen_reference_to (tree source_type
, tree target_type
)
487 if (source_type
== ptr_type_node
|| target_type
== object_ptr_type_node
)
490 /* Get rid of pointers */
491 if (TREE_CODE (source_type
) == POINTER_TYPE
)
492 source_type
= TREE_TYPE (source_type
);
493 if (TREE_CODE (target_type
) == POINTER_TYPE
)
494 target_type
= TREE_TYPE (target_type
);
496 if (source_type
== target_type
)
499 /* FIXME: This is very pessimistic, in that it checks everything,
500 even if we already know that the types are compatible. If we're
501 to support full Java class loader semantics, we need this.
502 However, we could do something more optimal. */
503 if (! flag_verify_invocations
)
505 add_type_assertion (current_class
, JV_ASSERT_TYPES_COMPATIBLE
,
506 source_type
, target_type
);
509 warning (0, "assert: %s is assign compatible with %s",
510 xstrdup (lang_printable_name (target_type
, 0)),
511 xstrdup (lang_printable_name (source_type
, 0)));
512 /* Punt everything to runtime. */
516 if (TYPE_DUMMY (source_type
) || TYPE_DUMMY (target_type
))
522 if (TYPE_ARRAY_P (source_type
) || TYPE_ARRAY_P (target_type
))
524 HOST_WIDE_INT source_length
, target_length
;
525 if (TYPE_ARRAY_P (source_type
) != TYPE_ARRAY_P (target_type
))
527 /* An array implements Cloneable and Serializable. */
528 tree name
= DECL_NAME (TYPE_NAME (target_type
));
529 return (name
== java_lang_cloneable_identifier_node
530 || name
== java_io_serializable_identifier_node
);
532 target_length
= java_array_type_length (target_type
);
533 if (target_length
>= 0)
535 source_length
= java_array_type_length (source_type
);
536 if (source_length
!= target_length
)
539 source_type
= TYPE_ARRAY_ELEMENT (source_type
);
540 target_type
= TYPE_ARRAY_ELEMENT (target_type
);
541 if (source_type
== target_type
)
543 if (TREE_CODE (source_type
) != POINTER_TYPE
544 || TREE_CODE (target_type
) != POINTER_TYPE
)
546 return can_widen_reference_to (source_type
, target_type
);
550 int source_depth
= class_depth (source_type
);
551 int target_depth
= class_depth (target_type
);
553 if (TYPE_DUMMY (source_type
) || TYPE_DUMMY (target_type
))
556 warning (0, "assert: %s is assign compatible with %s",
557 xstrdup (lang_printable_name (target_type
, 0)),
558 xstrdup (lang_printable_name (source_type
, 0)));
562 /* class_depth can return a negative depth if an error occurred */
563 if (source_depth
< 0 || target_depth
< 0)
566 if (CLASS_INTERFACE (TYPE_NAME (target_type
)))
568 /* target_type is OK if source_type or source_type ancestors
569 implement target_type. We handle multiple sub-interfaces */
570 tree binfo
, base_binfo
;
573 for (binfo
= TYPE_BINFO (source_type
), i
= 0;
574 BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
575 if (can_widen_reference_to
576 (BINFO_TYPE (base_binfo
), target_type
))
583 for ( ; source_depth
> target_depth
; source_depth
--)
586 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type
), 0));
588 return source_type
== target_type
;
594 pop_value (tree type
)
596 type
= pop_type (type
);
599 tree node
= quick_stack
;
600 quick_stack
= TREE_CHAIN (quick_stack
);
601 TREE_CHAIN (node
) = tree_list_free_list
;
602 tree_list_free_list
= node
;
603 node
= TREE_VALUE (node
);
607 return find_stack_slot (stack_pointer
, promote_type (type
));
611 /* Pop and discard the top COUNT stack slots. */
614 java_stack_pop (int count
)
620 gcc_assert (stack_pointer
!= 0);
622 type
= stack_type_map
[stack_pointer
- 1];
623 if (type
== TYPE_SECOND
)
626 gcc_assert (stack_pointer
!= 1 && count
> 0);
628 type
= stack_type_map
[stack_pointer
- 2];
630 val
= pop_value (type
);
635 /* Implement the 'swap' operator (to swap two top stack slots). */
638 java_stack_swap (void)
644 if (stack_pointer
< 2
645 || (type1
= stack_type_map
[stack_pointer
- 1]) == TYPE_UNKNOWN
646 || (type2
= stack_type_map
[stack_pointer
- 2]) == TYPE_UNKNOWN
647 || type1
== TYPE_SECOND
|| type2
== TYPE_SECOND
648 || TYPE_IS_WIDE (type1
) || TYPE_IS_WIDE (type2
))
649 /* Bad stack swap. */
651 /* Bad stack swap. */
653 flush_quick_stack ();
654 decl1
= find_stack_slot (stack_pointer
- 1, type1
);
655 decl2
= find_stack_slot (stack_pointer
- 2, type2
);
656 temp
= build_decl (VAR_DECL
, NULL_TREE
, type1
);
657 java_add_local_var (temp
);
658 java_add_stmt (build2 (MODIFY_EXPR
, type1
, temp
, decl1
));
659 java_add_stmt (build2 (MODIFY_EXPR
, type2
,
660 find_stack_slot (stack_pointer
- 1, type2
),
662 java_add_stmt (build2 (MODIFY_EXPR
, type1
,
663 find_stack_slot (stack_pointer
- 2, type1
),
665 stack_type_map
[stack_pointer
- 1] = type2
;
666 stack_type_map
[stack_pointer
- 2] = type1
;
670 java_stack_dup (int size
, int offset
)
672 int low_index
= stack_pointer
- size
- offset
;
675 error ("stack underflow - dup* operation");
677 flush_quick_stack ();
679 stack_pointer
+= size
;
680 dst_index
= stack_pointer
;
682 for (dst_index
= stack_pointer
; --dst_index
>= low_index
; )
685 int src_index
= dst_index
- size
;
686 if (src_index
< low_index
)
687 src_index
= dst_index
+ size
+ offset
;
688 type
= stack_type_map
[src_index
];
689 if (type
== TYPE_SECOND
)
691 /* Dup operation splits 64-bit number. */
692 gcc_assert (src_index
> low_index
);
694 stack_type_map
[dst_index
] = type
;
695 src_index
--; dst_index
--;
696 type
= stack_type_map
[src_index
];
697 gcc_assert (TYPE_IS_WIDE (type
));
700 gcc_assert (! TYPE_IS_WIDE (type
));
702 if (src_index
!= dst_index
)
704 tree src_decl
= find_stack_slot (src_index
, type
);
705 tree dst_decl
= find_stack_slot (dst_index
, type
);
708 (build2 (MODIFY_EXPR
, TREE_TYPE (dst_decl
), dst_decl
, src_decl
));
709 stack_type_map
[dst_index
] = type
;
714 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
718 build_java_athrow (tree node
)
722 call
= build3 (CALL_EXPR
,
724 build_address_of (throw_node
),
725 build_tree_list (NULL_TREE
, node
),
727 TREE_SIDE_EFFECTS (call
) = 1;
728 java_add_stmt (call
);
729 java_stack_pop (stack_pointer
);
732 /* Implementation for jsr/ret */
735 build_java_jsr (int target_pc
, int return_pc
)
737 tree where
= lookup_label (target_pc
);
738 tree ret
= lookup_label (return_pc
);
739 tree ret_label
= fold_build1 (ADDR_EXPR
, return_address_type_node
, ret
);
740 push_value (ret_label
);
741 flush_quick_stack ();
742 java_add_stmt (build1 (GOTO_EXPR
, void_type_node
, where
));
744 /* Do not need to emit the label here. We noted the existence of the
745 label as a jump target in note_instructions; we'll emit the label
746 for real at the beginning of the expand_byte_code loop. */
750 build_java_ret (tree location
)
752 java_add_stmt (build1 (GOTO_EXPR
, void_type_node
, location
));
755 /* Implementation of operations on array: new, load, store, length */
758 decode_newarray_type (int atype
)
762 case 4: return boolean_type_node
;
763 case 5: return char_type_node
;
764 case 6: return float_type_node
;
765 case 7: return double_type_node
;
766 case 8: return byte_type_node
;
767 case 9: return short_type_node
;
768 case 10: return int_type_node
;
769 case 11: return long_type_node
;
770 default: return NULL_TREE
;
774 /* Map primitive type to the code used by OPCODE_newarray. */
777 encode_newarray_type (tree type
)
779 if (type
== boolean_type_node
)
781 else if (type
== char_type_node
)
783 else if (type
== float_type_node
)
785 else if (type
== double_type_node
)
787 else if (type
== byte_type_node
)
789 else if (type
== short_type_node
)
791 else if (type
== int_type_node
)
793 else if (type
== long_type_node
)
799 /* Build a call to _Jv_ThrowBadArrayIndex(), the
800 ArrayIndexOfBoundsException exception handler. */
803 build_java_throw_out_of_bounds_exception (tree index
)
805 tree node
= build3 (CALL_EXPR
, int_type_node
,
806 build_address_of (soft_badarrayindex_node
),
807 build_tree_list (NULL_TREE
, index
), NULL_TREE
);
808 TREE_SIDE_EFFECTS (node
) = 1; /* Allows expansion within ANDIF */
812 /* Return the length of an array. Doesn't perform any checking on the nature
813 or value of the array NODE. May be used to implement some bytecodes. */
816 build_java_array_length_access (tree node
)
818 tree type
= TREE_TYPE (node
);
819 tree array_type
= TREE_TYPE (type
);
820 HOST_WIDE_INT length
;
822 if (!is_array_type_p (type
))
824 /* With the new verifier, we will see an ordinary pointer type
825 here. In this case, we just use an arbitrary array type. */
826 array_type
= build_java_array_type (object_ptr_type_node
, -1);
827 type
= promote_type (array_type
);
830 length
= java_array_type_length (type
);
832 return build_int_cst (NULL_TREE
, length
);
834 node
= build3 (COMPONENT_REF
, int_type_node
,
835 build_java_indirect_ref (array_type
, node
,
836 flag_check_references
),
837 lookup_field (&array_type
, get_identifier ("length")),
839 IS_ARRAY_LENGTH_ACCESS (node
) = 1;
843 /* Optionally checks a reference against the NULL pointer. ARG1: the
844 expr, ARG2: we should check the reference. Don't generate extra
845 checks if we're not generating code. */
848 java_check_reference (tree expr
, int check
)
850 if (!flag_syntax_only
&& check
)
852 expr
= save_expr (expr
);
853 expr
= build3 (COND_EXPR
, TREE_TYPE (expr
),
854 build2 (EQ_EXPR
, boolean_type_node
,
855 expr
, null_pointer_node
),
856 build3 (CALL_EXPR
, void_type_node
,
857 build_address_of (soft_nullpointer_node
),
858 NULL_TREE
, NULL_TREE
),
865 /* Reference an object: just like an INDIRECT_REF, but with checking. */
868 build_java_indirect_ref (tree type
, tree expr
, int check
)
871 t
= java_check_reference (expr
, check
);
872 t
= convert (build_pointer_type (type
), t
);
873 return build1 (INDIRECT_REF
, type
, t
);
876 /* Implement array indexing (either as l-value or r-value).
877 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
878 Optionally performs bounds checking and/or test to NULL.
879 At this point, ARRAY should have been verified as an array. */
882 build_java_arrayaccess (tree array
, tree type
, tree index
)
884 tree node
, throw = NULL_TREE
;
887 tree array_type
= TREE_TYPE (TREE_TYPE (array
));
888 tree size_exp
= fold_convert (sizetype
, size_in_bytes (type
));
890 if (!is_array_type_p (TREE_TYPE (array
)))
892 /* With the new verifier, we will see an ordinary pointer type
893 here. In this case, we just use the correct array type. */
894 array_type
= build_java_array_type (type
, -1);
897 if (flag_bounds_check
)
900 * (unsigned jint) INDEX >= (unsigned jint) LEN
901 * && throw ArrayIndexOutOfBoundsException.
902 * Note this is equivalent to and more efficient than:
903 * INDEX < 0 || INDEX >= LEN && throw ... */
905 tree len
= convert (unsigned_int_type_node
,
906 build_java_array_length_access (array
));
907 test
= fold_build2 (GE_EXPR
, boolean_type_node
,
908 convert (unsigned_int_type_node
, index
),
910 if (! integer_zerop (test
))
912 throw = build2 (TRUTH_ANDIF_EXPR
, int_type_node
, test
,
913 build_java_throw_out_of_bounds_exception (index
));
914 /* allows expansion within COMPOUND */
915 TREE_SIDE_EFFECTS( throw ) = 1;
919 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
920 to have the bounds check evaluated first. */
921 if (throw != NULL_TREE
)
922 index
= build2 (COMPOUND_EXPR
, int_type_node
, throw, index
);
924 data_field
= lookup_field (&array_type
, get_identifier ("data"));
926 ref
= build3 (COMPONENT_REF
, TREE_TYPE (data_field
),
927 build_java_indirect_ref (array_type
, array
,
928 flag_check_references
),
929 data_field
, NULL_TREE
);
931 /* Take the address of the data field and convert it to a pointer to
933 node
= build1 (NOP_EXPR
, build_pointer_type (type
), build_address_of (ref
));
935 /* Multiply the index by the size of an element to obtain a byte
936 offset. Convert the result to a pointer to the element type. */
937 index
= fold_convert (TREE_TYPE (node
),
938 build2 (MULT_EXPR
, sizetype
,
939 fold_convert (sizetype
, index
),
942 /* Sum the byte offset and the address of the data field. */
943 node
= fold_build2 (PLUS_EXPR
, TREE_TYPE (node
), node
, index
);
947 *((&array->data) + index*size_exp)
950 return build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (node
)), node
);
953 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
954 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
955 determine that no check is required. */
958 build_java_arraystore_check (tree array
, tree object
)
960 tree check
, element_type
, source
;
961 tree array_type_p
= TREE_TYPE (array
);
962 tree object_type
= TYPE_NAME (TREE_TYPE (TREE_TYPE (object
)));
964 if (! flag_verify_invocations
)
966 /* With the new verifier, we don't track precise types. FIXME:
967 performance regression here. */
968 element_type
= TYPE_NAME (object_type_node
);
972 gcc_assert (is_array_type_p (array_type_p
));
974 /* Get the TYPE_DECL for ARRAY's element type. */
976 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p
))));
979 gcc_assert (TREE_CODE (element_type
) == TYPE_DECL
980 && TREE_CODE (object_type
) == TYPE_DECL
);
982 if (!flag_store_check
)
983 return build1 (NOP_EXPR
, array_type_p
, array
);
985 /* No check is needed if the element type is final. Also check that
986 element_type matches object_type, since in the bytecode
987 compilation case element_type may be the actual element type of
988 the array rather than its declared type. However, if we're doing
989 indirect dispatch, we can't do the `final' optimization. */
990 if (element_type
== object_type
991 && ! flag_indirect_dispatch
992 && CLASS_FINAL (element_type
))
993 return build1 (NOP_EXPR
, array_type_p
, array
);
995 /* OBJECT might be wrapped by a SAVE_EXPR. */
996 if (TREE_CODE (object
) == SAVE_EXPR
)
997 source
= TREE_OPERAND (object
, 0);
1001 /* Avoid the check if OBJECT was just loaded from the same array. */
1002 if (TREE_CODE (source
) == ARRAY_REF
)
1005 source
= TREE_OPERAND (source
, 0); /* COMPONENT_REF. */
1006 source
= TREE_OPERAND (source
, 0); /* INDIRECT_REF. */
1007 source
= TREE_OPERAND (source
, 0); /* Source array's DECL or SAVE_EXPR. */
1008 if (TREE_CODE (source
) == SAVE_EXPR
)
1009 source
= TREE_OPERAND (source
, 0);
1012 if (TREE_CODE (target
) == SAVE_EXPR
)
1013 target
= TREE_OPERAND (target
, 0);
1015 if (source
== target
)
1016 return build1 (NOP_EXPR
, array_type_p
, array
);
1019 /* Build an invocation of _Jv_CheckArrayStore */
1020 check
= build3 (CALL_EXPR
, void_type_node
,
1021 build_address_of (soft_checkarraystore_node
),
1022 tree_cons (NULL_TREE
, array
,
1023 build_tree_list (NULL_TREE
, object
)),
1025 TREE_SIDE_EFFECTS (check
) = 1;
1030 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1031 ARRAY_NODE. This function is used to retrieve something less vague than
1032 a pointer type when indexing the first dimension of something like [[<t>.
1033 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1034 return unchanged. */
1037 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED
,
1040 /* We used to check to see if ARRAY_NODE really had array type.
1041 However, with the new verifier, this is not necessary, as we know
1042 that the object will be an array of the appropriate type. */
1044 return indexed_type
;
1047 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1048 called with an integer code (the type of array to create), and the length
1049 of the array to create. */
1052 build_newarray (int atype_value
, tree length
)
1056 tree prim_type
= decode_newarray_type (atype_value
);
1058 = build_java_array_type (prim_type
,
1059 host_integerp (length
, 0) == INTEGER_CST
1060 ? tree_low_cst (length
, 0) : -1);
1062 /* If compiling to native, pass a reference to the primitive type class
1063 and save the runtime some work. However, the bytecode generator
1064 expects to find the type_code int here. */
1065 if (flag_emit_class_files
)
1066 type_arg
= build_int_cst (NULL_TREE
, atype_value
);
1068 type_arg
= build_class_ref (prim_type
);
1070 return build3 (CALL_EXPR
, promote_type (type
),
1071 build_address_of (soft_newarray_node
),
1072 tree_cons (NULL_TREE
,
1074 build_tree_list (NULL_TREE
, length
)),
1078 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1079 of the dimension. */
1082 build_anewarray (tree class_type
, tree length
)
1085 = build_java_array_type (class_type
,
1086 host_integerp (length
, 0)
1087 ? tree_low_cst (length
, 0) : -1);
1089 return build3 (CALL_EXPR
, promote_type (type
),
1090 build_address_of (soft_anewarray_node
),
1091 tree_cons (NULL_TREE
, length
,
1092 tree_cons (NULL_TREE
, build_class_ref (class_type
),
1093 build_tree_list (NULL_TREE
,
1094 null_pointer_node
))),
1098 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1101 build_new_array (tree type
, tree length
)
1103 if (JPRIMITIVE_TYPE_P (type
))
1104 return build_newarray (encode_newarray_type (type
), length
);
1106 return build_anewarray (TREE_TYPE (type
), length
);
1109 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1110 class pointer, a number of dimensions and the matching number of
1111 dimensions. The argument list is NULL terminated. */
1114 expand_java_multianewarray (tree class_type
, int ndim
)
1117 tree args
= build_tree_list( NULL_TREE
, null_pointer_node
);
1119 for( i
= 0; i
< ndim
; i
++ )
1120 args
= tree_cons (NULL_TREE
, pop_value (int_type_node
), args
);
1122 push_value (build3 (CALL_EXPR
,
1123 promote_type (class_type
),
1124 build_address_of (soft_multianewarray_node
),
1125 tree_cons (NULL_TREE
, build_class_ref (class_type
),
1126 tree_cons (NULL_TREE
,
1127 build_int_cst (NULL_TREE
, ndim
),
1132 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1133 ARRAY is an array type. May expand some bound checking and NULL
1134 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1135 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1136 INT. In those cases, we make the conversion.
1138 if ARRAy is a reference type, the assignment is checked at run-time
1139 to make sure that the RHS can be assigned to the array element
1140 type. It is not necessary to generate this code if ARRAY is final. */
1143 expand_java_arraystore (tree rhs_type_node
)
1145 tree rhs_node
= pop_value ((INTEGRAL_TYPE_P (rhs_type_node
)
1146 && TYPE_PRECISION (rhs_type_node
) <= 32) ?
1147 int_type_node
: rhs_type_node
);
1148 tree index
= pop_value (int_type_node
);
1149 tree array_type
, array
, temp
, access
;
1151 /* If we're processing an `aaload' we might as well just pick
1153 if (TREE_CODE (rhs_type_node
) == POINTER_TYPE
)
1155 array_type
= build_java_array_type (object_ptr_type_node
, -1);
1156 rhs_type_node
= object_ptr_type_node
;
1159 array_type
= build_java_array_type (rhs_type_node
, -1);
1161 array
= pop_value (array_type
);
1162 array
= build1 (NOP_EXPR
, promote_type (array_type
), array
);
1164 rhs_type_node
= build_java_check_indexed_type (array
, rhs_type_node
);
1166 flush_quick_stack ();
1168 index
= save_expr (index
);
1169 array
= save_expr (array
);
1171 /* We want to perform the bounds check (done by
1172 build_java_arrayaccess) before the type check (done by
1173 build_java_arraystore_check). So, we call build_java_arrayaccess
1174 -- which returns an ARRAY_REF lvalue -- and we then generate code
1175 to stash the address of that lvalue in a temp. Then we call
1176 build_java_arraystore_check, and finally we generate a
1177 MODIFY_EXPR to set the array element. */
1179 access
= build_java_arrayaccess (array
, rhs_type_node
, index
);
1180 temp
= build_decl (VAR_DECL
, NULL_TREE
,
1181 build_pointer_type (TREE_TYPE (access
)));
1182 java_add_local_var (temp
);
1183 java_add_stmt (build2 (MODIFY_EXPR
, TREE_TYPE (temp
),
1185 build_fold_addr_expr (access
)));
1187 if (TREE_CODE (rhs_type_node
) == POINTER_TYPE
)
1189 tree check
= build_java_arraystore_check (array
, rhs_node
);
1190 java_add_stmt (check
);
1193 java_add_stmt (build2 (MODIFY_EXPR
, TREE_TYPE (access
),
1194 build1 (INDIRECT_REF
, TREE_TYPE (access
), temp
),
1198 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1199 sure that LHS is an array type. May expand some bound checking and NULL
1201 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1202 BOOLEAN/SHORT, we push a promoted type back to the stack.
1206 expand_java_arrayload (tree lhs_type_node
)
1209 tree index_node
= pop_value (int_type_node
);
1213 /* If we're processing an `aaload' we might as well just pick
1215 if (TREE_CODE (lhs_type_node
) == POINTER_TYPE
)
1217 array_type
= build_java_array_type (object_ptr_type_node
, -1);
1218 lhs_type_node
= object_ptr_type_node
;
1221 array_type
= build_java_array_type (lhs_type_node
, -1);
1222 array_node
= pop_value (array_type
);
1223 array_node
= build1 (NOP_EXPR
, promote_type (array_type
), array_node
);
1225 index_node
= save_expr (index_node
);
1226 array_node
= save_expr (array_node
);
1228 lhs_type_node
= build_java_check_indexed_type (array_node
,
1230 load_node
= build_java_arrayaccess (array_node
,
1233 if (INTEGRAL_TYPE_P (lhs_type_node
) && TYPE_PRECISION (lhs_type_node
) <= 32)
1234 load_node
= fold_build1 (NOP_EXPR
, int_type_node
, load_node
);
1235 push_value (load_node
);
1238 /* Expands .length. Makes sure that we deal with and array and may expand
1239 a NULL check on the array object. */
1242 expand_java_array_length (void)
1244 tree array
= pop_value (ptr_type_node
);
1245 tree length
= build_java_array_length_access (array
);
1247 push_value (length
);
1250 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1251 either soft_monitorenter_node or soft_monitorexit_node. */
1254 build_java_monitor (tree call
, tree object
)
1256 return build3 (CALL_EXPR
,
1258 build_address_of (call
),
1259 build_tree_list (NULL_TREE
, object
),
1263 /* Emit code for one of the PUSHC instructions. */
1266 expand_java_pushc (int ival
, tree type
)
1269 if (type
== ptr_type_node
&& ival
== 0)
1270 value
= null_pointer_node
;
1271 else if (type
== int_type_node
|| type
== long_type_node
)
1272 value
= build_int_cst (type
, ival
);
1273 else if (type
== float_type_node
|| type
== double_type_node
)
1276 REAL_VALUE_FROM_INT (x
, ival
, 0, TYPE_MODE (type
));
1277 value
= build_real (type
, x
);
1286 expand_java_return (tree type
)
1288 if (type
== void_type_node
)
1289 java_add_stmt (build1 (RETURN_EXPR
, void_type_node
, NULL
));
1292 tree retval
= pop_value (type
);
1293 tree res
= DECL_RESULT (current_function_decl
);
1294 retval
= build2 (MODIFY_EXPR
, TREE_TYPE (res
), res
, retval
);
1296 /* Handle the situation where the native integer type is smaller
1297 than the JVM integer. It can happen for many cross compilers.
1298 The whole if expression just goes away if INT_TYPE_SIZE < 32
1300 if (INT_TYPE_SIZE
< 32
1301 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res
)))
1302 < GET_MODE_SIZE (TYPE_MODE (type
))))
1303 retval
= build1(NOP_EXPR
, TREE_TYPE(res
), retval
);
1305 TREE_SIDE_EFFECTS (retval
) = 1;
1306 java_add_stmt (build1 (RETURN_EXPR
, TREE_TYPE (retval
), retval
));
1311 expand_load_internal (int index
, tree type
, int pc
)
1314 tree var
= find_local_variable (index
, type
, pc
);
1316 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1317 on the stack. If there is an assignment to this VAR_DECL between
1318 the stack push and the use, then the wrong code could be
1319 generated. To avoid this we create a new local and copy our
1320 value into it. Then we push this new local on the stack.
1321 Hopefully this all gets optimized out. */
1322 copy
= build_decl (VAR_DECL
, NULL_TREE
, type
);
1323 if ((INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
))
1324 && TREE_TYPE (copy
) != TREE_TYPE (var
))
1325 var
= convert (type
, var
);
1326 java_add_local_var (copy
);
1327 java_add_stmt (build2 (MODIFY_EXPR
, TREE_TYPE (var
), copy
, var
));
1333 build_address_of (tree value
)
1335 return build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (value
)), value
);
1339 class_has_finalize_method (tree type
)
1341 tree super
= CLASSTYPE_SUPER (type
);
1343 if (super
== NULL_TREE
)
1344 return false; /* Every class with a real finalizer inherits */
1345 /* from java.lang.Object. */
1347 return HAS_FINALIZER_P (type
) || class_has_finalize_method (super
);
1351 java_create_object (tree type
)
1353 tree alloc_node
= (class_has_finalize_method (type
)
1355 : alloc_no_finalizer_node
);
1357 return build3 (CALL_EXPR
, promote_type (type
),
1358 build_address_of (alloc_node
),
1359 build_tree_list (NULL_TREE
, build_class_ref (type
)),
1364 expand_java_NEW (tree type
)
1368 alloc_node
= (class_has_finalize_method (type
) ? alloc_object_node
1369 : alloc_no_finalizer_node
);
1370 if (! CLASS_LOADED_P (type
))
1371 load_class (type
, 1);
1372 safe_layout_class (type
);
1373 push_value (build3 (CALL_EXPR
, promote_type (type
),
1374 build_address_of (alloc_node
),
1375 build_tree_list (NULL_TREE
, build_class_ref (type
)),
1379 /* This returns an expression which will extract the class of an
1383 build_get_class (tree value
)
1385 tree class_field
= lookup_field (&dtable_type
, get_identifier ("class"));
1386 tree vtable_field
= lookup_field (&object_type_node
,
1387 get_identifier ("vtable"));
1388 tree tmp
= build3 (COMPONENT_REF
, dtable_ptr_type
,
1389 build_java_indirect_ref (object_type_node
, value
,
1390 flag_check_references
),
1391 vtable_field
, NULL_TREE
);
1392 return build3 (COMPONENT_REF
, class_ptr_type
,
1393 build1 (INDIRECT_REF
, dtable_type
, tmp
),
1394 class_field
, NULL_TREE
);
1397 /* This builds the tree representation of the `instanceof' operator.
1398 It tries various tricks to optimize this in cases where types are
1402 build_instanceof (tree value
, tree type
)
1405 tree itype
= TREE_TYPE (TREE_TYPE (soft_instanceof_node
));
1406 tree valtype
= TREE_TYPE (TREE_TYPE (value
));
1407 tree valclass
= TYPE_NAME (valtype
);
1410 /* When compiling from bytecode, we need to ensure that TYPE has
1412 if (CLASS_P (type
) && ! CLASS_LOADED_P (type
))
1414 load_class (type
, 1);
1415 safe_layout_class (type
);
1416 if (! TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) == ERROR_MARK
)
1417 return error_mark_node
;
1419 klass
= TYPE_NAME (type
);
1421 if (type
== object_type_node
|| inherits_from_p (valtype
, type
))
1423 /* Anything except `null' is an instance of Object. Likewise,
1424 if the object is known to be an instance of the class, then
1425 we only need to check for `null'. */
1426 expr
= build2 (NE_EXPR
, itype
, value
, null_pointer_node
);
1428 else if (flag_verify_invocations
1429 && ! TYPE_ARRAY_P (type
)
1430 && ! TYPE_ARRAY_P (valtype
)
1431 && DECL_P (klass
) && DECL_P (valclass
)
1432 && ! CLASS_INTERFACE (valclass
)
1433 && ! CLASS_INTERFACE (klass
)
1434 && ! inherits_from_p (type
, valtype
)
1435 && (CLASS_FINAL (klass
)
1436 || ! inherits_from_p (valtype
, type
)))
1438 /* The classes are from different branches of the derivation
1439 tree, so we immediately know the answer. */
1440 expr
= boolean_false_node
;
1442 else if (DECL_P (klass
) && CLASS_FINAL (klass
))
1444 tree save
= save_expr (value
);
1445 expr
= build3 (COND_EXPR
, itype
,
1446 build2 (NE_EXPR
, boolean_type_node
,
1447 save
, null_pointer_node
),
1448 build2 (EQ_EXPR
, itype
,
1449 build_get_class (save
),
1450 build_class_ref (type
)),
1451 boolean_false_node
);
1455 expr
= build3 (CALL_EXPR
, itype
,
1456 build_address_of (soft_instanceof_node
),
1457 tree_cons (NULL_TREE
, value
,
1458 build_tree_list (NULL_TREE
,
1459 build_class_ref (type
))),
1462 TREE_SIDE_EFFECTS (expr
) = TREE_SIDE_EFFECTS (value
);
1467 expand_java_INSTANCEOF (tree type
)
1469 tree value
= pop_value (object_ptr_type_node
);
1470 value
= build_instanceof (value
, type
);
1475 expand_java_CHECKCAST (tree type
)
1477 tree value
= pop_value (ptr_type_node
);
1478 value
= build3 (CALL_EXPR
, promote_type (type
),
1479 build_address_of (soft_checkcast_node
),
1480 tree_cons (NULL_TREE
, build_class_ref (type
),
1481 build_tree_list (NULL_TREE
, value
)),
1487 expand_iinc (unsigned int local_var_index
, int ival
, int pc
)
1489 tree local_var
, res
;
1490 tree constant_value
;
1492 flush_quick_stack ();
1493 local_var
= find_local_variable (local_var_index
, int_type_node
, pc
);
1494 constant_value
= build_int_cst (NULL_TREE
, ival
);
1495 res
= fold_build2 (PLUS_EXPR
, int_type_node
, local_var
, constant_value
);
1496 java_add_stmt (build2 (MODIFY_EXPR
, TREE_TYPE (local_var
), local_var
, res
));
1501 build_java_soft_divmod (enum tree_code op
, tree type
, tree op1
, tree op2
)
1504 tree arg1
= convert (type
, op1
);
1505 tree arg2
= convert (type
, op2
);
1507 if (type
== int_type_node
)
1511 case TRUNC_DIV_EXPR
:
1512 call
= soft_idiv_node
;
1514 case TRUNC_MOD_EXPR
:
1515 call
= soft_irem_node
;
1521 else if (type
== long_type_node
)
1525 case TRUNC_DIV_EXPR
:
1526 call
= soft_ldiv_node
;
1528 case TRUNC_MOD_EXPR
:
1529 call
= soft_lrem_node
;
1537 call
= build3 (CALL_EXPR
, type
,
1538 build_address_of (call
),
1539 tree_cons (NULL_TREE
, arg1
,
1540 build_tree_list (NULL_TREE
, arg2
)),
1547 build_java_binop (enum tree_code op
, tree type
, tree arg1
, tree arg2
)
1554 tree u_type
= java_unsigned_type (type
);
1555 arg1
= convert (u_type
, arg1
);
1556 arg1
= build_java_binop (RSHIFT_EXPR
, u_type
, arg1
, arg2
);
1557 return convert (type
, arg1
);
1561 mask
= build_int_cst (NULL_TREE
,
1562 TYPE_PRECISION (TREE_TYPE (arg1
)) - 1);
1563 arg2
= fold_build2 (BIT_AND_EXPR
, int_type_node
, arg2
, mask
);
1566 case COMPARE_L_EXPR
: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1567 case COMPARE_G_EXPR
: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1568 arg1
= save_expr (arg1
); arg2
= save_expr (arg2
);
1570 tree ifexp1
= fold_build2 (op
== COMPARE_L_EXPR
? GT_EXPR
: LT_EXPR
,
1571 boolean_type_node
, arg1
, arg2
);
1572 tree ifexp2
= fold_build2 (EQ_EXPR
, boolean_type_node
, arg1
, arg2
);
1573 tree second_compare
= fold_build3 (COND_EXPR
, int_type_node
,
1574 ifexp2
, integer_zero_node
,
1575 op
== COMPARE_L_EXPR
1576 ? integer_minus_one_node
1577 : integer_one_node
);
1578 return fold_build3 (COND_EXPR
, int_type_node
, ifexp1
,
1579 op
== COMPARE_L_EXPR
? integer_one_node
1580 : integer_minus_one_node
,
1584 arg1
= save_expr (arg1
); arg2
= save_expr (arg2
);
1586 tree ifexp1
= fold_build2 (LT_EXPR
, boolean_type_node
, arg1
, arg2
);
1587 tree ifexp2
= fold_build2 (GT_EXPR
, boolean_type_node
, arg1
, arg2
);
1588 tree second_compare
= fold_build3 (COND_EXPR
, int_type_node
,
1589 ifexp2
, integer_one_node
,
1591 return fold_build3 (COND_EXPR
, int_type_node
,
1592 ifexp1
, integer_minus_one_node
, second_compare
);
1594 case TRUNC_DIV_EXPR
:
1595 case TRUNC_MOD_EXPR
:
1596 if (TREE_CODE (type
) == REAL_TYPE
1597 && op
== TRUNC_MOD_EXPR
)
1600 if (type
!= double_type_node
)
1602 arg1
= convert (double_type_node
, arg1
);
1603 arg2
= convert (double_type_node
, arg2
);
1605 call
= build3 (CALL_EXPR
, double_type_node
,
1606 build_address_of (soft_fmod_node
),
1607 tree_cons (NULL_TREE
, arg1
,
1608 build_tree_list (NULL_TREE
, arg2
)),
1610 if (type
!= double_type_node
)
1611 call
= convert (type
, call
);
1615 if (TREE_CODE (type
) == INTEGER_TYPE
1616 && flag_use_divide_subroutine
1617 && ! flag_syntax_only
)
1618 return build_java_soft_divmod (op
, type
, arg1
, arg2
);
1623 return fold_build2 (op
, type
, arg1
, arg2
);
1627 expand_java_binop (tree type
, enum tree_code op
)
1637 rtype
= int_type_node
;
1638 rarg
= pop_value (rtype
);
1641 rarg
= pop_value (rtype
);
1643 larg
= pop_value (ltype
);
1644 push_value (build_java_binop (op
, type
, larg
, rarg
));
1647 /* Lookup the field named NAME in *TYPEP or its super classes.
1648 If not found, return NULL_TREE.
1649 (If the *TYPEP is not found, or if the field reference is
1650 ambiguous, return error_mark_node.)
1651 If found, return the FIELD_DECL, and set *TYPEP to the
1652 class containing the field. */
1655 lookup_field (tree
*typep
, tree name
)
1657 if (CLASS_P (*typep
) && !CLASS_LOADED_P (*typep
))
1659 load_class (*typep
, 1);
1660 safe_layout_class (*typep
);
1661 if (!TYPE_SIZE (*typep
) || TREE_CODE (TYPE_SIZE (*typep
)) == ERROR_MARK
)
1662 return error_mark_node
;
1666 tree field
, binfo
, base_binfo
;
1670 for (field
= TYPE_FIELDS (*typep
); field
; field
= TREE_CHAIN (field
))
1671 if (DECL_NAME (field
) == name
)
1674 /* Process implemented interfaces. */
1675 save_field
= NULL_TREE
;
1676 for (binfo
= TYPE_BINFO (*typep
), i
= 0;
1677 BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
1679 tree t
= BINFO_TYPE (base_binfo
);
1680 if ((field
= lookup_field (&t
, name
)))
1682 if (save_field
== field
)
1684 if (save_field
== NULL_TREE
)
1688 tree i1
= DECL_CONTEXT (save_field
);
1689 tree i2
= DECL_CONTEXT (field
);
1690 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1691 IDENTIFIER_POINTER (name
),
1692 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1
))),
1693 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2
))));
1694 return error_mark_node
;
1699 if (save_field
!= NULL_TREE
)
1702 *typep
= CLASSTYPE_SUPER (*typep
);
1707 /* Look up the field named NAME in object SELF_VALUE,
1708 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1709 SELF_VALUE is NULL_TREE if looking for a static field. */
1712 build_field_ref (tree self_value
, tree self_class
, tree name
)
1714 tree base_class
= self_class
;
1715 tree field_decl
= lookup_field (&base_class
, name
);
1716 if (field_decl
== NULL_TREE
)
1718 error ("field %qs not found", IDENTIFIER_POINTER (name
));
1719 return error_mark_node
;
1721 if (self_value
== NULL_TREE
)
1723 return build_static_field_ref (field_decl
);
1727 tree base_type
= promote_type (base_class
);
1729 /* CHECK is true if self_value is not the this pointer. */
1730 int check
= (! (DECL_P (self_value
)
1731 && DECL_NAME (self_value
) == this_identifier_node
));
1733 /* Determine whether a field offset from NULL will lie within
1734 Page 0: this is necessary on those GNU/Linux/BSD systems that
1735 trap SEGV to generate NullPointerExceptions.
1737 We assume that Page 0 will be mapped with NOPERM, and that
1738 memory may be allocated from any other page, so only field
1739 offsets < pagesize are guaranteed to trap. We also assume
1740 the smallest page size we'll encounter is 4k bytes. */
1741 if (! flag_syntax_only
&& check
&& ! flag_check_references
1742 && ! flag_indirect_dispatch
)
1744 tree field_offset
= byte_position (field_decl
);
1746 page_size
= size_int (4096);
1747 check
= ! INT_CST_LT_UNSIGNED (field_offset
, page_size
);
1750 if (base_type
!= TREE_TYPE (self_value
))
1751 self_value
= fold_build1 (NOP_EXPR
, base_type
, self_value
);
1752 if (! flag_syntax_only
&& flag_indirect_dispatch
)
1755 = build_int_cst (NULL_TREE
, get_symbol_table_index
1756 (field_decl
, NULL_TREE
,
1757 &TYPE_OTABLE_METHODS (output_class
)));
1759 = build4 (ARRAY_REF
, integer_type_node
,
1760 TYPE_OTABLE_DECL (output_class
), otable_index
,
1761 NULL_TREE
, NULL_TREE
);
1764 if (DECL_CONTEXT (field_decl
) != output_class
)
1766 = build3 (COND_EXPR
, TREE_TYPE (field_offset
),
1767 build2 (EQ_EXPR
, boolean_type_node
,
1768 field_offset
, integer_zero_node
),
1769 build3 (CALL_EXPR
, void_type_node
,
1770 build_address_of (soft_nosuchfield_node
),
1771 build_tree_list (NULL_TREE
, otable_index
),
1775 field_offset
= fold (convert (sizetype
, field_offset
));
1776 self_value
= java_check_reference (self_value
, check
);
1778 = fold_build2 (PLUS_EXPR
,
1779 build_pointer_type (TREE_TYPE (field_decl
)),
1780 self_value
, field_offset
);
1781 return fold_build1 (INDIRECT_REF
, TREE_TYPE (field_decl
), address
);
1784 self_value
= build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value
)),
1786 return fold_build3 (COMPONENT_REF
, TREE_TYPE (field_decl
),
1787 self_value
, field_decl
, NULL_TREE
);
1792 lookup_label (int pc
)
1796 if (pc
> highest_label_pc_this_method
)
1797 highest_label_pc_this_method
= pc
;
1798 ASM_GENERATE_INTERNAL_LABEL(buf
, "LJpc=", start_label_pc_this_method
+ pc
);
1799 name
= get_identifier (buf
);
1800 if (IDENTIFIER_LOCAL_VALUE (name
))
1801 return IDENTIFIER_LOCAL_VALUE (name
);
1804 /* The type of the address of a label is return_address_type_node. */
1805 tree decl
= create_label_decl (name
);
1806 LABEL_PC (decl
) = pc
;
1807 return pushdecl (decl
);
1811 /* Generate a unique name for the purpose of loops and switches
1812 labels, and try-catch-finally blocks label or temporary variables. */
1815 generate_name (void)
1817 static int l_number
= 0;
1819 ASM_GENERATE_INTERNAL_LABEL(buff
, "LJv", l_number
);
1821 return get_identifier (buff
);
1825 create_label_decl (tree name
)
1828 decl
= build_decl (LABEL_DECL
, name
,
1829 TREE_TYPE (return_address_type_node
));
1830 DECL_CONTEXT (decl
) = current_function_decl
;
1831 DECL_IGNORED_P (decl
) = 1;
1835 /* This maps a bytecode offset (PC) to various flags. */
1836 char *instruction_bits
;
1839 note_label (int current_pc ATTRIBUTE_UNUSED
, int target_pc
)
1841 lookup_label (target_pc
);
1842 instruction_bits
[target_pc
] |= BCODE_JUMP_TARGET
;
1845 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1846 where CONDITION is one of one the compare operators. */
1849 expand_compare (enum tree_code condition
, tree value1
, tree value2
,
1852 tree target
= lookup_label (target_pc
);
1853 tree cond
= fold_build2 (condition
, boolean_type_node
, value1
, value2
);
1855 (build3 (COND_EXPR
, void_type_node
, java_truthvalue_conversion (cond
),
1856 build1 (GOTO_EXPR
, void_type_node
, target
),
1857 build_java_empty_stmt ()));
1860 /* Emit code for a TEST-type opcode. */
1863 expand_test (enum tree_code condition
, tree type
, int target_pc
)
1865 tree value1
, value2
;
1866 flush_quick_stack ();
1867 value1
= pop_value (type
);
1868 value2
= (type
== ptr_type_node
) ? null_pointer_node
: integer_zero_node
;
1869 expand_compare (condition
, value1
, value2
, target_pc
);
1872 /* Emit code for a COND-type opcode. */
1875 expand_cond (enum tree_code condition
, tree type
, int target_pc
)
1877 tree value1
, value2
;
1878 flush_quick_stack ();
1879 /* note: pop values in opposite order */
1880 value2
= pop_value (type
);
1881 value1
= pop_value (type
);
1882 /* Maybe should check value1 and value2 for type compatibility ??? */
1883 expand_compare (condition
, value1
, value2
, target_pc
);
1887 expand_java_goto (int target_pc
)
1889 tree target_label
= lookup_label (target_pc
);
1890 flush_quick_stack ();
1891 java_add_stmt (build1 (GOTO_EXPR
, void_type_node
, target_label
));
1895 expand_java_switch (tree selector
, int default_pc
)
1897 tree switch_expr
, x
;
1899 flush_quick_stack ();
1900 switch_expr
= build3 (SWITCH_EXPR
, TREE_TYPE (selector
), selector
,
1901 NULL_TREE
, NULL_TREE
);
1902 java_add_stmt (switch_expr
);
1904 x
= build3 (CASE_LABEL_EXPR
, void_type_node
, NULL_TREE
, NULL_TREE
,
1905 create_artificial_label ());
1906 append_to_statement_list (x
, &SWITCH_BODY (switch_expr
));
1908 x
= build1 (GOTO_EXPR
, void_type_node
, lookup_label (default_pc
));
1909 append_to_statement_list (x
, &SWITCH_BODY (switch_expr
));
1915 expand_java_add_case (tree switch_expr
, int match
, int target_pc
)
1919 value
= build_int_cst (TREE_TYPE (switch_expr
), match
);
1921 x
= build3 (CASE_LABEL_EXPR
, void_type_node
, value
, NULL_TREE
,
1922 create_artificial_label ());
1923 append_to_statement_list (x
, &SWITCH_BODY (switch_expr
));
1925 x
= build1 (GOTO_EXPR
, void_type_node
, lookup_label (target_pc
));
1926 append_to_statement_list (x
, &SWITCH_BODY (switch_expr
));
1930 pop_arguments (tree arg_types
)
1932 if (arg_types
== end_params_node
)
1934 if (TREE_CODE (arg_types
) == TREE_LIST
)
1936 tree tail
= pop_arguments (TREE_CHAIN (arg_types
));
1937 tree type
= TREE_VALUE (arg_types
);
1938 tree arg
= pop_value (type
);
1940 /* We simply cast each argument to its proper type. This is
1941 needed since we lose type information coming out of the
1942 verifier. We also have to do this when we pop an integer
1943 type that must be promoted for the function call. */
1944 if (TREE_CODE (type
) == POINTER_TYPE
)
1945 arg
= build1 (NOP_EXPR
, type
, arg
);
1946 else if (targetm
.calls
.promote_prototypes (type
)
1947 && TYPE_PRECISION (type
) < TYPE_PRECISION (integer_type_node
)
1948 && INTEGRAL_TYPE_P (type
))
1949 arg
= convert (integer_type_node
, arg
);
1950 return tree_cons (NULL_TREE
, arg
, tail
);
1955 /* Attach to PTR (a block) the declaration found in ENTRY. */
1958 attach_init_test_initialization_flags (void **entry
, void *ptr
)
1960 tree block
= (tree
)ptr
;
1961 struct treetreehash_entry
*ite
= (struct treetreehash_entry
*) *entry
;
1963 if (block
!= error_mark_node
)
1965 if (TREE_CODE (block
) == BIND_EXPR
)
1967 tree body
= BIND_EXPR_BODY (block
);
1968 TREE_CHAIN (ite
->value
) = BIND_EXPR_VARS (block
);
1969 BIND_EXPR_VARS (block
) = ite
->value
;
1970 body
= build2 (COMPOUND_EXPR
, void_type_node
,
1971 build1 (DECL_EXPR
, void_type_node
, ite
->value
), body
);
1972 BIND_EXPR_BODY (block
) = body
;
1976 tree body
= BLOCK_SUBBLOCKS (block
);
1977 TREE_CHAIN (ite
->value
) = BLOCK_EXPR_DECLS (block
);
1978 BLOCK_EXPR_DECLS (block
) = ite
->value
;
1979 body
= build2 (COMPOUND_EXPR
, void_type_node
,
1980 build1 (DECL_EXPR
, void_type_node
, ite
->value
), body
);
1981 BLOCK_SUBBLOCKS (block
) = body
;
1988 /* Build an expression to initialize the class CLAS.
1989 if EXPR is non-NULL, returns an expression to first call the initializer
1990 (if it is needed) and then calls EXPR. */
1993 build_class_init (tree clas
, tree expr
)
1997 /* An optimization: if CLAS is a superclass of the class we're
1998 compiling, we don't need to initialize it. However, if CLAS is
1999 an interface, it won't necessarily be initialized, even if we
2001 if ((! CLASS_INTERFACE (TYPE_NAME (clas
))
2002 && inherits_from_p (current_class
, clas
))
2003 || current_class
== clas
)
2006 if (always_initialize_class_p
)
2008 init
= build3 (CALL_EXPR
, void_type_node
,
2009 build_address_of (soft_initclass_node
),
2010 build_tree_list (NULL_TREE
, build_class_ref (clas
)),
2012 TREE_SIDE_EFFECTS (init
) = 1;
2016 tree
*init_test_decl
;
2018 init_test_decl
= java_treetreehash_new
2019 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl
), clas
);
2021 if (*init_test_decl
== NULL
)
2023 /* Build a declaration and mark it as a flag used to track
2024 static class initializations. */
2025 decl
= build_decl (VAR_DECL
, NULL_TREE
,
2027 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl
);
2028 LOCAL_CLASS_INITIALIZATION_FLAG (decl
) = 1;
2029 DECL_CONTEXT (decl
) = current_function_decl
;
2030 DECL_FUNCTION_INIT_TEST_CLASS (decl
) = clas
;
2031 /* Tell the check-init code to ignore this decl when not
2032 optimizing class initialization. */
2033 if (!STATIC_CLASS_INIT_OPT_P ())
2034 DECL_BIT_INDEX (decl
) = -1;
2035 DECL_INITIAL (decl
) = boolean_false_node
;
2036 /* Don't emit any symbolic debugging info for this decl. */
2037 DECL_IGNORED_P (decl
) = 1;
2038 *init_test_decl
= decl
;
2041 init
= build3 (CALL_EXPR
, void_type_node
,
2042 build_address_of (soft_initclass_node
),
2043 build_tree_list (NULL_TREE
, build_class_ref (clas
)),
2045 TREE_SIDE_EFFECTS (init
) = 1;
2046 init
= build3 (COND_EXPR
, void_type_node
,
2047 build2 (EQ_EXPR
, boolean_type_node
,
2048 *init_test_decl
, boolean_false_node
),
2049 init
, integer_zero_node
);
2050 TREE_SIDE_EFFECTS (init
) = 1;
2051 init
= build2 (COMPOUND_EXPR
, TREE_TYPE (expr
), init
,
2052 build2 (MODIFY_EXPR
, boolean_type_node
,
2053 *init_test_decl
, boolean_true_node
));
2054 TREE_SIDE_EFFECTS (init
) = 1;
2057 if (expr
!= NULL_TREE
)
2059 expr
= build2 (COMPOUND_EXPR
, TREE_TYPE (expr
), init
, expr
);
2060 TREE_SIDE_EFFECTS (expr
) = 1;
2068 /* Rewrite expensive calls that require stack unwinding at runtime to
2069 cheaper alternatives. The logic here performs these
2072 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2073 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2079 const char *classname
;
2081 const char *signature
;
2082 const char *new_signature
;
2084 tree (*rewrite_arglist
) (tree arglist
);
2087 /* Add __builtin_return_address(0) to the end of an arglist. */
2091 rewrite_arglist_getcaller (tree arglist
)
2094 = (build_function_call_expr
2095 (built_in_decls
[BUILT_IN_RETURN_ADDRESS
],
2096 build_tree_list (NULL_TREE
, integer_zero_node
)));
2098 DECL_INLINE (current_function_decl
) = 0;
2100 return chainon (arglist
,
2101 tree_cons (NULL_TREE
, retaddr
,
2105 /* Add this.class to the end of an arglist. */
2108 rewrite_arglist_getclass (tree arglist
)
2110 return chainon (arglist
,
2111 tree_cons (NULL_TREE
, build_class_ref (output_class
),
2115 static rewrite_rule rules
[] =
2116 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2117 "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2118 ACC_FINAL
|ACC_PRIVATE
, rewrite_arglist_getclass
},
2119 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2120 "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2121 ACC_FINAL
|ACC_PRIVATE
|ACC_STATIC
, rewrite_arglist_getclass
},
2122 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2123 "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2124 ACC_FINAL
|ACC_PRIVATE
|ACC_STATIC
, rewrite_arglist_getcaller
},
2125 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2126 "()Ljava/lang/ClassLoader;",
2127 "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2128 ACC_FINAL
|ACC_PRIVATE
|ACC_STATIC
, rewrite_arglist_getcaller
},
2130 {NULL
, NULL
, NULL
, NULL
, 0, NULL
}};
2132 /* Scan the rules list for replacements for *METHOD_P and replace the
2133 args accordingly. If the rewrite results in an access to a private
2134 method, update SPECIAL.*/
2137 maybe_rewrite_invocation (tree
*method_p
, tree
*arg_list_p
,
2138 tree
*method_signature_p
, tree
*special
)
2140 tree context
= DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p
)));
2142 *special
= NULL_TREE
;
2144 for (p
= rules
; p
->classname
; p
++)
2146 if (get_identifier (p
->classname
) == context
)
2148 tree method
= DECL_NAME (*method_p
);
2149 if (get_identifier (p
->method
) == method
2150 && get_identifier (p
->signature
) == *method_signature_p
)
2153 = lookup_java_method (DECL_CONTEXT (*method_p
),
2155 get_identifier (p
->new_signature
));
2156 if (! maybe_method
&& ! flag_verify_invocations
)
2159 = add_method (DECL_CONTEXT (*method_p
), p
->flags
,
2160 method
, get_identifier (p
->new_signature
));
2161 DECL_EXTERNAL (maybe_method
) = 1;
2163 *method_p
= maybe_method
;
2164 gcc_assert (*method_p
);
2165 *arg_list_p
= p
->rewrite_arglist (*arg_list_p
);
2166 *method_signature_p
= get_identifier (p
->new_signature
);
2167 *special
= integer_one_node
;
2178 build_known_method_ref (tree method
, tree method_type ATTRIBUTE_UNUSED
,
2179 tree self_type
, tree method_signature ATTRIBUTE_UNUSED
,
2180 tree arg_list ATTRIBUTE_UNUSED
, tree special
)
2183 if (is_compiled_class (self_type
))
2185 /* With indirect dispatch we have to use indirect calls for all
2186 publicly visible methods or gcc will use PLT indirections
2187 to reach them. We also have to use indirect dispatch for all
2188 external methods. */
2189 if (! flag_indirect_dispatch
2190 || (! DECL_EXTERNAL (method
) && ! TREE_PUBLIC (method
)))
2192 func
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (method
)),
2198 = build_int_cst (NULL_TREE
,
2199 (get_symbol_table_index
2201 &TYPE_ATABLE_METHODS (output_class
))));
2203 = build4 (ARRAY_REF
,
2204 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class
))),
2205 TYPE_ATABLE_DECL (output_class
), table_index
,
2206 NULL_TREE
, NULL_TREE
);
2208 func
= convert (method_ptr_type_node
, func
);
2212 /* We don't know whether the method has been (statically) compiled.
2213 Compile this code to get a reference to the method's code:
2215 SELF_TYPE->methods[METHOD_INDEX].ncode
2219 int method_index
= 0;
2222 /* The method might actually be declared in some superclass, so
2223 we have to use its class context, not the caller's notion of
2224 where the method is. */
2225 self_type
= DECL_CONTEXT (method
);
2226 ref
= build_class_ref (self_type
);
2227 ref
= build1 (INDIRECT_REF
, class_type_node
, ref
);
2228 if (ncode_ident
== NULL_TREE
)
2229 ncode_ident
= get_identifier ("ncode");
2230 if (methods_ident
== NULL_TREE
)
2231 methods_ident
= get_identifier ("methods");
2232 ref
= build3 (COMPONENT_REF
, method_ptr_type_node
, ref
,
2233 lookup_field (&class_type_node
, methods_ident
),
2235 for (meth
= TYPE_METHODS (self_type
);
2236 ; meth
= TREE_CHAIN (meth
))
2240 if (meth
== NULL_TREE
)
2241 fatal_error ("method '%s' not found in class",
2242 IDENTIFIER_POINTER (DECL_NAME (method
)));
2245 method_index
*= int_size_in_bytes (method_type_node
);
2246 ref
= fold_build2 (PLUS_EXPR
, method_ptr_type_node
,
2247 ref
, build_int_cst (NULL_TREE
, method_index
));
2248 ref
= build1 (INDIRECT_REF
, method_type_node
, ref
);
2249 func
= build3 (COMPONENT_REF
, nativecode_ptr_type_node
,
2250 ref
, lookup_field (&method_type_node
, ncode_ident
),
2257 invoke_build_dtable (int is_invoke_interface
, tree arg_list
)
2259 tree dtable
, objectref
;
2261 TREE_VALUE (arg_list
) = save_expr (TREE_VALUE (arg_list
));
2263 /* If we're dealing with interfaces and if the objectref
2264 argument is an array then get the dispatch table of the class
2265 Object rather than the one from the objectref. */
2266 objectref
= (is_invoke_interface
2267 && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list
)))
2268 ? build_class_ref (object_type_node
) : TREE_VALUE (arg_list
));
2270 if (dtable_ident
== NULL_TREE
)
2271 dtable_ident
= get_identifier ("vtable");
2272 dtable
= build_java_indirect_ref (object_type_node
, objectref
,
2273 flag_check_references
);
2274 dtable
= build3 (COMPONENT_REF
, dtable_ptr_type
, dtable
,
2275 lookup_field (&object_type_node
, dtable_ident
), NULL_TREE
);
2280 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2281 T. If this decl has not been seen before, it will be added to the
2282 [oa]table_methods. If it has, the existing table slot will be
2286 get_symbol_table_index (tree t
, tree special
, tree
*symbol_table
)
2291 if (*symbol_table
== NULL_TREE
)
2293 *symbol_table
= build_tree_list (special
, t
);
2297 method_list
= *symbol_table
;
2301 tree value
= TREE_VALUE (method_list
);
2302 tree purpose
= TREE_PURPOSE (method_list
);
2303 if (value
== t
&& purpose
== special
)
2306 if (TREE_CHAIN (method_list
) == NULL_TREE
)
2309 method_list
= TREE_CHAIN (method_list
);
2312 TREE_CHAIN (method_list
) = build_tree_list (special
, t
);
2317 build_invokevirtual (tree dtable
, tree method
, tree special
)
2320 tree nativecode_ptr_ptr_type_node
2321 = build_pointer_type (nativecode_ptr_type_node
);
2325 if (flag_indirect_dispatch
)
2327 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method
))));
2330 = build_int_cst (NULL_TREE
, get_symbol_table_index
2332 &TYPE_OTABLE_METHODS (output_class
)));
2333 method_index
= build4 (ARRAY_REF
, integer_type_node
,
2334 TYPE_OTABLE_DECL (output_class
),
2335 otable_index
, NULL_TREE
, NULL_TREE
);
2339 /* We fetch the DECL_VINDEX field directly here, rather than
2340 using get_method_index(). DECL_VINDEX is the true offset
2341 from the vtable base to a method, regrdless of any extra
2342 words inserted at the start of the vtable. */
2343 method_index
= DECL_VINDEX (method
);
2344 method_index
= size_binop (MULT_EXPR
, method_index
,
2345 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node
));
2346 if (TARGET_VTABLE_USES_DESCRIPTORS
)
2347 method_index
= size_binop (MULT_EXPR
, method_index
,
2348 size_int (TARGET_VTABLE_USES_DESCRIPTORS
));
2351 func
= fold_build2 (PLUS_EXPR
, nativecode_ptr_ptr_type_node
, dtable
,
2352 convert (nativecode_ptr_ptr_type_node
, method_index
));
2354 if (TARGET_VTABLE_USES_DESCRIPTORS
)
2355 func
= build1 (NOP_EXPR
, nativecode_ptr_type_node
, func
);
2357 func
= build1 (INDIRECT_REF
, nativecode_ptr_type_node
, func
);
2362 static GTY(()) tree class_ident
;
2364 build_invokeinterface (tree dtable
, tree method
)
2370 /* We expand invokeinterface here. */
2372 if (class_ident
== NULL_TREE
)
2373 class_ident
= get_identifier ("class");
2375 dtable
= build_java_indirect_ref (dtable_type
, dtable
,
2376 flag_check_references
);
2377 dtable
= build3 (COMPONENT_REF
, class_ptr_type
, dtable
,
2378 lookup_field (&dtable_type
, class_ident
), NULL_TREE
);
2380 interface
= DECL_CONTEXT (method
);
2381 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface
)));
2382 layout_class_methods (interface
);
2384 if (flag_indirect_dispatch
)
2387 = 2 * (get_symbol_table_index
2388 (method
, NULL_TREE
, &TYPE_ITABLE_METHODS (output_class
)));
2390 = build4 (ARRAY_REF
,
2391 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class
))),
2392 TYPE_ITABLE_DECL (output_class
),
2393 build_int_cst (NULL_TREE
, itable_index
-1),
2394 NULL_TREE
, NULL_TREE
);
2396 = build4 (ARRAY_REF
,
2397 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class
))),
2398 TYPE_ITABLE_DECL (output_class
),
2399 build_int_cst (NULL_TREE
, itable_index
),
2400 NULL_TREE
, NULL_TREE
);
2401 interface
= convert (class_ptr_type
, interface
);
2402 idx
= convert (integer_type_node
, idx
);
2406 idx
= build_int_cst (NULL_TREE
,
2407 get_interface_method_index (method
, interface
));
2408 interface
= build_class_ref (interface
);
2411 lookup_arg
= tree_cons (NULL_TREE
, dtable
,
2412 tree_cons (NULL_TREE
, interface
,
2413 build_tree_list (NULL_TREE
, idx
)));
2415 return build3 (CALL_EXPR
, ptr_type_node
,
2416 build_address_of (soft_lookupinterfacemethod_node
),
2417 lookup_arg
, NULL_TREE
);
2420 /* Expand one of the invoke_* opcodes.
2421 OPCODE is the specific opcode.
2422 METHOD_REF_INDEX is an index into the constant pool.
2423 NARGS is the number of arguments, or -1 if not specified. */
2426 expand_invoke (int opcode
, int method_ref_index
, int nargs ATTRIBUTE_UNUSED
)
2428 tree method_signature
2429 = COMPONENT_REF_SIGNATURE(¤t_jcf
->cpool
, method_ref_index
);
2430 tree method_name
= COMPONENT_REF_NAME (¤t_jcf
->cpool
,
2433 = get_class_constant (current_jcf
,
2434 COMPONENT_REF_CLASS_INDEX(¤t_jcf
->cpool
,
2436 const char *const self_name
2437 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type
)));
2438 tree call
, func
, method
, arg_list
, method_type
;
2439 tree check
= NULL_TREE
;
2441 tree special
= NULL_TREE
;
2443 if (! CLASS_LOADED_P (self_type
))
2445 load_class (self_type
, 1);
2446 safe_layout_class (self_type
);
2447 if (TREE_CODE (TYPE_SIZE (self_type
)) == ERROR_MARK
)
2448 fatal_error ("failed to find class '%s'", self_name
);
2450 layout_class_methods (self_type
);
2452 if (ID_INIT_P (method_name
))
2453 method
= lookup_java_constructor (self_type
, method_signature
);
2455 method
= lookup_java_method (self_type
, method_name
, method_signature
);
2457 /* We've found a method in a class other than the one in which it
2458 was wanted. This can happen if, for instance, we're trying to
2459 compile invokespecial super.equals().
2460 FIXME: This is a kludge. Rather than nullifying the result, we
2461 should change lookup_java_method() so that it doesn't search the
2462 superclass chain when we're BC-compiling. */
2463 if (! flag_verify_invocations
2465 && ! TYPE_ARRAY_P (self_type
)
2466 && self_type
!= DECL_CONTEXT (method
))
2469 /* We've found a method in an interface, but this isn't an interface
2471 if (opcode
!= OPCODE_invokeinterface
2473 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method
)))))
2476 /* We've found a non-interface method but we are making an
2477 interface call. This can happen if the interface overrides a
2478 method in Object. */
2479 if (! flag_verify_invocations
2480 && opcode
== OPCODE_invokeinterface
2482 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method
))))
2485 if (method
== NULL_TREE
)
2487 if (flag_verify_invocations
|| ! flag_indirect_dispatch
)
2489 error ("class '%s' has no method named '%s' matching signature '%s'",
2491 IDENTIFIER_POINTER (method_name
),
2492 IDENTIFIER_POINTER (method_signature
));
2496 int flags
= ACC_PUBLIC
;
2497 if (opcode
== OPCODE_invokestatic
)
2498 flags
|= ACC_STATIC
;
2499 if (opcode
== OPCODE_invokeinterface
)
2501 flags
|= ACC_INTERFACE
| ACC_ABSTRACT
;
2502 CLASS_INTERFACE (TYPE_NAME (self_type
)) = 1;
2504 method
= add_method (self_type
, flags
, method_name
,
2506 DECL_ARTIFICIAL (method
) = 1;
2507 METHOD_DUMMY (method
) = 1;
2508 layout_class_method (self_type
, NULL
,
2513 /* Invoke static can't invoke static/abstract method */
2514 if (method
!= NULL_TREE
)
2516 if (opcode
== OPCODE_invokestatic
)
2518 if (!METHOD_STATIC (method
))
2520 error ("invokestatic on non static method");
2523 else if (METHOD_ABSTRACT (method
))
2525 error ("invokestatic on abstract method");
2531 if (METHOD_STATIC (method
))
2533 error ("invoke[non-static] on static method");
2539 if (method
== NULL_TREE
)
2541 /* If we got here, we emitted an error message above. So we
2542 just pop the arguments, push a properly-typed zero, and
2544 method_type
= get_type_from_signature (method_signature
);
2545 pop_arguments (TYPE_ARG_TYPES (method_type
));
2546 if (opcode
!= OPCODE_invokestatic
)
2547 pop_type (self_type
);
2548 method_type
= promote_type (TREE_TYPE (method_type
));
2549 push_value (convert (method_type
, integer_zero_node
));
2553 method_type
= TREE_TYPE (method
);
2554 arg_list
= pop_arguments (TYPE_ARG_TYPES (method_type
));
2555 flush_quick_stack ();
2557 maybe_rewrite_invocation (&method
, &arg_list
, &method_signature
,
2561 if (opcode
== OPCODE_invokestatic
)
2562 func
= build_known_method_ref (method
, method_type
, self_type
,
2563 method_signature
, arg_list
, special
);
2564 else if (opcode
== OPCODE_invokespecial
2565 || (opcode
== OPCODE_invokevirtual
2566 && (METHOD_PRIVATE (method
)
2567 || METHOD_FINAL (method
)
2568 || CLASS_FINAL (TYPE_NAME (self_type
)))))
2570 /* If the object for the method call is null, we throw an
2571 exception. We don't do this if the object is the current
2572 method's `this'. In other cases we just rely on an
2573 optimization pass to eliminate redundant checks. FIXME:
2574 Unfortunately there doesn't seem to be a way to determine
2575 what the current method is right now.
2576 We do omit the check if we're calling <init>. */
2577 /* We use a SAVE_EXPR here to make sure we only evaluate
2578 the new `self' expression once. */
2579 tree save_arg
= save_expr (TREE_VALUE (arg_list
));
2580 TREE_VALUE (arg_list
) = save_arg
;
2581 check
= java_check_reference (save_arg
, ! DECL_INIT_P (method
));
2582 func
= build_known_method_ref (method
, method_type
, self_type
,
2583 method_signature
, arg_list
, special
);
2587 tree dtable
= invoke_build_dtable (opcode
== OPCODE_invokeinterface
,
2589 if (opcode
== OPCODE_invokevirtual
)
2590 func
= build_invokevirtual (dtable
, method
, special
);
2592 func
= build_invokeinterface (dtable
, method
);
2595 if (TREE_CODE (func
) == ADDR_EXPR
)
2596 TREE_TYPE (func
) = build_pointer_type (method_type
);
2598 func
= build1 (NOP_EXPR
, build_pointer_type (method_type
), func
);
2600 call
= build3 (CALL_EXPR
, TREE_TYPE (method_type
),
2601 func
, arg_list
, NULL_TREE
);
2602 TREE_SIDE_EFFECTS (call
) = 1;
2603 call
= check_for_builtin (method
, call
);
2605 if (check
!= NULL_TREE
)
2607 call
= build2 (COMPOUND_EXPR
, TREE_TYPE (call
), check
, call
);
2608 TREE_SIDE_EFFECTS (call
) = 1;
2611 if (TREE_CODE (TREE_TYPE (method_type
)) == VOID_TYPE
)
2612 java_add_stmt (call
);
2616 flush_quick_stack ();
2620 /* Create a stub which will be put into the vtable but which will call
2624 build_jni_stub (tree method
)
2626 tree jnifunc
, call
, args
, body
, lookup_arg
, method_sig
, arg_types
;
2627 tree jni_func_type
, tem
;
2628 tree env_var
, res_var
= NULL_TREE
, block
;
2629 tree method_args
, res_type
;
2635 tree klass
= DECL_CONTEXT (method
);
2636 int from_class
= ! CLASS_FROM_SOURCE_P (klass
);
2637 klass
= build_class_ref (klass
);
2639 gcc_assert (METHOD_NATIVE (method
) && flag_jni
);
2641 DECL_ARTIFICIAL (method
) = 1;
2642 DECL_EXTERNAL (method
) = 0;
2644 env_var
= build_decl (VAR_DECL
, get_identifier ("env"), ptr_type_node
);
2645 DECL_CONTEXT (env_var
) = method
;
2647 if (TREE_TYPE (TREE_TYPE (method
)) != void_type_node
)
2649 res_var
= build_decl (VAR_DECL
, get_identifier ("res"),
2650 TREE_TYPE (TREE_TYPE (method
)));
2651 DECL_CONTEXT (res_var
) = method
;
2652 TREE_CHAIN (env_var
) = res_var
;
2655 meth_var
= build_decl (VAR_DECL
, get_identifier ("meth"), ptr_type_node
);
2656 TREE_STATIC (meth_var
) = 1;
2657 TREE_PUBLIC (meth_var
) = 0;
2658 DECL_EXTERNAL (meth_var
) = 0;
2659 DECL_CONTEXT (meth_var
) = method
;
2660 DECL_ARTIFICIAL (meth_var
) = 1;
2661 DECL_INITIAL (meth_var
) = null_pointer_node
;
2662 TREE_USED (meth_var
) = 1;
2663 chainon (env_var
, meth_var
);
2664 build_result_decl (method
);
2666 /* One strange way that the front ends are different is that they
2667 store arguments differently. */
2669 method_args
= DECL_ARGUMENTS (method
);
2671 method_args
= BLOCK_EXPR_DECLS (DECL_FUNCTION_BODY (method
));
2672 block
= build_block (env_var
, NULL_TREE
, method_args
, NULL_TREE
);
2673 TREE_SIDE_EFFECTS (block
) = 1;
2674 /* When compiling from source we don't set the type of the block,
2675 because that will prevent patch_return from ever being run. */
2677 TREE_TYPE (block
) = TREE_TYPE (TREE_TYPE (method
));
2679 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2680 body
= build2 (MODIFY_EXPR
, ptr_type_node
, env_var
,
2681 build3 (CALL_EXPR
, ptr_type_node
,
2682 build_address_of (soft_getjnienvnewframe_node
),
2683 build_tree_list (NULL_TREE
, klass
),
2685 CAN_COMPLETE_NORMALLY (body
) = 1;
2687 /* All the arguments to this method become arguments to the
2688 underlying JNI function. If we had to wrap object arguments in a
2689 special way, we would do that here. */
2691 for (tem
= method_args
; tem
!= NULL_TREE
; tem
= TREE_CHAIN (tem
))
2693 int arg_bits
= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem
)));
2694 #ifdef PARM_BOUNDARY
2695 arg_bits
= (((arg_bits
+ PARM_BOUNDARY
- 1) / PARM_BOUNDARY
)
2698 args_size
+= (arg_bits
/ BITS_PER_UNIT
);
2700 args
= tree_cons (NULL_TREE
, tem
, args
);
2702 args
= nreverse (args
);
2703 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (method
));
2705 /* For a static method the second argument is the class. For a
2706 non-static method the second argument is `this'; that is already
2707 available in the argument list. */
2708 if (METHOD_STATIC (method
))
2710 args_size
+= int_size_in_bytes (TREE_TYPE (klass
));
2711 args
= tree_cons (NULL_TREE
, klass
, args
);
2712 arg_types
= tree_cons (NULL_TREE
, object_ptr_type_node
, arg_types
);
2715 /* The JNIEnv structure is the first argument to the JNI function. */
2716 args_size
+= int_size_in_bytes (TREE_TYPE (env_var
));
2717 args
= tree_cons (NULL_TREE
, env_var
, args
);
2718 arg_types
= tree_cons (NULL_TREE
, ptr_type_node
, arg_types
);
2720 /* We call _Jv_LookupJNIMethod to find the actual underlying
2721 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2722 exception if this function is not found at runtime. */
2723 tem
= build_tree_list (NULL_TREE
, build_int_cst (NULL_TREE
, args_size
));
2724 method_sig
= build_java_signature (TREE_TYPE (method
));
2725 lookup_arg
= tree_cons (NULL_TREE
,
2726 build_utf8_ref (unmangle_classname
2727 (IDENTIFIER_POINTER (method_sig
),
2728 IDENTIFIER_LENGTH (method_sig
))),
2730 tem
= DECL_NAME (method
);
2732 = tree_cons (NULL_TREE
, klass
,
2733 tree_cons (NULL_TREE
, build_utf8_ref (tem
), lookup_arg
));
2735 tem
= build_function_type (TREE_TYPE (TREE_TYPE (method
)), arg_types
);
2737 #ifdef MODIFY_JNI_METHOD_CALL
2738 tem
= MODIFY_JNI_METHOD_CALL (tem
);
2741 jni_func_type
= build_pointer_type (tem
);
2743 jnifunc
= build3 (COND_EXPR
, ptr_type_node
,
2745 build2 (MODIFY_EXPR
, ptr_type_node
, meth_var
,
2746 build3 (CALL_EXPR
, ptr_type_node
,
2748 (soft_lookupjnimethod_node
),
2749 lookup_arg
, NULL_TREE
)));
2751 /* Now we make the actual JNI call via the resulting function
2753 call
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (method
)),
2754 build1 (NOP_EXPR
, jni_func_type
, jnifunc
),
2757 /* If the JNI call returned a result, capture it here. If we had to
2758 unwrap JNI object results, we would do that here. */
2759 if (res_var
!= NULL_TREE
)
2761 /* If the call returns an object, it may return a JNI weak
2762 reference, in which case we must unwrap it. */
2763 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method
))))
2764 call
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (method
)),
2765 build_address_of (soft_unwrapjni_node
),
2766 build_tree_list (NULL_TREE
, call
),
2768 call
= build2 (MODIFY_EXPR
, TREE_TYPE (TREE_TYPE (method
)),
2772 TREE_SIDE_EFFECTS (call
) = 1;
2773 CAN_COMPLETE_NORMALLY (call
) = 1;
2775 body
= build2 (COMPOUND_EXPR
, void_type_node
, body
, call
);
2776 TREE_SIDE_EFFECTS (body
) = 1;
2778 /* Now free the environment we allocated. */
2779 call
= build3 (CALL_EXPR
, ptr_type_node
,
2780 build_address_of (soft_jnipopsystemframe_node
),
2781 build_tree_list (NULL_TREE
, env_var
),
2783 TREE_SIDE_EFFECTS (call
) = 1;
2784 CAN_COMPLETE_NORMALLY (call
) = 1;
2785 body
= build2 (COMPOUND_EXPR
, void_type_node
, body
, call
);
2786 TREE_SIDE_EFFECTS (body
) = 1;
2788 /* Finally, do the return. */
2789 res_type
= void_type_node
;
2790 if (res_var
!= NULL_TREE
)
2793 gcc_assert (DECL_RESULT (method
));
2794 /* Make sure we copy the result variable to the actual
2795 result. We use the type of the DECL_RESULT because it
2796 might be different from the return type of the function:
2797 it might be promoted. */
2798 drt
= TREE_TYPE (DECL_RESULT (method
));
2799 if (drt
!= TREE_TYPE (res_var
))
2800 res_var
= build1 (CONVERT_EXPR
, drt
, res_var
);
2801 res_var
= build2 (MODIFY_EXPR
, drt
, DECL_RESULT (method
), res_var
);
2802 TREE_SIDE_EFFECTS (res_var
) = 1;
2805 body
= build2 (COMPOUND_EXPR
, void_type_node
, body
,
2806 build1 (RETURN_EXPR
, res_type
, res_var
));
2807 TREE_SIDE_EFFECTS (body
) = 1;
2809 bind
= build3 (BIND_EXPR
, void_type_node
, BLOCK_VARS (block
),
2815 /* Given lvalue EXP, return a volatile expression that references the
2819 java_modify_addr_for_volatile (tree exp
)
2821 tree exp_type
= TREE_TYPE (exp
);
2823 = build_qualified_type (exp_type
,
2824 TYPE_QUALS (exp_type
) | TYPE_QUAL_VOLATILE
);
2825 tree addr
= build_fold_addr_expr (exp
);
2826 v_type
= build_pointer_type (v_type
);
2827 addr
= fold_convert (v_type
, addr
);
2828 exp
= build_fold_indirect_ref (addr
);
2833 /* Expand an operation to extract from or store into a field.
2834 IS_STATIC is 1 iff the field is static.
2835 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2836 FIELD_REF_INDEX is an index into the constant pool. */
2839 expand_java_field_op (int is_static
, int is_putting
, int field_ref_index
)
2842 = get_class_constant (current_jcf
,
2843 COMPONENT_REF_CLASS_INDEX (¤t_jcf
->cpool
,
2845 const char *self_name
2846 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type
)));
2847 tree field_name
= COMPONENT_REF_NAME (¤t_jcf
->cpool
, field_ref_index
);
2848 tree field_signature
= COMPONENT_REF_SIGNATURE (¤t_jcf
->cpool
,
2850 tree field_type
= get_type_from_signature (field_signature
);
2851 tree new_value
= is_putting
? pop_value (field_type
) : NULL_TREE
;
2854 tree original_self_type
= self_type
;
2858 if (! CLASS_LOADED_P (self_type
))
2859 load_class (self_type
, 1);
2860 field_decl
= lookup_field (&self_type
, field_name
);
2861 if (field_decl
== error_mark_node
)
2865 else if (field_decl
== NULL_TREE
)
2867 if (! flag_verify_invocations
)
2869 int flags
= ACC_PUBLIC
;
2871 flags
|= ACC_STATIC
;
2872 self_type
= original_self_type
;
2873 field_decl
= add_field (original_self_type
, field_name
,
2875 DECL_ARTIFICIAL (field_decl
) = 1;
2876 DECL_IGNORED_P (field_decl
) = 1;
2878 /* FIXME: We should be pessimistic about volatility. We
2879 don't know one way or another, but this is safe.
2880 However, doing this has bad effects on code quality. We
2881 need to look at better ways to do this. */
2882 TREE_THIS_VOLATILE (field_decl
) = 1;
2887 error ("missing field '%s' in '%s'",
2888 IDENTIFIER_POINTER (field_name
), self_name
);
2892 else if (build_java_signature (TREE_TYPE (field_decl
)) != field_signature
)
2894 error ("mismatching signature for field '%s' in '%s'",
2895 IDENTIFIER_POINTER (field_name
), self_name
);
2898 field_ref
= is_static
? NULL_TREE
: pop_value (self_type
);
2902 push_value (convert (field_type
, integer_zero_node
));
2903 flush_quick_stack ();
2907 field_ref
= build_field_ref (field_ref
, self_type
, field_name
);
2909 && ! flag_indirect_dispatch
)
2911 tree context
= DECL_CONTEXT (field_ref
);
2912 if (context
!= self_type
&& CLASS_INTERFACE (TYPE_NAME (context
)))
2913 field_ref
= build_class_init (context
, field_ref
);
2915 field_ref
= build_class_init (self_type
, field_ref
);
2919 flush_quick_stack ();
2920 if (FIELD_FINAL (field_decl
))
2922 if (DECL_CONTEXT (field_decl
) != current_class
)
2923 error ("assignment to final field %q+D not in field's class",
2925 /* We used to check for assignments to final fields not
2926 occurring in the class initializer or in a constructor
2927 here. However, this constraint doesn't seem to be
2928 enforced by the JVM. */
2931 if (TREE_THIS_VOLATILE (field_decl
))
2932 field_ref
= java_modify_addr_for_volatile (field_ref
);
2934 modify_expr
= build2 (MODIFY_EXPR
, TREE_TYPE (field_ref
),
2935 field_ref
, new_value
);
2937 if (TREE_THIS_VOLATILE (field_decl
))
2940 (CALL_EXPR
, void_type_node
,
2941 build_address_of (built_in_decls
[BUILT_IN_SYNCHRONIZE
]),
2942 NULL_TREE
, NULL_TREE
));
2944 java_add_stmt (modify_expr
);
2948 tree temp
= build_decl (VAR_DECL
, NULL_TREE
, TREE_TYPE (field_ref
));
2949 java_add_local_var (temp
);
2951 if (TREE_THIS_VOLATILE (field_decl
))
2952 field_ref
= java_modify_addr_for_volatile (field_ref
);
2955 = build2 (MODIFY_EXPR
, TREE_TYPE (field_ref
), temp
, field_ref
);
2956 java_add_stmt (modify_expr
);
2958 if (TREE_THIS_VOLATILE (field_decl
))
2961 (CALL_EXPR
, void_type_node
,
2962 build_address_of (built_in_decls
[BUILT_IN_SYNCHRONIZE
]),
2963 NULL_TREE
, NULL_TREE
));
2967 TREE_THIS_VOLATILE (field_ref
) = TREE_THIS_VOLATILE (field_decl
);
2971 load_type_state (tree label
)
2974 tree vec
= LABEL_TYPE_STATE (label
);
2975 int cur_length
= TREE_VEC_LENGTH (vec
);
2976 stack_pointer
= cur_length
- DECL_MAX_LOCALS(current_function_decl
);
2977 for (i
= 0; i
< cur_length
; i
++)
2978 type_map
[i
] = TREE_VEC_ELT (vec
, i
);
2981 /* Go over METHOD's bytecode and note instruction starts in
2982 instruction_bits[]. */
2985 note_instructions (JCF
*jcf
, tree method
)
2988 unsigned char* byte_ops
;
2989 long length
= DECL_CODE_LENGTH (method
);
2994 #undef RET /* Defined by config/i386/i386.h */
2996 #define BCODE byte_ops
2997 #define BYTE_type_node byte_type_node
2998 #define SHORT_type_node short_type_node
2999 #define INT_type_node int_type_node
3000 #define LONG_type_node long_type_node
3001 #define CHAR_type_node char_type_node
3002 #define PTR_type_node ptr_type_node
3003 #define FLOAT_type_node float_type_node
3004 #define DOUBLE_type_node double_type_node
3005 #define VOID_type_node void_type_node
3006 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3007 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3008 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3009 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3011 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3013 JCF_SEEK (jcf
, DECL_CODE_OFFSET (method
));
3014 byte_ops
= jcf
->read_ptr
;
3015 instruction_bits
= xrealloc (instruction_bits
, length
+ 1);
3016 memset (instruction_bits
, 0, length
+ 1);
3018 /* This pass figures out which PC can be the targets of jumps. */
3019 for (PC
= 0; PC
< length
;)
3021 int oldpc
= PC
; /* PC at instruction start. */
3022 instruction_bits
[PC
] |= BCODE_INSTRUCTION_START
;
3023 switch (byte_ops
[PC
++])
3025 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3027 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3030 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3032 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3033 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3034 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3035 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3036 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3037 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3038 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3039 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3041 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3042 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3043 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3044 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3045 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3046 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3047 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3048 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3050 /* two forms of wide instructions */
3051 #define PRE_SPECIAL_WIDE(IGNORE) \
3053 int modified_opcode = IMMEDIATE_u1; \
3054 if (modified_opcode == OPCODE_iinc) \
3056 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3057 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3061 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3065 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3067 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3069 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3070 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3071 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3072 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3073 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3074 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3075 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3076 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3077 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3078 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3080 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3081 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3082 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3083 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3084 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3085 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3086 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3088 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3090 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3092 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3093 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3095 #define PRE_LOOKUP_SWITCH \
3096 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3097 NOTE_LABEL (default_offset+oldpc); \
3099 while (--npairs >= 0) { \
3100 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3101 jint offset = IMMEDIATE_s4; \
3102 NOTE_LABEL (offset+oldpc); } \
3105 #define PRE_TABLE_SWITCH \
3106 { jint default_offset = IMMEDIATE_s4; \
3107 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3108 NOTE_LABEL (default_offset+oldpc); \
3110 while (low++ <= high) { \
3111 jint offset = IMMEDIATE_s4; \
3112 NOTE_LABEL (offset+oldpc); } \
3115 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3116 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3117 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3118 (void)(IMMEDIATE_u2); \
3119 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3121 #include "javaop.def"
3128 expand_byte_code (JCF
*jcf
, tree method
)
3132 const unsigned char *linenumber_pointer
;
3133 int dead_code_index
= -1;
3134 unsigned char* byte_ops
;
3135 long length
= DECL_CODE_LENGTH (method
);
3138 JCF_SEEK (jcf
, DECL_CODE_OFFSET (method
));
3139 byte_ops
= jcf
->read_ptr
;
3141 /* We make an initial pass of the line number table, to note
3142 which instructions have associated line number entries. */
3143 linenumber_pointer
= linenumber_table
;
3144 for (i
= 0; i
< linenumber_count
; i
++)
3146 int pc
= GET_u2 (linenumber_pointer
);
3147 linenumber_pointer
+= 4;
3149 warning (0, "invalid PC in line number table");
3152 if ((instruction_bits
[pc
] & BCODE_HAS_LINENUMBER
) != 0)
3153 instruction_bits
[pc
] |= BCODE_HAS_MULTI_LINENUMBERS
;
3154 instruction_bits
[pc
] |= BCODE_HAS_LINENUMBER
;
3158 if (! verify_jvm_instructions_new (jcf
, byte_ops
, length
))
3161 promote_arguments ();
3163 /* Translate bytecodes. */
3164 linenumber_pointer
= linenumber_table
;
3165 for (PC
= 0; PC
< length
;)
3167 if ((instruction_bits
[PC
] & BCODE_TARGET
) != 0 || PC
== 0)
3169 tree label
= lookup_label (PC
);
3170 flush_quick_stack ();
3171 if ((instruction_bits
[PC
] & BCODE_TARGET
) != 0)
3172 java_add_stmt (build1 (LABEL_EXPR
, void_type_node
, label
));
3173 if (LABEL_VERIFIED (label
) || PC
== 0)
3174 load_type_state (label
);
3177 if (! (instruction_bits
[PC
] & BCODE_VERIFIED
))
3179 if (dead_code_index
== -1)
3181 /* This is the start of a region of unreachable bytecodes.
3182 They still need to be processed in order for EH ranges
3183 to get handled correctly. However, we can simply
3184 replace these bytecodes with nops. */
3185 dead_code_index
= PC
;
3188 /* Turn this bytecode into a nop. */
3193 if (dead_code_index
!= -1)
3195 /* We've just reached the end of a region of dead code. */
3197 warning (0, "unreachable bytecode from %d to before %d",
3198 dead_code_index
, PC
);
3199 dead_code_index
= -1;
3203 /* Handle possible line number entry for this PC.
3205 This code handles out-of-order and multiple linenumbers per PC,
3206 but is optimized for the case of line numbers increasing
3207 monotonically with PC. */
3208 if ((instruction_bits
[PC
] & BCODE_HAS_LINENUMBER
) != 0)
3210 if ((instruction_bits
[PC
] & BCODE_HAS_MULTI_LINENUMBERS
) != 0
3211 || GET_u2 (linenumber_pointer
) != PC
)
3212 linenumber_pointer
= linenumber_table
;
3213 while (linenumber_pointer
< linenumber_table
+ linenumber_count
* 4)
3215 int pc
= GET_u2 (linenumber_pointer
);
3216 linenumber_pointer
+= 4;
3219 int line
= GET_u2 (linenumber_pointer
- 2);
3220 #ifdef USE_MAPPED_LOCATION
3221 input_location
= linemap_line_start (&line_table
, line
, 1);
3223 input_location
.line
= line
;
3225 if (!(instruction_bits
[PC
] & BCODE_HAS_MULTI_LINENUMBERS
))
3230 maybe_pushlevels (PC
);
3231 PC
= process_jvm_instruction (PC
, byte_ops
, length
);
3232 maybe_poplevels (PC
);
3235 if (dead_code_index
!= -1)
3237 /* We've just reached the end of a region of dead code. */
3239 warning (0, "unreachable bytecode from %d to the end of the method",
3245 java_push_constant_from_pool (JCF
*jcf
, int index
)
3248 if (JPOOL_TAG (jcf
, index
) == CONSTANT_String
)
3251 name
= get_name_constant (jcf
, JPOOL_USHORT1 (jcf
, index
));
3252 index
= alloc_name_constant (CONSTANT_String
, name
);
3253 c
= build_ref_from_constant_pool (index
);
3254 c
= convert (promote_type (string_type_node
), c
);
3256 else if (JPOOL_TAG (jcf
, index
) == CONSTANT_Class
3257 || JPOOL_TAG (jcf
, index
) == CONSTANT_ResolvedClass
)
3259 tree record
= get_class_constant (jcf
, index
);
3260 c
= build_class_ref (record
);
3263 c
= get_constant (jcf
, index
);
3268 process_jvm_instruction (int PC
, const unsigned char* byte_ops
,
3269 long length ATTRIBUTE_UNUSED
)
3271 const char *opname
; /* Temporary ??? */
3272 int oldpc
= PC
; /* PC at instruction start. */
3274 /* If the instruction is at the beginning of an exception handler,
3275 replace the top of the stack with the thrown object reference. */
3276 if (instruction_bits
[PC
] & BCODE_EXCEPTION_TARGET
)
3278 /* Note that the verifier will not emit a type map at all for
3279 dead exception handlers. In this case we just ignore the
3281 if ((instruction_bits
[PC
] & BCODE_VERIFIED
) != 0)
3283 tree type
= pop_type (promote_type (throwable_type_node
));
3284 push_value (build_exception_object_ref (type
));
3288 switch (byte_ops
[PC
++])
3290 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3293 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3296 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3298 int saw_index = 0; \
3299 int index = OPERAND_VALUE; \
3301 (find_local_variable (index, return_address_type_node, oldpc)); \
3304 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3306 /* OPERAND_VALUE may have side-effects on PC */ \
3307 int opvalue = OPERAND_VALUE; \
3308 build_java_jsr (oldpc + opvalue, PC); \
3311 /* Push a constant onto the stack. */
3312 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3313 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3314 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3315 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3317 /* internal macro added for use by the WIDE case */
3318 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3319 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3321 /* Push local variable onto the opcode stack. */
3322 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3324 /* have to do this since OPERAND_VALUE may have side-effects */ \
3325 int opvalue = OPERAND_VALUE; \
3326 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3329 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3330 expand_java_return (OPERAND_TYPE##_type_node)
3332 #define REM_EXPR TRUNC_MOD_EXPR
3333 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3334 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3336 #define FIELD(IS_STATIC, IS_PUT) \
3337 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3339 #define TEST(OPERAND_TYPE, CONDITION) \
3340 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3342 #define COND(OPERAND_TYPE, CONDITION) \
3343 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3345 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3346 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3348 #define BRANCH_GOTO(OPERAND_VALUE) \
3349 expand_java_goto (oldpc + OPERAND_VALUE)
3351 #define BRANCH_CALL(OPERAND_VALUE) \
3352 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3355 #define BRANCH_RETURN(OPERAND_VALUE) \
3357 tree type = OPERAND_TYPE##_type_node; \
3358 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3359 expand_java_ret (value); \
3363 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3364 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3365 fprintf (stderr, "(not implemented)\n")
3366 #define NOT_IMPL1(OPERAND_VALUE) \
3367 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3368 fprintf (stderr, "(not implemented)\n")
3370 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3372 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3374 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3376 #define STACK_SWAP(COUNT) java_stack_swap()
3378 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3379 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3380 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3382 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3383 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3385 #define LOOKUP_SWITCH \
3386 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3387 tree selector = pop_value (INT_type_node); \
3388 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3389 while (--npairs >= 0) \
3391 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3392 expand_java_add_case (switch_expr, match, oldpc + offset); \
3396 #define TABLE_SWITCH \
3397 { jint default_offset = IMMEDIATE_s4; \
3398 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3399 tree selector = pop_value (INT_type_node); \
3400 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3401 for (; low <= high; low++) \
3403 jint offset = IMMEDIATE_s4; \
3404 expand_java_add_case (switch_expr, low, oldpc + offset); \
3408 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3409 { int opcode = byte_ops[PC-1]; \
3410 int method_ref_index = IMMEDIATE_u2; \
3412 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3414 expand_invoke (opcode, method_ref_index, nargs); \
3417 /* Handle new, checkcast, instanceof */
3418 #define OBJECT(TYPE, OP) \
3419 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3421 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3423 #define ARRAY_LOAD(OPERAND_TYPE) \
3425 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3428 #define ARRAY_STORE(OPERAND_TYPE) \
3430 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3433 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3434 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3435 #define ARRAY_NEW_PTR() \
3436 push_value (build_anewarray (get_class_constant (current_jcf, \
3438 pop_value (int_type_node)));
3439 #define ARRAY_NEW_NUM() \
3441 int atype = IMMEDIATE_u1; \
3442 push_value (build_newarray (atype, pop_value (int_type_node)));\
3444 #define ARRAY_NEW_MULTI() \
3446 tree class = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3447 int ndims = IMMEDIATE_u1; \
3448 expand_java_multianewarray( class, ndims ); \
3451 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3452 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3453 pop_value (OPERAND_TYPE##_type_node)));
3455 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3457 push_value (build1 (NOP_EXPR, int_type_node, \
3458 (convert (TO_TYPE##_type_node, \
3459 pop_value (FROM_TYPE##_type_node))))); \
3462 #define CONVERT(FROM_TYPE, TO_TYPE) \
3464 push_value (convert (TO_TYPE##_type_node, \
3465 pop_value (FROM_TYPE##_type_node))); \
3468 /* internal macro added for use by the WIDE case
3469 Added TREE_TYPE (decl) assignment, apbianco */
3470 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3473 int index = OPVALUE; \
3474 tree type = OPTYPE; \
3475 value = pop_value (type); \
3476 type = TREE_TYPE (value); \
3477 decl = find_local_variable (index, type, oldpc); \
3478 set_local_type (index, type); \
3479 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3482 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3484 /* have to do this since OPERAND_VALUE may have side-effects */ \
3485 int opvalue = OPERAND_VALUE; \
3486 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3489 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3490 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3492 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3493 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3495 #define MONITOR_OPERATION(call) \
3497 tree o = pop_value (ptr_type_node); \
3499 flush_quick_stack (); \
3500 c = build_java_monitor (call, o); \
3501 TREE_SIDE_EFFECTS (c) = 1; \
3502 java_add_stmt (c); \
3505 #define SPECIAL_IINC(IGNORED) \
3507 unsigned int local_var_index = IMMEDIATE_u1; \
3508 int ival = IMMEDIATE_s1; \
3509 expand_iinc(local_var_index, ival, oldpc); \
3512 #define SPECIAL_WIDE(IGNORED) \
3514 int modified_opcode = IMMEDIATE_u1; \
3515 unsigned int local_var_index = IMMEDIATE_u2; \
3516 switch (modified_opcode) \
3520 int ival = IMMEDIATE_s2; \
3521 expand_iinc (local_var_index, ival, oldpc); \
3524 case OPCODE_iload: \
3525 case OPCODE_lload: \
3526 case OPCODE_fload: \
3527 case OPCODE_dload: \
3528 case OPCODE_aload: \
3530 /* duplicate code from LOAD macro */ \
3531 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3534 case OPCODE_istore: \
3535 case OPCODE_lstore: \
3536 case OPCODE_fstore: \
3537 case OPCODE_dstore: \
3538 case OPCODE_astore: \
3540 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3544 error ("unrecogized wide sub-instruction"); \
3548 #define SPECIAL_THROW(IGNORED) \
3549 build_java_athrow (pop_value (throwable_type_node))
3551 #define SPECIAL_BREAK NOT_IMPL1
3552 #define IMPL NOT_IMPL
3554 #include "javaop.def"
3557 fprintf (stderr
, "%3d: unknown(%3d)\n", oldpc
, byte_ops
[PC
]);
3562 /* Return the opcode at PC in the code section pointed to by
3565 static unsigned char
3566 peek_opcode_at_pc (JCF
*jcf
, int code_offset
, int pc
)
3568 unsigned char opcode
;
3569 long absolute_offset
= (long)JCF_TELL (jcf
);
3571 JCF_SEEK (jcf
, code_offset
);
3572 opcode
= jcf
->read_ptr
[pc
];
3573 JCF_SEEK (jcf
, absolute_offset
);
3577 /* Some bytecode compilers are emitting accurate LocalVariableTable
3578 attributes. Here's an example:
3583 Attribute "LocalVariableTable"
3584 slot #<n>: ... (PC: PC+1 length: L)
3586 This is accurate because the local in slot <n> really exists after
3587 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3589 This procedure recognizes this situation and extends the live range
3590 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3591 length of the store instruction.)
3593 This function is used by `give_name_to_locals' so that a local's
3594 DECL features a DECL_LOCAL_START_PC such that the first related
3595 store operation will use DECL as a destination, not an unrelated
3596 temporary created for the occasion.
3598 This function uses a global (instruction_bits) `note_instructions' should
3599 have allocated and filled properly. */
3602 maybe_adjust_start_pc (struct JCF
*jcf
, int code_offset
,
3603 int start_pc
, int slot
)
3605 int first
, index
, opcode
;
3614 /* Find last previous instruction and remember it */
3615 for (pc
= start_pc
-1; pc
; pc
--)
3616 if (instruction_bits
[pc
] & BCODE_INSTRUCTION_START
)
3620 /* Retrieve the instruction, handle `wide'. */
3621 opcode
= (int) peek_opcode_at_pc (jcf
, code_offset
, pc
++);
3622 if (opcode
== OPCODE_wide
)
3625 opcode
= (int) peek_opcode_at_pc (jcf
, code_offset
, pc
++);
3630 case OPCODE_astore_0
:
3631 case OPCODE_astore_1
:
3632 case OPCODE_astore_2
:
3633 case OPCODE_astore_3
:
3634 first
= OPCODE_astore_0
;
3637 case OPCODE_istore_0
:
3638 case OPCODE_istore_1
:
3639 case OPCODE_istore_2
:
3640 case OPCODE_istore_3
:
3641 first
= OPCODE_istore_0
;
3644 case OPCODE_lstore_0
:
3645 case OPCODE_lstore_1
:
3646 case OPCODE_lstore_2
:
3647 case OPCODE_lstore_3
:
3648 first
= OPCODE_lstore_0
;
3651 case OPCODE_fstore_0
:
3652 case OPCODE_fstore_1
:
3653 case OPCODE_fstore_2
:
3654 case OPCODE_fstore_3
:
3655 first
= OPCODE_fstore_0
;
3658 case OPCODE_dstore_0
:
3659 case OPCODE_dstore_1
:
3660 case OPCODE_dstore_2
:
3661 case OPCODE_dstore_3
:
3662 first
= OPCODE_dstore_0
;
3670 index
= peek_opcode_at_pc (jcf
, code_offset
, pc
);
3673 int other
= peek_opcode_at_pc (jcf
, code_offset
, ++pc
);
3674 index
= (other
<< 8) + index
;
3679 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3680 means we have a <t>store. */
3681 if ((first
> 0 && opcode
- first
== slot
) || (index
> 0 && index
== slot
))
3687 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3688 order, as specified by Java Language Specification.
3690 The problem is that while expand_expr will evaluate its sub-operands in
3691 left-to-right order, for variables it will just return an rtx (i.e.
3692 an lvalue) for the variable (rather than an rvalue). So it is possible
3693 that a later sub-operand will change the register, and when the
3694 actual operation is done, it will use the new value, when it should
3695 have used the original value.
3697 We fix this by using save_expr. This forces the sub-operand to be
3698 copied into a fresh virtual register,
3700 For method invocation, we modify the arguments so that a
3701 left-to-right order evaluation is performed. Saved expressions
3702 will, in CALL_EXPR order, be reused when the call will be expanded.
3704 We also promote outgoing args if needed. */
3707 force_evaluation_order (tree node
)
3709 if (flag_syntax_only
)
3711 if (TREE_CODE (node
) == CALL_EXPR
3712 || (TREE_CODE (node
) == COMPOUND_EXPR
3713 && TREE_CODE (TREE_OPERAND (node
, 0)) == CALL_EXPR
3714 && TREE_CODE (TREE_OPERAND (node
, 1)) == SAVE_EXPR
))
3720 /* Position arg properly, account for wrapped around ctors. */
3721 if (TREE_CODE (node
) == COMPOUND_EXPR
)
3722 arg
= TREE_OPERAND (node
, 0);
3724 arg
= TREE_OPERAND (arg
, 1);
3726 /* An empty argument list is ok, just ignore it. */
3730 /* Not having a list of arguments here is an error. */
3731 gcc_assert (TREE_CODE (arg
) == TREE_LIST
);
3733 /* This reverses the evaluation order. This is a desired effect. */
3734 for (cmp
= NULL_TREE
; arg
; arg
= TREE_CHAIN (arg
))
3736 /* Promote types smaller than integer. This is required by
3738 tree type
= TREE_TYPE (TREE_VALUE (arg
));
3740 if (targetm
.calls
.promote_prototypes (type
)
3741 && INTEGRAL_TYPE_P (type
)
3742 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type
),
3743 TYPE_SIZE (integer_type_node
)))
3744 TREE_VALUE (arg
) = fold_convert (integer_type_node
, TREE_VALUE (arg
));
3746 saved
= save_expr (force_evaluation_order (TREE_VALUE (arg
)));
3747 cmp
= (cmp
== NULL_TREE
? saved
:
3748 build2 (COMPOUND_EXPR
, void_type_node
, cmp
, saved
));
3749 TREE_VALUE (arg
) = saved
;
3752 if (cmp
&& TREE_CODE (cmp
) == COMPOUND_EXPR
)
3753 TREE_SIDE_EFFECTS (cmp
) = 1;
3757 cmp
= build2 (COMPOUND_EXPR
, TREE_TYPE (node
), cmp
, node
);
3758 if (TREE_TYPE (cmp
) != void_type_node
)
3759 cmp
= save_expr (cmp
);
3760 CAN_COMPLETE_NORMALLY (cmp
) = CAN_COMPLETE_NORMALLY (node
);
3761 TREE_SIDE_EFFECTS (cmp
) = 1;
3768 /* EXPR_WITH_FILE_LOCATION are used to keep track of the exact
3769 location where an expression or an identifier were encountered. It
3770 is necessary for languages where the frontend parser will handle
3771 recursively more than one file (Java is one of them). */
3774 build_expr_wfl (tree node
,
3775 #ifdef USE_MAPPED_LOCATION
3776 source_location location
3778 const char *file
, int line
, int col
3784 #ifdef USE_MAPPED_LOCATION
3785 wfl
= make_node (EXPR_WITH_FILE_LOCATION
);
3786 SET_EXPR_LOCATION (wfl
, location
);
3788 static const char *last_file
= 0;
3789 static tree last_filenode
= NULL_TREE
;
3791 wfl
= make_node (EXPR_WITH_FILE_LOCATION
);
3793 EXPR_WFL_SET_LINECOL (wfl
, line
, col
);
3794 if (file
!= last_file
)
3797 last_filenode
= file
? get_identifier (file
) : NULL_TREE
;
3799 EXPR_WFL_FILENAME_NODE (wfl
) = last_filenode
;
3801 EXPR_WFL_NODE (wfl
) = node
;
3805 TREE_SIDE_EFFECTS (wfl
) = TREE_SIDE_EFFECTS (node
);
3806 TREE_TYPE (wfl
) = TREE_TYPE (node
);
3812 #ifdef USE_MAPPED_LOCATION
3814 expr_add_location (tree node
, source_location location
, bool statement
)
3818 /* FIXME. This optimization causes failures in code that expects an
3819 EXPR_WITH_FILE_LOCATION. E.g. in resolve_qualified_expression_name. */
3820 if (node
&& ! (statement
&& flag_emit_class_files
))
3822 source_location node_loc
= EXPR_LOCATION (node
);
3823 if (node_loc
== location
|| location
== UNKNOWN_LOCATION
)
3825 if (node_loc
== UNKNOWN_LOCATION
3826 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (node
))))
3828 SET_EXPR_LOCATION (node
, location
);
3833 wfl
= make_node (EXPR_WITH_FILE_LOCATION
);
3834 SET_EXPR_LOCATION (wfl
, location
);
3835 EXPR_WFL_NODE (wfl
) = node
;
3836 if (statement
&& debug_info_level
!= DINFO_LEVEL_NONE
)
3837 EXPR_WFL_EMIT_LINE_NOTE (wfl
) = 1;
3841 TREE_SIDE_EFFECTS (wfl
) = TREE_SIDE_EFFECTS (node
);
3842 TREE_TYPE (wfl
) = TREE_TYPE (node
);
3849 /* Build a node to represent empty statements and blocks. */
3852 build_java_empty_stmt (void)
3854 tree t
= build_empty_stmt ();
3855 CAN_COMPLETE_NORMALLY (t
) = 1;
3859 /* Promote all args of integral type before generating any code. */
3862 promote_arguments (void)
3866 for (arg
= DECL_ARGUMENTS (current_function_decl
), i
= 0;
3867 arg
!= NULL_TREE
; arg
= TREE_CHAIN (arg
), i
++)
3869 tree arg_type
= TREE_TYPE (arg
);
3870 if (INTEGRAL_TYPE_P (arg_type
)
3871 && TYPE_PRECISION (arg_type
) < 32)
3873 tree copy
= find_local_variable (i
, integer_type_node
, -1);
3874 java_add_stmt (build2 (MODIFY_EXPR
, integer_type_node
,
3876 fold_convert (integer_type_node
, arg
)));
3878 if (TYPE_IS_WIDE (arg_type
))
3883 #include "gt-java-expr.h"