1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
27 #include "coretypes.h"
32 #include "java-tree.h"
36 #include "java-opcodes.h"
37 #include "parse.h" /* for BLOCK_EXPR_BODY */
43 extern struct obstack temporary_obstack
;
45 /* Base directory in which `.class' files should be written.
46 NULL means to put the file into the same directory as the
47 corresponding .java file. */
48 const char *jcf_write_base_directory
= NULL
;
50 /* Make sure bytecode.data is big enough for at least N more bytes. */
53 do { CHECK_OP(state); \
54 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
55 buffer_grow (&state->bytecode, N); } while (0)
57 /* Add a 1-byte instruction/operand I to bytecode.data,
58 assuming space has already been RESERVE'd. */
60 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
62 /* Like OP1, but I is a 2-byte big endian integer. */
65 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
67 /* Like OP1, but I is a 4-byte big endian integer. */
70 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
71 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73 /* Macro to call each time we push I words on the JVM stack. */
75 #define NOTE_PUSH(I) \
76 do { state->code_SP += (I); \
77 if (state->code_SP > state->code_SP_max) \
78 state->code_SP_max = state->code_SP; } while (0)
80 /* Macro to call each time we pop I words from the JVM stack. */
83 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
85 /* A chunk or segment of a .class file. */
89 /* The next segment of this .class file. */
92 /* The actual data in this segment to be written to the .class file. */
95 /* The size of the segment to be written to the .class file. */
99 #define PENDING_CLEANUP_PC (-3)
100 #define PENDING_EXIT_PC (-2)
101 #define UNDEFINED_PC (-1)
103 /* Each "block" represents a label plus the bytecode instructions following.
104 There may be branches out of the block, but no incoming jumps, except
105 to the beginning of the block.
107 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
108 associated code yet), but it is an undefined label.
113 /* For blocks that that are defined, the next block (in pc order).
114 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
115 or a cleanup expression (from a TRY_FINALLY_EXPR),
116 this is the next (outer) such end label, in a stack headed by
117 labeled_blocks in jcf_partial. */
118 struct jcf_block
*next
;
120 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
121 pc is PENDING_EXIT_PC.
122 In the not-yet-defined end label for pending cleanup subroutine,
123 pc is PENDING_CLEANUP_PC.
124 For other not-yet-defined labels, pc is UNDEFINED_PC.
126 If the label has been defined:
127 Until perform_relocations is finished, this is the maximum possible
128 value of the bytecode offset at the beginning of this block.
129 After perform_relocations, it is the actual offset (pc). */
134 /* After finish_jcf_block is called, the actual instructions
135 contained in this block. Before that NULL, and the instructions
136 are in state->bytecode. */
140 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
141 covered by the cleanup. */
142 struct jcf_block
*start_label
;
146 /* Set of relocations (in reverse offset order) for this block. */
147 struct jcf_relocation
*relocations
;
149 /* If this block is that of the not-yet-defined end label of
150 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
151 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
156 /* A "relocation" type for the 0-3 bytes of padding at the start
157 of a tableswitch or a lookupswitch. */
158 #define SWITCH_ALIGN_RELOC 4
160 /* A relocation type for the labels in a tableswitch or a lookupswitch;
161 these are relative to the start of the instruction, but (due to
162 th 0-3 bytes of padding), we don't know the offset before relocation. */
163 #define BLOCK_START_RELOC 1
165 struct jcf_relocation
167 /* Next relocation for the current jcf_block. */
168 struct jcf_relocation
*next
;
170 /* The (byte) offset within the current block that needs to be relocated. */
171 HOST_WIDE_INT offset
;
173 /* 0 if offset is a 4-byte relative offset.
174 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
175 for proper alignment in tableswitch/lookupswitch instructions.
176 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
177 to the start of the containing block.
178 -1 if offset is a 2-byte relative offset.
179 < -1 if offset is the address of an instruction with a 2-byte offset
180 that does not have a corresponding 4-byte offset version, in which
181 case the absolute value of kind is the inverted opcode.
182 > 4 if offset is the address of an instruction (such as jsr) with a
183 2-byte offset that does have a corresponding 4-byte offset version,
184 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
187 /* The label the relocation wants to actually transfer to. */
188 struct jcf_block
*label
;
191 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
192 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194 /* State for single catch clause. */
198 struct jcf_handler
*next
;
200 struct jcf_block
*start_label
;
201 struct jcf_block
*end_label
;
202 struct jcf_block
*handler_label
;
204 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
208 /* State for the current switch statement. */
210 struct jcf_switch_state
212 struct jcf_switch_state
*prev
;
213 struct jcf_block
*default_label
;
215 struct jcf_relocation
*cases
;
217 HOST_WIDE_INT min_case
, max_case
;
220 /* This structure is used to contain the various pieces that will
221 become a .class file. */
227 struct obstack
*chunk_obstack
;
230 /* List of basic blocks for the current method. */
231 struct jcf_block
*blocks
;
232 struct jcf_block
*last_block
;
234 struct localvar_info
*first_lvar
;
235 struct localvar_info
*last_lvar
;
240 int linenumber_count
;
242 /* Until perform_relocations, this is a upper bound on the number
243 of bytes (so far) in the instructions for the current method. */
246 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
247 struct jcf_block
*labeled_blocks
;
249 /* The current stack size (stack pointer) in the current method. */
252 /* The largest extent of stack size (stack pointer) in the current method. */
255 /* Contains a mapping from local var slot number to localvar_info. */
256 struct buffer localvars
;
258 /* The buffer allocated for bytecode for the current jcf_block. */
259 struct buffer bytecode
;
261 /* Chain of exception handlers for the current method. */
262 struct jcf_handler
*handlers
;
264 /* Last element in handlers chain. */
265 struct jcf_handler
*last_handler
;
267 /* Number of exception handlers for the current method. */
270 /* Number of finalizers we are currently nested within. */
273 /* If non-NULL, use this for the return value. */
274 tree return_value_decl
;
276 /* Information about the current switch statement. */
277 struct jcf_switch_state
*sw_state
;
279 /* The count of jsr instructions that have been emitted. */
283 static void generate_bytecode_insns (tree
, int, struct jcf_partial
*);
284 static struct chunk
* alloc_chunk (struct chunk
*, unsigned char *,
285 int, struct obstack
*);
286 static unsigned char * append_chunk (unsigned char *, int,
287 struct jcf_partial
*);
288 static void append_chunk_copy (unsigned char *, int, struct jcf_partial
*);
289 static struct jcf_block
* gen_jcf_label (struct jcf_partial
*);
290 static void finish_jcf_block (struct jcf_partial
*);
291 static void define_jcf_label (struct jcf_block
*, struct jcf_partial
*);
292 static struct jcf_block
* get_jcf_label_here (struct jcf_partial
*);
293 static void put_linenumber (int, struct jcf_partial
*);
294 static void localvar_alloc (tree
, struct jcf_partial
*);
295 static void maybe_free_localvar (tree
, struct jcf_partial
*, int);
296 static int get_access_flags (tree
);
297 static void write_chunks (FILE *, struct chunk
*);
298 static int adjust_typed_op (tree
, int);
299 static void generate_bytecode_conditional (tree
, struct jcf_block
*,
300 struct jcf_block
*, int,
301 struct jcf_partial
*);
302 static void generate_bytecode_return (tree
, struct jcf_partial
*);
303 static void perform_relocations (struct jcf_partial
*);
304 static void init_jcf_state (struct jcf_partial
*, struct obstack
*);
305 static void init_jcf_method (struct jcf_partial
*, tree
);
306 static void release_jcf_state (struct jcf_partial
*);
307 static int get_classfile_modifiers (tree
class);
308 static struct chunk
* generate_classfile (tree
, struct jcf_partial
*);
309 static struct jcf_handler
*alloc_handler (struct jcf_block
*,
311 struct jcf_partial
*);
312 static void emit_iinc (tree
, HOST_WIDE_INT
, struct jcf_partial
*);
313 static void emit_reloc (HOST_WIDE_INT
, int, struct jcf_block
*,
314 struct jcf_partial
*);
315 static void push_constant1 (HOST_WIDE_INT
, struct jcf_partial
*);
316 static void push_constant2 (HOST_WIDE_INT
, struct jcf_partial
*);
317 static void push_int_const (HOST_WIDE_INT
, struct jcf_partial
*);
318 static int find_constant_wide (HOST_WIDE_INT
, HOST_WIDE_INT
,
319 struct jcf_partial
*);
320 static void push_long_const (HOST_WIDE_INT
, HOST_WIDE_INT
,
321 struct jcf_partial
*);
322 static int find_constant_index (tree
, struct jcf_partial
*);
323 static void push_long_const (HOST_WIDE_INT
, HOST_WIDE_INT
,
324 struct jcf_partial
*);
325 static void field_op (tree
, int, struct jcf_partial
*);
326 static void maybe_wide (int, int, struct jcf_partial
*);
327 static void emit_dup (int, int, struct jcf_partial
*);
328 static void emit_pop (int, struct jcf_partial
*);
329 static void emit_load_or_store (tree
, int, struct jcf_partial
*);
330 static void emit_load (tree
, struct jcf_partial
*);
331 static void emit_store (tree
, struct jcf_partial
*);
332 static void emit_unop (enum java_opcode
, tree
, struct jcf_partial
*);
333 static void emit_binop (enum java_opcode
, tree
, struct jcf_partial
*);
334 static void emit_reloc (HOST_WIDE_INT
, int, struct jcf_block
*,
335 struct jcf_partial
*);
336 static void emit_switch_reloc (struct jcf_block
*, struct jcf_partial
*);
337 static void emit_case_reloc (struct jcf_relocation
*, struct jcf_partial
*);
338 static void emit_if (struct jcf_block
*, int, int, struct jcf_partial
*);
339 static void emit_goto (struct jcf_block
*, struct jcf_partial
*);
340 static void emit_jsr (struct jcf_block
*, struct jcf_partial
*);
341 static void call_cleanups (struct jcf_block
*, struct jcf_partial
*);
342 static char *make_class_file_name (tree
);
343 static unsigned char *append_synthetic_attribute (struct jcf_partial
*);
344 static void append_deprecated_attribute (struct jcf_partial
*);
345 static void append_innerclasses_attribute (struct jcf_partial
*, tree
);
346 static void append_innerclasses_attribute_entry (struct jcf_partial
*, tree
, tree
);
347 static void append_gcj_attribute (struct jcf_partial
*, tree
);
349 /* Utility macros for appending (big-endian) data to a buffer.
350 We assume a local variable 'ptr' points into where we want to
351 write next, and we assume enough space has been allocated. */
353 #ifdef ENABLE_JC1_CHECKING
354 static int CHECK_PUT (void *, struct jcf_partial
*, int);
357 CHECK_PUT (void *ptr
, struct jcf_partial
*state
, int i
)
359 if ((unsigned char *) ptr
< state
->chunk
->data
360 || (unsigned char *) ptr
+ i
> state
->chunk
->data
+ state
->chunk
->size
)
366 #define CHECK_PUT(PTR, STATE, I) ((void)0)
369 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
370 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
371 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
372 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
374 /* There are some cases below where CHECK_PUT is guaranteed to fail.
375 Use the following macros in those specific cases. */
376 #define UNSAFE_PUT1(X) (*ptr++ = (X))
377 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
378 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
379 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
382 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
383 Set the data and size fields to DATA and SIZE, respectively.
384 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
386 static struct chunk
*
387 alloc_chunk (struct chunk
*last
, unsigned char *data
,
388 int size
, struct obstack
*work
)
390 struct chunk
*chunk
= obstack_alloc (work
, sizeof(struct chunk
));
392 if (data
== NULL
&& size
> 0)
393 data
= obstack_alloc (work
, size
);
403 #ifdef ENABLE_JC1_CHECKING
404 static int CHECK_OP (struct jcf_partial
*);
407 CHECK_OP (struct jcf_partial
*state
)
409 if (state
->bytecode
.ptr
> state
->bytecode
.limit
)
415 #define CHECK_OP(STATE) ((void) 0)
418 static unsigned char *
419 append_chunk (unsigned char *data
, int size
, struct jcf_partial
*state
)
421 state
->chunk
= alloc_chunk (state
->chunk
, data
, size
, state
->chunk_obstack
);
422 if (state
->first
== NULL
)
423 state
->first
= state
->chunk
;
424 return state
->chunk
->data
;
428 append_chunk_copy (unsigned char *data
, int size
, struct jcf_partial
*state
)
430 unsigned char *ptr
= append_chunk (NULL
, size
, state
);
431 memcpy (ptr
, data
, size
);
434 static struct jcf_block
*
435 gen_jcf_label (struct jcf_partial
*state
)
437 struct jcf_block
*block
438 = obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_block
));
440 block
->linenumber
= -1;
441 block
->pc
= UNDEFINED_PC
;
446 finish_jcf_block (struct jcf_partial
*state
)
448 struct jcf_block
*block
= state
->last_block
;
449 struct jcf_relocation
*reloc
;
450 int code_length
= BUFFER_LENGTH (&state
->bytecode
);
451 int pc
= state
->code_length
;
452 append_chunk_copy (state
->bytecode
.data
, code_length
, state
);
453 BUFFER_RESET (&state
->bytecode
);
454 block
->v
.chunk
= state
->chunk
;
456 /* Calculate code_length to the maximum value it can have. */
457 pc
+= block
->v
.chunk
->size
;
458 for (reloc
= block
->u
.relocations
; reloc
!= NULL
; reloc
= reloc
->next
)
460 int kind
= reloc
->kind
;
461 if (kind
== SWITCH_ALIGN_RELOC
)
463 else if (kind
> BLOCK_START_RELOC
)
464 pc
+= 2; /* 2-byte offset may grow to 4-byte offset */
466 pc
+= 5; /* May need to add a goto_w. */
468 state
->code_length
= pc
;
472 define_jcf_label (struct jcf_block
*label
, struct jcf_partial
*state
)
474 if (state
->last_block
!= NULL
)
475 finish_jcf_block (state
);
476 label
->pc
= state
->code_length
;
477 if (state
->blocks
== NULL
)
478 state
->blocks
= label
;
480 state
->last_block
->next
= label
;
481 state
->last_block
= label
;
483 label
->u
.relocations
= NULL
;
486 static struct jcf_block
*
487 get_jcf_label_here (struct jcf_partial
*state
)
489 if (state
->last_block
!= NULL
&& BUFFER_LENGTH (&state
->bytecode
) == 0)
490 return state
->last_block
;
493 struct jcf_block
*label
= gen_jcf_label (state
);
494 define_jcf_label (label
, state
);
499 /* Note a line number entry for the current PC and given LINE. */
502 put_linenumber (int line
, struct jcf_partial
*state
)
504 struct jcf_block
*label
= get_jcf_label_here (state
);
505 if (label
->linenumber
> 0)
507 label
= gen_jcf_label (state
);
508 define_jcf_label (label
, state
);
510 label
->linenumber
= line
;
511 state
->linenumber_count
++;
514 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
515 in the range (START_LABEL, END_LABEL). */
517 static struct jcf_handler
*
518 alloc_handler (struct jcf_block
*start_label
, struct jcf_block
*end_label
,
519 struct jcf_partial
*state
)
521 struct jcf_handler
*handler
522 = obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_handler
));
523 handler
->start_label
= start_label
;
524 handler
->end_label
= end_label
;
525 handler
->handler_label
= get_jcf_label_here (state
);
526 if (state
->handlers
== NULL
)
527 state
->handlers
= handler
;
529 state
->last_handler
->next
= handler
;
530 state
->last_handler
= handler
;
531 handler
->next
= NULL
;
532 state
->num_handlers
++;
537 /* The index of jvm local variable allocated for this DECL.
538 This is assigned when generating .class files;
539 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
540 (We don't allocate DECL_LANG_SPECIFIC for locals from Java source code.) */
542 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
546 struct localvar_info
*next
;
549 struct jcf_block
*start_label
;
550 struct jcf_block
*end_label
;
553 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
554 #define localvar_max \
555 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
558 localvar_alloc (tree decl
, struct jcf_partial
*state
)
560 struct jcf_block
*start_label
= get_jcf_label_here (state
);
561 int wide
= TYPE_IS_WIDE (TREE_TYPE (decl
));
563 struct localvar_info
*info
;
564 struct localvar_info
**ptr
= localvar_buffer
;
565 struct localvar_info
**limit
566 = (struct localvar_info
**) state
->localvars
.ptr
;
567 for (index
= 0; ptr
< limit
; index
++, ptr
++)
570 && (! wide
|| ((ptr
+1) < limit
&& ptr
[1] == NULL
)))
575 buffer_grow (&state
->localvars
, 2 * sizeof (struct localvar_info
*));
576 ptr
= (struct localvar_info
**) state
->localvars
.data
+ index
;
577 state
->localvars
.ptr
= (unsigned char *) (ptr
+ 1 + wide
);
579 info
= obstack_alloc (state
->chunk_obstack
, sizeof (struct localvar_info
));
582 ptr
[1] = (struct localvar_info
*)(~0);
583 DECL_LOCAL_INDEX (decl
) = index
;
585 info
->start_label
= start_label
;
587 if (debug_info_level
> DINFO_LEVEL_TERSE
588 && DECL_NAME (decl
) != NULL_TREE
)
590 /* Generate debugging info. */
592 if (state
->last_lvar
!= NULL
)
593 state
->last_lvar
->next
= info
;
595 state
->first_lvar
= info
;
596 state
->last_lvar
= info
;
602 maybe_free_localvar (tree decl
, struct jcf_partial
*state
, int really
)
604 struct jcf_block
*end_label
= get_jcf_label_here (state
);
605 int index
= DECL_LOCAL_INDEX (decl
);
606 struct localvar_info
**ptr
= &localvar_buffer
[index
];
607 struct localvar_info
*info
= *ptr
;
608 int wide
= TYPE_IS_WIDE (TREE_TYPE (decl
));
610 info
->end_label
= end_label
;
612 if (info
->decl
!= decl
)
619 if (ptr
[1] != (struct localvar_info
*)(~0))
626 #define STACK_TARGET 1
627 #define IGNORE_TARGET 2
629 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
630 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
633 get_access_flags (tree decl
)
636 int isfield
= TREE_CODE (decl
) == FIELD_DECL
|| TREE_CODE (decl
) == VAR_DECL
;
638 if (isfield
|| TREE_CODE (decl
) == FUNCTION_DECL
)
640 if (TREE_PROTECTED (decl
))
641 flags
|= ACC_PROTECTED
;
642 if (TREE_PRIVATE (decl
))
643 flags
|= ACC_PRIVATE
;
645 else if (TREE_CODE (decl
) == TYPE_DECL
)
647 if (CLASS_PUBLIC (decl
))
649 if (CLASS_FINAL (decl
))
651 if (CLASS_SUPER (decl
))
653 if (CLASS_ABSTRACT (decl
))
654 flags
|= ACC_ABSTRACT
;
655 if (CLASS_INTERFACE (decl
))
656 flags
|= ACC_INTERFACE
;
657 if (CLASS_STATIC (decl
))
659 if (CLASS_PRIVATE (decl
))
660 flags
|= ACC_PRIVATE
;
661 if (CLASS_PROTECTED (decl
))
662 flags
|= ACC_PROTECTED
;
663 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl
))
664 || LOCAL_CLASS_P (TREE_TYPE (decl
)))
665 flags
|= ACC_PRIVATE
;
666 if (CLASS_STRICTFP (decl
))
672 if (TREE_CODE (decl
) == FUNCTION_DECL
)
674 if (METHOD_PUBLIC (decl
))
676 if (METHOD_FINAL (decl
))
678 if (METHOD_NATIVE (decl
))
680 if (METHOD_STATIC (decl
))
682 if (METHOD_SYNCHRONIZED (decl
))
683 flags
|= ACC_SYNCHRONIZED
;
684 if (METHOD_ABSTRACT (decl
))
685 flags
|= ACC_ABSTRACT
;
686 if (METHOD_STRICTFP (decl
))
691 if (FIELD_PUBLIC (decl
))
693 if (FIELD_FINAL (decl
))
695 if (FIELD_STATIC (decl
))
697 if (FIELD_VOLATILE (decl
))
698 flags
|= ACC_VOLATILE
;
699 if (FIELD_TRANSIENT (decl
))
700 flags
|= ACC_TRANSIENT
;
705 /* Write the list of segments starting at CHUNKS to STREAM. */
708 write_chunks (FILE* stream
, struct chunk
*chunks
)
710 for (; chunks
!= NULL
; chunks
= chunks
->next
)
711 fwrite (chunks
->data
, chunks
->size
, 1, stream
);
714 /* Push a 1-word constant in the constant pool at the given INDEX.
715 (Caller is responsible for doing NOTE_PUSH.) */
718 push_constant1 (HOST_WIDE_INT index
, struct jcf_partial
*state
)
733 /* Push a 2-word constant in the constant pool at the given INDEX.
734 (Caller is responsible for doing NOTE_PUSH.) */
737 push_constant2 (HOST_WIDE_INT index
, struct jcf_partial
*state
)
744 /* Push 32-bit integer constant on VM stack.
745 Caller is responsible for doing NOTE_PUSH. */
748 push_int_const (HOST_WIDE_INT i
, struct jcf_partial
*state
)
751 if (i
>= -1 && i
<= 5)
752 OP1(OPCODE_iconst_0
+ i
);
753 else if (i
>= -128 && i
< 128)
758 else if (i
>= -32768 && i
< 32768)
765 i
= find_constant1 (&state
->cpool
, CONSTANT_Integer
,
766 (jword
)(i
& 0xFFFFFFFF));
767 push_constant1 (i
, state
);
772 find_constant_wide (HOST_WIDE_INT lo
, HOST_WIDE_INT hi
,
773 struct jcf_partial
*state
)
775 unsigned HOST_WIDE_INT w1
;
777 lshift_double (lo
, hi
, -32, 64, &w1
, &w2
, 1);
778 return find_constant2 (&state
->cpool
, CONSTANT_Long
,
779 (jword
)(w1
& 0xFFFFFFFF), (jword
)(lo
& 0xFFFFFFFF));
782 /* Find or allocate a constant pool entry for the given VALUE.
783 Return the index in the constant pool. */
786 find_constant_index (tree value
, struct jcf_partial
*state
)
788 if (TREE_CODE (value
) == INTEGER_CST
)
790 if (TYPE_PRECISION (TREE_TYPE (value
)) <= 32)
791 return find_constant1 (&state
->cpool
, CONSTANT_Integer
,
792 (jword
)(TREE_INT_CST_LOW (value
) & 0xFFFFFFFF));
794 return find_constant_wide (TREE_INT_CST_LOW (value
),
795 TREE_INT_CST_HIGH (value
), state
);
797 else if (TREE_CODE (value
) == REAL_CST
)
801 /* IEEE NaN can have many values, but the Java VM spec defines a
803 if (flag_emit_class_files
804 && REAL_VALUE_ISNAN (TREE_REAL_CST (value
)))
806 if (TYPE_PRECISION (TREE_TYPE (value
)) == 32)
807 return find_constant1 (&state
->cpool
, CONSTANT_Float
,
810 return find_constant2 (&state
->cpool
, CONSTANT_Double
,
811 0x7ff80000, 0x00000000);
814 real_to_target (words
, &TREE_REAL_CST (value
),
815 TYPE_MODE (TREE_TYPE (value
)));
816 words
[0] &= 0xffffffff;
817 words
[1] &= 0xffffffff;
819 if (TYPE_PRECISION (TREE_TYPE (value
)) == 32)
820 return find_constant1 (&state
->cpool
, CONSTANT_Float
, (jword
)words
[0]);
822 return find_constant2 (&state
->cpool
, CONSTANT_Double
,
823 (jword
)words
[1-FLOAT_WORDS_BIG_ENDIAN
],
824 (jword
)words
[FLOAT_WORDS_BIG_ENDIAN
]);
826 else if (TREE_CODE (value
) == STRING_CST
)
827 return find_string_constant (&state
->cpool
, value
);
833 /* Push 64-bit long constant on VM stack.
834 Caller is responsible for doing NOTE_PUSH. */
837 push_long_const (HOST_WIDE_INT lo
, HOST_WIDE_INT hi
, struct jcf_partial
*state
)
839 unsigned HOST_WIDE_INT highpart
;
841 jint lowpart
= WORD_TO_INT (lo
);
843 rshift_double (lo
, hi
, 32, 64, &highpart
, &dummy
, 1);
845 if (highpart
== 0 && (lowpart
== 0 || lowpart
== 1))
848 OP1(OPCODE_lconst_0
+ lowpart
);
850 else if ((highpart
== 0 && lowpart
> 0 && lowpart
< 32768)
851 || (highpart
== (unsigned HOST_WIDE_INT
)-1
852 && lowpart
< 0 && lowpart
>= -32768))
854 push_int_const (lowpart
, state
);
859 push_constant2 (find_constant_wide (lo
, hi
, state
), state
);
863 field_op (tree field
, int opcode
, struct jcf_partial
*state
)
865 int index
= find_fieldref_index (&state
->cpool
, field
);
871 /* Returns an integer in the range 0 (for 'int') through 4 (for object
872 reference) to 7 (for 'short') which matches the pattern of how JVM
873 opcodes typically depend on the operand type. */
876 adjust_typed_op (tree type
, int max
)
878 switch (TREE_CODE (type
))
881 case RECORD_TYPE
: return 4;
883 return TYPE_PRECISION (type
) == 32 || max
< 5 ? 0 : 5;
885 return TYPE_PRECISION (type
) == 32 || max
< 6 ? 0 : 6;
887 switch (TYPE_PRECISION (type
))
889 case 8: return max
< 5 ? 0 : 5;
890 case 16: return max
< 7 ? 0 : 7;
896 switch (TYPE_PRECISION (type
))
909 maybe_wide (int opcode
, int index
, struct jcf_partial
*state
)
926 /* Compile code to duplicate with offset, where
927 SIZE is the size of the stack item to duplicate (1 or 2), abd
928 OFFSET is where to insert the result (must be 0, 1, or 2).
929 (The new words get inserted at stack[SP-size-offset].) */
932 emit_dup (int size
, int offset
, struct jcf_partial
*state
)
939 kind
= size
== 1 ? OPCODE_dup
: OPCODE_dup2
;
940 else if (offset
== 1)
941 kind
= size
== 1 ? OPCODE_dup_x1
: OPCODE_dup2_x1
;
942 else if (offset
== 2)
943 kind
= size
== 1 ? OPCODE_dup_x2
: OPCODE_dup2_x2
;
951 emit_pop (int size
, struct jcf_partial
*state
)
954 OP1 (OPCODE_pop
- 1 + size
);
958 emit_iinc (tree var
, HOST_WIDE_INT value
, struct jcf_partial
*state
)
960 int slot
= DECL_LOCAL_INDEX (var
);
962 if (value
< -128 || value
> 127 || slot
>= 256)
980 emit_load_or_store (tree var
, /* Variable to load from or store into. */
981 int opcode
, /* Either OPCODE_iload or OPCODE_istore. */
982 struct jcf_partial
*state
)
984 tree type
= TREE_TYPE (var
);
985 int kind
= adjust_typed_op (type
, 4);
986 int index
= DECL_LOCAL_INDEX (var
);
990 OP1 (opcode
+ 5 + 4 * kind
+ index
); /* [ilfda]{load,store}_[0123] */
993 maybe_wide (opcode
+ kind
, index
, state
); /* [ilfda]{load,store} */
997 emit_load (tree var
, struct jcf_partial
*state
)
999 emit_load_or_store (var
, OPCODE_iload
, state
);
1000 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var
)) ? 2 : 1);
1004 emit_store (tree var
, struct jcf_partial
*state
)
1006 emit_load_or_store (var
, OPCODE_istore
, state
);
1007 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var
)) ? 2 : 1);
1011 emit_unop (enum java_opcode opcode
, tree type ATTRIBUTE_UNUSED
,
1012 struct jcf_partial
*state
)
1019 emit_binop (enum java_opcode opcode
, tree type
, struct jcf_partial
*state
)
1021 int size
= TYPE_IS_WIDE (type
) ? 2 : 1;
1028 emit_reloc (HOST_WIDE_INT value
, int kind
,
1029 struct jcf_block
*target
, struct jcf_partial
*state
)
1031 struct jcf_relocation
*reloc
1032 = obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_relocation
));
1033 struct jcf_block
*block
= state
->last_block
;
1034 reloc
->next
= block
->u
.relocations
;
1035 block
->u
.relocations
= reloc
;
1036 reloc
->offset
= BUFFER_LENGTH (&state
->bytecode
);
1037 reloc
->label
= target
;
1039 if (kind
== 0 || kind
== BLOCK_START_RELOC
)
1041 else if (kind
!= SWITCH_ALIGN_RELOC
)
1046 emit_switch_reloc (struct jcf_block
*label
, struct jcf_partial
*state
)
1048 emit_reloc (RELOCATION_VALUE_0
, BLOCK_START_RELOC
, label
, state
);
1051 /* Similar to emit_switch_reloc,
1052 but re-uses an existing case reloc. */
1055 emit_case_reloc (struct jcf_relocation
*reloc
, struct jcf_partial
*state
)
1057 struct jcf_block
*block
= state
->last_block
;
1058 reloc
->next
= block
->u
.relocations
;
1059 block
->u
.relocations
= reloc
;
1060 reloc
->offset
= BUFFER_LENGTH (&state
->bytecode
);
1061 reloc
->kind
= BLOCK_START_RELOC
;
1065 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1066 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1069 emit_if (struct jcf_block
*target
, int opcode
, int inv_opcode
,
1070 struct jcf_partial
*state
)
1074 /* value is 1 byte from reloc back to start of instruction. */
1075 emit_reloc (RELOCATION_VALUE_1
, - inv_opcode
, target
, state
);
1079 emit_goto (struct jcf_block
*target
, struct jcf_partial
*state
)
1083 /* Value is 1 byte from reloc back to start of instruction. */
1084 emit_reloc (RELOCATION_VALUE_1
, OPCODE_goto_w
, target
, state
);
1088 emit_jsr (struct jcf_block
*target
, struct jcf_partial
*state
)
1092 /* Value is 1 byte from reloc back to start of instruction. */
1093 emit_reloc (RELOCATION_VALUE_1
, OPCODE_jsr_w
, target
, state
);
1097 /* Generate code to evaluate EXP. If the result is true,
1098 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1099 TRUE_BRANCH_FIRST is a code generation hint that the
1100 TRUE_LABEL may follow right after this. (The idea is that we
1101 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1104 generate_bytecode_conditional (tree exp
,
1105 struct jcf_block
*true_label
,
1106 struct jcf_block
*false_label
,
1107 int true_branch_first
,
1108 struct jcf_partial
*state
)
1110 tree exp0
, exp1
, type
;
1111 int save_SP
= state
->code_SP
;
1112 enum java_opcode op
, negop
;
1115 switch (TREE_CODE (exp
))
1118 emit_goto (integer_zerop (exp
) ? false_label
: true_label
, state
);
1122 struct jcf_block
*then_label
= gen_jcf_label (state
);
1123 struct jcf_block
*else_label
= gen_jcf_label (state
);
1124 int save_SP_before
, save_SP_after
;
1125 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1126 then_label
, else_label
, 1, state
);
1127 define_jcf_label (then_label
, state
);
1128 save_SP_before
= state
->code_SP
;
1129 generate_bytecode_conditional (TREE_OPERAND (exp
, 1),
1130 true_label
, false_label
, 1, state
);
1131 save_SP_after
= state
->code_SP
;
1132 state
->code_SP
= save_SP_before
;
1133 define_jcf_label (else_label
, state
);
1134 generate_bytecode_conditional (TREE_OPERAND (exp
, 2),
1135 true_label
, false_label
,
1136 true_branch_first
, state
);
1137 if (state
->code_SP
!= save_SP_after
)
1141 case TRUTH_NOT_EXPR
:
1142 generate_bytecode_conditional (TREE_OPERAND (exp
, 0), false_label
,
1143 true_label
, ! true_branch_first
, state
);
1145 case TRUTH_ANDIF_EXPR
:
1147 struct jcf_block
*next_label
= gen_jcf_label (state
);
1148 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1149 next_label
, false_label
, 1, state
);
1150 define_jcf_label (next_label
, state
);
1151 generate_bytecode_conditional (TREE_OPERAND (exp
, 1),
1152 true_label
, false_label
, 1, state
);
1155 case TRUTH_ORIF_EXPR
:
1157 struct jcf_block
*next_label
= gen_jcf_label (state
);
1158 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1159 true_label
, next_label
, 1, state
);
1160 define_jcf_label (next_label
, state
);
1161 generate_bytecode_conditional (TREE_OPERAND (exp
, 1),
1162 true_label
, false_label
, 1, state
);
1166 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1167 set it to the corresponding 1-operand if<COND> instructions. */
1171 /* The opcodes with their inverses are allocated in pairs.
1172 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1173 negop
= (op
& 1) ? op
+ 1 : op
- 1;
1175 if (true_branch_first
)
1177 emit_if (false_label
, negop
, op
, state
);
1178 emit_goto (true_label
, state
);
1182 emit_if (true_label
, op
, negop
, state
);
1183 emit_goto (false_label
, state
);
1190 op
= OPCODE_if_icmpeq
;
1196 op
= OPCODE_if_icmpne
;
1202 op
= OPCODE_if_icmpgt
;
1208 op
= OPCODE_if_icmplt
;
1214 op
= OPCODE_if_icmpge
;
1220 op
= OPCODE_if_icmple
;
1226 /* UNLT_EXPR(a, b) means 'a < b || unordered(a, b)'. This is
1227 the same as the Java source expression '!(a >= b)', so handle
1229 struct jcf_block
*tmp
= true_label
;
1230 true_label
= false_label
;
1232 true_branch_first
= !true_branch_first
;
1235 exp0
= TREE_OPERAND (exp
, 0);
1236 exp1
= TREE_OPERAND (exp
, 1);
1237 type
= TREE_TYPE (exp0
);
1238 switch (TREE_CODE (type
))
1241 case POINTER_TYPE
: case RECORD_TYPE
:
1242 switch (TREE_CODE (exp
))
1244 case EQ_EXPR
: op
= OPCODE_if_acmpeq
; break;
1245 case NE_EXPR
: op
= OPCODE_if_acmpne
; break;
1248 if (integer_zerop (exp1
) || integer_zerop (exp0
))
1250 generate_bytecode_insns (integer_zerop (exp0
) ? exp1
: exp0
,
1251 STACK_TARGET
, state
);
1252 op
= op
+ (OPCODE_ifnull
- OPCODE_if_acmpeq
);
1253 negop
= (op
& 1) ? op
- 1 : op
+ 1;
1257 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1258 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1262 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1263 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1264 if (op
== OPCODE_if_icmplt
|| op
== OPCODE_if_icmple
)
1268 if (TYPE_PRECISION (type
) > 32)
1279 if (TYPE_PRECISION (type
) > 32)
1281 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1282 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1290 if (integer_zerop (exp1
))
1292 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1296 if (integer_zerop (exp0
))
1300 case OPCODE_if_icmplt
:
1301 case OPCODE_if_icmpge
:
1304 case OPCODE_if_icmpgt
:
1305 case OPCODE_if_icmple
:
1311 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1315 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1316 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1322 generate_bytecode_insns (exp
, STACK_TARGET
, state
);
1324 if (true_branch_first
)
1326 emit_if (false_label
, OPCODE_ifeq
, OPCODE_ifne
, state
);
1327 emit_goto (true_label
, state
);
1331 emit_if (true_label
, OPCODE_ifne
, OPCODE_ifeq
, state
);
1332 emit_goto (false_label
, state
);
1336 if (save_SP
!= state
->code_SP
)
1340 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1341 but only as far out as LIMIT (since we are about to jump to the
1342 emit label that is LIMIT). */
1345 call_cleanups (struct jcf_block
*limit
, struct jcf_partial
*state
)
1347 struct jcf_block
*block
= state
->labeled_blocks
;
1348 for (; block
!= limit
; block
= block
->next
)
1350 if (block
->pc
== PENDING_CLEANUP_PC
)
1351 emit_jsr (block
, state
);
1356 generate_bytecode_return (tree exp
, struct jcf_partial
*state
)
1358 tree return_type
= TREE_TYPE (TREE_TYPE (state
->current_method
));
1359 int returns_void
= TREE_CODE (return_type
) == VOID_TYPE
;
1364 switch (TREE_CODE (exp
))
1367 generate_bytecode_insns (TREE_OPERAND (exp
, 0), IGNORE_TARGET
,
1369 exp
= TREE_OPERAND (exp
, 1);
1373 struct jcf_block
*then_label
= gen_jcf_label (state
);
1374 struct jcf_block
*else_label
= gen_jcf_label (state
);
1375 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1376 then_label
, else_label
, 1, state
);
1377 define_jcf_label (then_label
, state
);
1378 generate_bytecode_return (TREE_OPERAND (exp
, 1), state
);
1379 define_jcf_label (else_label
, state
);
1380 generate_bytecode_return (TREE_OPERAND (exp
, 2), state
);
1384 generate_bytecode_insns (exp
,
1385 returns_void
? IGNORE_TARGET
1386 : STACK_TARGET
, state
);
1392 call_cleanups (NULL
, state
);
1396 op
= OPCODE_ireturn
+ adjust_typed_op (return_type
, 4);
1397 if (state
->num_finalizers
> 0)
1399 if (state
->return_value_decl
== NULL_TREE
)
1401 state
->return_value_decl
1402 = build_decl (VAR_DECL
, NULL_TREE
, TREE_TYPE (exp
));
1403 localvar_alloc (state
->return_value_decl
, state
);
1405 emit_store (state
->return_value_decl
, state
);
1406 call_cleanups (NULL
, state
);
1407 emit_load (state
->return_value_decl
, state
);
1408 /* If we call maybe_free_localvar (state->return_value_decl, state, 1),
1409 then we risk the save decl erroneously re-used in the
1410 finalizer. Instead, we keep the state->return_value_decl
1411 allocated through the rest of the method. This is not
1412 the greatest solution, but it is at least simple and safe. */
1419 /* Generate bytecode for sub-expression EXP of METHOD.
1420 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1423 generate_bytecode_insns (tree exp
, int target
, struct jcf_partial
*state
)
1426 enum java_opcode jopcode
;
1428 HOST_WIDE_INT value
;
1433 if (exp
== NULL
&& target
== IGNORE_TARGET
)
1436 type
= TREE_TYPE (exp
);
1438 switch (TREE_CODE (exp
))
1441 if (BLOCK_EXPR_BODY (exp
))
1444 tree body
= BLOCK_EXPR_BODY (exp
);
1445 long jsrs
= state
->num_jsrs
;
1446 for (local
= BLOCK_EXPR_DECLS (exp
); local
; )
1448 tree next
= TREE_CHAIN (local
);
1449 localvar_alloc (local
, state
);
1452 /* Avoid deep recursion for long blocks. */
1453 while (TREE_CODE (body
) == COMPOUND_EXPR
)
1455 generate_bytecode_insns (TREE_OPERAND (body
, 0), target
, state
);
1456 body
= TREE_OPERAND (body
, 1);
1458 generate_bytecode_insns (body
, target
, state
);
1460 for (local
= BLOCK_EXPR_DECLS (exp
); local
; )
1462 tree next
= TREE_CHAIN (local
);
1463 maybe_free_localvar (local
, state
, state
->num_jsrs
<= jsrs
);
1469 generate_bytecode_insns (TREE_OPERAND (exp
, 0), IGNORE_TARGET
, state
);
1470 /* Normally the first operand to a COMPOUND_EXPR must complete
1471 normally. However, in the special case of a do-while
1472 statement this is not necessarily the case. */
1473 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp
, 0)))
1474 generate_bytecode_insns (TREE_OPERAND (exp
, 1), target
, state
);
1476 case EXPR_WITH_FILE_LOCATION
:
1478 location_t saved_location
= input_location
;
1479 tree body
= EXPR_WFL_NODE (exp
);
1480 if (IS_EMPTY_STMT (body
))
1482 input_filename
= EXPR_WFL_FILENAME (exp
);
1483 input_line
= EXPR_WFL_LINENO (exp
);
1484 if (EXPR_WFL_EMIT_LINE_NOTE (exp
) && input_line
> 0
1485 && debug_info_level
> DINFO_LEVEL_NONE
)
1486 put_linenumber (input_line
, state
);
1487 generate_bytecode_insns (body
, target
, state
);
1488 input_location
= saved_location
;
1492 if (target
== IGNORE_TARGET
) ; /* do nothing */
1493 else if (TREE_CODE (type
) == POINTER_TYPE
)
1495 if (! integer_zerop (exp
))
1498 OP1 (OPCODE_aconst_null
);
1501 else if (TYPE_PRECISION (type
) <= 32)
1503 push_int_const (TREE_INT_CST_LOW (exp
), state
);
1508 push_long_const (TREE_INT_CST_LOW (exp
), TREE_INT_CST_HIGH (exp
),
1515 int prec
= TYPE_PRECISION (type
) >> 5;
1517 if (real_zerop (exp
) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp
)))
1518 OP1 (prec
== 1 ? OPCODE_fconst_0
: OPCODE_dconst_0
);
1519 else if (real_onep (exp
))
1520 OP1 (prec
== 1 ? OPCODE_fconst_1
: OPCODE_dconst_1
);
1521 else if (prec
== 1 && real_twop (exp
))
1522 OP1 (OPCODE_fconst_2
);
1523 /* ??? We could also use iconst_3/ldc followed by i2f/i2d
1524 for other float/double when the value is a small integer. */
1527 offset
= find_constant_index (exp
, state
);
1529 push_constant1 (offset
, state
);
1531 push_constant2 (offset
, state
);
1537 push_constant1 (find_string_constant (&state
->cpool
, exp
), state
);
1541 if (TREE_STATIC (exp
))
1543 field_op (exp
, OPCODE_getstatic
, state
);
1544 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 2 : 1);
1547 /* ... fall through ... */
1549 emit_load (exp
, state
);
1551 case NON_LVALUE_EXPR
:
1553 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
1556 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
1557 generate_bytecode_insns (TREE_OPERAND (exp
, 1), target
, state
);
1558 if (target
!= IGNORE_TARGET
)
1560 jopcode
= OPCODE_iaload
+ adjust_typed_op (type
, 7);
1563 if (! TYPE_IS_WIDE (type
))
1569 tree obj
= TREE_OPERAND (exp
, 0);
1570 tree field
= TREE_OPERAND (exp
, 1);
1571 int is_static
= FIELD_STATIC (field
);
1572 generate_bytecode_insns (obj
,
1573 is_static
? IGNORE_TARGET
: target
, state
);
1574 if (target
!= IGNORE_TARGET
)
1576 if (DECL_NAME (field
) == length_identifier_node
&& !is_static
1577 && TYPE_ARRAY_P (TREE_TYPE (obj
)))
1580 OP1 (OPCODE_arraylength
);
1584 field_op (field
, is_static
? OPCODE_getstatic
: OPCODE_getfield
,
1588 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field
)) ? 2 : 1);
1593 case TRUTH_ANDIF_EXPR
:
1594 case TRUTH_ORIF_EXPR
:
1608 struct jcf_block
*then_label
= gen_jcf_label (state
);
1609 struct jcf_block
*else_label
= gen_jcf_label (state
);
1610 struct jcf_block
*end_label
= gen_jcf_label (state
);
1611 generate_bytecode_conditional (exp
,
1612 then_label
, else_label
, 1, state
);
1613 define_jcf_label (then_label
, state
);
1614 push_int_const (1, state
);
1615 emit_goto (end_label
, state
);
1616 define_jcf_label (else_label
, state
);
1617 push_int_const (0, state
);
1618 define_jcf_label (end_label
, state
);
1624 struct jcf_block
*then_label
= gen_jcf_label (state
);
1625 struct jcf_block
*else_label
= gen_jcf_label (state
);
1626 struct jcf_block
*end_label
= gen_jcf_label (state
);
1627 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1628 then_label
, else_label
, 1, state
);
1629 define_jcf_label (then_label
, state
);
1630 generate_bytecode_insns (TREE_OPERAND (exp
, 1), target
, state
);
1631 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp
, 1))
1632 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1633 || TREE_CODE (TREE_TYPE (exp
)) != VOID_TYPE
)
1634 emit_goto (end_label
, state
);
1635 define_jcf_label (else_label
, state
);
1636 generate_bytecode_insns (TREE_OPERAND (exp
, 2), target
, state
);
1637 define_jcf_label (end_label
, state
);
1638 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1639 if (TREE_TYPE (exp
) != void_type_node
)
1640 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 2 : 1);
1645 struct jcf_switch_state
*sw_state
= state
->sw_state
;
1646 struct jcf_relocation
*reloc
1647 = obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_relocation
));
1648 HOST_WIDE_INT case_value
= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0));
1650 reloc
->label
= get_jcf_label_here (state
);
1651 reloc
->offset
= case_value
;
1652 reloc
->next
= sw_state
->cases
;
1653 sw_state
->cases
= reloc
;
1654 if (sw_state
->num_cases
== 0)
1656 sw_state
->min_case
= case_value
;
1657 sw_state
->max_case
= case_value
;
1661 if (case_value
< sw_state
->min_case
)
1662 sw_state
->min_case
= case_value
;
1663 if (case_value
> sw_state
->max_case
)
1664 sw_state
->max_case
= case_value
;
1666 sw_state
->num_cases
++;
1670 state
->sw_state
->default_label
= get_jcf_label_here (state
);
1675 /* The SWITCH_EXPR has three parts, generated in the following order:
1676 1. the switch_expression (the value used to select the correct case);
1678 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1679 After code generation, we will re-order them in the order 1, 3, 2.
1680 This is to avoid any extra GOTOs. */
1681 struct jcf_switch_state sw_state
;
1682 struct jcf_block
*expression_last
; /* Last block of the switch_expression. */
1683 struct jcf_block
*body_last
; /* Last block of the switch_body. */
1684 struct jcf_block
*switch_instruction
; /* First block of switch_instruction. */
1685 struct jcf_block
*instruction_last
; /* Last block of the switch_instruction. */
1686 struct jcf_block
*body_block
;
1688 sw_state
.prev
= state
->sw_state
;
1689 state
->sw_state
= &sw_state
;
1690 sw_state
.cases
= NULL
;
1691 sw_state
.num_cases
= 0;
1692 sw_state
.default_label
= NULL
;
1693 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
1694 expression_last
= state
->last_block
;
1695 /* Force a new block here. */
1696 body_block
= gen_jcf_label (state
);
1697 define_jcf_label (body_block
, state
);
1698 generate_bytecode_insns (TREE_OPERAND (exp
, 1), IGNORE_TARGET
, state
);
1699 body_last
= state
->last_block
;
1701 switch_instruction
= gen_jcf_label (state
);
1702 define_jcf_label (switch_instruction
, state
);
1703 if (sw_state
.default_label
== NULL
)
1704 sw_state
.default_label
= gen_jcf_label (state
);
1706 if (sw_state
.num_cases
<= 1)
1708 if (sw_state
.num_cases
== 0)
1710 emit_pop (1, state
);
1715 push_int_const (sw_state
.cases
->offset
, state
);
1717 emit_if (sw_state
.cases
->label
,
1718 OPCODE_if_icmpeq
, OPCODE_if_icmpne
, state
);
1720 emit_goto (sw_state
.default_label
, state
);
1725 unsigned HOST_WIDE_INT delta
;
1726 /* Copy the chain of relocs into a sorted array. */
1727 struct jcf_relocation
**relocs
1728 = xmalloc (sw_state
.num_cases
* sizeof (struct jcf_relocation
*));
1729 /* The relocs arrays is a buffer with a gap.
1730 The assumption is that cases will normally come in "runs". */
1732 int gap_end
= sw_state
.num_cases
;
1733 struct jcf_relocation
*reloc
;
1734 for (reloc
= sw_state
.cases
; reloc
!= NULL
; reloc
= reloc
->next
)
1736 HOST_WIDE_INT case_value
= reloc
->offset
;
1737 while (gap_end
< sw_state
.num_cases
)
1739 struct jcf_relocation
*end
= relocs
[gap_end
];
1740 if (case_value
<= end
->offset
)
1742 relocs
[gap_start
++] = end
;
1745 while (gap_start
> 0)
1747 struct jcf_relocation
*before
= relocs
[gap_start
-1];
1748 if (case_value
>= before
->offset
)
1750 relocs
[--gap_end
] = before
;
1753 relocs
[gap_start
++] = reloc
;
1754 /* Note we don't check for duplicates. This is
1755 handled by the parser. */
1758 /* We could have DELTA < 0 if sw_state.min_case is
1759 something like Integer.MIN_VALUE. That is why delta is
1761 delta
= sw_state
.max_case
- sw_state
.min_case
;
1762 if (2 * (unsigned) sw_state
.num_cases
>= delta
)
1763 { /* Use tableswitch. */
1765 RESERVE (13 + 4 * (sw_state
.max_case
- sw_state
.min_case
+ 1));
1766 OP1 (OPCODE_tableswitch
);
1767 emit_reloc (RELOCATION_VALUE_0
,
1768 SWITCH_ALIGN_RELOC
, NULL
, state
);
1769 emit_switch_reloc (sw_state
.default_label
, state
);
1770 OP4 (sw_state
.min_case
);
1771 OP4 (sw_state
.max_case
);
1772 for (i
= sw_state
.min_case
; ; )
1774 reloc
= relocs
[index
];
1775 if (i
== reloc
->offset
)
1777 emit_case_reloc (reloc
, state
);
1778 if (i
== sw_state
.max_case
)
1783 emit_switch_reloc (sw_state
.default_label
, state
);
1788 { /* Use lookupswitch. */
1789 RESERVE(9 + 8 * sw_state
.num_cases
);
1790 OP1 (OPCODE_lookupswitch
);
1791 emit_reloc (RELOCATION_VALUE_0
,
1792 SWITCH_ALIGN_RELOC
, NULL
, state
);
1793 emit_switch_reloc (sw_state
.default_label
, state
);
1794 OP4 (sw_state
.num_cases
);
1795 for (i
= 0; i
< sw_state
.num_cases
; i
++)
1797 struct jcf_relocation
*reloc
= relocs
[i
];
1798 OP4 (reloc
->offset
);
1799 emit_case_reloc (reloc
, state
);
1805 instruction_last
= state
->last_block
;
1806 if (sw_state
.default_label
->pc
< 0)
1807 define_jcf_label (sw_state
.default_label
, state
);
1808 else /* Force a new block. */
1809 sw_state
.default_label
= get_jcf_label_here (state
);
1810 /* Now re-arrange the blocks so the switch_instruction
1811 comes before the switch_body. */
1812 switch_length
= state
->code_length
- switch_instruction
->pc
;
1813 switch_instruction
->pc
= body_block
->pc
;
1814 instruction_last
->next
= body_block
;
1815 instruction_last
->v
.chunk
->next
= body_block
->v
.chunk
;
1816 expression_last
->next
= switch_instruction
;
1817 expression_last
->v
.chunk
->next
= switch_instruction
->v
.chunk
;
1818 body_last
->next
= sw_state
.default_label
;
1819 body_last
->v
.chunk
->next
= NULL
;
1820 state
->chunk
= body_last
->v
.chunk
;
1821 for (; body_block
!= sw_state
.default_label
; body_block
= body_block
->next
)
1822 body_block
->pc
+= switch_length
;
1824 state
->sw_state
= sw_state
.prev
;
1829 exp
= TREE_OPERAND (exp
, 0);
1830 if (exp
== NULL_TREE
)
1831 exp
= build_java_empty_stmt ();
1832 else if (TREE_CODE (exp
) != MODIFY_EXPR
)
1835 exp
= TREE_OPERAND (exp
, 1);
1836 generate_bytecode_return (exp
, state
);
1838 case LABELED_BLOCK_EXPR
:
1840 struct jcf_block
*end_label
= gen_jcf_label (state
);
1841 end_label
->next
= state
->labeled_blocks
;
1842 state
->labeled_blocks
= end_label
;
1843 end_label
->pc
= PENDING_EXIT_PC
;
1844 end_label
->u
.labeled_block
= exp
;
1845 if (LABELED_BLOCK_BODY (exp
))
1846 generate_bytecode_insns (LABELED_BLOCK_BODY (exp
), target
, state
);
1847 if (state
->labeled_blocks
!= end_label
)
1849 state
->labeled_blocks
= end_label
->next
;
1850 define_jcf_label (end_label
, state
);
1855 tree body
= TREE_OPERAND (exp
, 0);
1857 if (TREE_CODE (body
) == COMPOUND_EXPR
1858 && TREE_CODE (TREE_OPERAND (body
, 0)) == EXIT_EXPR
)
1860 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1861 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1862 struct jcf_block
*head_label
;
1863 struct jcf_block
*body_label
;
1864 struct jcf_block
*end_label
= gen_jcf_label (state
);
1865 struct jcf_block
*exit_label
= state
->labeled_blocks
;
1866 head_label
= gen_jcf_label (state
);
1867 emit_goto (head_label
, state
);
1868 body_label
= get_jcf_label_here (state
);
1869 generate_bytecode_insns (TREE_OPERAND (body
, 1), target
, state
);
1870 define_jcf_label (head_label
, state
);
1871 generate_bytecode_conditional (TREE_OPERAND (body
, 0),
1872 end_label
, body_label
, 1, state
);
1873 define_jcf_label (end_label
, state
);
1878 struct jcf_block
*head_label
= get_jcf_label_here (state
);
1879 generate_bytecode_insns (body
, IGNORE_TARGET
, state
);
1880 if (CAN_COMPLETE_NORMALLY (body
))
1881 emit_goto (head_label
, state
);
1887 struct jcf_block
*label
= state
->labeled_blocks
;
1888 struct jcf_block
*end_label
= gen_jcf_label (state
);
1889 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1890 label
, end_label
, 0, state
);
1891 define_jcf_label (end_label
, state
);
1894 case EXIT_BLOCK_EXPR
:
1896 struct jcf_block
*label
= state
->labeled_blocks
;
1897 if (TREE_OPERAND (exp
, 1) != NULL
) goto notimpl
;
1898 while (label
->u
.labeled_block
!= TREE_OPERAND (exp
, 0))
1899 label
= label
->next
;
1900 call_cleanups (label
, state
);
1901 emit_goto (label
, state
);
1905 case PREDECREMENT_EXPR
: value
= -1; post_op
= 0; goto increment
;
1906 case PREINCREMENT_EXPR
: value
= 1; post_op
= 0; goto increment
;
1907 case POSTDECREMENT_EXPR
: value
= -1; post_op
= 1; goto increment
;
1908 case POSTINCREMENT_EXPR
: value
= 1; post_op
= 1; goto increment
;
1911 arg
= TREE_OPERAND (exp
, 1);
1912 exp
= TREE_OPERAND (exp
, 0);
1913 type
= TREE_TYPE (exp
);
1914 size
= TYPE_IS_WIDE (type
) ? 2 : 1;
1915 if ((TREE_CODE (exp
) == VAR_DECL
|| TREE_CODE (exp
) == PARM_DECL
)
1916 && ! TREE_STATIC (exp
)
1917 && TREE_CODE (type
) == INTEGER_TYPE
1918 && TYPE_PRECISION (type
) == 32)
1920 if (target
!= IGNORE_TARGET
&& post_op
)
1921 emit_load (exp
, state
);
1922 emit_iinc (exp
, value
, state
);
1923 if (target
!= IGNORE_TARGET
&& ! post_op
)
1924 emit_load (exp
, state
);
1927 if (TREE_CODE (exp
) == COMPONENT_REF
)
1929 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
1930 emit_dup (1, 0, state
);
1931 /* Stack: ..., objectref, objectref. */
1932 field_op (TREE_OPERAND (exp
, 1), OPCODE_getfield
, state
);
1934 /* Stack: ..., objectref, oldvalue. */
1937 else if (TREE_CODE (exp
) == ARRAY_REF
)
1939 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
1940 generate_bytecode_insns (TREE_OPERAND (exp
, 1), STACK_TARGET
, state
);
1941 emit_dup (2, 0, state
);
1942 /* Stack: ..., array, index, array, index. */
1943 jopcode
= OPCODE_iaload
+ adjust_typed_op (TREE_TYPE (exp
), 7);
1947 /* Stack: ..., array, index, oldvalue. */
1950 else if (TREE_CODE (exp
) == VAR_DECL
|| TREE_CODE (exp
) == PARM_DECL
)
1952 generate_bytecode_insns (exp
, STACK_TARGET
, state
);
1953 /* Stack: ..., oldvalue. */
1959 if (target
!= IGNORE_TARGET
&& post_op
)
1960 emit_dup (size
, offset
, state
);
1961 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1962 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1963 /* Stack, otherwise: ..., [result, ] oldvalue. */
1964 generate_bytecode_insns (arg
, STACK_TARGET
, state
);
1965 emit_binop ((value
>= 0 ? OPCODE_iadd
: OPCODE_isub
)
1966 + adjust_typed_op (type
, 3),
1968 if (target
!= IGNORE_TARGET
&& ! post_op
)
1969 emit_dup (size
, offset
, state
);
1970 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1971 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1972 /* Stack, otherwise: ..., [result, ] newvalue. */
1973 goto finish_assignment
;
1977 tree lhs
= TREE_OPERAND (exp
, 0);
1978 tree rhs
= TREE_OPERAND (exp
, 1);
1981 /* See if we can use the iinc instruction. */
1982 if ((TREE_CODE (lhs
) == VAR_DECL
|| TREE_CODE (lhs
) == PARM_DECL
)
1983 && ! TREE_STATIC (lhs
)
1984 && TREE_CODE (TREE_TYPE (lhs
)) == INTEGER_TYPE
1985 && TYPE_PRECISION (TREE_TYPE (lhs
)) == 32
1986 && (TREE_CODE (rhs
) == PLUS_EXPR
|| TREE_CODE (rhs
) == MINUS_EXPR
))
1988 tree arg0
= TREE_OPERAND (rhs
, 0);
1989 tree arg1
= TREE_OPERAND (rhs
, 1);
1990 HOST_WIDE_INT min_value
= -32768;
1991 HOST_WIDE_INT max_value
= 32767;
1992 if (TREE_CODE (rhs
) == MINUS_EXPR
)
1997 else if (arg1
== lhs
)
2000 arg1
= TREE_OPERAND (rhs
, 0);
2002 if (lhs
== arg0
&& TREE_CODE (arg1
) == INTEGER_CST
)
2004 HOST_WIDE_INT hi_value
= TREE_INT_CST_HIGH (arg1
);
2005 value
= TREE_INT_CST_LOW (arg1
);
2006 if ((hi_value
== 0 && value
<= max_value
)
2007 || (hi_value
== -1 && value
>= min_value
))
2009 if (TREE_CODE (rhs
) == MINUS_EXPR
)
2011 emit_iinc (lhs
, value
, state
);
2012 if (target
!= IGNORE_TARGET
)
2013 emit_load (lhs
, state
);
2019 if (TREE_CODE (lhs
) == COMPONENT_REF
)
2021 generate_bytecode_insns (TREE_OPERAND (lhs
, 0),
2022 STACK_TARGET
, state
);
2025 else if (TREE_CODE (lhs
) == ARRAY_REF
)
2027 generate_bytecode_insns (TREE_OPERAND(lhs
, 0),
2028 STACK_TARGET
, state
);
2029 generate_bytecode_insns (TREE_OPERAND(lhs
, 1),
2030 STACK_TARGET
, state
);
2036 /* If the rhs is a binary expression and the left operand is
2037 `==' to the lhs then we have an OP= expression. In this
2038 case we must do some special processing. */
2039 if (BINARY_CLASS_P (rhs
) && lhs
== TREE_OPERAND (rhs
, 0))
2041 if (TREE_CODE (lhs
) == COMPONENT_REF
)
2043 tree field
= TREE_OPERAND (lhs
, 1);
2044 if (! FIELD_STATIC (field
))
2046 /* Duplicate the object reference so we can get
2048 emit_dup (TYPE_IS_WIDE (field
) ? 2 : 1, 0, state
);
2051 field_op (field
, (FIELD_STATIC (field
)
2056 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field
)) ? 2 : 1);
2058 else if (TREE_CODE (lhs
) == VAR_DECL
2059 || TREE_CODE (lhs
) == PARM_DECL
)
2061 if (FIELD_STATIC (lhs
))
2063 field_op (lhs
, OPCODE_getstatic
, state
);
2064 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs
)) ? 2 : 1);
2067 emit_load (lhs
, state
);
2069 else if (TREE_CODE (lhs
) == ARRAY_REF
)
2071 /* Duplicate the array and index, which are on the
2072 stack, so that we can load the old value. */
2073 emit_dup (2, 0, state
);
2075 jopcode
= OPCODE_iaload
+ adjust_typed_op (TREE_TYPE (lhs
), 7);
2078 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs
)) ? 2 : 1);
2083 /* This function correctly handles the case where the LHS
2084 of a binary expression is NULL_TREE. */
2085 rhs
= build2 (TREE_CODE (rhs
), TREE_TYPE (rhs
),
2086 NULL_TREE
, TREE_OPERAND (rhs
, 1));
2089 generate_bytecode_insns (rhs
, STACK_TARGET
, state
);
2090 if (target
!= IGNORE_TARGET
)
2091 emit_dup (TYPE_IS_WIDE (type
) ? 2 : 1 , offset
, state
);
2097 if (TREE_CODE (exp
) == COMPONENT_REF
)
2099 tree field
= TREE_OPERAND (exp
, 1);
2100 if (! FIELD_STATIC (field
))
2103 FIELD_STATIC (field
) ? OPCODE_putstatic
: OPCODE_putfield
,
2106 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field
)) ? 2 : 1);
2108 else if (TREE_CODE (exp
) == VAR_DECL
2109 || TREE_CODE (exp
) == PARM_DECL
)
2111 if (FIELD_STATIC (exp
))
2113 field_op (exp
, OPCODE_putstatic
, state
);
2114 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 2 : 1);
2117 emit_store (exp
, state
);
2119 else if (TREE_CODE (exp
) == ARRAY_REF
)
2121 jopcode
= OPCODE_iastore
+ adjust_typed_op (TREE_TYPE (exp
), 7);
2124 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 4 : 3);
2130 jopcode
= OPCODE_iadd
;
2133 jopcode
= OPCODE_isub
;
2136 jopcode
= OPCODE_imul
;
2138 case TRUNC_DIV_EXPR
:
2140 jopcode
= OPCODE_idiv
;
2142 case TRUNC_MOD_EXPR
:
2143 jopcode
= OPCODE_irem
;
2145 case LSHIFT_EXPR
: jopcode
= OPCODE_ishl
; goto binop
;
2146 case RSHIFT_EXPR
: jopcode
= OPCODE_ishr
; goto binop
;
2147 case URSHIFT_EXPR
: jopcode
= OPCODE_iushr
; goto binop
;
2148 case TRUTH_AND_EXPR
:
2149 case BIT_AND_EXPR
: jopcode
= OPCODE_iand
; goto binop
;
2151 case BIT_IOR_EXPR
: jopcode
= OPCODE_ior
; goto binop
;
2152 case TRUTH_XOR_EXPR
:
2153 case BIT_XOR_EXPR
: jopcode
= OPCODE_ixor
; goto binop
;
2156 tree arg0
= TREE_OPERAND (exp
, 0);
2157 tree arg1
= TREE_OPERAND (exp
, 1);
2158 jopcode
+= adjust_typed_op (type
, 3);
2159 if (arg0
!= NULL_TREE
&& operand_equal_p (arg0
, arg1
, 0))
2161 /* fold may (e.g) convert 2*x to x+x. */
2162 generate_bytecode_insns (arg0
, target
, state
);
2163 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0
)) > 32 ? 2 : 1, 0, state
);
2167 /* ARG0 will be NULL_TREE if we're handling an `OP='
2168 expression. In this case the stack already holds the
2169 LHS. See the MODIFY_EXPR case. */
2170 if (arg0
!= NULL_TREE
)
2171 generate_bytecode_insns (arg0
, target
, state
);
2172 if (jopcode
>= OPCODE_lshl
&& jopcode
<= OPCODE_lushr
)
2173 arg1
= convert (int_type_node
, arg1
);
2174 generate_bytecode_insns (arg1
, target
, state
);
2176 /* For most binary operations, both operands and the result have the
2177 same type. Shift operations are different. Using arg1's type
2178 gets us the correct SP adjustment in all cases. */
2179 if (target
== STACK_TARGET
)
2180 emit_binop (jopcode
, TREE_TYPE (arg1
), state
);
2183 case TRUTH_NOT_EXPR
:
2185 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2186 if (target
== STACK_TARGET
)
2188 int is_long
= TYPE_PRECISION (TREE_TYPE (exp
)) > 32;
2189 push_int_const (TREE_CODE (exp
) == BIT_NOT_EXPR
? -1 : 1, state
);
2193 NOTE_PUSH (1 + is_long
);
2194 OP1 (OPCODE_ixor
+ is_long
);
2195 NOTE_POP (1 + is_long
);
2199 jopcode
= OPCODE_ineg
;
2200 jopcode
+= adjust_typed_op (type
, 3);
2201 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2202 if (target
== STACK_TARGET
)
2203 emit_unop (jopcode
, type
, state
);
2205 case INSTANCEOF_EXPR
:
2207 int index
= find_class_constant (&state
->cpool
, TREE_OPERAND (exp
, 1));
2208 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2210 OP1 (OPCODE_instanceof
);
2215 /* The first time through, the argument of the SAVE_EXPR will be
2216 something complex. Evaluate it, and replace the argument with
2217 a VAR_DECL that holds the result. */
2218 arg
= TREE_OPERAND (exp
, 0);
2219 if (TREE_CODE (arg
) != VAR_DECL
|| DECL_NAME (arg
))
2221 tree type
= TREE_TYPE (exp
);
2222 tree decl
= build_decl (VAR_DECL
, NULL_TREE
, type
);
2223 generate_bytecode_insns (arg
, STACK_TARGET
, state
);
2224 localvar_alloc (decl
, state
);
2225 TREE_OPERAND (exp
, 0) = decl
;
2226 emit_dup (TYPE_IS_WIDE (type
) ? 2 : 1, 0, state
);
2227 emit_store (decl
, state
);
2231 tree type
= TREE_TYPE (exp
);
2232 emit_load (arg
, state
);
2233 NOTE_PUSH (TYPE_IS_WIDE (type
) ? 2 : 1);
2239 case FIX_TRUNC_EXPR
:
2241 tree src
= TREE_OPERAND (exp
, 0);
2242 tree src_type
= TREE_TYPE (src
);
2243 tree dst_type
= TREE_TYPE (exp
);
2244 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2245 if (target
== IGNORE_TARGET
|| src_type
== dst_type
)
2247 if (TREE_CODE (dst_type
) == POINTER_TYPE
)
2249 if (TREE_CODE (exp
) == CONVERT_EXPR
)
2251 int index
= find_class_constant (&state
->cpool
,
2252 TREE_TYPE (dst_type
));
2254 OP1 (OPCODE_checkcast
);
2258 else /* Convert numeric types. */
2260 int wide_src
= TYPE_PRECISION (src_type
) > 32;
2261 int wide_dst
= TYPE_PRECISION (dst_type
) > 32;
2262 NOTE_POP (1 + wide_src
);
2264 if (TREE_CODE (dst_type
) == REAL_TYPE
)
2266 if (TREE_CODE (src_type
) == REAL_TYPE
)
2267 OP1 (wide_dst
? OPCODE_f2d
: OPCODE_d2f
);
2268 else if (TYPE_PRECISION (src_type
) == 64)
2269 OP1 (OPCODE_l2f
+ wide_dst
);
2271 OP1 (OPCODE_i2f
+ wide_dst
);
2273 else /* Convert to integral type. */
2275 if (TREE_CODE (src_type
) == REAL_TYPE
)
2276 OP1 (OPCODE_f2i
+ wide_dst
+ 3 * wide_src
);
2281 if (TYPE_PRECISION (dst_type
) < 32)
2284 /* Already converted to int, if needed. */
2285 if (TYPE_PRECISION (dst_type
) <= 8)
2287 else if (TYPE_UNSIGNED (dst_type
))
2293 NOTE_PUSH (1 + wide_dst
);
2300 tree try_clause
= TREE_OPERAND (exp
, 0);
2301 struct jcf_block
*start_label
= get_jcf_label_here (state
);
2302 struct jcf_block
*end_label
; /* End of try clause. */
2303 struct jcf_block
*finished_label
= gen_jcf_label (state
);
2304 tree clause
= TREE_OPERAND (exp
, 1);
2305 if (target
!= IGNORE_TARGET
)
2307 generate_bytecode_insns (try_clause
, IGNORE_TARGET
, state
);
2308 end_label
= get_jcf_label_here (state
);
2309 if (end_label
== start_label
)
2311 if (CAN_COMPLETE_NORMALLY (try_clause
))
2312 emit_goto (finished_label
, state
);
2313 while (clause
!= NULL_TREE
)
2315 tree catch_clause
= TREE_OPERAND (clause
, 0);
2316 tree exception_decl
= BLOCK_EXPR_DECLS (catch_clause
);
2317 struct jcf_handler
*handler
= alloc_handler (start_label
,
2319 if (exception_decl
== NULL_TREE
)
2320 handler
->type
= NULL_TREE
;
2322 handler
->type
= TREE_TYPE (TREE_TYPE (exception_decl
));
2323 generate_bytecode_insns (catch_clause
, IGNORE_TARGET
, state
);
2324 clause
= TREE_CHAIN (clause
);
2325 if (CAN_COMPLETE_NORMALLY (catch_clause
) && clause
!= NULL_TREE
)
2326 emit_goto (finished_label
, state
);
2328 define_jcf_label (finished_label
, state
);
2332 case TRY_FINALLY_EXPR
:
2334 struct jcf_block
*finished_label
= NULL
;
2335 struct jcf_block
*finally_label
, *start_label
, *end_label
;
2336 struct jcf_handler
*handler
;
2337 tree try_block
= TREE_OPERAND (exp
, 0);
2338 tree finally
= TREE_OPERAND (exp
, 1);
2339 tree return_link
= NULL_TREE
, exception_decl
= NULL_TREE
;
2341 tree exception_type
;
2343 finally_label
= gen_jcf_label (state
);
2344 start_label
= get_jcf_label_here (state
);
2345 /* If the `finally' clause can complete normally, we emit it
2346 as a subroutine and let the other clauses call it via
2347 `jsr'. If it can't complete normally, then we simply emit
2348 `goto's directly to it. */
2349 if (CAN_COMPLETE_NORMALLY (finally
))
2351 finally_label
->pc
= PENDING_CLEANUP_PC
;
2352 finally_label
->next
= state
->labeled_blocks
;
2353 state
->labeled_blocks
= finally_label
;
2354 state
->num_finalizers
++;
2357 generate_bytecode_insns (try_block
, target
, state
);
2359 if (CAN_COMPLETE_NORMALLY (finally
))
2361 if (state
->labeled_blocks
!= finally_label
)
2363 state
->labeled_blocks
= finally_label
->next
;
2365 end_label
= get_jcf_label_here (state
);
2367 if (end_label
== start_label
)
2369 state
->num_finalizers
--;
2370 define_jcf_label (finally_label
, state
);
2371 generate_bytecode_insns (finally
, IGNORE_TARGET
, state
);
2375 if (CAN_COMPLETE_NORMALLY (finally
))
2377 return_link
= build_decl (VAR_DECL
, NULL_TREE
,
2378 return_address_type_node
);
2379 finished_label
= gen_jcf_label (state
);
2382 if (CAN_COMPLETE_NORMALLY (try_block
))
2384 if (CAN_COMPLETE_NORMALLY (finally
))
2386 emit_jsr (finally_label
, state
);
2387 emit_goto (finished_label
, state
);
2390 emit_goto (finally_label
, state
);
2393 /* Handle exceptions. */
2395 exception_type
= build_pointer_type (throwable_type_node
);
2396 if (CAN_COMPLETE_NORMALLY (finally
))
2398 /* We're going to generate a subroutine, so we'll need to
2399 save and restore the exception around the `jsr'. */
2400 exception_decl
= build_decl (VAR_DECL
, NULL_TREE
, exception_type
);
2401 localvar_alloc (return_link
, state
);
2403 handler
= alloc_handler (start_label
, end_label
, state
);
2404 handler
->type
= NULL_TREE
;
2405 if (CAN_COMPLETE_NORMALLY (finally
))
2407 localvar_alloc (exception_decl
, state
);
2409 emit_store (exception_decl
, state
);
2410 emit_jsr (finally_label
, state
);
2411 emit_load (exception_decl
, state
);
2413 OP1 (OPCODE_athrow
);
2418 /* We're not generating a subroutine. In this case we can
2419 simply have the exception handler pop the exception and
2420 then fall through to the `finally' block. */
2422 emit_pop (1, state
);
2426 /* The finally block. If we're generating a subroutine, first
2427 save return PC into return_link. Otherwise, just generate
2428 the code for the `finally' block. */
2429 define_jcf_label (finally_label
, state
);
2430 if (CAN_COMPLETE_NORMALLY (finally
))
2433 emit_store (return_link
, state
);
2436 generate_bytecode_insns (finally
, IGNORE_TARGET
, state
);
2437 if (CAN_COMPLETE_NORMALLY (finally
))
2439 maybe_wide (OPCODE_ret
, DECL_LOCAL_INDEX (return_link
), state
);
2440 maybe_free_localvar (exception_decl
, state
, 1);
2441 maybe_free_localvar (return_link
, state
, 1);
2442 define_jcf_label (finished_label
, state
);
2447 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
2449 OP1 (OPCODE_athrow
);
2451 case NEW_ARRAY_INIT
:
2453 tree values
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
2454 tree array_type
= TREE_TYPE (TREE_TYPE (exp
));
2455 tree element_type
= TYPE_ARRAY_ELEMENT (array_type
);
2456 HOST_WIDE_INT length
= java_array_type_length (array_type
);
2457 if (target
== IGNORE_TARGET
)
2459 for ( ; values
!= NULL_TREE
; values
= TREE_CHAIN (values
))
2460 generate_bytecode_insns (TREE_VALUE (values
), target
, state
);
2463 push_int_const (length
, state
);
2466 if (JPRIMITIVE_TYPE_P (element_type
))
2468 int atype
= encode_newarray_type (element_type
);
2469 OP1 (OPCODE_newarray
);
2474 int index
= find_class_constant (&state
->cpool
,
2475 TREE_TYPE (element_type
));
2476 OP1 (OPCODE_anewarray
);
2480 jopcode
= OPCODE_iastore
+ adjust_typed_op (element_type
, 7);
2481 for ( ; values
!= NULL_TREE
; values
= TREE_CHAIN (values
), offset
++)
2483 int save_SP
= state
->code_SP
;
2484 emit_dup (1, 0, state
);
2485 push_int_const (offset
, state
);
2487 generate_bytecode_insns (TREE_VALUE (values
), STACK_TARGET
, state
);
2490 state
->code_SP
= save_SP
;
2494 case JAVA_EXC_OBJ_EXPR
:
2495 NOTE_PUSH (1); /* Pushed by exception system. */
2500 /* This copes with cases where fold() has created MIN or MAX
2501 from a conditional expression. */
2502 enum tree_code code
= TREE_CODE (exp
) == MIN_EXPR
? LT_EXPR
: GT_EXPR
;
2503 tree op0
= TREE_OPERAND (exp
, 0);
2504 tree op1
= TREE_OPERAND (exp
, 1);
2506 if (TREE_SIDE_EFFECTS (op0
) || TREE_SIDE_EFFECTS (op1
))
2508 x
= build3 (COND_EXPR
, TREE_TYPE (exp
),
2509 build2 (code
, boolean_type_node
, op0
, op1
),
2511 generate_bytecode_insns (x
, target
, state
);
2514 case NEW_CLASS_EXPR
:
2516 tree
class = TREE_TYPE (TREE_TYPE (exp
));
2517 int need_result
= target
!= IGNORE_TARGET
;
2518 int index
= find_class_constant (&state
->cpool
, class);
2524 NOTE_PUSH (1 + need_result
);
2526 /* ... fall though ... */
2529 tree f
= TREE_OPERAND (exp
, 0);
2530 tree x
= TREE_OPERAND (exp
, 1);
2531 int save_SP
= state
->code_SP
;
2533 if (TREE_CODE (f
) == ADDR_EXPR
)
2534 f
= TREE_OPERAND (f
, 0);
2535 if (f
== soft_newarray_node
)
2537 int type_code
= TREE_INT_CST_LOW (TREE_VALUE (x
));
2538 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x
)),
2539 STACK_TARGET
, state
);
2541 OP1 (OPCODE_newarray
);
2545 else if (f
== soft_multianewarray_node
)
2549 int index
= find_class_constant (&state
->cpool
,
2550 TREE_TYPE (TREE_TYPE (exp
)));
2551 x
= TREE_CHAIN (x
); /* Skip class argument. */
2552 ndims
= TREE_INT_CST_LOW (TREE_VALUE (x
));
2553 for (idim
= ndims
; --idim
>= 0; )
2556 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2559 OP1 (OPCODE_multianewarray
);
2564 else if (f
== soft_anewarray_node
)
2566 tree cl
= TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp
)));
2567 int index
= find_class_constant (&state
->cpool
, TREE_TYPE (cl
));
2568 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2570 OP1 (OPCODE_anewarray
);
2574 else if (f
== soft_monitorenter_node
2575 || f
== soft_monitorexit_node
2578 if (f
== soft_monitorenter_node
)
2579 op
= OPCODE_monitorenter
;
2580 else if (f
== soft_monitorexit_node
)
2581 op
= OPCODE_monitorexit
;
2584 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2590 for ( ; x
!= NULL_TREE
; x
= TREE_CHAIN (x
))
2592 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2594 nargs
= state
->code_SP
- save_SP
;
2595 state
->code_SP
= save_SP
;
2596 if (f
== soft_fmod_node
)
2603 if (TREE_CODE (exp
) == NEW_CLASS_EXPR
)
2604 NOTE_POP (1); /* Pop implicit this. */
2605 if (TREE_CODE (f
) == FUNCTION_DECL
&& DECL_CONTEXT (f
) != NULL_TREE
)
2607 tree context
= DECL_CONTEXT (f
);
2608 int index
, interface
= 0;
2610 if (METHOD_STATIC (f
))
2611 OP1 (OPCODE_invokestatic
);
2612 else if (DECL_CONSTRUCTOR_P (f
) || CALL_USING_SUPER (exp
)
2613 || METHOD_PRIVATE (f
))
2614 OP1 (OPCODE_invokespecial
);
2617 if (CLASS_INTERFACE (TYPE_NAME (context
)))
2619 tree arg1
= TREE_VALUE (TREE_OPERAND (exp
, 1));
2620 context
= TREE_TYPE (TREE_TYPE (arg1
));
2621 if (CLASS_INTERFACE (TYPE_NAME (context
)))
2625 OP1 (OPCODE_invokeinterface
);
2627 OP1 (OPCODE_invokevirtual
);
2629 index
= find_methodref_with_class_index (&state
->cpool
, f
, context
);
2639 f
= TREE_TYPE (TREE_TYPE (f
));
2640 if (TREE_CODE (f
) != VOID_TYPE
)
2642 int size
= TYPE_IS_WIDE (f
) ? 2 : 1;
2643 if (target
== IGNORE_TARGET
)
2644 emit_pop (size
, state
);
2654 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2655 tree_code_name
[(int) TREE_CODE (exp
)]);
2660 perform_relocations (struct jcf_partial
*state
)
2662 struct jcf_block
*block
;
2663 struct jcf_relocation
*reloc
;
2667 /* Before we start, the pc field of each block is an upper bound on
2668 the block's start pc (it may be less, if previous blocks need less
2669 than their maximum).
2671 The minimum size of each block is in the block's chunk->size. */
2673 /* First, figure out the actual locations of each block. */
2676 for (block
= state
->blocks
; block
!= NULL
; block
= block
->next
)
2678 int block_size
= block
->v
.chunk
->size
;
2682 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2683 Assumes relocations are in reverse order. */
2684 reloc
= block
->u
.relocations
;
2685 while (reloc
!= NULL
2686 && reloc
->kind
== OPCODE_goto_w
2687 && reloc
->label
->pc
== block
->next
->pc
2688 && reloc
->offset
+ 2 == block_size
)
2690 reloc
= reloc
->next
;
2691 block
->u
.relocations
= reloc
;
2692 block
->v
.chunk
->size
-= 3;
2697 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2698 jump directly to X. We're careful here to avoid an infinite
2699 loop if the `goto's themselves form one. We do this
2700 optimization because we can generate a goto-to-goto for some
2701 try/finally blocks. */
2702 while (reloc
!= NULL
2703 && reloc
->kind
== OPCODE_goto_w
2704 && reloc
->label
!= block
2705 && reloc
->label
->v
.chunk
->data
!= NULL
2706 && reloc
->label
->v
.chunk
->data
[0] == OPCODE_goto
)
2708 /* Find the reloc for the first instruction of the
2709 destination block. */
2710 struct jcf_relocation
*first_reloc
;
2711 for (first_reloc
= reloc
->label
->u
.relocations
;
2713 first_reloc
= first_reloc
->next
)
2715 if (first_reloc
->offset
== 1
2716 && first_reloc
->kind
== OPCODE_goto_w
)
2718 reloc
->label
= first_reloc
->label
;
2723 /* If we didn't do anything, exit the loop. */
2724 if (first_reloc
== NULL
)
2728 for (reloc
= block
->u
.relocations
; reloc
!= NULL
; reloc
= reloc
->next
)
2730 if (reloc
->kind
== SWITCH_ALIGN_RELOC
)
2732 /* We assume this is the first relocation in this block,
2733 so we know its final pc. */
2734 int where
= pc
+ reloc
->offset
;
2735 int pad
= ((where
+ 3) & ~3) - where
;
2738 else if (reloc
->kind
< -1 || reloc
->kind
> BLOCK_START_RELOC
)
2740 int delta
= reloc
->label
->pc
- (pc
+ reloc
->offset
- 1);
2741 int expand
= reloc
->kind
> 0 ? 2 : 5;
2745 if (delta
>= -32768 && delta
<= 32767)
2751 block_size
+= expand
;
2757 for (block
= state
->blocks
; block
!= NULL
; block
= block
->next
)
2759 struct chunk
*chunk
= block
->v
.chunk
;
2760 int old_size
= chunk
->size
;
2761 int next_pc
= block
->next
== NULL
? pc
: block
->next
->pc
;
2762 int new_size
= next_pc
- block
->pc
;
2763 unsigned char *new_ptr
;
2764 unsigned char *old_buffer
= chunk
->data
;
2765 unsigned char *old_ptr
= old_buffer
+ old_size
;
2766 if (new_size
!= old_size
)
2768 chunk
->data
= obstack_alloc (state
->chunk_obstack
, new_size
);
2769 chunk
->size
= new_size
;
2771 new_ptr
= chunk
->data
+ new_size
;
2773 /* We do the relocations from back to front, because
2774 the relocations are in reverse order. */
2775 for (reloc
= block
->u
.relocations
; ; reloc
= reloc
->next
)
2777 /* new_ptr and old_ptr point into the old and new buffers,
2778 respectively. (If no relocations cause the buffer to
2779 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2780 The bytes at higher address have been copied and relocations
2781 handled; those at lower addresses remain to process. */
2783 /* Lower old index of piece to be copied with no relocation.
2784 I.e. high index of the first piece that does need relocation. */
2785 int start
= reloc
== NULL
? 0
2786 : reloc
->kind
== SWITCH_ALIGN_RELOC
? reloc
->offset
2787 : (reloc
->kind
== 0 || reloc
->kind
== BLOCK_START_RELOC
)
2789 : reloc
->offset
+ 2;
2792 int n
= (old_ptr
- old_buffer
) - start
;
2796 memcpy (new_ptr
, old_ptr
, n
);
2797 if (old_ptr
== old_buffer
)
2800 new_offset
= new_ptr
- chunk
->data
;
2801 new_offset
-= (reloc
->kind
== -1 ? 2 : 4);
2802 if (reloc
->kind
== 0)
2805 value
= GET_u4 (old_ptr
);
2807 else if (reloc
->kind
== BLOCK_START_RELOC
)
2813 else if (reloc
->kind
== SWITCH_ALIGN_RELOC
)
2815 int where
= block
->pc
+ reloc
->offset
;
2816 int pad
= ((where
+ 3) & ~3) - where
;
2824 value
= GET_u2 (old_ptr
);
2826 value
+= reloc
->label
->pc
- (block
->pc
+ new_offset
);
2827 *--new_ptr
= (unsigned char) value
; value
>>= 8;
2828 *--new_ptr
= (unsigned char) value
; value
>>= 8;
2829 if (reloc
->kind
!= -1)
2831 *--new_ptr
= (unsigned char) value
; value
>>= 8;
2832 *--new_ptr
= (unsigned char) value
;
2834 if (reloc
->kind
> BLOCK_START_RELOC
)
2836 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2838 *--new_ptr
= reloc
->kind
;
2840 else if (reloc
->kind
< -1)
2842 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2844 *--new_ptr
= OPCODE_goto_w
;
2847 *--new_ptr
= - reloc
->kind
;
2850 if (new_ptr
!= chunk
->data
)
2853 state
->code_length
= pc
;
2857 init_jcf_state (struct jcf_partial
*state
, struct obstack
*work
)
2859 state
->chunk_obstack
= work
;
2860 state
->first
= state
->chunk
= NULL
;
2861 CPOOL_INIT (&state
->cpool
);
2862 BUFFER_INIT (&state
->localvars
);
2863 BUFFER_INIT (&state
->bytecode
);
2867 init_jcf_method (struct jcf_partial
*state
, tree method
)
2869 state
->current_method
= method
;
2870 state
->blocks
= state
->last_block
= NULL
;
2871 state
->linenumber_count
= 0;
2872 state
->first_lvar
= state
->last_lvar
= NULL
;
2873 state
->lvar_count
= 0;
2874 state
->labeled_blocks
= NULL
;
2875 state
->code_length
= 0;
2876 BUFFER_RESET (&state
->bytecode
);
2877 BUFFER_RESET (&state
->localvars
);
2879 state
->code_SP_max
= 0;
2880 state
->handlers
= NULL
;
2881 state
->last_handler
= NULL
;
2882 state
->num_handlers
= 0;
2883 state
->num_finalizers
= 0;
2884 state
->return_value_decl
= NULL_TREE
;
2888 release_jcf_state (struct jcf_partial
*state
)
2890 CPOOL_FINISH (&state
->cpool
);
2891 obstack_free (state
->chunk_obstack
, state
->first
);
2894 /* Get the access flags (modifiers) of a class (TYPE_DECL) to be used in the
2895 access_flags field of the class file header. */
2898 get_classfile_modifiers (tree
class)
2900 /* These are the flags which are valid class file modifiers.
2902 int valid_toplevel_class_flags
= (ACC_PUBLIC
| ACC_FINAL
| ACC_SUPER
|
2903 ACC_INTERFACE
| ACC_ABSTRACT
);
2904 int flags
= get_access_flags (class);
2906 /* ACC_SUPER should always be set, except for interfaces. */
2907 if (! (flags
& ACC_INTERFACE
))
2910 /* A protected member class becomes public at the top level. */
2911 if (flags
& ACC_PROTECTED
)
2912 flags
|= ACC_PUBLIC
;
2914 /* Filter out flags that are not valid for a class or interface in the
2915 top-level access_flags field. */
2916 flags
&= valid_toplevel_class_flags
;
2921 /* Get the access flags (modifiers) for a method to be used in the class
2925 get_method_access_flags (tree decl
)
2927 int flags
= get_access_flags (decl
);
2929 /* Promote "private" inner-class constructors to package-private. */
2930 if (DECL_CONSTRUCTOR_P (decl
)
2931 && INNER_CLASS_DECL_P (TYPE_NAME (DECL_CONTEXT (decl
))))
2932 flags
&= ~(ACC_PRIVATE
);
2937 /* Generate and return a list of chunks containing the class CLAS
2938 in the .class file representation. The list can be written to a
2939 .class file using write_chunks. Allocate chunks from obstack WORK. */
2941 static GTY(()) tree SourceFile_node
;
2942 static struct chunk
*
2943 generate_classfile (tree clas
, struct jcf_partial
*state
)
2945 struct chunk
*cpool_chunk
;
2946 const char *source_file
, *s
;
2949 unsigned char *fields_count_ptr
;
2950 int fields_count
= 0;
2951 unsigned char *methods_count_ptr
;
2952 int methods_count
= 0;
2955 = clas
== object_type_node
? 0 : BINFO_N_BASE_BINFOS (TYPE_BINFO (clas
));
2957 ptr
= append_chunk (NULL
, 8, state
);
2958 PUT4 (0xCafeBabe); /* Magic number */
2959 PUT2 (3); /* Minor version */
2960 PUT2 (45); /* Major version */
2962 append_chunk (NULL
, 0, state
);
2963 cpool_chunk
= state
->chunk
;
2965 /* Next allocate the chunk containing access_flags through fields_count. */
2966 if (clas
== object_type_node
)
2969 i
= 8 + 2 * total_supers
;
2970 ptr
= append_chunk (NULL
, i
, state
);
2971 i
= get_classfile_modifiers (TYPE_NAME (clas
));
2972 PUT2 (i
); /* access_flags */
2973 i
= find_class_constant (&state
->cpool
, clas
); PUT2 (i
); /* this_class */
2974 if (clas
== object_type_node
)
2976 PUT2(0); /* super_class */
2977 PUT2(0); /* interfaces_count */
2981 tree binfo
= TYPE_BINFO (clas
);
2982 tree base_binfo
= BINFO_BASE_BINFO (binfo
, 0);
2983 int j
= find_class_constant (&state
->cpool
, BINFO_TYPE (base_binfo
));
2985 PUT2 (j
); /* super_class */
2986 PUT2 (total_supers
- 1); /* interfaces_count */
2987 for (i
= 1; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
2989 j
= find_class_constant (&state
->cpool
, BINFO_TYPE (base_binfo
));
2993 fields_count_ptr
= ptr
;
2995 for (part
= TYPE_FIELDS (clas
); part
; part
= TREE_CHAIN (part
))
2997 int have_value
, attr_count
= 0;
2998 if (DECL_NAME (part
) == NULL_TREE
|| DECL_ARTIFICIAL (part
))
3000 ptr
= append_chunk (NULL
, 8, state
);
3001 i
= get_access_flags (part
); PUT2 (i
);
3002 i
= find_utf8_constant (&state
->cpool
, DECL_NAME (part
)); PUT2 (i
);
3003 i
= find_utf8_constant (&state
->cpool
,
3004 build_java_signature (TREE_TYPE (part
)));
3006 have_value
= DECL_INITIAL (part
) != NULL_TREE
3007 && FIELD_STATIC (part
) && CONSTANT_VALUE_P (DECL_INITIAL (part
))
3008 && FIELD_FINAL (part
)
3009 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part
))
3010 || TREE_TYPE (part
) == string_ptr_type_node
);
3014 if (FIELD_THISN (part
) || FIELD_LOCAL_ALIAS (part
)
3015 || FIELD_SYNTHETIC (part
))
3017 if (FIELD_DEPRECATED (part
))
3020 PUT2 (attr_count
); /* attributes_count */
3023 tree init
= DECL_INITIAL (part
);
3024 static tree ConstantValue_node
= NULL_TREE
;
3025 if (TREE_TYPE (part
) != TREE_TYPE (init
))
3026 fatal_error ("field initializer type mismatch");
3027 ptr
= append_chunk (NULL
, 8, state
);
3028 if (ConstantValue_node
== NULL_TREE
)
3029 ConstantValue_node
= get_identifier ("ConstantValue");
3030 i
= find_utf8_constant (&state
->cpool
, ConstantValue_node
);
3031 PUT2 (i
); /* attribute_name_index */
3032 PUT4 (2); /* attribute_length */
3033 i
= find_constant_index (init
, state
); PUT2 (i
);
3035 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
3036 fields and other fields which need it. */
3037 if (FIELD_THISN (part
) || FIELD_LOCAL_ALIAS (part
)
3038 || FIELD_SYNTHETIC (part
))
3039 ptr
= append_synthetic_attribute (state
);
3040 if (FIELD_DEPRECATED (part
))
3041 append_deprecated_attribute (state
);
3044 ptr
= fields_count_ptr
; UNSAFE_PUT2 (fields_count
);
3046 ptr
= methods_count_ptr
= append_chunk (NULL
, 2, state
);
3049 for (part
= TYPE_METHODS (clas
); part
; part
= TREE_CHAIN (part
))
3051 struct jcf_block
*block
;
3052 tree function_body
= DECL_FUNCTION_BODY (part
);
3053 tree body
= function_body
== NULL_TREE
? NULL_TREE
3054 : BLOCK_EXPR_BODY (function_body
);
3055 tree name
= DECL_CONSTRUCTOR_P (part
) ? init_identifier_node
3057 tree type
= TREE_TYPE (part
);
3058 tree save_function
= current_function_decl
;
3059 int synthetic_p
= 0;
3061 /* Invisible Miranda methods shouldn't end up in the .class
3063 if (METHOD_INVISIBLE (part
))
3066 current_function_decl
= part
;
3067 ptr
= append_chunk (NULL
, 8, state
);
3068 i
= get_method_access_flags (part
); PUT2 (i
);
3069 i
= find_utf8_constant (&state
->cpool
, name
); PUT2 (i
);
3070 i
= find_utf8_constant (&state
->cpool
, build_java_signature (type
));
3072 i
= (body
!= NULL_TREE
) + (DECL_FUNCTION_THROWS (part
) != NULL_TREE
);
3074 /* Make room for the Synthetic attribute (of zero length.) */
3075 if (DECL_FINIT_P (part
)
3076 || DECL_INSTINIT_P (part
)
3077 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part
))
3078 || TYPE_DOT_CLASS (clas
) == part
)
3083 /* Make room for Deprecated attribute. */
3084 if (METHOD_DEPRECATED (part
))
3087 PUT2 (i
); /* attributes_count */
3090 ptr
= append_synthetic_attribute (state
);
3092 if (body
!= NULL_TREE
)
3094 int code_attributes_count
= 0;
3095 static tree Code_node
= NULL_TREE
;
3097 unsigned char *attr_len_ptr
;
3098 struct jcf_handler
*handler
;
3099 if (Code_node
== NULL_TREE
)
3100 Code_node
= get_identifier ("Code");
3101 ptr
= append_chunk (NULL
, 14, state
);
3102 i
= find_utf8_constant (&state
->cpool
, Code_node
); PUT2 (i
);
3104 init_jcf_method (state
, part
);
3105 get_jcf_label_here (state
); /* Force a first block. */
3106 for (t
= DECL_ARGUMENTS (part
); t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
3107 localvar_alloc (t
, state
);
3108 state
->num_jsrs
= 0;
3109 generate_bytecode_insns (body
, IGNORE_TARGET
, state
);
3110 if (CAN_COMPLETE_NORMALLY (body
))
3112 if (TREE_CODE (TREE_TYPE (type
)) != VOID_TYPE
)
3115 OP1 (OPCODE_return
);
3117 for (t
= DECL_ARGUMENTS (part
); t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
3118 maybe_free_localvar (t
, state
, 1);
3119 if (state
->return_value_decl
!= NULL_TREE
)
3120 maybe_free_localvar (state
->return_value_decl
, state
, 1);
3121 finish_jcf_block (state
);
3122 perform_relocations (state
);
3125 i
= 8 + state
->code_length
+ 4 + 8 * state
->num_handlers
;
3126 if (state
->linenumber_count
> 0)
3128 code_attributes_count
++;
3129 i
+= 8 + 4 * state
->linenumber_count
;
3131 if (state
->lvar_count
> 0)
3133 code_attributes_count
++;
3134 i
+= 8 + 10 * state
->lvar_count
;
3136 UNSAFE_PUT4 (i
); /* attribute_length */
3137 UNSAFE_PUT2 (state
->code_SP_max
); /* max_stack */
3138 UNSAFE_PUT2 (localvar_max
); /* max_locals */
3139 UNSAFE_PUT4 (state
->code_length
);
3141 /* Emit the exception table. */
3142 ptr
= append_chunk (NULL
, 2 + 8 * state
->num_handlers
, state
);
3143 PUT2 (state
->num_handlers
); /* exception_table_length */
3144 handler
= state
->handlers
;
3145 for (; handler
!= NULL
; handler
= handler
->next
)
3148 PUT2 (handler
->start_label
->pc
);
3149 PUT2 (handler
->end_label
->pc
);
3150 PUT2 (handler
->handler_label
->pc
);
3151 if (handler
->type
== NULL_TREE
)
3154 type_index
= find_class_constant (&state
->cpool
,
3159 ptr
= append_chunk (NULL
, 2, state
);
3160 PUT2 (code_attributes_count
);
3162 /* Write the LineNumberTable attribute. */
3163 if (state
->linenumber_count
> 0)
3165 static tree LineNumberTable_node
= NULL_TREE
;
3166 ptr
= append_chunk (NULL
,
3167 8 + 4 * state
->linenumber_count
, state
);
3168 if (LineNumberTable_node
== NULL_TREE
)
3169 LineNumberTable_node
= get_identifier ("LineNumberTable");
3170 i
= find_utf8_constant (&state
->cpool
, LineNumberTable_node
);
3171 PUT2 (i
); /* attribute_name_index */
3172 i
= 2+4*state
->linenumber_count
; PUT4(i
); /* attribute_length */
3173 i
= state
->linenumber_count
; PUT2 (i
);
3174 for (block
= state
->blocks
; block
!= NULL
; block
= block
->next
)
3176 int line
= block
->linenumber
;
3185 /* Write the LocalVariableTable attribute. */
3186 if (state
->lvar_count
> 0)
3188 static tree LocalVariableTable_node
= NULL_TREE
;
3189 struct localvar_info
*lvar
= state
->first_lvar
;
3190 ptr
= append_chunk (NULL
, 8 + 10 * state
->lvar_count
, state
);
3191 if (LocalVariableTable_node
== NULL_TREE
)
3192 LocalVariableTable_node
= get_identifier("LocalVariableTable");
3193 i
= find_utf8_constant (&state
->cpool
, LocalVariableTable_node
);
3194 PUT2 (i
); /* attribute_name_index */
3195 i
= 2 + 10 * state
->lvar_count
; PUT4 (i
); /* attribute_length */
3196 i
= state
->lvar_count
; PUT2 (i
);
3197 for ( ; lvar
!= NULL
; lvar
= lvar
->next
)
3199 tree name
= DECL_NAME (lvar
->decl
);
3200 tree sig
= build_java_signature (TREE_TYPE (lvar
->decl
));
3201 i
= lvar
->start_label
->pc
; PUT2 (i
);
3202 i
= lvar
->end_label
->pc
- i
; PUT2 (i
);
3203 i
= find_utf8_constant (&state
->cpool
, name
); PUT2 (i
);
3204 i
= find_utf8_constant (&state
->cpool
, sig
); PUT2 (i
);
3205 i
= DECL_LOCAL_INDEX (lvar
->decl
); PUT2 (i
);
3209 if (DECL_FUNCTION_THROWS (part
) != NULL_TREE
)
3211 tree t
= DECL_FUNCTION_THROWS (part
);
3212 int throws_count
= list_length (t
);
3213 static tree Exceptions_node
= NULL_TREE
;
3214 if (Exceptions_node
== NULL_TREE
)
3215 Exceptions_node
= get_identifier ("Exceptions");
3216 ptr
= append_chunk (NULL
, 8 + 2 * throws_count
, state
);
3217 i
= find_utf8_constant (&state
->cpool
, Exceptions_node
);
3218 PUT2 (i
); /* attribute_name_index */
3219 i
= 2 + 2 * throws_count
; PUT4(i
); /* attribute_length */
3220 i
= throws_count
; PUT2 (i
);
3221 for (; t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
3223 i
= find_class_constant (&state
->cpool
, TREE_VALUE (t
));
3228 if (METHOD_DEPRECATED (part
))
3229 append_deprecated_attribute (state
);
3232 current_function_decl
= save_function
;
3234 ptr
= methods_count_ptr
; UNSAFE_PUT2 (methods_count
);
3236 source_file
= DECL_SOURCE_FILE (TYPE_NAME (clas
));
3237 for (s
= source_file
; ; s
++)
3242 if (ch
== '/' || ch
== '\\')
3245 ptr
= append_chunk (NULL
, 10, state
);
3247 i
= 1; /* Source file always exists as an attribute */
3248 if (INNER_CLASS_TYPE_P (clas
) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas
)))
3250 if (clas
== object_type_node
)
3252 if (CLASS_DEPRECATED (TYPE_NAME (clas
)))
3255 PUT2 (i
); /* attributes_count */
3257 /* generate the SourceFile attribute. */
3258 if (SourceFile_node
== NULL_TREE
)
3260 SourceFile_node
= get_identifier ("SourceFile");
3263 i
= find_utf8_constant (&state
->cpool
, SourceFile_node
);
3264 PUT2 (i
); /* attribute_name_index */
3266 i
= find_utf8_constant (&state
->cpool
, get_identifier (source_file
));
3268 append_gcj_attribute (state
, clas
);
3269 append_innerclasses_attribute (state
, clas
);
3270 if (CLASS_DEPRECATED (TYPE_NAME (clas
)))
3271 append_deprecated_attribute (state
);
3273 /* New finally generate the contents of the constant pool chunk. */
3274 i
= count_constant_pool_bytes (&state
->cpool
);
3275 ptr
= obstack_alloc (state
->chunk_obstack
, i
);
3276 cpool_chunk
->data
= ptr
;
3277 cpool_chunk
->size
= i
;
3278 write_constant_pool (&state
->cpool
, ptr
, i
);
3279 return state
->first
;
3282 static GTY(()) tree Synthetic_node
;
3283 static unsigned char *
3284 append_synthetic_attribute (struct jcf_partial
*state
)
3286 unsigned char *ptr
= append_chunk (NULL
, 6, state
);
3289 if (Synthetic_node
== NULL_TREE
)
3291 Synthetic_node
= get_identifier ("Synthetic");
3293 i
= find_utf8_constant (&state
->cpool
, Synthetic_node
);
3294 PUT2 (i
); /* Attribute string index */
3295 PUT4 (0); /* Attribute length */
3301 append_deprecated_attribute (struct jcf_partial
*state
)
3303 unsigned char *ptr
= append_chunk (NULL
, 6, state
);
3306 i
= find_utf8_constant (&state
->cpool
, get_identifier ("Deprecated"));
3307 PUT2 (i
); /* Attribute string index */
3308 PUT4 (0); /* Attribute length */
3312 append_gcj_attribute (struct jcf_partial
*state
, tree
class)
3317 if (class != object_type_node
)
3320 ptr
= append_chunk (NULL
, 6, state
); /* 2+4 */
3321 i
= find_utf8_constant (&state
->cpool
,
3322 get_identifier ("gnu.gcj.gcj-compiled"));
3323 PUT2 (i
); /* Attribute string index */
3324 PUT4 (0); /* Attribute length */
3327 static tree InnerClasses_node
;
3329 append_innerclasses_attribute (struct jcf_partial
*state
, tree
class)
3331 tree orig_decl
= TYPE_NAME (class);
3334 unsigned char *ptr
, *length_marker
, *number_marker
;
3336 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl
))
3339 ptr
= append_chunk (NULL
, 8, state
); /* 2+4+2 */
3341 if (InnerClasses_node
== NULL_TREE
)
3343 InnerClasses_node
= get_identifier ("InnerClasses");
3345 i
= find_utf8_constant (&state
->cpool
, InnerClasses_node
);
3347 length_marker
= ptr
; PUT4 (0); /* length, to be later patched */
3348 number_marker
= ptr
; PUT2 (0); /* number of classes, tblp */
3350 /* Generate the entries: all inner classes visible from the one we
3351 process: itself, up and down. */
3352 while (class && INNER_CLASS_TYPE_P (class))
3356 decl
= TYPE_NAME (class);
3357 n
= IDENTIFIER_POINTER (DECL_NAME (decl
)) +
3358 IDENTIFIER_LENGTH (DECL_NAME (decl
));
3360 while (n
[-1] != '$')
3362 append_innerclasses_attribute_entry (state
, decl
, get_identifier (n
));
3365 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3369 for (current
= DECL_INNER_CLASS_LIST (decl
);
3370 current
; current
= TREE_CHAIN (current
))
3372 append_innerclasses_attribute_entry (state
, TREE_PURPOSE (current
),
3373 TREE_VALUE (current
));
3377 ptr
= length_marker
; PUT4 (8*length
+2);
3378 ptr
= number_marker
; PUT2 (length
);
3382 append_innerclasses_attribute_entry (struct jcf_partial
*state
,
3383 tree decl
, tree name
)
3386 int ocii
= 0, ini
= 0;
3387 unsigned char *ptr
= append_chunk (NULL
, 8, state
);
3389 icii
= find_class_constant (&state
->cpool
, TREE_TYPE (decl
));
3391 /* Sun's implementation seems to generate ocii to 0 for inner
3392 classes (which aren't considered members of the class they're
3393 in.) The specs are saying that if the class is anonymous,
3394 inner_name_index must be zero. */
3395 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl
)))
3397 ocii
= find_class_constant (&state
->cpool
,
3398 TREE_TYPE (DECL_CONTEXT (decl
)));
3399 ini
= find_utf8_constant (&state
->cpool
, name
);
3401 icaf
= get_access_flags (decl
);
3403 PUT2 (icii
); PUT2 (ocii
); PUT2 (ini
); PUT2 (icaf
);
3407 make_class_file_name (tree clas
)
3409 const char *dname
, *cname
, *slash
;
3414 cname
= IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas
)),
3415 "", '.', DIR_SEPARATOR
,
3417 if (jcf_write_base_directory
== NULL
)
3419 /* Make sure we put the class file into the .java file's
3420 directory, and not into some subdirectory thereof. */
3422 dname
= DECL_SOURCE_FILE (TYPE_NAME (clas
));
3423 slash
= strrchr (dname
, DIR_SEPARATOR
);
3424 #ifdef DIR_SEPARATOR_2
3426 slash
= strrchr (dname
, DIR_SEPARATOR_2
);
3432 sep
= DIR_SEPARATOR
;
3437 t
= strrchr (cname
, DIR_SEPARATOR
);
3445 dname
= jcf_write_base_directory
;
3447 s
= strrchr (dname
, DIR_SEPARATOR
);
3448 #ifdef DIR_SEPARATOR_2
3450 s
= strrchr (dname
, DIR_SEPARATOR_2
);
3455 sep
= DIR_SEPARATOR
;
3457 slash
= dname
+ strlen (dname
);
3460 r
= xmalloc (slash
- dname
+ strlen (cname
) + 2);
3461 strncpy (r
, dname
, slash
- dname
);
3462 r
[slash
- dname
] = sep
;
3463 strcpy (&r
[slash
- dname
+ 1], cname
);
3465 /* We try to make new directories when we need them. We only do
3466 this for directories which "might not" exist. For instance, we
3467 assume the `-d' directory exists, but we don't assume that any
3468 subdirectory below it exists. It might be worthwhile to keep
3469 track of which directories we've created to avoid gratuitous
3471 dname
= r
+ (slash
- dname
) + 1;
3474 char *s
= strchr (dname
, sep
);
3478 /* Try to make directory if it doesn't already exist. */
3479 if (stat (r
, &sb
) == -1
3480 && mkdir (r
, 0755) == -1
3481 /* The directory might have been made by another process. */
3483 fatal_error ("can't create directory %s: %m", r
);
3486 /* Skip consecutive separators. */
3487 for (dname
= s
+ 1; *dname
&& *dname
== sep
; ++dname
)
3494 /* Write out the contents of a class (RECORD_TYPE) CLAS, as a .class file.
3495 The output .class file name is make_class_file_name(CLAS). */
3498 write_classfile (tree clas
)
3500 struct obstack
*work
= &temporary_obstack
;
3501 struct jcf_partial state
[1];
3502 char *class_file_name
= make_class_file_name (clas
);
3503 struct chunk
*chunks
;
3505 if (class_file_name
!= NULL
)
3508 char *temporary_file_name
;
3510 /* The .class file is initially written to a ".tmp" file so that
3511 if multiple instances of the compiler are running at once
3512 they do not see partially formed class files. */
3513 temporary_file_name
= concat (class_file_name
, ".tmp", NULL
);
3514 stream
= fopen (temporary_file_name
, "wb");
3516 fatal_error ("can't open %s for writing: %m", temporary_file_name
);
3518 jcf_dependency_add_target (class_file_name
);
3519 init_jcf_state (state
, work
);
3520 chunks
= generate_classfile (clas
, state
);
3521 write_chunks (stream
, chunks
);
3522 if (fclose (stream
))
3523 fatal_error ("error closing %s: %m", temporary_file_name
);
3525 /* If a file named by the string pointed to by `new' exists
3526 prior to the call to the `rename' function, the behavior
3527 is implementation-defined. ISO 9899-1990 7.9.4.2.
3529 For example, on Win32 with MSVCRT, it is an error. */
3531 unlink (class_file_name
);
3533 if (rename (temporary_file_name
, class_file_name
) == -1)
3535 remove (temporary_file_name
);
3536 fatal_error ("can't create %s: %m", class_file_name
);
3538 free (temporary_file_name
);
3539 free (class_file_name
);
3541 release_jcf_state (state
);
3545 string concatenation
3546 synchronized statement
3549 #include "gt-java-jcf-write.h"