1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
27 #include "coretypes.h"
32 #include "java-tree.h"
36 #include "java-opcodes.h"
37 #include "parse.h" /* for BLOCK_EXPR_BODY */
43 #define DIR_SEPARATOR '/'
46 extern struct obstack temporary_obstack
;
48 /* Base directory in which `.class' files should be written.
49 NULL means to put the file into the same directory as the
50 corresponding .java file. */
51 char *jcf_write_base_directory
= NULL
;
53 /* Make sure bytecode.data is big enough for at least N more bytes. */
56 do { CHECK_OP(state); \
57 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
58 buffer_grow (&state->bytecode, N); } while (0)
60 /* Add a 1-byte instruction/operand I to bytecode.data,
61 assuming space has already been RESERVE'd. */
63 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
65 /* Like OP1, but I is a 2-byte big endian integer. */
68 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
70 /* Like OP1, but I is a 4-byte big endian integer. */
73 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
74 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
76 /* Macro to call each time we push I words on the JVM stack. */
78 #define NOTE_PUSH(I) \
79 do { state->code_SP += (I); \
80 if (state->code_SP > state->code_SP_max) \
81 state->code_SP_max = state->code_SP; } while (0)
83 /* Macro to call each time we pop I words from the JVM stack. */
86 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
88 /* A chunk or segment of a .class file. */
92 /* The next segment of this .class file. */
95 /* The actual data in this segment to be written to the .class file. */
98 /* The size of the segment to be written to the .class file. */
102 #define PENDING_CLEANUP_PC (-3)
103 #define PENDING_EXIT_PC (-2)
104 #define UNDEFINED_PC (-1)
106 /* Each "block" represents a label plus the bytecode instructions following.
107 There may be branches out of the block, but no incoming jumps, except
108 to the beginning of the block.
110 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
111 associated code yet), but it is an undefined label.
116 /* For blocks that that are defined, the next block (in pc order).
117 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
118 or a cleanup expression (from a TRY_FINALLY_EXPR),
119 this is the next (outer) such end label, in a stack headed by
120 labeled_blocks in jcf_partial. */
121 struct jcf_block
*next
;
123 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
124 pc is PENDING_EXIT_PC.
125 In the not-yet-defined end label for pending cleanup subroutine,
126 pc is PENDING_CLEANUP_PC.
127 For other not-yet-defined labels, pc is UNDEFINED_PC.
129 If the label has been defined:
130 Until perform_relocations is finished, this is the maximum possible
131 value of the bytecode offset at the begnning of this block.
132 After perform_relocations, it is the actual offset (pc). */
137 /* After finish_jcf_block is called, the actual instructions
138 contained in this block. Before that NULL, and the instructions
139 are in state->bytecode. */
143 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
144 covered by the cleanup. */
145 struct jcf_block
*start_label
;
149 /* Set of relocations (in reverse offset order) for this block. */
150 struct jcf_relocation
*relocations
;
152 /* If this block is that of the not-yet-defined end label of
153 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
154 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
159 /* A "relocation" type for the 0-3 bytes of padding at the start
160 of a tableswitch or a lookupswitch. */
161 #define SWITCH_ALIGN_RELOC 4
163 /* A relocation type for the labels in a tableswitch or a lookupswitch;
164 these are relative to the start of the instruction, but (due to
165 th 0-3 bytes of padding), we don't know the offset before relocation. */
166 #define BLOCK_START_RELOC 1
168 struct jcf_relocation
170 /* Next relocation for the current jcf_block. */
171 struct jcf_relocation
*next
;
173 /* The (byte) offset within the current block that needs to be relocated. */
174 HOST_WIDE_INT offset
;
176 /* 0 if offset is a 4-byte relative offset.
177 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
178 for proper alignment in tableswitch/lookupswitch instructions.
179 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
180 to the start of the containing block.
181 -1 if offset is a 2-byte relative offset.
182 < -1 if offset is the address of an instruction with a 2-byte offset
183 that does not have a corresponding 4-byte offset version, in which
184 case the absolute value of kind is the inverted opcode.
185 > 4 if offset is the address of an instruction (such as jsr) with a
186 2-byte offset that does have a corresponding 4-byte offset version,
187 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
190 /* The label the relocation wants to actually transfer to. */
191 struct jcf_block
*label
;
194 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
195 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
197 /* State for single catch clause. */
201 struct jcf_handler
*next
;
203 struct jcf_block
*start_label
;
204 struct jcf_block
*end_label
;
205 struct jcf_block
*handler_label
;
207 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
211 /* State for the current switch statement. */
213 struct jcf_switch_state
215 struct jcf_switch_state
*prev
;
216 struct jcf_block
*default_label
;
218 struct jcf_relocation
*cases
;
220 HOST_WIDE_INT min_case
, max_case
;
223 /* This structure is used to contain the various pieces that will
224 become a .class file. */
230 struct obstack
*chunk_obstack
;
233 /* List of basic blocks for the current method. */
234 struct jcf_block
*blocks
;
235 struct jcf_block
*last_block
;
237 struct localvar_info
*first_lvar
;
238 struct localvar_info
*last_lvar
;
243 int linenumber_count
;
245 /* Until perform_relocations, this is a upper bound on the number
246 of bytes (so far) in the instructions for the current method. */
249 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
250 struct jcf_block
*labeled_blocks
;
252 /* The current stack size (stack pointer) in the current method. */
255 /* The largest extent of stack size (stack pointer) in the current method. */
258 /* Contains a mapping from local var slot number to localvar_info. */
259 struct buffer localvars
;
261 /* The buffer allocated for bytecode for the current jcf_block. */
262 struct buffer bytecode
;
264 /* Chain of exception handlers for the current method. */
265 struct jcf_handler
*handlers
;
267 /* Last element in handlers chain. */
268 struct jcf_handler
*last_handler
;
270 /* Number of exception handlers for the current method. */
273 /* Number of finalizers we are currently nested within. */
276 /* If non-NULL, use this for the return value. */
277 tree return_value_decl
;
279 /* Information about the current switch statement. */
280 struct jcf_switch_state
*sw_state
;
283 static void generate_bytecode_insns (tree
, int, struct jcf_partial
*);
284 static struct chunk
* alloc_chunk (struct chunk
*, unsigned char *,
285 int, struct obstack
*);
286 static unsigned char * append_chunk (unsigned char *, int,
287 struct jcf_partial
*);
288 static void append_chunk_copy (unsigned char *, int, struct jcf_partial
*);
289 static struct jcf_block
* gen_jcf_label (struct jcf_partial
*);
290 static void finish_jcf_block (struct jcf_partial
*);
291 static void define_jcf_label (struct jcf_block
*, struct jcf_partial
*);
292 static struct jcf_block
* get_jcf_label_here (struct jcf_partial
*);
293 static void put_linenumber (int, struct jcf_partial
*);
294 static void localvar_alloc (tree
, struct jcf_partial
*);
295 static void localvar_free (tree
, struct jcf_partial
*);
296 static int get_access_flags (tree
);
297 static void write_chunks (FILE *, struct chunk
*);
298 static int adjust_typed_op (tree
, int);
299 static void generate_bytecode_conditional (tree
, struct jcf_block
*,
300 struct jcf_block
*, int,
301 struct jcf_partial
*);
302 static void generate_bytecode_return (tree
, struct jcf_partial
*);
303 static void perform_relocations (struct jcf_partial
*);
304 static void init_jcf_state (struct jcf_partial
*, struct obstack
*);
305 static void init_jcf_method (struct jcf_partial
*, tree
);
306 static void release_jcf_state (struct jcf_partial
*);
307 static struct chunk
* generate_classfile (tree
, struct jcf_partial
*);
308 static struct jcf_handler
*alloc_handler (struct jcf_block
*,
310 struct jcf_partial
*);
311 static void emit_iinc (tree
, HOST_WIDE_INT
, struct jcf_partial
*);
312 static void emit_reloc (HOST_WIDE_INT
, int, struct jcf_block
*,
313 struct jcf_partial
*);
314 static void push_constant1 (HOST_WIDE_INT
, struct jcf_partial
*);
315 static void push_constant2 (HOST_WIDE_INT
, struct jcf_partial
*);
316 static void push_int_const (HOST_WIDE_INT
, struct jcf_partial
*);
317 static int find_constant_wide (HOST_WIDE_INT
, HOST_WIDE_INT
,
318 struct jcf_partial
*);
319 static void push_long_const (HOST_WIDE_INT
, HOST_WIDE_INT
,
320 struct jcf_partial
*);
321 static int find_constant_index (tree
, struct jcf_partial
*);
322 static void push_long_const (HOST_WIDE_INT
, HOST_WIDE_INT
,
323 struct jcf_partial
*);
324 static void field_op (tree
, int, struct jcf_partial
*);
325 static void maybe_wide (int, int, struct jcf_partial
*);
326 static void emit_dup (int, int, struct jcf_partial
*);
327 static void emit_pop (int, struct jcf_partial
*);
328 static void emit_load_or_store (tree
, int, struct jcf_partial
*);
329 static void emit_load (tree
, struct jcf_partial
*);
330 static void emit_store (tree
, struct jcf_partial
*);
331 static void emit_unop (enum java_opcode
, tree
, struct jcf_partial
*);
332 static void emit_binop (enum java_opcode
, tree
, struct jcf_partial
*);
333 static void emit_reloc (HOST_WIDE_INT
, int, struct jcf_block
*,
334 struct jcf_partial
*);
335 static void emit_switch_reloc (struct jcf_block
*, struct jcf_partial
*);
336 static void emit_case_reloc (struct jcf_relocation
*, struct jcf_partial
*);
337 static void emit_if (struct jcf_block
*, int, int, struct jcf_partial
*);
338 static void emit_goto (struct jcf_block
*, struct jcf_partial
*);
339 static void emit_jsr (struct jcf_block
*, struct jcf_partial
*);
340 static void call_cleanups (struct jcf_block
*, struct jcf_partial
*);
341 static char *make_class_file_name (tree
);
342 static unsigned char *append_synthetic_attribute (struct jcf_partial
*);
343 static void append_innerclasses_attribute (struct jcf_partial
*, tree
);
344 static void append_innerclasses_attribute_entry (struct jcf_partial
*, tree
, tree
);
345 static void append_gcj_attribute (struct jcf_partial
*, tree
);
347 /* Utility macros for appending (big-endian) data to a buffer.
348 We assume a local variable 'ptr' points into where we want to
349 write next, and we assume enough space has been allocated. */
351 #ifdef ENABLE_JC1_CHECKING
352 static int CHECK_PUT (void *, struct jcf_partial
*, int);
355 CHECK_PUT (void *ptr
, struct jcf_partial
*state
, int i
)
357 if ((unsigned char *) ptr
< state
->chunk
->data
358 || (unsigned char *) ptr
+ i
> state
->chunk
->data
+ state
->chunk
->size
)
364 #define CHECK_PUT(PTR, STATE, I) ((void)0)
367 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
368 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
369 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
370 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
372 /* There are some cases below where CHECK_PUT is guaranteed to fail.
373 Use the following macros in those specific cases. */
374 #define UNSAFE_PUT1(X) (*ptr++ = (X))
375 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
376 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
377 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
380 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
381 Set the data and size fields to DATA and SIZE, respectively.
382 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
384 static struct chunk
*
385 alloc_chunk (struct chunk
*last
, unsigned char *data
,
386 int size
, struct obstack
*work
)
388 struct chunk
*chunk
= (struct chunk
*)
389 obstack_alloc (work
, sizeof(struct chunk
));
391 if (data
== NULL
&& size
> 0)
392 data
= obstack_alloc (work
, size
);
402 #ifdef ENABLE_JC1_CHECKING
403 static int CHECK_OP (struct jcf_partial
*);
406 CHECK_OP (struct jcf_partial
*state
)
408 if (state
->bytecode
.ptr
> state
->bytecode
.limit
)
414 #define CHECK_OP(STATE) ((void) 0)
417 static unsigned char *
418 append_chunk (unsigned char *data
, int size
, struct jcf_partial
*state
)
420 state
->chunk
= alloc_chunk (state
->chunk
, data
, size
, state
->chunk_obstack
);
421 if (state
->first
== NULL
)
422 state
->first
= state
->chunk
;
423 return state
->chunk
->data
;
427 append_chunk_copy (unsigned char *data
, int size
, struct jcf_partial
*state
)
429 unsigned char *ptr
= append_chunk (NULL
, size
, state
);
430 memcpy (ptr
, data
, size
);
433 static struct jcf_block
*
434 gen_jcf_label (struct jcf_partial
*state
)
436 struct jcf_block
*block
= (struct jcf_block
*)
437 obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_block
));
439 block
->linenumber
= -1;
440 block
->pc
= UNDEFINED_PC
;
445 finish_jcf_block (struct jcf_partial
*state
)
447 struct jcf_block
*block
= state
->last_block
;
448 struct jcf_relocation
*reloc
;
449 int code_length
= BUFFER_LENGTH (&state
->bytecode
);
450 int pc
= state
->code_length
;
451 append_chunk_copy (state
->bytecode
.data
, code_length
, state
);
452 BUFFER_RESET (&state
->bytecode
);
453 block
->v
.chunk
= state
->chunk
;
455 /* Calculate code_length to the maximum value it can have. */
456 pc
+= block
->v
.chunk
->size
;
457 for (reloc
= block
->u
.relocations
; reloc
!= NULL
; reloc
= reloc
->next
)
459 int kind
= reloc
->kind
;
460 if (kind
== SWITCH_ALIGN_RELOC
)
462 else if (kind
> BLOCK_START_RELOC
)
463 pc
+= 2; /* 2-byte offset may grow to 4-byte offset */
465 pc
+= 5; /* May need to add a goto_w. */
467 state
->code_length
= pc
;
471 define_jcf_label (struct jcf_block
*label
, struct jcf_partial
*state
)
473 if (state
->last_block
!= NULL
)
474 finish_jcf_block (state
);
475 label
->pc
= state
->code_length
;
476 if (state
->blocks
== NULL
)
477 state
->blocks
= label
;
479 state
->last_block
->next
= label
;
480 state
->last_block
= label
;
482 label
->u
.relocations
= NULL
;
485 static struct jcf_block
*
486 get_jcf_label_here (struct jcf_partial
*state
)
488 if (state
->last_block
!= NULL
&& BUFFER_LENGTH (&state
->bytecode
) == 0)
489 return state
->last_block
;
492 struct jcf_block
*label
= gen_jcf_label (state
);
493 define_jcf_label (label
, state
);
498 /* Note a line number entry for the current PC and given LINE. */
501 put_linenumber (int line
, struct jcf_partial
*state
)
503 struct jcf_block
*label
= get_jcf_label_here (state
);
504 if (label
->linenumber
> 0)
506 label
= gen_jcf_label (state
);
507 define_jcf_label (label
, state
);
509 label
->linenumber
= line
;
510 state
->linenumber_count
++;
513 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
514 in the range (START_LABEL, END_LABEL). */
516 static struct jcf_handler
*
517 alloc_handler (struct jcf_block
*start_label
, struct jcf_block
*end_label
,
518 struct jcf_partial
*state
)
520 struct jcf_handler
*handler
= (struct jcf_handler
*)
521 obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_handler
));
522 handler
->start_label
= start_label
;
523 handler
->end_label
= end_label
;
524 handler
->handler_label
= get_jcf_label_here (state
);
525 if (state
->handlers
== NULL
)
526 state
->handlers
= handler
;
528 state
->last_handler
->next
= handler
;
529 state
->last_handler
= handler
;
530 handler
->next
= NULL
;
531 state
->num_handlers
++;
536 /* The index of jvm local variable allocated for this DECL.
537 This is assigned when generating .class files;
538 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
539 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
541 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
545 struct localvar_info
*next
;
548 struct jcf_block
*start_label
;
549 struct jcf_block
*end_label
;
552 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
553 #define localvar_max \
554 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
557 localvar_alloc (tree decl
, struct jcf_partial
*state
)
559 struct jcf_block
*start_label
= get_jcf_label_here (state
);
560 int wide
= TYPE_IS_WIDE (TREE_TYPE (decl
));
562 register struct localvar_info
*info
;
563 register struct localvar_info
**ptr
= localvar_buffer
;
564 register struct localvar_info
**limit
565 = (struct localvar_info
**) state
->localvars
.ptr
;
566 for (index
= 0; ptr
< limit
; index
++, ptr
++)
569 && (! wide
|| ((ptr
+1) < limit
&& ptr
[1] == NULL
)))
574 buffer_grow (&state
->localvars
, 2 * sizeof (struct localvar_info
*));
575 ptr
= (struct localvar_info
**) state
->localvars
.data
+ index
;
576 state
->localvars
.ptr
= (unsigned char *) (ptr
+ 1 + wide
);
578 info
= (struct localvar_info
*)
579 obstack_alloc (state
->chunk_obstack
, sizeof (struct localvar_info
));
582 ptr
[1] = (struct localvar_info
*)(~0);
583 DECL_LOCAL_INDEX (decl
) = index
;
585 info
->start_label
= start_label
;
587 if (debug_info_level
> DINFO_LEVEL_TERSE
588 && DECL_NAME (decl
) != NULL_TREE
)
590 /* Generate debugging info. */
592 if (state
->last_lvar
!= NULL
)
593 state
->last_lvar
->next
= info
;
595 state
->first_lvar
= info
;
596 state
->last_lvar
= info
;
602 localvar_free (tree decl
, struct jcf_partial
*state
)
604 struct jcf_block
*end_label
= get_jcf_label_here (state
);
605 int index
= DECL_LOCAL_INDEX (decl
);
606 register struct localvar_info
**ptr
= &localvar_buffer
[index
];
607 register struct localvar_info
*info
= *ptr
;
608 int wide
= TYPE_IS_WIDE (TREE_TYPE (decl
));
610 info
->end_label
= end_label
;
612 if (info
->decl
!= decl
)
617 if (ptr
[1] != (struct localvar_info
*)(~0))
624 #define STACK_TARGET 1
625 #define IGNORE_TARGET 2
627 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
628 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
631 get_access_flags (tree decl
)
634 int isfield
= TREE_CODE (decl
) == FIELD_DECL
|| TREE_CODE (decl
) == VAR_DECL
;
635 if (CLASS_PUBLIC (decl
)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
637 if (CLASS_FINAL (decl
)) /* same as FIELD_FINAL and METHOD_FINAL */
639 if (isfield
|| TREE_CODE (decl
) == FUNCTION_DECL
)
641 if (TREE_PROTECTED (decl
))
642 flags
|= ACC_PROTECTED
;
643 if (TREE_PRIVATE (decl
))
644 flags
|= ACC_PRIVATE
;
646 else if (TREE_CODE (decl
) == TYPE_DECL
)
648 if (CLASS_SUPER (decl
))
650 if (CLASS_ABSTRACT (decl
))
651 flags
|= ACC_ABSTRACT
;
652 if (CLASS_INTERFACE (decl
))
653 flags
|= ACC_INTERFACE
;
654 if (CLASS_STATIC (decl
))
656 if (CLASS_PRIVATE (decl
))
657 flags
|= ACC_PRIVATE
;
658 if (CLASS_PROTECTED (decl
))
659 flags
|= ACC_PROTECTED
;
660 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl
))
661 || LOCAL_CLASS_P (TREE_TYPE (decl
)))
662 flags
|= ACC_PRIVATE
;
663 if (CLASS_STRICTFP (decl
))
669 if (TREE_CODE (decl
) == FUNCTION_DECL
)
671 if (METHOD_NATIVE (decl
))
673 if (METHOD_STATIC (decl
))
675 if (METHOD_SYNCHRONIZED (decl
))
676 flags
|= ACC_SYNCHRONIZED
;
677 if (METHOD_ABSTRACT (decl
))
678 flags
|= ACC_ABSTRACT
;
679 if (METHOD_STRICTFP (decl
))
684 if (FIELD_STATIC (decl
))
686 if (FIELD_VOLATILE (decl
))
687 flags
|= ACC_VOLATILE
;
688 if (FIELD_TRANSIENT (decl
))
689 flags
|= ACC_TRANSIENT
;
694 /* Write the list of segments starting at CHUNKS to STREAM. */
697 write_chunks (FILE* stream
, struct chunk
*chunks
)
699 for (; chunks
!= NULL
; chunks
= chunks
->next
)
700 fwrite (chunks
->data
, chunks
->size
, 1, stream
);
703 /* Push a 1-word constant in the constant pool at the given INDEX.
704 (Caller is responsible for doing NOTE_PUSH.) */
707 push_constant1 (HOST_WIDE_INT index
, struct jcf_partial
*state
)
722 /* Push a 2-word constant in the constant pool at the given INDEX.
723 (Caller is responsible for doing NOTE_PUSH.) */
726 push_constant2 (HOST_WIDE_INT index
, struct jcf_partial
*state
)
733 /* Push 32-bit integer constant on VM stack.
734 Caller is responsible for doing NOTE_PUSH. */
737 push_int_const (HOST_WIDE_INT i
, struct jcf_partial
*state
)
740 if (i
>= -1 && i
<= 5)
741 OP1(OPCODE_iconst_0
+ i
);
742 else if (i
>= -128 && i
< 128)
747 else if (i
>= -32768 && i
< 32768)
754 i
= find_constant1 (&state
->cpool
, CONSTANT_Integer
,
755 (jword
)(i
& 0xFFFFFFFF));
756 push_constant1 (i
, state
);
761 find_constant_wide (HOST_WIDE_INT lo
, HOST_WIDE_INT hi
,
762 struct jcf_partial
*state
)
764 HOST_WIDE_INT w1
, w2
;
765 lshift_double (lo
, hi
, -32, 64, &w1
, &w2
, 1);
766 return find_constant2 (&state
->cpool
, CONSTANT_Long
,
767 (jword
)(w1
& 0xFFFFFFFF), (jword
)(lo
& 0xFFFFFFFF));
770 /* Find or allocate a constant pool entry for the given VALUE.
771 Return the index in the constant pool. */
774 find_constant_index (tree value
, struct jcf_partial
*state
)
776 if (TREE_CODE (value
) == INTEGER_CST
)
778 if (TYPE_PRECISION (TREE_TYPE (value
)) <= 32)
779 return find_constant1 (&state
->cpool
, CONSTANT_Integer
,
780 (jword
)(TREE_INT_CST_LOW (value
) & 0xFFFFFFFF));
782 return find_constant_wide (TREE_INT_CST_LOW (value
),
783 TREE_INT_CST_HIGH (value
), state
);
785 else if (TREE_CODE (value
) == REAL_CST
)
789 real_to_target (words
, &TREE_REAL_CST (value
),
790 TYPE_MODE (TREE_TYPE (value
)));
791 words
[0] &= 0xffffffff;
792 words
[1] &= 0xffffffff;
794 if (TYPE_PRECISION (TREE_TYPE (value
)) == 32)
795 return find_constant1 (&state
->cpool
, CONSTANT_Float
, (jword
)words
[0]);
797 return find_constant2 (&state
->cpool
, CONSTANT_Double
,
798 (jword
)words
[1-FLOAT_WORDS_BIG_ENDIAN
],
799 (jword
)words
[FLOAT_WORDS_BIG_ENDIAN
]);
801 else if (TREE_CODE (value
) == STRING_CST
)
802 return find_string_constant (&state
->cpool
, value
);
808 /* Push 64-bit long constant on VM stack.
809 Caller is responsible for doing NOTE_PUSH. */
812 push_long_const (HOST_WIDE_INT lo
, HOST_WIDE_INT hi
, struct jcf_partial
*state
)
814 HOST_WIDE_INT highpart
, dummy
;
815 jint lowpart
= WORD_TO_INT (lo
);
817 rshift_double (lo
, hi
, 32, 64, &highpart
, &dummy
, 1);
819 if (highpart
== 0 && (lowpart
== 0 || lowpart
== 1))
822 OP1(OPCODE_lconst_0
+ lowpart
);
824 else if ((highpart
== 0 && lowpart
> 0 && lowpart
< 32768)
825 || (highpart
== -1 && lowpart
< 0 && lowpart
>= -32768))
827 push_int_const (lowpart
, state
);
832 push_constant2 (find_constant_wide (lo
, hi
, state
), state
);
836 field_op (tree field
, int opcode
, struct jcf_partial
*state
)
838 int index
= find_fieldref_index (&state
->cpool
, field
);
844 /* Returns an integer in the range 0 (for 'int') through 4 (for object
845 reference) to 7 (for 'short') which matches the pattern of how JVM
846 opcodes typically depend on the operand type. */
849 adjust_typed_op (tree type
, int max
)
851 switch (TREE_CODE (type
))
854 case RECORD_TYPE
: return 4;
856 return TYPE_PRECISION (type
) == 32 || max
< 5 ? 0 : 5;
858 return TYPE_PRECISION (type
) == 32 || max
< 6 ? 0 : 6;
860 switch (TYPE_PRECISION (type
))
862 case 8: return max
< 5 ? 0 : 5;
863 case 16: return max
< 7 ? 0 : 7;
869 switch (TYPE_PRECISION (type
))
882 maybe_wide (int opcode
, int index
, struct jcf_partial
*state
)
899 /* Compile code to duplicate with offset, where
900 SIZE is the size of the stack item to duplicate (1 or 2), abd
901 OFFSET is where to insert the result (must be 0, 1, or 2).
902 (The new words get inserted at stack[SP-size-offset].) */
905 emit_dup (int size
, int offset
, struct jcf_partial
*state
)
912 kind
= size
== 1 ? OPCODE_dup
: OPCODE_dup2
;
913 else if (offset
== 1)
914 kind
= size
== 1 ? OPCODE_dup_x1
: OPCODE_dup2_x1
;
915 else if (offset
== 2)
916 kind
= size
== 1 ? OPCODE_dup_x2
: OPCODE_dup2_x2
;
924 emit_pop (int size
, struct jcf_partial
*state
)
927 OP1 (OPCODE_pop
- 1 + size
);
931 emit_iinc (tree var
, HOST_WIDE_INT value
, struct jcf_partial
*state
)
933 int slot
= DECL_LOCAL_INDEX (var
);
935 if (value
< -128 || value
> 127 || slot
>= 256)
953 emit_load_or_store (tree var
, /* Variable to load from or store into. */
954 int opcode
, /* Either OPCODE_iload or OPCODE_istore. */
955 struct jcf_partial
*state
)
957 tree type
= TREE_TYPE (var
);
958 int kind
= adjust_typed_op (type
, 4);
959 int index
= DECL_LOCAL_INDEX (var
);
963 OP1 (opcode
+ 5 + 4 * kind
+ index
); /* [ilfda]{load,store}_[0123] */
966 maybe_wide (opcode
+ kind
, index
, state
); /* [ilfda]{load,store} */
970 emit_load (tree var
, struct jcf_partial
*state
)
972 emit_load_or_store (var
, OPCODE_iload
, state
);
973 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var
)) ? 2 : 1);
977 emit_store (tree var
, struct jcf_partial
*state
)
979 emit_load_or_store (var
, OPCODE_istore
, state
);
980 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var
)) ? 2 : 1);
984 emit_unop (enum java_opcode opcode
, tree type ATTRIBUTE_UNUSED
,
985 struct jcf_partial
*state
)
992 emit_binop (enum java_opcode opcode
, tree type
, struct jcf_partial
*state
)
994 int size
= TYPE_IS_WIDE (type
) ? 2 : 1;
1001 emit_reloc (HOST_WIDE_INT value
, int kind
,
1002 struct jcf_block
*target
, struct jcf_partial
*state
)
1004 struct jcf_relocation
*reloc
= (struct jcf_relocation
*)
1005 obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_relocation
));
1006 struct jcf_block
*block
= state
->last_block
;
1007 reloc
->next
= block
->u
.relocations
;
1008 block
->u
.relocations
= reloc
;
1009 reloc
->offset
= BUFFER_LENGTH (&state
->bytecode
);
1010 reloc
->label
= target
;
1012 if (kind
== 0 || kind
== BLOCK_START_RELOC
)
1014 else if (kind
!= SWITCH_ALIGN_RELOC
)
1019 emit_switch_reloc (struct jcf_block
*label
, struct jcf_partial
*state
)
1021 emit_reloc (RELOCATION_VALUE_0
, BLOCK_START_RELOC
, label
, state
);
1024 /* Similar to emit_switch_reloc,
1025 but re-uses an existing case reloc. */
1028 emit_case_reloc (struct jcf_relocation
*reloc
, struct jcf_partial
*state
)
1030 struct jcf_block
*block
= state
->last_block
;
1031 reloc
->next
= block
->u
.relocations
;
1032 block
->u
.relocations
= reloc
;
1033 reloc
->offset
= BUFFER_LENGTH (&state
->bytecode
);
1034 reloc
->kind
= BLOCK_START_RELOC
;
1038 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1039 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1042 emit_if (struct jcf_block
*target
, int opcode
, int inv_opcode
,
1043 struct jcf_partial
*state
)
1047 /* value is 1 byte from reloc back to start of instruction. */
1048 emit_reloc (RELOCATION_VALUE_1
, - inv_opcode
, target
, state
);
1052 emit_goto (struct jcf_block
*target
, struct jcf_partial
*state
)
1056 /* Value is 1 byte from reloc back to start of instruction. */
1057 emit_reloc (RELOCATION_VALUE_1
, OPCODE_goto_w
, target
, state
);
1061 emit_jsr (struct jcf_block
*target
, struct jcf_partial
*state
)
1065 /* Value is 1 byte from reloc back to start of instruction. */
1066 emit_reloc (RELOCATION_VALUE_1
, OPCODE_jsr_w
, target
, state
);
1069 /* Generate code to evaluate EXP. If the result is true,
1070 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1071 TRUE_BRANCH_FIRST is a code geneation hint that the
1072 TRUE_LABEL may follow right after this. (The idea is that we
1073 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1076 generate_bytecode_conditional (tree exp
,
1077 struct jcf_block
*true_label
,
1078 struct jcf_block
*false_label
,
1079 int true_branch_first
,
1080 struct jcf_partial
*state
)
1082 tree exp0
, exp1
, type
;
1083 int save_SP
= state
->code_SP
;
1084 enum java_opcode op
, negop
;
1085 switch (TREE_CODE (exp
))
1088 emit_goto (integer_zerop (exp
) ? false_label
: true_label
, state
);
1092 struct jcf_block
*then_label
= gen_jcf_label (state
);
1093 struct jcf_block
*else_label
= gen_jcf_label (state
);
1094 int save_SP_before
, save_SP_after
;
1095 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1096 then_label
, else_label
, 1, state
);
1097 define_jcf_label (then_label
, state
);
1098 save_SP_before
= state
->code_SP
;
1099 generate_bytecode_conditional (TREE_OPERAND (exp
, 1),
1100 true_label
, false_label
, 1, state
);
1101 save_SP_after
= state
->code_SP
;
1102 state
->code_SP
= save_SP_before
;
1103 define_jcf_label (else_label
, state
);
1104 generate_bytecode_conditional (TREE_OPERAND (exp
, 2),
1105 true_label
, false_label
,
1106 true_branch_first
, state
);
1107 if (state
->code_SP
!= save_SP_after
)
1111 case TRUTH_NOT_EXPR
:
1112 generate_bytecode_conditional (TREE_OPERAND (exp
, 0), false_label
,
1113 true_label
, ! true_branch_first
, state
);
1115 case TRUTH_ANDIF_EXPR
:
1117 struct jcf_block
*next_label
= gen_jcf_label (state
);
1118 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1119 next_label
, false_label
, 1, state
);
1120 define_jcf_label (next_label
, state
);
1121 generate_bytecode_conditional (TREE_OPERAND (exp
, 1),
1122 true_label
, false_label
, 1, state
);
1125 case TRUTH_ORIF_EXPR
:
1127 struct jcf_block
*next_label
= gen_jcf_label (state
);
1128 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1129 true_label
, next_label
, 1, state
);
1130 define_jcf_label (next_label
, state
);
1131 generate_bytecode_conditional (TREE_OPERAND (exp
, 1),
1132 true_label
, false_label
, 1, state
);
1136 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1137 set it to the corresponding 1-operand if<COND> instructions. */
1141 /* The opcodes with their inverses are allocated in pairs.
1142 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1143 negop
= (op
& 1) ? op
+ 1 : op
- 1;
1145 if (true_branch_first
)
1147 emit_if (false_label
, negop
, op
, state
);
1148 emit_goto (true_label
, state
);
1152 emit_if (true_label
, op
, negop
, state
);
1153 emit_goto (false_label
, state
);
1157 op
= OPCODE_if_icmpeq
;
1160 op
= OPCODE_if_icmpne
;
1163 op
= OPCODE_if_icmpgt
;
1166 op
= OPCODE_if_icmplt
;
1169 op
= OPCODE_if_icmpge
;
1172 op
= OPCODE_if_icmple
;
1175 exp0
= TREE_OPERAND (exp
, 0);
1176 exp1
= TREE_OPERAND (exp
, 1);
1177 type
= TREE_TYPE (exp0
);
1178 switch (TREE_CODE (type
))
1181 case POINTER_TYPE
: case RECORD_TYPE
:
1182 switch (TREE_CODE (exp
))
1184 case EQ_EXPR
: op
= OPCODE_if_acmpeq
; break;
1185 case NE_EXPR
: op
= OPCODE_if_acmpne
; break;
1188 if (integer_zerop (exp1
) || integer_zerop (exp0
))
1190 generate_bytecode_insns (integer_zerop (exp0
) ? exp1
: exp0
,
1191 STACK_TARGET
, state
);
1192 op
= op
+ (OPCODE_ifnull
- OPCODE_if_acmpeq
);
1193 negop
= (op
& 1) ? op
- 1 : op
+ 1;
1197 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1198 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1202 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1203 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1204 if (op
== OPCODE_if_icmplt
|| op
== OPCODE_if_icmple
)
1208 if (TYPE_PRECISION (type
) > 32)
1219 if (TYPE_PRECISION (type
) > 32)
1221 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1222 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1230 if (integer_zerop (exp1
))
1232 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1236 if (integer_zerop (exp0
))
1240 case OPCODE_if_icmplt
:
1241 case OPCODE_if_icmpge
:
1244 case OPCODE_if_icmpgt
:
1245 case OPCODE_if_icmple
:
1251 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1255 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1256 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1262 generate_bytecode_insns (exp
, STACK_TARGET
, state
);
1264 if (true_branch_first
)
1266 emit_if (false_label
, OPCODE_ifeq
, OPCODE_ifne
, state
);
1267 emit_goto (true_label
, state
);
1271 emit_if (true_label
, OPCODE_ifne
, OPCODE_ifeq
, state
);
1272 emit_goto (false_label
, state
);
1276 if (save_SP
!= state
->code_SP
)
1280 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1281 but only as far out as LIMIT (since we are about to jump to the
1282 emit label that is LIMIT). */
1285 call_cleanups (struct jcf_block
*limit
, struct jcf_partial
*state
)
1287 struct jcf_block
*block
= state
->labeled_blocks
;
1288 for (; block
!= limit
; block
= block
->next
)
1290 if (block
->pc
== PENDING_CLEANUP_PC
)
1291 emit_jsr (block
, state
);
1296 generate_bytecode_return (tree exp
, struct jcf_partial
*state
)
1298 tree return_type
= TREE_TYPE (TREE_TYPE (state
->current_method
));
1299 int returns_void
= TREE_CODE (return_type
) == VOID_TYPE
;
1304 switch (TREE_CODE (exp
))
1307 generate_bytecode_insns (TREE_OPERAND (exp
, 0), IGNORE_TARGET
,
1309 exp
= TREE_OPERAND (exp
, 1);
1313 struct jcf_block
*then_label
= gen_jcf_label (state
);
1314 struct jcf_block
*else_label
= gen_jcf_label (state
);
1315 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1316 then_label
, else_label
, 1, state
);
1317 define_jcf_label (then_label
, state
);
1318 generate_bytecode_return (TREE_OPERAND (exp
, 1), state
);
1319 define_jcf_label (else_label
, state
);
1320 generate_bytecode_return (TREE_OPERAND (exp
, 2), state
);
1324 generate_bytecode_insns (exp
,
1325 returns_void
? IGNORE_TARGET
1326 : STACK_TARGET
, state
);
1332 call_cleanups (NULL
, state
);
1336 op
= OPCODE_ireturn
+ adjust_typed_op (return_type
, 4);
1337 if (state
->num_finalizers
> 0)
1339 if (state
->return_value_decl
== NULL_TREE
)
1341 state
->return_value_decl
1342 = build_decl (VAR_DECL
, NULL_TREE
, TREE_TYPE (exp
));
1343 localvar_alloc (state
->return_value_decl
, state
);
1345 emit_store (state
->return_value_decl
, state
);
1346 call_cleanups (NULL
, state
);
1347 emit_load (state
->return_value_decl
, state
);
1348 /* If we call localvar_free (state->return_value_decl, state),
1349 then we risk the save decl erroneously re-used in the
1350 finalizer. Instead, we keep the state->return_value_decl
1351 allocated through the rest of the method. This is not
1352 the greatest solution, but it is at least simple and safe. */
1359 /* Generate bytecode for sub-expression EXP of METHOD.
1360 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1363 generate_bytecode_insns (tree exp
, int target
, struct jcf_partial
*state
)
1366 enum java_opcode jopcode
;
1368 HOST_WIDE_INT value
;
1373 if (exp
== NULL
&& target
== IGNORE_TARGET
)
1376 type
= TREE_TYPE (exp
);
1378 switch (TREE_CODE (exp
))
1381 if (BLOCK_EXPR_BODY (exp
))
1384 tree body
= BLOCK_EXPR_BODY (exp
);
1385 for (local
= BLOCK_EXPR_DECLS (exp
); local
; )
1387 tree next
= TREE_CHAIN (local
);
1388 localvar_alloc (local
, state
);
1391 /* Avoid deep recursion for long blocks. */
1392 while (TREE_CODE (body
) == COMPOUND_EXPR
)
1394 generate_bytecode_insns (TREE_OPERAND (body
, 0), target
, state
);
1395 body
= TREE_OPERAND (body
, 1);
1397 generate_bytecode_insns (body
, target
, state
);
1398 for (local
= BLOCK_EXPR_DECLS (exp
); local
; )
1400 tree next
= TREE_CHAIN (local
);
1401 localvar_free (local
, state
);
1407 generate_bytecode_insns (TREE_OPERAND (exp
, 0), IGNORE_TARGET
, state
);
1408 /* Normally the first operand to a COMPOUND_EXPR must complete
1409 normally. However, in the special case of a do-while
1410 statement this is not necessarily the case. */
1411 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp
, 0)))
1412 generate_bytecode_insns (TREE_OPERAND (exp
, 1), target
, state
);
1414 case EXPR_WITH_FILE_LOCATION
:
1416 const char *saved_input_filename
= input_filename
;
1417 tree body
= EXPR_WFL_NODE (exp
);
1418 int saved_lineno
= lineno
;
1419 if (body
== empty_stmt_node
)
1421 input_filename
= EXPR_WFL_FILENAME (exp
);
1422 lineno
= EXPR_WFL_LINENO (exp
);
1423 if (EXPR_WFL_EMIT_LINE_NOTE (exp
) && lineno
> 0
1424 && debug_info_level
> DINFO_LEVEL_NONE
)
1425 put_linenumber (lineno
, state
);
1426 generate_bytecode_insns (body
, target
, state
);
1427 input_filename
= saved_input_filename
;
1428 lineno
= saved_lineno
;
1432 if (target
== IGNORE_TARGET
) ; /* do nothing */
1433 else if (TREE_CODE (type
) == POINTER_TYPE
)
1435 if (! integer_zerop (exp
))
1438 OP1 (OPCODE_aconst_null
);
1441 else if (TYPE_PRECISION (type
) <= 32)
1443 push_int_const (TREE_INT_CST_LOW (exp
), state
);
1448 push_long_const (TREE_INT_CST_LOW (exp
), TREE_INT_CST_HIGH (exp
),
1455 int prec
= TYPE_PRECISION (type
) >> 5;
1457 if (real_zerop (exp
) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp
)))
1458 OP1 (prec
== 1 ? OPCODE_fconst_0
: OPCODE_dconst_0
);
1459 else if (real_onep (exp
))
1460 OP1 (prec
== 1 ? OPCODE_fconst_1
: OPCODE_dconst_1
);
1461 /* FIXME Should also use fconst_2 for 2.0f.
1462 Also, should use iconst_2/ldc followed by i2f/i2d
1463 for other float/double when the value is a small integer. */
1466 offset
= find_constant_index (exp
, state
);
1468 push_constant1 (offset
, state
);
1470 push_constant2 (offset
, state
);
1476 push_constant1 (find_string_constant (&state
->cpool
, exp
), state
);
1480 if (TREE_STATIC (exp
))
1482 field_op (exp
, OPCODE_getstatic
, state
);
1483 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 2 : 1);
1486 /* ... fall through ... */
1488 emit_load (exp
, state
);
1490 case NON_LVALUE_EXPR
:
1492 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
1495 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
1496 generate_bytecode_insns (TREE_OPERAND (exp
, 1), target
, state
);
1497 if (target
!= IGNORE_TARGET
)
1499 jopcode
= OPCODE_iaload
+ adjust_typed_op (type
, 7);
1502 if (! TYPE_IS_WIDE (type
))
1508 tree obj
= TREE_OPERAND (exp
, 0);
1509 tree field
= TREE_OPERAND (exp
, 1);
1510 int is_static
= FIELD_STATIC (field
);
1511 generate_bytecode_insns (obj
,
1512 is_static
? IGNORE_TARGET
: target
, state
);
1513 if (target
!= IGNORE_TARGET
)
1515 if (DECL_NAME (field
) == length_identifier_node
&& !is_static
1516 && TYPE_ARRAY_P (TREE_TYPE (obj
)))
1519 OP1 (OPCODE_arraylength
);
1523 field_op (field
, is_static
? OPCODE_getstatic
: OPCODE_getfield
,
1527 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field
)) ? 2 : 1);
1532 case TRUTH_ANDIF_EXPR
:
1533 case TRUTH_ORIF_EXPR
:
1541 struct jcf_block
*then_label
= gen_jcf_label (state
);
1542 struct jcf_block
*else_label
= gen_jcf_label (state
);
1543 struct jcf_block
*end_label
= gen_jcf_label (state
);
1544 generate_bytecode_conditional (exp
,
1545 then_label
, else_label
, 1, state
);
1546 define_jcf_label (then_label
, state
);
1547 push_int_const (1, state
);
1548 emit_goto (end_label
, state
);
1549 define_jcf_label (else_label
, state
);
1550 push_int_const (0, state
);
1551 define_jcf_label (end_label
, state
);
1557 struct jcf_block
*then_label
= gen_jcf_label (state
);
1558 struct jcf_block
*else_label
= gen_jcf_label (state
);
1559 struct jcf_block
*end_label
= gen_jcf_label (state
);
1560 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1561 then_label
, else_label
, 1, state
);
1562 define_jcf_label (then_label
, state
);
1563 generate_bytecode_insns (TREE_OPERAND (exp
, 1), target
, state
);
1564 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp
, 1))
1565 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1566 || TREE_CODE (TREE_TYPE (exp
)) != VOID_TYPE
)
1567 emit_goto (end_label
, state
);
1568 define_jcf_label (else_label
, state
);
1569 generate_bytecode_insns (TREE_OPERAND (exp
, 2), target
, state
);
1570 define_jcf_label (end_label
, state
);
1571 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1572 if (TREE_TYPE (exp
) != void_type_node
)
1573 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 2 : 1);
1578 struct jcf_switch_state
*sw_state
= state
->sw_state
;
1579 struct jcf_relocation
*reloc
= (struct jcf_relocation
*)
1580 obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_relocation
));
1581 HOST_WIDE_INT case_value
= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0));
1583 reloc
->label
= get_jcf_label_here (state
);
1584 reloc
->offset
= case_value
;
1585 reloc
->next
= sw_state
->cases
;
1586 sw_state
->cases
= reloc
;
1587 if (sw_state
->num_cases
== 0)
1589 sw_state
->min_case
= case_value
;
1590 sw_state
->max_case
= case_value
;
1594 if (case_value
< sw_state
->min_case
)
1595 sw_state
->min_case
= case_value
;
1596 if (case_value
> sw_state
->max_case
)
1597 sw_state
->max_case
= case_value
;
1599 sw_state
->num_cases
++;
1603 state
->sw_state
->default_label
= get_jcf_label_here (state
);
1608 /* The SWITCH_EXPR has three parts, generated in the following order:
1609 1. the switch_expression (the value used to select the correct case);
1611 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1612 After code generation, we will re-order them in the order 1, 3, 2.
1613 This is to avoid any extra GOTOs. */
1614 struct jcf_switch_state sw_state
;
1615 struct jcf_block
*expression_last
; /* Last block of the switch_expression. */
1616 struct jcf_block
*body_last
; /* Last block of the switch_body. */
1617 struct jcf_block
*switch_instruction
; /* First block of switch_instruction. */
1618 struct jcf_block
*instruction_last
; /* Last block of the switch_instruction. */
1619 struct jcf_block
*body_block
;
1621 sw_state
.prev
= state
->sw_state
;
1622 state
->sw_state
= &sw_state
;
1623 sw_state
.cases
= NULL
;
1624 sw_state
.num_cases
= 0;
1625 sw_state
.default_label
= NULL
;
1626 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
1627 expression_last
= state
->last_block
;
1628 /* Force a new block here. */
1629 body_block
= gen_jcf_label (state
);
1630 define_jcf_label (body_block
, state
);
1631 generate_bytecode_insns (TREE_OPERAND (exp
, 1), IGNORE_TARGET
, state
);
1632 body_last
= state
->last_block
;
1634 switch_instruction
= gen_jcf_label (state
);
1635 define_jcf_label (switch_instruction
, state
);
1636 if (sw_state
.default_label
== NULL
)
1637 sw_state
.default_label
= gen_jcf_label (state
);
1639 if (sw_state
.num_cases
<= 1)
1641 if (sw_state
.num_cases
== 0)
1643 emit_pop (1, state
);
1648 push_int_const (sw_state
.cases
->offset
, state
);
1650 emit_if (sw_state
.cases
->label
,
1651 OPCODE_if_icmpeq
, OPCODE_if_icmpne
, state
);
1653 emit_goto (sw_state
.default_label
, state
);
1658 unsigned HOST_WIDE_INT delta
;
1659 /* Copy the chain of relocs into a sorted array. */
1660 struct jcf_relocation
**relocs
= (struct jcf_relocation
**)
1661 xmalloc (sw_state
.num_cases
* sizeof (struct jcf_relocation
*));
1662 /* The relocs arrays is a buffer with a gap.
1663 The assumption is that cases will normally come in "runs". */
1665 int gap_end
= sw_state
.num_cases
;
1666 struct jcf_relocation
*reloc
;
1667 for (reloc
= sw_state
.cases
; reloc
!= NULL
; reloc
= reloc
->next
)
1669 HOST_WIDE_INT case_value
= reloc
->offset
;
1670 while (gap_end
< sw_state
.num_cases
)
1672 struct jcf_relocation
*end
= relocs
[gap_end
];
1673 if (case_value
<= end
->offset
)
1675 relocs
[gap_start
++] = end
;
1678 while (gap_start
> 0)
1680 struct jcf_relocation
*before
= relocs
[gap_start
-1];
1681 if (case_value
>= before
->offset
)
1683 relocs
[--gap_end
] = before
;
1686 relocs
[gap_start
++] = reloc
;
1687 /* Note we don't check for duplicates. This is
1688 handled by the parser. */
1691 /* We could have DELTA < 0 if sw_state.min_case is
1692 something like Integer.MIN_VALUE. That is why delta is
1694 delta
= sw_state
.max_case
- sw_state
.min_case
;
1695 if (2 * (unsigned) sw_state
.num_cases
>= delta
)
1696 { /* Use tableswitch. */
1698 RESERVE (13 + 4 * (sw_state
.max_case
- sw_state
.min_case
+ 1));
1699 OP1 (OPCODE_tableswitch
);
1700 emit_reloc (RELOCATION_VALUE_0
,
1701 SWITCH_ALIGN_RELOC
, NULL
, state
);
1702 emit_switch_reloc (sw_state
.default_label
, state
);
1703 OP4 (sw_state
.min_case
);
1704 OP4 (sw_state
.max_case
);
1705 for (i
= sw_state
.min_case
; ; )
1707 reloc
= relocs
[index
];
1708 if (i
== reloc
->offset
)
1710 emit_case_reloc (reloc
, state
);
1711 if (i
== sw_state
.max_case
)
1716 emit_switch_reloc (sw_state
.default_label
, state
);
1721 { /* Use lookupswitch. */
1722 RESERVE(9 + 8 * sw_state
.num_cases
);
1723 OP1 (OPCODE_lookupswitch
);
1724 emit_reloc (RELOCATION_VALUE_0
,
1725 SWITCH_ALIGN_RELOC
, NULL
, state
);
1726 emit_switch_reloc (sw_state
.default_label
, state
);
1727 OP4 (sw_state
.num_cases
);
1728 for (i
= 0; i
< sw_state
.num_cases
; i
++)
1730 struct jcf_relocation
*reloc
= relocs
[i
];
1731 OP4 (reloc
->offset
);
1732 emit_case_reloc (reloc
, state
);
1738 instruction_last
= state
->last_block
;
1739 if (sw_state
.default_label
->pc
< 0)
1740 define_jcf_label (sw_state
.default_label
, state
);
1741 else /* Force a new block. */
1742 sw_state
.default_label
= get_jcf_label_here (state
);
1743 /* Now re-arrange the blocks so the switch_instruction
1744 comes before the switch_body. */
1745 switch_length
= state
->code_length
- switch_instruction
->pc
;
1746 switch_instruction
->pc
= body_block
->pc
;
1747 instruction_last
->next
= body_block
;
1748 instruction_last
->v
.chunk
->next
= body_block
->v
.chunk
;
1749 expression_last
->next
= switch_instruction
;
1750 expression_last
->v
.chunk
->next
= switch_instruction
->v
.chunk
;
1751 body_last
->next
= sw_state
.default_label
;
1752 body_last
->v
.chunk
->next
= NULL
;
1753 state
->chunk
= body_last
->v
.chunk
;
1754 for (; body_block
!= sw_state
.default_label
; body_block
= body_block
->next
)
1755 body_block
->pc
+= switch_length
;
1757 state
->sw_state
= sw_state
.prev
;
1762 exp
= TREE_OPERAND (exp
, 0);
1763 if (exp
== NULL_TREE
)
1764 exp
= empty_stmt_node
;
1765 else if (TREE_CODE (exp
) != MODIFY_EXPR
)
1768 exp
= TREE_OPERAND (exp
, 1);
1769 generate_bytecode_return (exp
, state
);
1771 case LABELED_BLOCK_EXPR
:
1773 struct jcf_block
*end_label
= gen_jcf_label (state
);
1774 end_label
->next
= state
->labeled_blocks
;
1775 state
->labeled_blocks
= end_label
;
1776 end_label
->pc
= PENDING_EXIT_PC
;
1777 end_label
->u
.labeled_block
= exp
;
1778 if (LABELED_BLOCK_BODY (exp
))
1779 generate_bytecode_insns (LABELED_BLOCK_BODY (exp
), target
, state
);
1780 if (state
->labeled_blocks
!= end_label
)
1782 state
->labeled_blocks
= end_label
->next
;
1783 define_jcf_label (end_label
, state
);
1788 tree body
= TREE_OPERAND (exp
, 0);
1790 if (TREE_CODE (body
) == COMPOUND_EXPR
1791 && TREE_CODE (TREE_OPERAND (body
, 0)) == EXIT_EXPR
)
1793 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1794 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1795 struct jcf_block
*head_label
;
1796 struct jcf_block
*body_label
;
1797 struct jcf_block
*end_label
= gen_jcf_label (state
);
1798 struct jcf_block
*exit_label
= state
->labeled_blocks
;
1799 head_label
= gen_jcf_label (state
);
1800 emit_goto (head_label
, state
);
1801 body_label
= get_jcf_label_here (state
);
1802 generate_bytecode_insns (TREE_OPERAND (body
, 1), target
, state
);
1803 define_jcf_label (head_label
, state
);
1804 generate_bytecode_conditional (TREE_OPERAND (body
, 0),
1805 end_label
, body_label
, 1, state
);
1806 define_jcf_label (end_label
, state
);
1811 struct jcf_block
*head_label
= get_jcf_label_here (state
);
1812 generate_bytecode_insns (body
, IGNORE_TARGET
, state
);
1813 if (CAN_COMPLETE_NORMALLY (body
))
1814 emit_goto (head_label
, state
);
1820 struct jcf_block
*label
= state
->labeled_blocks
;
1821 struct jcf_block
*end_label
= gen_jcf_label (state
);
1822 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1823 label
, end_label
, 0, state
);
1824 define_jcf_label (end_label
, state
);
1827 case EXIT_BLOCK_EXPR
:
1829 struct jcf_block
*label
= state
->labeled_blocks
;
1830 if (TREE_OPERAND (exp
, 1) != NULL
) goto notimpl
;
1831 while (label
->u
.labeled_block
!= TREE_OPERAND (exp
, 0))
1832 label
= label
->next
;
1833 call_cleanups (label
, state
);
1834 emit_goto (label
, state
);
1838 case PREDECREMENT_EXPR
: value
= -1; post_op
= 0; goto increment
;
1839 case PREINCREMENT_EXPR
: value
= 1; post_op
= 0; goto increment
;
1840 case POSTDECREMENT_EXPR
: value
= -1; post_op
= 1; goto increment
;
1841 case POSTINCREMENT_EXPR
: value
= 1; post_op
= 1; goto increment
;
1844 arg
= TREE_OPERAND (exp
, 1);
1845 exp
= TREE_OPERAND (exp
, 0);
1846 type
= TREE_TYPE (exp
);
1847 size
= TYPE_IS_WIDE (type
) ? 2 : 1;
1848 if ((TREE_CODE (exp
) == VAR_DECL
|| TREE_CODE (exp
) == PARM_DECL
)
1849 && ! TREE_STATIC (exp
)
1850 && TREE_CODE (type
) == INTEGER_TYPE
1851 && TYPE_PRECISION (type
) == 32)
1853 if (target
!= IGNORE_TARGET
&& post_op
)
1854 emit_load (exp
, state
);
1855 emit_iinc (exp
, value
, state
);
1856 if (target
!= IGNORE_TARGET
&& ! post_op
)
1857 emit_load (exp
, state
);
1860 if (TREE_CODE (exp
) == COMPONENT_REF
)
1862 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
1863 emit_dup (1, 0, state
);
1864 /* Stack: ..., objectref, objectref. */
1865 field_op (TREE_OPERAND (exp
, 1), OPCODE_getfield
, state
);
1867 /* Stack: ..., objectref, oldvalue. */
1870 else if (TREE_CODE (exp
) == ARRAY_REF
)
1872 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
1873 generate_bytecode_insns (TREE_OPERAND (exp
, 1), STACK_TARGET
, state
);
1874 emit_dup (2, 0, state
);
1875 /* Stack: ..., array, index, array, index. */
1876 jopcode
= OPCODE_iaload
+ adjust_typed_op (TREE_TYPE (exp
), 7);
1880 /* Stack: ..., array, index, oldvalue. */
1883 else if (TREE_CODE (exp
) == VAR_DECL
|| TREE_CODE (exp
) == PARM_DECL
)
1885 generate_bytecode_insns (exp
, STACK_TARGET
, state
);
1886 /* Stack: ..., oldvalue. */
1892 if (target
!= IGNORE_TARGET
&& post_op
)
1893 emit_dup (size
, offset
, state
);
1894 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1895 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1896 /* Stack, otherwise: ..., [result, ] oldvalue. */
1897 generate_bytecode_insns (arg
, STACK_TARGET
, state
);
1898 emit_binop ((value
>= 0 ? OPCODE_iadd
: OPCODE_isub
)
1899 + adjust_typed_op (type
, 3),
1901 if (target
!= IGNORE_TARGET
&& ! post_op
)
1902 emit_dup (size
, offset
, state
);
1903 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1904 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1905 /* Stack, otherwise: ..., [result, ] newvalue. */
1906 goto finish_assignment
;
1910 tree lhs
= TREE_OPERAND (exp
, 0);
1911 tree rhs
= TREE_OPERAND (exp
, 1);
1914 /* See if we can use the iinc instruction. */
1915 if ((TREE_CODE (lhs
) == VAR_DECL
|| TREE_CODE (lhs
) == PARM_DECL
)
1916 && ! TREE_STATIC (lhs
)
1917 && TREE_CODE (TREE_TYPE (lhs
)) == INTEGER_TYPE
1918 && TYPE_PRECISION (TREE_TYPE (lhs
)) == 32
1919 && (TREE_CODE (rhs
) == PLUS_EXPR
|| TREE_CODE (rhs
) == MINUS_EXPR
))
1921 tree arg0
= TREE_OPERAND (rhs
, 0);
1922 tree arg1
= TREE_OPERAND (rhs
, 1);
1923 HOST_WIDE_INT min_value
= -32768;
1924 HOST_WIDE_INT max_value
= 32767;
1925 if (TREE_CODE (rhs
) == MINUS_EXPR
)
1930 else if (arg1
== lhs
)
1933 arg1
= TREE_OPERAND (rhs
, 0);
1935 if (lhs
== arg0
&& TREE_CODE (arg1
) == INTEGER_CST
)
1937 HOST_WIDE_INT hi_value
= TREE_INT_CST_HIGH (arg1
);
1938 value
= TREE_INT_CST_LOW (arg1
);
1939 if ((hi_value
== 0 && value
<= max_value
)
1940 || (hi_value
== -1 && value
>= min_value
))
1942 if (TREE_CODE (rhs
) == MINUS_EXPR
)
1944 emit_iinc (lhs
, value
, state
);
1945 if (target
!= IGNORE_TARGET
)
1946 emit_load (lhs
, state
);
1952 if (TREE_CODE (lhs
) == COMPONENT_REF
)
1954 generate_bytecode_insns (TREE_OPERAND (lhs
, 0),
1955 STACK_TARGET
, state
);
1958 else if (TREE_CODE (lhs
) == ARRAY_REF
)
1960 generate_bytecode_insns (TREE_OPERAND(lhs
, 0),
1961 STACK_TARGET
, state
);
1962 generate_bytecode_insns (TREE_OPERAND(lhs
, 1),
1963 STACK_TARGET
, state
);
1969 /* If the rhs is a binary expression and the left operand is
1970 `==' to the lhs then we have an OP= expression. In this
1971 case we must do some special processing. */
1972 if (TREE_CODE_CLASS (TREE_CODE (rhs
)) == '2'
1973 && lhs
== TREE_OPERAND (rhs
, 0))
1975 if (TREE_CODE (lhs
) == COMPONENT_REF
)
1977 tree field
= TREE_OPERAND (lhs
, 1);
1978 if (! FIELD_STATIC (field
))
1980 /* Duplicate the object reference so we can get
1982 emit_dup (TYPE_IS_WIDE (field
) ? 2 : 1, 0, state
);
1985 field_op (field
, (FIELD_STATIC (field
)
1990 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field
)) ? 2 : 1);
1992 else if (TREE_CODE (lhs
) == VAR_DECL
1993 || TREE_CODE (lhs
) == PARM_DECL
)
1995 if (FIELD_STATIC (lhs
))
1997 field_op (lhs
, OPCODE_getstatic
, state
);
1998 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs
)) ? 2 : 1);
2001 emit_load (lhs
, state
);
2003 else if (TREE_CODE (lhs
) == ARRAY_REF
)
2005 /* Duplicate the array and index, which are on the
2006 stack, so that we can load the old value. */
2007 emit_dup (2, 0, state
);
2009 jopcode
= OPCODE_iaload
+ adjust_typed_op (TREE_TYPE (lhs
), 7);
2012 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs
)) ? 2 : 1);
2017 /* This function correctly handles the case where the LHS
2018 of a binary expression is NULL_TREE. */
2019 rhs
= build (TREE_CODE (rhs
), TREE_TYPE (rhs
),
2020 NULL_TREE
, TREE_OPERAND (rhs
, 1));
2023 generate_bytecode_insns (rhs
, STACK_TARGET
, state
);
2024 if (target
!= IGNORE_TARGET
)
2025 emit_dup (TYPE_IS_WIDE (type
) ? 2 : 1 , offset
, state
);
2031 if (TREE_CODE (exp
) == COMPONENT_REF
)
2033 tree field
= TREE_OPERAND (exp
, 1);
2034 if (! FIELD_STATIC (field
))
2037 FIELD_STATIC (field
) ? OPCODE_putstatic
: OPCODE_putfield
,
2040 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field
)) ? 2 : 1);
2042 else if (TREE_CODE (exp
) == VAR_DECL
2043 || TREE_CODE (exp
) == PARM_DECL
)
2045 if (FIELD_STATIC (exp
))
2047 field_op (exp
, OPCODE_putstatic
, state
);
2048 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 2 : 1);
2051 emit_store (exp
, state
);
2053 else if (TREE_CODE (exp
) == ARRAY_REF
)
2055 jopcode
= OPCODE_iastore
+ adjust_typed_op (TREE_TYPE (exp
), 7);
2058 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 4 : 3);
2064 jopcode
= OPCODE_iadd
;
2067 jopcode
= OPCODE_isub
;
2070 jopcode
= OPCODE_imul
;
2072 case TRUNC_DIV_EXPR
:
2074 jopcode
= OPCODE_idiv
;
2076 case TRUNC_MOD_EXPR
:
2077 jopcode
= OPCODE_irem
;
2079 case LSHIFT_EXPR
: jopcode
= OPCODE_ishl
; goto binop
;
2080 case RSHIFT_EXPR
: jopcode
= OPCODE_ishr
; goto binop
;
2081 case URSHIFT_EXPR
: jopcode
= OPCODE_iushr
; goto binop
;
2082 case TRUTH_AND_EXPR
:
2083 case BIT_AND_EXPR
: jopcode
= OPCODE_iand
; goto binop
;
2085 case BIT_IOR_EXPR
: jopcode
= OPCODE_ior
; goto binop
;
2086 case TRUTH_XOR_EXPR
:
2087 case BIT_XOR_EXPR
: jopcode
= OPCODE_ixor
; goto binop
;
2090 tree arg0
= TREE_OPERAND (exp
, 0);
2091 tree arg1
= TREE_OPERAND (exp
, 1);
2092 jopcode
+= adjust_typed_op (type
, 3);
2093 if (arg0
== arg1
&& TREE_CODE (arg0
) == SAVE_EXPR
)
2095 /* fold may (e.g) convert 2*x to x+x. */
2096 generate_bytecode_insns (TREE_OPERAND (arg0
, 0), target
, state
);
2097 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0
)) > 32 ? 2 : 1, 0, state
);
2101 /* ARG0 will be NULL_TREE if we're handling an `OP='
2102 expression. In this case the stack already holds the
2103 LHS. See the MODIFY_EXPR case. */
2104 if (arg0
!= NULL_TREE
)
2105 generate_bytecode_insns (arg0
, target
, state
);
2106 if (jopcode
>= OPCODE_lshl
&& jopcode
<= OPCODE_lushr
)
2107 arg1
= convert (int_type_node
, arg1
);
2108 generate_bytecode_insns (arg1
, target
, state
);
2110 /* For most binary operations, both operands and the result have the
2111 same type. Shift operations are different. Using arg1's type
2112 gets us the correct SP adjustment in all cases. */
2113 if (target
== STACK_TARGET
)
2114 emit_binop (jopcode
, TREE_TYPE (arg1
), state
);
2117 case TRUTH_NOT_EXPR
:
2119 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2120 if (target
== STACK_TARGET
)
2122 int is_long
= TYPE_PRECISION (TREE_TYPE (exp
)) > 32;
2123 push_int_const (TREE_CODE (exp
) == BIT_NOT_EXPR
? -1 : 1, state
);
2127 NOTE_PUSH (1 + is_long
);
2128 OP1 (OPCODE_ixor
+ is_long
);
2129 NOTE_POP (1 + is_long
);
2133 jopcode
= OPCODE_ineg
;
2134 jopcode
+= adjust_typed_op (type
, 3);
2135 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2136 if (target
== STACK_TARGET
)
2137 emit_unop (jopcode
, type
, state
);
2139 case INSTANCEOF_EXPR
:
2141 int index
= find_class_constant (&state
->cpool
, TREE_OPERAND (exp
, 1));
2142 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2144 OP1 (OPCODE_instanceof
);
2149 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
2154 case FIX_TRUNC_EXPR
:
2156 tree src
= TREE_OPERAND (exp
, 0);
2157 tree src_type
= TREE_TYPE (src
);
2158 tree dst_type
= TREE_TYPE (exp
);
2159 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2160 if (target
== IGNORE_TARGET
|| src_type
== dst_type
)
2162 if (TREE_CODE (dst_type
) == POINTER_TYPE
)
2164 if (TREE_CODE (exp
) == CONVERT_EXPR
)
2166 int index
= find_class_constant (&state
->cpool
,
2167 TREE_TYPE (dst_type
));
2169 OP1 (OPCODE_checkcast
);
2173 else /* Convert numeric types. */
2175 int wide_src
= TYPE_PRECISION (src_type
) > 32;
2176 int wide_dst
= TYPE_PRECISION (dst_type
) > 32;
2177 NOTE_POP (1 + wide_src
);
2179 if (TREE_CODE (dst_type
) == REAL_TYPE
)
2181 if (TREE_CODE (src_type
) == REAL_TYPE
)
2182 OP1 (wide_dst
? OPCODE_f2d
: OPCODE_d2f
);
2183 else if (TYPE_PRECISION (src_type
) == 64)
2184 OP1 (OPCODE_l2f
+ wide_dst
);
2186 OP1 (OPCODE_i2f
+ wide_dst
);
2188 else /* Convert to integral type. */
2190 if (TREE_CODE (src_type
) == REAL_TYPE
)
2191 OP1 (OPCODE_f2i
+ wide_dst
+ 3 * wide_src
);
2196 if (TYPE_PRECISION (dst_type
) < 32)
2199 /* Already converted to int, if needed. */
2200 if (TYPE_PRECISION (dst_type
) <= 8)
2202 else if (TREE_UNSIGNED (dst_type
))
2208 NOTE_PUSH (1 + wide_dst
);
2215 tree try_clause
= TREE_OPERAND (exp
, 0);
2216 struct jcf_block
*start_label
= get_jcf_label_here (state
);
2217 struct jcf_block
*end_label
; /* End of try clause. */
2218 struct jcf_block
*finished_label
= gen_jcf_label (state
);
2219 tree clause
= TREE_OPERAND (exp
, 1);
2220 if (target
!= IGNORE_TARGET
)
2222 generate_bytecode_insns (try_clause
, IGNORE_TARGET
, state
);
2223 end_label
= get_jcf_label_here (state
);
2224 if (end_label
== start_label
)
2226 if (CAN_COMPLETE_NORMALLY (try_clause
))
2227 emit_goto (finished_label
, state
);
2228 while (clause
!= NULL_TREE
)
2230 tree catch_clause
= TREE_OPERAND (clause
, 0);
2231 tree exception_decl
= BLOCK_EXPR_DECLS (catch_clause
);
2232 struct jcf_handler
*handler
= alloc_handler (start_label
,
2234 if (exception_decl
== NULL_TREE
)
2235 handler
->type
= NULL_TREE
;
2237 handler
->type
= TREE_TYPE (TREE_TYPE (exception_decl
));
2238 generate_bytecode_insns (catch_clause
, IGNORE_TARGET
, state
);
2239 clause
= TREE_CHAIN (clause
);
2240 if (CAN_COMPLETE_NORMALLY (catch_clause
) && clause
!= NULL_TREE
)
2241 emit_goto (finished_label
, state
);
2243 define_jcf_label (finished_label
, state
);
2247 case TRY_FINALLY_EXPR
:
2249 struct jcf_block
*finished_label
= NULL
;
2250 struct jcf_block
*finally_label
, *start_label
, *end_label
;
2251 struct jcf_handler
*handler
;
2252 tree try_block
= TREE_OPERAND (exp
, 0);
2253 tree finally
= TREE_OPERAND (exp
, 1);
2254 tree return_link
= NULL_TREE
, exception_decl
= NULL_TREE
;
2256 tree exception_type
;
2258 finally_label
= gen_jcf_label (state
);
2259 start_label
= get_jcf_label_here (state
);
2260 /* If the `finally' clause can complete normally, we emit it
2261 as a subroutine and let the other clauses call it via
2262 `jsr'. If it can't complete normally, then we simply emit
2263 `goto's directly to it. */
2264 if (CAN_COMPLETE_NORMALLY (finally
))
2266 finally_label
->pc
= PENDING_CLEANUP_PC
;
2267 finally_label
->next
= state
->labeled_blocks
;
2268 state
->labeled_blocks
= finally_label
;
2269 state
->num_finalizers
++;
2272 generate_bytecode_insns (try_block
, target
, state
);
2274 if (CAN_COMPLETE_NORMALLY (finally
))
2276 if (state
->labeled_blocks
!= finally_label
)
2278 state
->labeled_blocks
= finally_label
->next
;
2280 end_label
= get_jcf_label_here (state
);
2282 if (end_label
== start_label
)
2284 state
->num_finalizers
--;
2285 define_jcf_label (finally_label
, state
);
2286 generate_bytecode_insns (finally
, IGNORE_TARGET
, state
);
2290 if (CAN_COMPLETE_NORMALLY (finally
))
2292 return_link
= build_decl (VAR_DECL
, NULL_TREE
,
2293 return_address_type_node
);
2294 finished_label
= gen_jcf_label (state
);
2297 if (CAN_COMPLETE_NORMALLY (try_block
))
2299 if (CAN_COMPLETE_NORMALLY (finally
))
2301 emit_jsr (finally_label
, state
);
2302 emit_goto (finished_label
, state
);
2305 emit_goto (finally_label
, state
);
2308 /* Handle exceptions. */
2310 exception_type
= build_pointer_type (throwable_type_node
);
2311 if (CAN_COMPLETE_NORMALLY (finally
))
2313 /* We're going to generate a subroutine, so we'll need to
2314 save and restore the exception around the `jsr'. */
2315 exception_decl
= build_decl (VAR_DECL
, NULL_TREE
, exception_type
);
2316 localvar_alloc (return_link
, state
);
2318 handler
= alloc_handler (start_label
, end_label
, state
);
2319 handler
->type
= NULL_TREE
;
2320 if (CAN_COMPLETE_NORMALLY (finally
))
2322 localvar_alloc (exception_decl
, state
);
2324 emit_store (exception_decl
, state
);
2325 emit_jsr (finally_label
, state
);
2326 emit_load (exception_decl
, state
);
2328 OP1 (OPCODE_athrow
);
2333 /* We're not generating a subroutine. In this case we can
2334 simply have the exception handler pop the exception and
2335 then fall through to the `finally' block. */
2337 emit_pop (1, state
);
2341 /* The finally block. If we're generating a subroutine, first
2342 save return PC into return_link. Otherwise, just generate
2343 the code for the `finally' block. */
2344 define_jcf_label (finally_label
, state
);
2345 if (CAN_COMPLETE_NORMALLY (finally
))
2348 emit_store (return_link
, state
);
2351 generate_bytecode_insns (finally
, IGNORE_TARGET
, state
);
2352 if (CAN_COMPLETE_NORMALLY (finally
))
2354 maybe_wide (OPCODE_ret
, DECL_LOCAL_INDEX (return_link
), state
);
2355 localvar_free (exception_decl
, state
);
2356 localvar_free (return_link
, state
);
2357 define_jcf_label (finished_label
, state
);
2362 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
2364 OP1 (OPCODE_athrow
);
2366 case NEW_ARRAY_INIT
:
2368 tree values
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
2369 tree array_type
= TREE_TYPE (TREE_TYPE (exp
));
2370 tree element_type
= TYPE_ARRAY_ELEMENT (array_type
);
2371 HOST_WIDE_INT length
= java_array_type_length (array_type
);
2372 if (target
== IGNORE_TARGET
)
2374 for ( ; values
!= NULL_TREE
; values
= TREE_CHAIN (values
))
2375 generate_bytecode_insns (TREE_VALUE (values
), target
, state
);
2378 push_int_const (length
, state
);
2381 if (JPRIMITIVE_TYPE_P (element_type
))
2383 int atype
= encode_newarray_type (element_type
);
2384 OP1 (OPCODE_newarray
);
2389 int index
= find_class_constant (&state
->cpool
,
2390 TREE_TYPE (element_type
));
2391 OP1 (OPCODE_anewarray
);
2395 jopcode
= OPCODE_iastore
+ adjust_typed_op (element_type
, 7);
2396 for ( ; values
!= NULL_TREE
; values
= TREE_CHAIN (values
), offset
++)
2398 int save_SP
= state
->code_SP
;
2399 emit_dup (1, 0, state
);
2400 push_int_const (offset
, state
);
2402 generate_bytecode_insns (TREE_VALUE (values
), STACK_TARGET
, state
);
2405 state
->code_SP
= save_SP
;
2409 case JAVA_EXC_OBJ_EXPR
:
2410 NOTE_PUSH (1); /* Pushed by exception system. */
2412 case NEW_CLASS_EXPR
:
2414 tree
class = TREE_TYPE (TREE_TYPE (exp
));
2415 int need_result
= target
!= IGNORE_TARGET
;
2416 int index
= find_class_constant (&state
->cpool
, class);
2422 NOTE_PUSH (1 + need_result
);
2424 /* ... fall though ... */
2427 tree f
= TREE_OPERAND (exp
, 0);
2428 tree x
= TREE_OPERAND (exp
, 1);
2429 int save_SP
= state
->code_SP
;
2431 if (TREE_CODE (f
) == ADDR_EXPR
)
2432 f
= TREE_OPERAND (f
, 0);
2433 if (f
== soft_newarray_node
)
2435 int type_code
= TREE_INT_CST_LOW (TREE_VALUE (x
));
2436 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x
)),
2437 STACK_TARGET
, state
);
2439 OP1 (OPCODE_newarray
);
2443 else if (f
== soft_multianewarray_node
)
2447 int index
= find_class_constant (&state
->cpool
,
2448 TREE_TYPE (TREE_TYPE (exp
)));
2449 x
= TREE_CHAIN (x
); /* Skip class argument. */
2450 ndims
= TREE_INT_CST_LOW (TREE_VALUE (x
));
2451 for (idim
= ndims
; --idim
>= 0; )
2454 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2457 OP1 (OPCODE_multianewarray
);
2462 else if (f
== soft_anewarray_node
)
2464 tree cl
= TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp
)));
2465 int index
= find_class_constant (&state
->cpool
, TREE_TYPE (cl
));
2466 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2468 OP1 (OPCODE_anewarray
);
2472 else if (f
== soft_monitorenter_node
2473 || f
== soft_monitorexit_node
2476 if (f
== soft_monitorenter_node
)
2477 op
= OPCODE_monitorenter
;
2478 else if (f
== soft_monitorexit_node
)
2479 op
= OPCODE_monitorexit
;
2482 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2488 for ( ; x
!= NULL_TREE
; x
= TREE_CHAIN (x
))
2490 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2492 nargs
= state
->code_SP
- save_SP
;
2493 state
->code_SP
= save_SP
;
2494 if (f
== soft_fmod_node
)
2501 if (TREE_CODE (exp
) == NEW_CLASS_EXPR
)
2502 NOTE_POP (1); /* Pop implicit this. */
2503 if (TREE_CODE (f
) == FUNCTION_DECL
&& DECL_CONTEXT (f
) != NULL_TREE
)
2505 tree context
= DECL_CONTEXT (f
);
2506 int index
, interface
= 0;
2508 if (METHOD_STATIC (f
))
2509 OP1 (OPCODE_invokestatic
);
2510 else if (DECL_CONSTRUCTOR_P (f
) || CALL_USING_SUPER (exp
)
2511 || METHOD_PRIVATE (f
))
2512 OP1 (OPCODE_invokespecial
);
2515 if (CLASS_INTERFACE (TYPE_NAME (context
)))
2517 tree arg1
= TREE_VALUE (TREE_OPERAND (exp
, 1));
2518 context
= TREE_TYPE (TREE_TYPE (arg1
));
2519 if (CLASS_INTERFACE (TYPE_NAME (context
)))
2523 OP1 (OPCODE_invokeinterface
);
2525 OP1 (OPCODE_invokevirtual
);
2527 index
= find_methodref_with_class_index (&state
->cpool
, f
, context
);
2537 f
= TREE_TYPE (TREE_TYPE (f
));
2538 if (TREE_CODE (f
) != VOID_TYPE
)
2540 int size
= TYPE_IS_WIDE (f
) ? 2 : 1;
2541 if (target
== IGNORE_TARGET
)
2542 emit_pop (size
, state
);
2552 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2553 tree_code_name
[(int) TREE_CODE (exp
)]);
2558 perform_relocations (struct jcf_partial
*state
)
2560 struct jcf_block
*block
;
2561 struct jcf_relocation
*reloc
;
2565 /* Before we start, the pc field of each block is an upper bound on
2566 the block's start pc (it may be less, if previous blocks need less
2567 than their maximum).
2569 The minimum size of each block is in the block's chunk->size. */
2571 /* First, figure out the actual locations of each block. */
2574 for (block
= state
->blocks
; block
!= NULL
; block
= block
->next
)
2576 int block_size
= block
->v
.chunk
->size
;
2580 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2581 Assumes relocations are in reverse order. */
2582 reloc
= block
->u
.relocations
;
2583 while (reloc
!= NULL
2584 && reloc
->kind
== OPCODE_goto_w
2585 && reloc
->label
->pc
== block
->next
->pc
2586 && reloc
->offset
+ 2 == block_size
)
2588 reloc
= reloc
->next
;
2589 block
->u
.relocations
= reloc
;
2590 block
->v
.chunk
->size
-= 3;
2595 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2596 jump directly to X. We're careful here to avoid an infinite
2597 loop if the `goto's themselves form one. We do this
2598 optimization because we can generate a goto-to-goto for some
2599 try/finally blocks. */
2600 while (reloc
!= NULL
2601 && reloc
->kind
== OPCODE_goto_w
2602 && reloc
->label
!= block
2603 && reloc
->label
->v
.chunk
->data
!= NULL
2604 && reloc
->label
->v
.chunk
->data
[0] == OPCODE_goto
)
2606 /* Find the reloc for the first instruction of the
2607 destination block. */
2608 struct jcf_relocation
*first_reloc
;
2609 for (first_reloc
= reloc
->label
->u
.relocations
;
2611 first_reloc
= first_reloc
->next
)
2613 if (first_reloc
->offset
== 1
2614 && first_reloc
->kind
== OPCODE_goto_w
)
2616 reloc
->label
= first_reloc
->label
;
2621 /* If we didn't do anything, exit the loop. */
2622 if (first_reloc
== NULL
)
2626 for (reloc
= block
->u
.relocations
; reloc
!= NULL
; reloc
= reloc
->next
)
2628 if (reloc
->kind
== SWITCH_ALIGN_RELOC
)
2630 /* We assume this is the first relocation in this block,
2631 so we know its final pc. */
2632 int where
= pc
+ reloc
->offset
;
2633 int pad
= ((where
+ 3) & ~3) - where
;
2636 else if (reloc
->kind
< -1 || reloc
->kind
> BLOCK_START_RELOC
)
2638 int delta
= reloc
->label
->pc
- (pc
+ reloc
->offset
- 1);
2639 int expand
= reloc
->kind
> 0 ? 2 : 5;
2643 if (delta
>= -32768 && delta
<= 32767)
2649 block_size
+= expand
;
2655 for (block
= state
->blocks
; block
!= NULL
; block
= block
->next
)
2657 struct chunk
*chunk
= block
->v
.chunk
;
2658 int old_size
= chunk
->size
;
2659 int next_pc
= block
->next
== NULL
? pc
: block
->next
->pc
;
2660 int new_size
= next_pc
- block
->pc
;
2661 unsigned char *new_ptr
;
2662 unsigned char *old_buffer
= chunk
->data
;
2663 unsigned char *old_ptr
= old_buffer
+ old_size
;
2664 if (new_size
!= old_size
)
2666 chunk
->data
= (unsigned char *)
2667 obstack_alloc (state
->chunk_obstack
, new_size
);
2668 chunk
->size
= new_size
;
2670 new_ptr
= chunk
->data
+ new_size
;
2672 /* We do the relocations from back to front, because
2673 the relocations are in reverse order. */
2674 for (reloc
= block
->u
.relocations
; ; reloc
= reloc
->next
)
2676 /* new_ptr and old_ptr point into the old and new buffers,
2677 respectively. (If no relocations cause the buffer to
2678 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2679 The bytes at higher address have been copied and relocations
2680 handled; those at lower addresses remain to process. */
2682 /* Lower old index of piece to be copied with no relocation.
2683 I.e. high index of the first piece that does need relocation. */
2684 int start
= reloc
== NULL
? 0
2685 : reloc
->kind
== SWITCH_ALIGN_RELOC
? reloc
->offset
2686 : (reloc
->kind
== 0 || reloc
->kind
== BLOCK_START_RELOC
)
2688 : reloc
->offset
+ 2;
2691 int n
= (old_ptr
- old_buffer
) - start
;
2695 memcpy (new_ptr
, old_ptr
, n
);
2696 if (old_ptr
== old_buffer
)
2699 new_offset
= new_ptr
- chunk
->data
;
2700 new_offset
-= (reloc
->kind
== -1 ? 2 : 4);
2701 if (reloc
->kind
== 0)
2704 value
= GET_u4 (old_ptr
);
2706 else if (reloc
->kind
== BLOCK_START_RELOC
)
2712 else if (reloc
->kind
== SWITCH_ALIGN_RELOC
)
2714 int where
= block
->pc
+ reloc
->offset
;
2715 int pad
= ((where
+ 3) & ~3) - where
;
2723 value
= GET_u2 (old_ptr
);
2725 value
+= reloc
->label
->pc
- (block
->pc
+ new_offset
);
2726 *--new_ptr
= (unsigned char) value
; value
>>= 8;
2727 *--new_ptr
= (unsigned char) value
; value
>>= 8;
2728 if (reloc
->kind
!= -1)
2730 *--new_ptr
= (unsigned char) value
; value
>>= 8;
2731 *--new_ptr
= (unsigned char) value
;
2733 if (reloc
->kind
> BLOCK_START_RELOC
)
2735 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2737 *--new_ptr
= reloc
->kind
;
2739 else if (reloc
->kind
< -1)
2741 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2743 *--new_ptr
= OPCODE_goto_w
;
2746 *--new_ptr
= - reloc
->kind
;
2749 if (new_ptr
!= chunk
->data
)
2752 state
->code_length
= pc
;
2756 init_jcf_state (struct jcf_partial
*state
, struct obstack
*work
)
2758 state
->chunk_obstack
= work
;
2759 state
->first
= state
->chunk
= NULL
;
2760 CPOOL_INIT (&state
->cpool
);
2761 BUFFER_INIT (&state
->localvars
);
2762 BUFFER_INIT (&state
->bytecode
);
2766 init_jcf_method (struct jcf_partial
*state
, tree method
)
2768 state
->current_method
= method
;
2769 state
->blocks
= state
->last_block
= NULL
;
2770 state
->linenumber_count
= 0;
2771 state
->first_lvar
= state
->last_lvar
= NULL
;
2772 state
->lvar_count
= 0;
2773 state
->labeled_blocks
= NULL
;
2774 state
->code_length
= 0;
2775 BUFFER_RESET (&state
->bytecode
);
2776 BUFFER_RESET (&state
->localvars
);
2778 state
->code_SP_max
= 0;
2779 state
->handlers
= NULL
;
2780 state
->last_handler
= NULL
;
2781 state
->num_handlers
= 0;
2782 state
->num_finalizers
= 0;
2783 state
->return_value_decl
= NULL_TREE
;
2787 release_jcf_state (struct jcf_partial
*state
)
2789 CPOOL_FINISH (&state
->cpool
);
2790 obstack_free (state
->chunk_obstack
, state
->first
);
2793 /* Generate and return a list of chunks containing the class CLAS
2794 in the .class file representation. The list can be written to a
2795 .class file using write_chunks. Allocate chunks from obstack WORK. */
2797 static GTY(()) tree SourceFile_node
;
2798 static struct chunk
*
2799 generate_classfile (tree clas
, struct jcf_partial
*state
)
2801 struct chunk
*cpool_chunk
;
2802 const char *source_file
, *s
;
2805 char *fields_count_ptr
;
2806 int fields_count
= 0;
2807 char *methods_count_ptr
;
2808 int methods_count
= 0;
2811 = clas
== object_type_node
? 0
2812 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas
));
2814 ptr
= append_chunk (NULL
, 8, state
);
2815 PUT4 (0xCafeBabe); /* Magic number */
2816 PUT2 (3); /* Minor version */
2817 PUT2 (45); /* Major version */
2819 append_chunk (NULL
, 0, state
);
2820 cpool_chunk
= state
->chunk
;
2822 /* Next allocate the chunk containing acces_flags through fields_counr. */
2823 if (clas
== object_type_node
)
2826 i
= 8 + 2 * total_supers
;
2827 ptr
= append_chunk (NULL
, i
, state
);
2828 i
= get_access_flags (TYPE_NAME (clas
));
2829 if (! (i
& ACC_INTERFACE
))
2831 PUT2 (i
); /* acces_flags */
2832 i
= find_class_constant (&state
->cpool
, clas
); PUT2 (i
); /* this_class */
2833 if (clas
== object_type_node
)
2835 PUT2(0); /* super_class */
2836 PUT2(0); /* interfaces_count */
2840 tree basetypes
= TYPE_BINFO_BASETYPES (clas
);
2841 tree base
= BINFO_TYPE (TREE_VEC_ELT (basetypes
, 0));
2842 int j
= find_class_constant (&state
->cpool
, base
);
2843 PUT2 (j
); /* super_class */
2844 PUT2 (total_supers
- 1); /* interfaces_count */
2845 for (i
= 1; i
< total_supers
; i
++)
2847 base
= BINFO_TYPE (TREE_VEC_ELT (basetypes
, i
));
2848 j
= find_class_constant (&state
->cpool
, base
);
2852 fields_count_ptr
= ptr
;
2854 for (part
= TYPE_FIELDS (clas
); part
; part
= TREE_CHAIN (part
))
2856 int have_value
, attr_count
= 0;
2857 if (DECL_NAME (part
) == NULL_TREE
|| DECL_ARTIFICIAL (part
))
2859 ptr
= append_chunk (NULL
, 8, state
);
2860 i
= get_access_flags (part
); PUT2 (i
);
2861 i
= find_utf8_constant (&state
->cpool
, DECL_NAME (part
)); PUT2 (i
);
2862 i
= find_utf8_constant (&state
->cpool
,
2863 build_java_signature (TREE_TYPE (part
)));
2865 have_value
= DECL_INITIAL (part
) != NULL_TREE
2866 && FIELD_STATIC (part
) && CONSTANT_VALUE_P (DECL_INITIAL (part
))
2867 && FIELD_FINAL (part
)
2868 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part
))
2869 || TREE_TYPE (part
) == string_ptr_type_node
);
2873 if (FIELD_THISN (part
) || FIELD_LOCAL_ALIAS (part
) || FIELD_SYNTHETIC (part
))
2876 PUT2 (attr_count
); /* attributes_count */
2879 tree init
= DECL_INITIAL (part
);
2880 static tree ConstantValue_node
= NULL_TREE
;
2881 if (TREE_TYPE (part
) != TREE_TYPE (init
))
2882 fatal_error ("field initializer type mismatch");
2883 ptr
= append_chunk (NULL
, 8, state
);
2884 if (ConstantValue_node
== NULL_TREE
)
2885 ConstantValue_node
= get_identifier ("ConstantValue");
2886 i
= find_utf8_constant (&state
->cpool
, ConstantValue_node
);
2887 PUT2 (i
); /* attribute_name_index */
2888 PUT4 (2); /* attribute_length */
2889 i
= find_constant_index (init
, state
); PUT2 (i
);
2891 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
2892 fields and other fields which need it. */
2893 if (FIELD_THISN (part
) || FIELD_LOCAL_ALIAS (part
)
2894 || FIELD_SYNTHETIC (part
))
2895 ptr
= append_synthetic_attribute (state
);
2898 ptr
= fields_count_ptr
; UNSAFE_PUT2 (fields_count
);
2900 ptr
= methods_count_ptr
= append_chunk (NULL
, 2, state
);
2903 for (part
= TYPE_METHODS (clas
); part
; part
= TREE_CHAIN (part
))
2905 struct jcf_block
*block
;
2906 tree function_body
= DECL_FUNCTION_BODY (part
);
2907 tree body
= function_body
== NULL_TREE
? NULL_TREE
2908 : BLOCK_EXPR_BODY (function_body
);
2909 tree name
= DECL_CONSTRUCTOR_P (part
) ? init_identifier_node
2911 tree type
= TREE_TYPE (part
);
2912 tree save_function
= current_function_decl
;
2913 int synthetic_p
= 0;
2914 current_function_decl
= part
;
2915 ptr
= append_chunk (NULL
, 8, state
);
2916 i
= get_access_flags (part
); PUT2 (i
);
2917 i
= find_utf8_constant (&state
->cpool
, name
); PUT2 (i
);
2918 i
= find_utf8_constant (&state
->cpool
, build_java_signature (type
));
2920 i
= (body
!= NULL_TREE
) + (DECL_FUNCTION_THROWS (part
) != NULL_TREE
);
2922 /* Make room for the Synthetic attribute (of zero length.) */
2923 if (DECL_FINIT_P (part
)
2924 || DECL_INSTINIT_P (part
)
2925 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part
))
2926 || TYPE_DOT_CLASS (clas
) == part
)
2932 PUT2 (i
); /* attributes_count */
2935 ptr
= append_synthetic_attribute (state
);
2937 if (body
!= NULL_TREE
)
2939 int code_attributes_count
= 0;
2940 static tree Code_node
= NULL_TREE
;
2943 struct jcf_handler
*handler
;
2944 if (Code_node
== NULL_TREE
)
2945 Code_node
= get_identifier ("Code");
2946 ptr
= append_chunk (NULL
, 14, state
);
2947 i
= find_utf8_constant (&state
->cpool
, Code_node
); PUT2 (i
);
2949 init_jcf_method (state
, part
);
2950 get_jcf_label_here (state
); /* Force a first block. */
2951 for (t
= DECL_ARGUMENTS (part
); t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
2952 localvar_alloc (t
, state
);
2953 generate_bytecode_insns (body
, IGNORE_TARGET
, state
);
2954 if (CAN_COMPLETE_NORMALLY (body
))
2956 if (TREE_CODE (TREE_TYPE (type
)) != VOID_TYPE
)
2959 OP1 (OPCODE_return
);
2961 for (t
= DECL_ARGUMENTS (part
); t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
2962 localvar_free (t
, state
);
2963 if (state
->return_value_decl
!= NULL_TREE
)
2964 localvar_free (state
->return_value_decl
, state
);
2965 finish_jcf_block (state
);
2966 perform_relocations (state
);
2969 i
= 8 + state
->code_length
+ 4 + 8 * state
->num_handlers
;
2970 if (state
->linenumber_count
> 0)
2972 code_attributes_count
++;
2973 i
+= 8 + 4 * state
->linenumber_count
;
2975 if (state
->lvar_count
> 0)
2977 code_attributes_count
++;
2978 i
+= 8 + 10 * state
->lvar_count
;
2980 UNSAFE_PUT4 (i
); /* attribute_length */
2981 UNSAFE_PUT2 (state
->code_SP_max
); /* max_stack */
2982 UNSAFE_PUT2 (localvar_max
); /* max_locals */
2983 UNSAFE_PUT4 (state
->code_length
);
2985 /* Emit the exception table. */
2986 ptr
= append_chunk (NULL
, 2 + 8 * state
->num_handlers
, state
);
2987 PUT2 (state
->num_handlers
); /* exception_table_length */
2988 handler
= state
->handlers
;
2989 for (; handler
!= NULL
; handler
= handler
->next
)
2992 PUT2 (handler
->start_label
->pc
);
2993 PUT2 (handler
->end_label
->pc
);
2994 PUT2 (handler
->handler_label
->pc
);
2995 if (handler
->type
== NULL_TREE
)
2998 type_index
= find_class_constant (&state
->cpool
,
3003 ptr
= append_chunk (NULL
, 2, state
);
3004 PUT2 (code_attributes_count
);
3006 /* Write the LineNumberTable attribute. */
3007 if (state
->linenumber_count
> 0)
3009 static tree LineNumberTable_node
= NULL_TREE
;
3010 ptr
= append_chunk (NULL
,
3011 8 + 4 * state
->linenumber_count
, state
);
3012 if (LineNumberTable_node
== NULL_TREE
)
3013 LineNumberTable_node
= get_identifier ("LineNumberTable");
3014 i
= find_utf8_constant (&state
->cpool
, LineNumberTable_node
);
3015 PUT2 (i
); /* attribute_name_index */
3016 i
= 2+4*state
->linenumber_count
; PUT4(i
); /* attribute_length */
3017 i
= state
->linenumber_count
; PUT2 (i
);
3018 for (block
= state
->blocks
; block
!= NULL
; block
= block
->next
)
3020 int line
= block
->linenumber
;
3029 /* Write the LocalVariableTable attribute. */
3030 if (state
->lvar_count
> 0)
3032 static tree LocalVariableTable_node
= NULL_TREE
;
3033 struct localvar_info
*lvar
= state
->first_lvar
;
3034 ptr
= append_chunk (NULL
, 8 + 10 * state
->lvar_count
, state
);
3035 if (LocalVariableTable_node
== NULL_TREE
)
3036 LocalVariableTable_node
= get_identifier("LocalVariableTable");
3037 i
= find_utf8_constant (&state
->cpool
, LocalVariableTable_node
);
3038 PUT2 (i
); /* attribute_name_index */
3039 i
= 2 + 10 * state
->lvar_count
; PUT4 (i
); /* attribute_length */
3040 i
= state
->lvar_count
; PUT2 (i
);
3041 for ( ; lvar
!= NULL
; lvar
= lvar
->next
)
3043 tree name
= DECL_NAME (lvar
->decl
);
3044 tree sig
= build_java_signature (TREE_TYPE (lvar
->decl
));
3045 i
= lvar
->start_label
->pc
; PUT2 (i
);
3046 i
= lvar
->end_label
->pc
- i
; PUT2 (i
);
3047 i
= find_utf8_constant (&state
->cpool
, name
); PUT2 (i
);
3048 i
= find_utf8_constant (&state
->cpool
, sig
); PUT2 (i
);
3049 i
= DECL_LOCAL_INDEX (lvar
->decl
); PUT2 (i
);
3053 if (DECL_FUNCTION_THROWS (part
) != NULL_TREE
)
3055 tree t
= DECL_FUNCTION_THROWS (part
);
3056 int throws_count
= list_length (t
);
3057 static tree Exceptions_node
= NULL_TREE
;
3058 if (Exceptions_node
== NULL_TREE
)
3059 Exceptions_node
= get_identifier ("Exceptions");
3060 ptr
= append_chunk (NULL
, 8 + 2 * throws_count
, state
);
3061 i
= find_utf8_constant (&state
->cpool
, Exceptions_node
);
3062 PUT2 (i
); /* attribute_name_index */
3063 i
= 2 + 2 * throws_count
; PUT4(i
); /* attribute_length */
3064 i
= throws_count
; PUT2 (i
);
3065 for (; t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
3067 i
= find_class_constant (&state
->cpool
, TREE_VALUE (t
));
3072 current_function_decl
= save_function
;
3074 ptr
= methods_count_ptr
; UNSAFE_PUT2 (methods_count
);
3076 source_file
= DECL_SOURCE_FILE (TYPE_NAME (clas
));
3077 for (s
= source_file
; ; s
++)
3082 if (ch
== '/' || ch
== '\\')
3085 ptr
= append_chunk (NULL
, 10, state
);
3087 i
= 1; /* Source file always exists as an attribute */
3088 if (INNER_CLASS_TYPE_P (clas
) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas
)))
3090 if (clas
== object_type_node
)
3092 PUT2 (i
); /* attributes_count */
3094 /* generate the SourceFile attribute. */
3095 if (SourceFile_node
== NULL_TREE
)
3097 SourceFile_node
= get_identifier ("SourceFile");
3100 i
= find_utf8_constant (&state
->cpool
, SourceFile_node
);
3101 PUT2 (i
); /* attribute_name_index */
3103 i
= find_utf8_constant (&state
->cpool
, get_identifier (source_file
));
3105 append_gcj_attribute (state
, clas
);
3106 append_innerclasses_attribute (state
, clas
);
3108 /* New finally generate the contents of the constant pool chunk. */
3109 i
= count_constant_pool_bytes (&state
->cpool
);
3110 ptr
= obstack_alloc (state
->chunk_obstack
, i
);
3111 cpool_chunk
->data
= ptr
;
3112 cpool_chunk
->size
= i
;
3113 write_constant_pool (&state
->cpool
, ptr
, i
);
3114 return state
->first
;
3117 static GTY(()) tree Synthetic_node
;
3118 static unsigned char *
3119 append_synthetic_attribute (struct jcf_partial
*state
)
3121 unsigned char *ptr
= append_chunk (NULL
, 6, state
);
3124 if (Synthetic_node
== NULL_TREE
)
3126 Synthetic_node
= get_identifier ("Synthetic");
3128 i
= find_utf8_constant (&state
->cpool
, Synthetic_node
);
3129 PUT2 (i
); /* Attribute string index */
3130 PUT4 (0); /* Attribute length */
3136 append_gcj_attribute (struct jcf_partial
*state
, tree
class)
3141 if (class != object_type_node
)
3144 ptr
= append_chunk (NULL
, 6, state
); /* 2+4 */
3145 i
= find_utf8_constant (&state
->cpool
,
3146 get_identifier ("gnu.gcj.gcj-compiled"));
3147 PUT2 (i
); /* Attribute string index */
3148 PUT4 (0); /* Attribute length */
3151 static tree InnerClasses_node
;
3153 append_innerclasses_attribute (struct jcf_partial
*state
, tree
class)
3155 tree orig_decl
= TYPE_NAME (class);
3158 unsigned char *ptr
, *length_marker
, *number_marker
;
3160 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl
))
3163 ptr
= append_chunk (NULL
, 8, state
); /* 2+4+2 */
3165 if (InnerClasses_node
== NULL_TREE
)
3167 InnerClasses_node
= get_identifier ("InnerClasses");
3169 i
= find_utf8_constant (&state
->cpool
, InnerClasses_node
);
3171 length_marker
= ptr
; PUT4 (0); /* length, to be later patched */
3172 number_marker
= ptr
; PUT2 (0); /* number of classes, tblp */
3174 /* Generate the entries: all inner classes visible from the one we
3175 process: itself, up and down. */
3176 while (class && INNER_CLASS_TYPE_P (class))
3180 decl
= TYPE_NAME (class);
3181 n
= IDENTIFIER_POINTER (DECL_NAME (decl
)) +
3182 IDENTIFIER_LENGTH (DECL_NAME (decl
));
3184 while (n
[-1] != '$')
3186 append_innerclasses_attribute_entry (state
, decl
, get_identifier (n
));
3189 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3193 for (current
= DECL_INNER_CLASS_LIST (decl
);
3194 current
; current
= TREE_CHAIN (current
))
3196 append_innerclasses_attribute_entry (state
, TREE_PURPOSE (current
),
3197 TREE_VALUE (current
));
3201 ptr
= length_marker
; PUT4 (8*length
+2);
3202 ptr
= number_marker
; PUT2 (length
);
3206 append_innerclasses_attribute_entry (struct jcf_partial
*state
,
3207 tree decl
, tree name
)
3210 int ocii
= 0, ini
= 0;
3211 unsigned char *ptr
= append_chunk (NULL
, 8, state
);
3213 icii
= find_class_constant (&state
->cpool
, TREE_TYPE (decl
));
3215 /* Sun's implementation seems to generate ocii to 0 for inner
3216 classes (which aren't considered members of the class they're
3217 in.) The specs are saying that if the class is anonymous,
3218 inner_name_index must be zero. */
3219 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl
)))
3221 ocii
= find_class_constant (&state
->cpool
,
3222 TREE_TYPE (DECL_CONTEXT (decl
)));
3223 ini
= find_utf8_constant (&state
->cpool
, name
);
3225 icaf
= get_access_flags (decl
);
3227 PUT2 (icii
); PUT2 (ocii
); PUT2 (ini
); PUT2 (icaf
);
3231 make_class_file_name (tree clas
)
3233 const char *dname
, *cname
, *slash
;
3237 cname
= IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas
)),
3238 "", '.', DIR_SEPARATOR
,
3240 if (jcf_write_base_directory
== NULL
)
3242 /* Make sure we put the class file into the .java file's
3243 directory, and not into some subdirectory thereof. */
3245 dname
= DECL_SOURCE_FILE (TYPE_NAME (clas
));
3246 slash
= strrchr (dname
, DIR_SEPARATOR
);
3252 t
= strrchr (cname
, DIR_SEPARATOR
);
3258 dname
= jcf_write_base_directory
;
3259 slash
= dname
+ strlen (dname
);
3262 r
= xmalloc (slash
- dname
+ strlen (cname
) + 2);
3263 strncpy (r
, dname
, slash
- dname
);
3264 r
[slash
- dname
] = DIR_SEPARATOR
;
3265 strcpy (&r
[slash
- dname
+ 1], cname
);
3267 /* We try to make new directories when we need them. We only do
3268 this for directories which "might not" exist. For instance, we
3269 assume the `-d' directory exists, but we don't assume that any
3270 subdirectory below it exists. It might be worthwhile to keep
3271 track of which directories we've created to avoid gratuitous
3273 dname
= r
+ (slash
- dname
) + 1;
3276 char *s
= strchr (dname
, DIR_SEPARATOR
);
3280 if (stat (r
, &sb
) == -1
3281 /* Try to make it. */
3282 && mkdir (r
, 0755) == -1)
3283 fatal_io_error ("can't create directory %s", r
);
3286 /* Skip consecutive separators. */
3287 for (dname
= s
+ 1; *dname
&& *dname
== DIR_SEPARATOR
; ++dname
)
3294 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
3295 The output .class file name is make_class_file_name(CLAS). */
3298 write_classfile (tree clas
)
3300 struct obstack
*work
= &temporary_obstack
;
3301 struct jcf_partial state
[1];
3302 char *class_file_name
= make_class_file_name (clas
);
3303 struct chunk
*chunks
;
3305 if (class_file_name
!= NULL
)
3308 char *temporary_file_name
;
3310 /* The .class file is initially written to a ".tmp" file so that
3311 if multiple instances of the compiler are running at once
3312 they do not see partially formed class files. */
3313 temporary_file_name
= concat (class_file_name
, ".tmp", NULL
);
3314 stream
= fopen (temporary_file_name
, "wb");
3316 fatal_io_error ("can't open %s for writing", temporary_file_name
);
3318 jcf_dependency_add_target (class_file_name
);
3319 init_jcf_state (state
, work
);
3320 chunks
= generate_classfile (clas
, state
);
3321 write_chunks (stream
, chunks
);
3322 if (fclose (stream
))
3323 fatal_io_error ("error closing %s", temporary_file_name
);
3325 /* If a file named by the string pointed to by `new' exists
3326 prior to the call to the `rename' function, the bahaviour
3327 is implementation-defined. ISO 9899-1990 7.9.4.2.
3329 For example, on Win32 with MSVCRT, it is an error. */
3331 unlink (class_file_name
);
3333 if (rename (temporary_file_name
, class_file_name
) == -1)
3335 remove (temporary_file_name
);
3336 fatal_io_error ("can't create %s", class_file_name
);
3338 free (temporary_file_name
);
3339 free (class_file_name
);
3341 release_jcf_state (state
);
3345 string concatenation
3346 synchronized statement
3349 #include "gt-java-jcf-write.h"