1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
28 #include "java-tree.h"
33 #include "java-opcodes.h"
34 #include "parse.h" /* for BLOCK_EXPR_BODY */
39 #define DIR_SEPARATOR '/'
42 extern struct obstack temporary_obstack
;
44 /* Base directory in which `.class' files should be written.
45 NULL means to put the file into the same directory as the
46 corresponding .java file. */
47 char *jcf_write_base_directory
= NULL
;
49 /* Make sure bytecode.data is big enough for at least N more bytes. */
52 do { CHECK_OP(state); \
53 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
54 buffer_grow (&state->bytecode, N); } while (0)
56 /* Add a 1-byte instruction/operand I to bytecode.data,
57 assuming space has already been RESERVE'd. */
59 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
61 /* Like OP1, but I is a 2-byte big endian integer. */
64 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
66 /* Like OP1, but I is a 4-byte big endian integer. */
69 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
70 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
72 /* Macro to call each time we push I words on the JVM stack. */
74 #define NOTE_PUSH(I) \
75 do { state->code_SP += (I); \
76 if (state->code_SP > state->code_SP_max) \
77 state->code_SP_max = state->code_SP; } while (0)
79 /* Macro to call each time we pop I words from the JVM stack. */
82 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
84 /* A chunk or segment of a .class file. */
88 /* The next segment of this .class file. */
91 /* The actual data in this segment to be written to the .class file. */
94 /* The size of the segment to be written to the .class file. */
98 #define PENDING_CLEANUP_PC (-3)
99 #define PENDING_EXIT_PC (-2)
100 #define UNDEFINED_PC (-1)
102 /* Each "block" represents a label plus the bytecode instructions following.
103 There may be branches out of the block, but no incoming jumps, except
104 to the beginning of the block.
106 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
107 assocated code yet), but it is an undefined label.
112 /* For blocks that that are defined, the next block (in pc order).
113 For blocks that are the not-yet-defined end label of a LABELED_BLOCK_EXPR
114 or a cleanup expression (from a WITH_CLEANUP_EXPR),
115 this is the next (outer) such end label, in a stack headed by
116 labeled_blocks in jcf_partial. */
117 struct jcf_block
*next
;
119 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
120 pc is PENDING_EXIT_PC.
121 In the not-yet-defined end label for pending cleanup subroutine,
122 pc is PENDING_CLEANUP_PC.
123 For other not-yet-defined labels, pc is UNDEFINED_PC.
125 If the label has been defined:
126 Until perform_relocations is finished, this is the maximum possible
127 value of the bytecode offset at the begnning of this block.
128 After perform_relocations, it is the actual offset (pc). */
133 /* After finish_jcf_block is called, The actual instructions contained in this block.
134 Before than NULL, and the instructions are in state->bytecode. */
138 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
139 coveed by the cleanup. */
140 struct jcf_block
*start_label
;
144 /* Set of relocations (in reverse offset order) for this block. */
145 struct jcf_relocation
*relocations
;
147 /* If this block is that of the not-yet-defined end label of
148 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
149 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
154 /* A "relocation" type for the 0-3 bytes of padding at the start
155 of a tableswitch or a lookupswitch. */
156 #define SWITCH_ALIGN_RELOC 4
158 /* A relocation type for the labels in a tableswitch or a lookupswitch;
159 these are relative to the start of the instruction, but (due to
160 th 0-3 bytes of padding), we don't know the offset before relocation. */
161 #define BLOCK_START_RELOC 1
163 struct jcf_relocation
165 /* Next relocation for the current jcf_block. */
166 struct jcf_relocation
*next
;
168 /* The (byte) offset within the current block that needs to be relocated. */
169 HOST_WIDE_INT offset
;
171 /* 0 if offset is a 4-byte relative offset.
172 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
173 for proper alignment in tableswitch/lookupswitch instructions.
174 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
175 to the start of the containing block.
176 -1 if offset is a 2-byte relative offset.
177 < -1 if offset is the address of an instruction with a 2-byte offset
178 that does not have a corresponding 4-byte offset version, in which
179 case the absolute value of kind is the inverted opcode.
180 > 4 if offset is the address of an instruction (such as jsr) with a
181 2-byte offset that does have a corresponding 4-byte offset version,
182 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
185 /* The label the relocation wants to actually transfer to. */
186 struct jcf_block
*label
;
189 /* State for single catch clause. */
193 struct jcf_handler
*next
;
195 struct jcf_block
*start_label
;
196 struct jcf_block
*end_label
;
197 struct jcf_block
*handler_label
;
199 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
203 /* State for the current switch statement. */
205 struct jcf_switch_state
207 struct jcf_switch_state
*prev
;
208 struct jcf_block
*default_label
;
210 struct jcf_relocation
*cases
;
212 HOST_WIDE_INT min_case
, max_case
;
215 /* This structure is used to contain the various pieces that will
216 become a .class file. */
222 struct obstack
*chunk_obstack
;
225 /* List of basic blocks for the current method. */
226 struct jcf_block
*blocks
;
227 struct jcf_block
*last_block
;
229 struct localvar_info
*first_lvar
;
230 struct localvar_info
*last_lvar
;
235 int linenumber_count
;
237 /* Until perform_relocations, this is a upper bound on the number
238 of bytes (so far) in the instructions for the current method. */
241 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
242 struct jcf_block
*labeled_blocks
;
244 /* The current stack size (stack pointer) in the current method. */
247 /* The largest extent of stack size (stack pointer) in the current method. */
250 /* Contains a mapping from local var slot number to localvar_info. */
251 struct buffer localvars
;
253 /* The buffer allocated for bytecode for the current jcf_block. */
254 struct buffer bytecode
;
256 /* Chain of exception handlers for the current method. */
257 struct jcf_handler
*handlers
;
259 /* Last element in handlers chain. */
260 struct jcf_handler
*last_handler
;
262 /* Number of exception handlers for the current method. */
265 /* Number of finalizers we are currently nested within. */
268 /* If non-NULL, use this for the return value. */
269 tree return_value_decl
;
271 /* Information about the current switch statemenet. */
272 struct jcf_switch_state
*sw_state
;
275 static void generate_bytecode_insns
PROTO ((tree
, int, struct jcf_partial
*));
276 static struct chunk
* alloc_chunk
PROTO ((struct chunk
*, unsigned char *,
277 int, struct obstack
*));
278 static unsigned char * append_chunk
PROTO ((unsigned char *, int,
279 struct jcf_partial
*));
280 static void append_chunk_copy
PROTO ((unsigned char *, int,
281 struct jcf_partial
*));
282 static struct jcf_block
* gen_jcf_label
PROTO ((struct jcf_partial
*));
283 static void finish_jcf_block
PROTO ((struct jcf_partial
*));
284 static void define_jcf_label
PROTO ((struct jcf_block
*,
285 struct jcf_partial
*));
286 static struct jcf_block
* get_jcf_label_here
PROTO ((struct jcf_partial
*));
287 static void put_linenumber
PROTO ((int, struct jcf_partial
*));
288 static void localvar_alloc
PROTO ((tree
, struct jcf_partial
*));
289 static int localvar_free
PROTO ((tree
, struct jcf_partial
*));
290 static int get_access_flags
PROTO ((tree
));
291 static void write_chunks
PROTO ((FILE *, struct chunk
*));
292 static int adjust_typed_op
PROTO ((tree
, int));
293 static void generate_bytecode_conditional
PROTO ((tree
, struct jcf_block
*,
294 struct jcf_block
*, int,
295 struct jcf_partial
*));
296 static void generate_bytecode_return
PROTO ((tree
, struct jcf_partial
*));
297 static void perform_relocations
PROTO ((struct jcf_partial
*));
298 static void init_jcf_state
PROTO ((struct jcf_partial
*, struct obstack
*));
299 static void init_jcf_method
PROTO ((struct jcf_partial
*, tree
));
300 static void release_jcf_state
PROTO ((struct jcf_partial
*));
301 static struct chunk
* generate_classfile
PROTO ((tree
, struct jcf_partial
*));
304 /* Utility macros for appending (big-endian) data to a buffer.
305 We assume a local variable 'ptr' points into where we want to
306 write next, and we assume enoygh space has been allocated. */
308 #ifdef ENABLE_CHECKING
310 CHECK_PUT(ptr
, state
, i
)
312 struct jcf_partial
*state
;
315 if (ptr
< state
->chunk
->data
316 || (char*)ptr
+ i
> state
->chunk
->data
+ state
->chunk
->size
)
317 fatal ("internal error - CHECK_PUT failed");
321 #define CHECK_PUT(PTR, STATE, I) ((void)0)
324 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
325 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
326 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
327 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
330 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
331 Set the data and size fields to DATA and SIZE, respectively.
332 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
334 static struct chunk
*
335 alloc_chunk (last
, data
, size
, work
)
339 struct obstack
*work
;
341 struct chunk
*chunk
= (struct chunk
*)
342 obstack_alloc (work
, sizeof(struct chunk
));
344 if (data
== NULL
&& size
> 0)
345 data
= obstack_alloc (work
, size
);
355 #ifdef ENABLE_CHECKING
357 CHECK_OP(struct jcf_partial
*state
)
359 if (state
->bytecode
.ptr
> state
->bytecode
.limit
)
361 fatal("internal error - CHECK_OP failed");
366 #define CHECK_OP(STATE) ((void)0)
369 static unsigned char *
370 append_chunk (data
, size
, state
)
373 struct jcf_partial
*state
;
375 state
->chunk
= alloc_chunk (state
->chunk
, data
, size
, state
->chunk_obstack
);
376 if (state
->first
== NULL
)
377 state
->first
= state
->chunk
;
378 return state
->chunk
->data
;
382 append_chunk_copy (data
, size
, state
)
385 struct jcf_partial
*state
;
387 unsigned char *ptr
= append_chunk (NULL
, size
, state
);
388 memcpy (ptr
, data
, size
);
391 static struct jcf_block
*
392 gen_jcf_label (state
)
393 struct jcf_partial
*state
;
395 struct jcf_block
*block
= (struct jcf_block
*)
396 obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_block
));
398 block
->linenumber
= -1;
399 block
->pc
= UNDEFINED_PC
;
404 finish_jcf_block (state
)
405 struct jcf_partial
*state
;
407 struct jcf_block
*block
= state
->last_block
;
408 struct jcf_relocation
*reloc
;
409 int code_length
= BUFFER_LENGTH (&state
->bytecode
);
410 int pc
= state
->code_length
;
411 append_chunk_copy (state
->bytecode
.data
, code_length
, state
);
412 BUFFER_RESET (&state
->bytecode
);
413 block
->v
.chunk
= state
->chunk
;
415 /* Calculate code_length to the maximum value it can have. */
416 pc
+= block
->v
.chunk
->size
;
417 for (reloc
= block
->u
.relocations
; reloc
!= NULL
; reloc
= reloc
->next
)
419 int kind
= reloc
->kind
;
420 if (kind
== SWITCH_ALIGN_RELOC
)
422 else if (kind
> BLOCK_START_RELOC
)
423 pc
+= 2; /* 2-byte offset may grow to 4-byte offset */
425 pc
+= 5; /* May need to add a goto_w. */
427 state
->code_length
= pc
;
431 define_jcf_label (label
, state
)
432 struct jcf_block
*label
;
433 struct jcf_partial
*state
;
435 if (state
->last_block
!= NULL
)
436 finish_jcf_block (state
);
437 label
->pc
= state
->code_length
;
438 if (state
->blocks
== NULL
)
439 state
->blocks
= label
;
441 state
->last_block
->next
= label
;
442 state
->last_block
= label
;
444 label
->u
.relocations
= NULL
;
447 static struct jcf_block
*
448 get_jcf_label_here (state
)
449 struct jcf_partial
*state
;
451 if (state
->last_block
!= NULL
&& BUFFER_LENGTH (&state
->bytecode
) == 0)
452 return state
->last_block
;
455 struct jcf_block
*label
= gen_jcf_label (state
);
456 define_jcf_label (label
, state
);
461 /* Note a line number entry for the current PC and given LINE. */
464 put_linenumber (line
, state
)
466 struct jcf_partial
*state
;
468 struct jcf_block
*label
= get_jcf_label_here (state
);
469 if (label
->linenumber
> 0)
471 label
= gen_jcf_label (state
);
472 define_jcf_label (label
, state
);
474 label
->linenumber
= line
;
475 state
->linenumber_count
++;
478 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
479 in the range (START_LABEL, END_LABEL). */
481 static struct jcf_handler
*
482 alloc_handler (start_label
, end_label
, state
)
483 struct jcf_block
*start_label
;
484 struct jcf_block
*end_label
;
485 struct jcf_partial
*state
;
487 struct jcf_handler
*handler
= (struct jcf_handler
*)
488 obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_handler
));
489 handler
->start_label
= start_label
;
490 handler
->end_label
= end_label
;
491 handler
->handler_label
= get_jcf_label_here (state
);
492 if (state
->handlers
== NULL
)
493 state
->handlers
= handler
;
495 state
->last_handler
->next
= handler
;
496 state
->last_handler
= handler
;
497 handler
->next
= NULL
;
498 state
->num_handlers
++;
503 /* The index of jvm local variable allocated for this DECL.
504 This is assigned when generating .class files;
505 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
506 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
508 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
512 struct localvar_info
*next
;
515 struct jcf_block
*start_label
;
516 struct jcf_block
*end_label
;
519 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
520 #define localvar_max \
521 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
524 localvar_alloc (decl
, state
)
526 struct jcf_partial
*state
;
528 struct jcf_block
*start_label
= get_jcf_label_here (state
);
529 int wide
= TYPE_IS_WIDE (TREE_TYPE (decl
));
531 register struct localvar_info
*info
;
532 register struct localvar_info
**ptr
= localvar_buffer
;
533 register struct localvar_info
**limit
534 = (struct localvar_info
**) state
->localvars
.ptr
;
535 for (index
= 0; ptr
< limit
; index
++, ptr
++)
538 && (! wide
|| ((ptr
+1) < limit
&& ptr
[1] == NULL
)))
543 buffer_grow (&state
->localvars
, 2 * sizeof (struct localvar_info
*));
544 ptr
= (struct localvar_info
**) state
->localvars
.data
+ index
;
545 state
->localvars
.ptr
= (unsigned char *) (ptr
+ 1 + wide
);
547 info
= (struct localvar_info
*)
548 obstack_alloc (state
->chunk_obstack
, sizeof (struct localvar_info
));
551 ptr
[1] = (struct localvar_info
*)(~0);
552 DECL_LOCAL_INDEX (decl
) = index
;
554 info
->start_label
= start_label
;
556 if (debug_info_level
> DINFO_LEVEL_TERSE
557 && DECL_NAME (decl
) != NULL_TREE
)
559 /* Generate debugging info. */
561 if (state
->last_lvar
!= NULL
)
562 state
->last_lvar
->next
= info
;
564 state
->first_lvar
= info
;
565 state
->last_lvar
= info
;
571 localvar_free (decl
, state
)
573 struct jcf_partial
*state
;
575 struct jcf_block
*end_label
= get_jcf_label_here (state
);
576 int index
= DECL_LOCAL_INDEX (decl
);
577 register struct localvar_info
**ptr
= &localvar_buffer
[index
];
578 register struct localvar_info
*info
= *ptr
;
579 int wide
= TYPE_IS_WIDE (TREE_TYPE (decl
));
581 info
->end_label
= end_label
;
583 if (info
->decl
!= decl
)
588 if (ptr
[1] != (struct localvar_info
*)(~0))
595 #define STACK_TARGET 1
596 #define IGNORE_TARGET 2
598 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
599 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
602 get_access_flags (decl
)
606 int isfield
= TREE_CODE (decl
) == FIELD_DECL
|| TREE_CODE (decl
) == VAR_DECL
;
607 if (CLASS_PUBLIC (decl
)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
609 if (CLASS_FINAL (decl
)) /* same as FIELD_FINAL and METHOD_FINAL */
611 if (isfield
|| TREE_CODE (decl
) == FUNCTION_DECL
)
613 if (TREE_PROTECTED (decl
))
614 flags
|= ACC_PROTECTED
;
615 if (TREE_PRIVATE (decl
))
616 flags
|= ACC_PRIVATE
;
618 else if (TREE_CODE (decl
) == TYPE_DECL
)
620 if (CLASS_SUPER (decl
))
622 if (CLASS_ABSTRACT (decl
))
623 flags
|= ACC_ABSTRACT
;
624 if (CLASS_INTERFACE (decl
))
625 flags
|= ACC_INTERFACE
;
628 fatal ("internal error - bad argument to get_access_flags");
629 if (TREE_CODE (decl
) == FUNCTION_DECL
)
631 if (METHOD_NATIVE (decl
))
633 if (METHOD_STATIC (decl
))
635 if (METHOD_SYNCHRONIZED (decl
))
636 flags
|= ACC_SYNCHRONIZED
;
637 if (METHOD_ABSTRACT (decl
))
638 flags
|= ACC_ABSTRACT
;
642 if (FIELD_STATIC (decl
))
644 if (FIELD_VOLATILE (decl
))
645 flags
|= ACC_VOLATILE
;
646 if (FIELD_TRANSIENT (decl
))
647 flags
|= ACC_TRANSIENT
;
652 /* Write the list of segments starting at CHUNKS to STREAM. */
655 write_chunks (stream
, chunks
)
657 struct chunk
*chunks
;
659 for (; chunks
!= NULL
; chunks
= chunks
->next
)
660 fwrite (chunks
->data
, chunks
->size
, 1, stream
);
663 /* Push a 1-word constant in the constant pool at the given INDEX.
664 (Caller is responsible for doing NOTE_PUSH.) */
667 push_constant1 (index
, state
)
669 struct jcf_partial
*state
;
684 /* Push a 2-word constant in the constant pool at the given INDEX.
685 (Caller is responsible for doing NOTE_PUSH.) */
688 push_constant2 (index
, state
)
690 struct jcf_partial
*state
;
697 /* Push 32-bit integer constant on VM stack.
698 Caller is responsible for doing NOTE_PUSH. */
701 push_int_const (i
, state
)
703 struct jcf_partial
*state
;
706 if (i
>= -1 && i
<= 5)
707 OP1(OPCODE_iconst_0
+ i
);
708 else if (i
>= -128 && i
< 128)
713 else if (i
>= -32768 && i
< 32768)
720 i
= find_constant1 (&state
->cpool
, CONSTANT_Integer
, i
& 0xFFFFFFFF);
721 push_constant1 (i
, state
);
726 find_constant_wide (lo
, hi
, state
)
727 HOST_WIDE_INT lo
, hi
;
728 struct jcf_partial
*state
;
730 HOST_WIDE_INT w1
, w2
;
731 lshift_double (lo
, hi
, -32, 64, &w1
, &w2
, 1);
732 return find_constant2 (&state
->cpool
, CONSTANT_Long
,
733 w1
& 0xFFFFFFFF, lo
& 0xFFFFFFFF);
736 /* Find or allocate a constant pool entry for the given VALUE.
737 Return the index in the constant pool. */
740 find_constant_index (value
, state
)
742 struct jcf_partial
*state
;
744 if (TREE_CODE (value
) == INTEGER_CST
)
746 if (TYPE_PRECISION (TREE_TYPE (value
)) <= 32)
747 return find_constant1 (&state
->cpool
, CONSTANT_Integer
,
748 TREE_INT_CST_LOW (value
) & 0xFFFFFFFF);
750 return find_constant_wide (TREE_INT_CST_LOW (value
),
751 TREE_INT_CST_HIGH (value
), state
);
753 else if (TREE_CODE (value
) == REAL_CST
)
756 if (TYPE_PRECISION (TREE_TYPE (value
)) == 32)
758 words
[0] = etarsingle (TREE_REAL_CST (value
)) & 0xFFFFFFFF;
759 return find_constant1 (&state
->cpool
, CONSTANT_Float
, words
[0]);
763 etardouble (TREE_REAL_CST (value
), words
);
764 return find_constant2 (&state
->cpool
, CONSTANT_Double
,
765 words
[1-FLOAT_WORDS_BIG_ENDIAN
] & 0xFFFFFFFF,
766 words
[FLOAT_WORDS_BIG_ENDIAN
] & 0xFFFFFFFF);
769 else if (TREE_CODE (value
) == STRING_CST
)
771 return find_string_constant (&state
->cpool
, value
);
774 fatal ("find_constant_index - bad type");
777 /* Push 64-bit long constant on VM stack.
778 Caller is responsible for doing NOTE_PUSH. */
781 push_long_const (lo
, hi
, state
)
782 HOST_WIDE_INT lo
, hi
;
783 struct jcf_partial
*state
;
785 if (hi
== 0 && lo
>= 0 && lo
<= 1)
788 OP1(OPCODE_lconst_0
+ lo
);
790 else if ((hi
== 0 && lo
< 32768) || (hi
== -1 && lo
>= -32768))
792 push_int_const (lo
, state
);
797 push_constant2 (find_constant_wide (lo
, hi
, state
), state
);
801 field_op (field
, opcode
, state
)
804 struct jcf_partial
*state
;
806 int index
= find_fieldref_index (&state
->cpool
, field
);
812 /* Returns an integer in the range 0 (for 'int') through 4 (for object
813 reference) to 7 (for 'short') which matches the pattern of how JVM
814 opcodes typically depend on the operand type. */
817 adjust_typed_op (type
, max
)
821 switch (TREE_CODE (type
))
824 case RECORD_TYPE
: return 4;
826 return TYPE_PRECISION (type
) == 32 || max
< 5 ? 0 : 5;
828 return TYPE_PRECISION (type
) == 32 || max
< 6 ? 0 : 6;
830 switch (TYPE_PRECISION (type
))
832 case 8: return max
< 5 ? 0 : 5;
833 case 16: return max
< 7 ? 0 : 7;
839 switch (TYPE_PRECISION (type
))
852 maybe_wide (opcode
, index
, state
)
854 struct jcf_partial
*state
;
871 /* Compile code to duplicate with offset, where
872 SIZE is the size of the stack item to duplicate (1 or 2), abd
873 OFFSET is where to insert the result (must be 0, 1, or 2).
874 (The new words get inserted at stack[SP-size-offset].) */
877 emit_dup (size
, offset
, state
)
879 struct jcf_partial
*state
;
886 kind
= size
== 1 ? OPCODE_dup
: OPCODE_dup2
;
887 else if (offset
== 1)
888 kind
= size
== 1 ? OPCODE_dup_x1
: OPCODE_dup2_x1
;
889 else if (offset
== 2)
890 kind
= size
== 1 ? OPCODE_dup_x2
: OPCODE_dup2_x2
;
898 emit_pop (size
, state
)
900 struct jcf_partial
*state
;
903 OP1 (OPCODE_pop
- 1 + size
);
907 emit_iinc (var
, value
, state
)
910 struct jcf_partial
*state
;
912 int slot
= DECL_LOCAL_INDEX (var
);
914 if (value
< -128 || value
> 127 || slot
>= 256)
932 emit_load_or_store (var
, opcode
, state
)
933 tree var
; /* Variable to load from or store into. */
934 int opcode
; /* Either OPCODE_iload or OPCODE_istore. */
935 struct jcf_partial
*state
;
937 tree type
= TREE_TYPE (var
);
938 int kind
= adjust_typed_op (type
, 4);
939 int index
= DECL_LOCAL_INDEX (var
);
943 OP1 (opcode
+ 5 + 4 * kind
+ index
); /* [ilfda]{load,store}_[0123] */
946 maybe_wide (opcode
+ kind
, index
, state
); /* [ilfda]{load,store} */
950 emit_load (var
, state
)
952 struct jcf_partial
*state
;
954 emit_load_or_store (var
, OPCODE_iload
, state
);
955 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var
)) ? 2 : 1);
959 emit_store (var
, state
)
961 struct jcf_partial
*state
;
963 emit_load_or_store (var
, OPCODE_istore
, state
);
964 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var
)) ? 2 : 1);
968 emit_unop (opcode
, type
, state
)
969 enum java_opcode opcode
;
970 tree type ATTRIBUTE_UNUSED
;
971 struct jcf_partial
*state
;
978 emit_binop (opcode
, type
, state
)
979 enum java_opcode opcode
;
981 struct jcf_partial
*state
;
983 int size
= TYPE_IS_WIDE (type
) ? 2 : 1;
990 emit_reloc (value
, kind
, target
, state
)
993 struct jcf_block
*target
;
994 struct jcf_partial
*state
;
996 struct jcf_relocation
*reloc
= (struct jcf_relocation
*)
997 obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_relocation
));
998 struct jcf_block
*block
= state
->last_block
;
999 reloc
->next
= block
->u
.relocations
;
1000 block
->u
.relocations
= reloc
;
1001 reloc
->offset
= BUFFER_LENGTH (&state
->bytecode
);
1002 reloc
->label
= target
;
1004 if (kind
== 0 || kind
== BLOCK_START_RELOC
)
1006 else if (kind
!= SWITCH_ALIGN_RELOC
)
1011 emit_switch_reloc (label
, state
)
1012 struct jcf_block
*label
;
1013 struct jcf_partial
*state
;
1015 emit_reloc (0, BLOCK_START_RELOC
, label
, state
);
1018 /* Similar to emit_switch_reloc,
1019 but re-uses an existing case reloc. */
1022 emit_case_reloc (reloc
, state
)
1023 struct jcf_relocation
*reloc
;
1024 struct jcf_partial
*state
;
1026 struct jcf_block
*block
= state
->last_block
;
1027 reloc
->next
= block
->u
.relocations
;
1028 block
->u
.relocations
= reloc
;
1029 reloc
->offset
= BUFFER_LENGTH (&state
->bytecode
);
1030 reloc
->kind
= BLOCK_START_RELOC
;
1034 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1035 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1038 emit_if (target
, opcode
, inv_opcode
, state
)
1039 struct jcf_block
*target
;
1040 int opcode
, inv_opcode
;
1041 struct jcf_partial
*state
;
1044 // value is 1 byte from reloc back to start of instruction.
1045 emit_reloc (1, - inv_opcode
, target
, state
);
1049 emit_goto (target
, state
)
1050 struct jcf_block
*target
;
1051 struct jcf_partial
*state
;
1054 // Value is 1 byte from reloc back to start of instruction.
1055 emit_reloc (1, OPCODE_goto_w
, target
, state
);
1059 emit_jsr (target
, state
)
1060 struct jcf_block
*target
;
1061 struct jcf_partial
*state
;
1064 // Value is 1 byte from reloc back to start of instruction.
1065 emit_reloc (1, OPCODE_jsr_w
, target
, state
);
1068 /* Generate code to evaluate EXP. If the result is true,
1069 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1070 TRUE_BRANCH_FIRST is a code geneation hint that the
1071 TRUE_LABEL may follow right after this. (The idea is that we
1072 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1075 generate_bytecode_conditional (exp
, true_label
, false_label
,
1076 true_branch_first
, state
)
1078 struct jcf_block
*true_label
;
1079 struct jcf_block
*false_label
;
1080 int true_branch_first
;
1081 struct jcf_partial
*state
;
1083 tree exp0
, exp1
, type
;
1084 int save_SP
= state
->code_SP
;
1085 enum java_opcode op
, negop
;
1086 switch (TREE_CODE (exp
))
1089 emit_goto (integer_zerop (exp
) ? false_label
: true_label
, state
);
1093 struct jcf_block
*then_label
= gen_jcf_label (state
);
1094 struct jcf_block
*else_label
= gen_jcf_label (state
);
1095 int save_SP_before
, save_SP_after
;
1096 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1097 then_label
, else_label
, 1, state
);
1098 define_jcf_label (then_label
, state
);
1099 save_SP_before
= state
->code_SP
;
1100 generate_bytecode_conditional (TREE_OPERAND (exp
, 1),
1101 true_label
, false_label
, 1, state
);
1102 save_SP_after
= state
->code_SP
;
1103 state
->code_SP
= save_SP_before
;
1104 define_jcf_label (else_label
, state
);
1105 generate_bytecode_conditional (TREE_OPERAND (exp
, 2),
1106 true_label
, false_label
,
1107 true_branch_first
, state
);
1108 if (state
->code_SP
!= save_SP_after
)
1109 fatal ("internal error non-matching SP");
1112 case TRUTH_NOT_EXPR
:
1113 generate_bytecode_conditional (TREE_OPERAND (exp
, 0), false_label
, true_label
,
1114 ! true_branch_first
, state
);
1116 case TRUTH_ANDIF_EXPR
:
1118 struct jcf_block
*next_label
= gen_jcf_label (state
);
1119 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1120 next_label
, false_label
, 1, state
);
1121 define_jcf_label (next_label
, state
);
1122 generate_bytecode_conditional (TREE_OPERAND (exp
, 1),
1123 true_label
, false_label
, 1, state
);
1126 case TRUTH_ORIF_EXPR
:
1128 struct jcf_block
*next_label
= gen_jcf_label (state
);
1129 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1130 true_label
, next_label
, 1, state
);
1131 define_jcf_label (next_label
, state
);
1132 generate_bytecode_conditional (TREE_OPERAND (exp
, 1),
1133 true_label
, false_label
, 1, state
);
1137 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1138 set it to the corresponding 1-operand if<COND> instructions. */
1142 /* The opcodes with their inverses are allocated in pairs.
1143 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1144 negop
= (op
& 1) ? op
+ 1 : op
- 1;
1146 if (true_branch_first
)
1148 emit_if (false_label
, negop
, op
, state
);
1149 emit_goto (true_label
, state
);
1153 emit_if (true_label
, op
, negop
, state
);
1154 emit_goto (false_label
, state
);
1158 op
= OPCODE_if_icmpeq
;
1161 op
= OPCODE_if_icmpne
;
1164 op
= OPCODE_if_icmpgt
;
1167 op
= OPCODE_if_icmplt
;
1170 op
= OPCODE_if_icmpge
;
1173 op
= OPCODE_if_icmple
;
1176 exp0
= TREE_OPERAND (exp
, 0);
1177 exp1
= TREE_OPERAND (exp
, 1);
1178 type
= TREE_TYPE (exp0
);
1179 switch (TREE_CODE (type
))
1182 case POINTER_TYPE
: case RECORD_TYPE
:
1183 switch (TREE_CODE (exp
))
1185 case EQ_EXPR
: op
= OPCODE_if_acmpeq
; break;
1186 case NE_EXPR
: op
= OPCODE_if_acmpne
; break;
1189 if (integer_zerop (exp1
) || integer_zerop (exp0
))
1191 generate_bytecode_insns (integer_zerop (exp1
) ? exp0
: exp0
,
1192 STACK_TARGET
, state
);
1193 op
= op
+ (OPCODE_ifnull
- OPCODE_if_acmpeq
);
1194 negop
= (op
& 1) ? op
- 1 : op
+ 1;
1198 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1199 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1203 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1204 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1205 if (op
== OPCODE_if_icmplt
|| op
== OPCODE_if_icmple
)
1209 if (TYPE_PRECISION (type
) > 32)
1220 if (TYPE_PRECISION (type
) > 32)
1222 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1223 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1231 if (integer_zerop (exp1
))
1233 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1237 if (integer_zerop (exp0
))
1241 case OPCODE_if_icmplt
:
1242 case OPCODE_if_icmpge
:
1245 case OPCODE_if_icmpgt
:
1246 case OPCODE_if_icmple
:
1252 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1256 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1257 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1263 generate_bytecode_insns (exp
, STACK_TARGET
, state
);
1265 if (true_branch_first
)
1267 emit_if (false_label
, OPCODE_ifeq
, OPCODE_ifne
, state
);
1268 emit_goto (true_label
, state
);
1272 emit_if (true_label
, OPCODE_ifne
, OPCODE_ifeq
, state
);
1273 emit_goto (false_label
, state
);
1277 if (save_SP
!= state
->code_SP
)
1278 fatal ("internal error - SP mismatch");
1281 /* Call pending cleanups i.e. those for surrounding CLEANUP_POINT_EXPRs
1282 but only as far out as LIMIT (since we are about to jump to the
1283 emit label that is LIMIT). */
1286 call_cleanups (limit
, state
)
1287 struct jcf_block
*limit
;
1288 struct jcf_partial
*state
;
1290 struct jcf_block
*block
= state
->labeled_blocks
;
1291 for (; block
!= limit
; block
= block
->next
)
1293 if (block
->pc
== PENDING_CLEANUP_PC
)
1294 emit_jsr (block
, state
);
1299 generate_bytecode_return (exp
, state
)
1301 struct jcf_partial
*state
;
1303 tree return_type
= TREE_TYPE (TREE_TYPE (state
->current_method
));
1304 int returns_void
= TREE_CODE (return_type
) == VOID_TYPE
;
1309 switch (TREE_CODE (exp
))
1312 generate_bytecode_insns (TREE_OPERAND (exp
, 0), IGNORE_TARGET
,
1314 exp
= TREE_OPERAND (exp
, 1);
1318 struct jcf_block
*then_label
= gen_jcf_label (state
);
1319 struct jcf_block
*else_label
= gen_jcf_label (state
);
1320 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1321 then_label
, else_label
, 1, state
);
1322 define_jcf_label (then_label
, state
);
1323 generate_bytecode_return (TREE_OPERAND (exp
, 1), state
);
1324 define_jcf_label (else_label
, state
);
1325 generate_bytecode_return (TREE_OPERAND (exp
, 2), state
);
1329 generate_bytecode_insns (exp
,
1330 returns_void
? IGNORE_TARGET
1331 : STACK_TARGET
, state
);
1337 call_cleanups (NULL_TREE
, state
);
1341 op
= OPCODE_ireturn
+ adjust_typed_op (return_type
, 4);
1342 if (state
->num_finalizers
> 0)
1344 if (state
->return_value_decl
== NULL_TREE
)
1346 state
->return_value_decl
1347 = build_decl (VAR_DECL
, NULL_TREE
, TREE_TYPE (exp
));
1348 localvar_alloc (state
->return_value_decl
, state
);
1350 emit_store (state
->return_value_decl
, state
);
1351 call_cleanups (NULL_TREE
, state
);
1352 emit_load (state
->return_value_decl
, state
);
1353 /* If we call localvar_free (state->return_value_decl, state),
1354 then we risk the save decl erroneously re-used in the
1355 finalizer. Instead, we keep the state->return_value_decl
1356 allocated through the rest of the method. This is not
1357 the greatest solution, but it is at least simple and safe. */
1364 /* Generate bytecode for sub-expression EXP of METHOD.
1365 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1368 generate_bytecode_insns (exp
, target
, state
)
1371 struct jcf_partial
*state
;
1374 enum java_opcode jopcode
;
1376 HOST_WIDE_INT value
;
1381 if (exp
== NULL
&& target
== IGNORE_TARGET
)
1384 type
= TREE_TYPE (exp
);
1386 switch (TREE_CODE (exp
))
1389 if (BLOCK_EXPR_BODY (exp
))
1392 tree body
= BLOCK_EXPR_BODY (exp
);
1393 for (local
= BLOCK_EXPR_DECLS (exp
); local
; )
1395 tree next
= TREE_CHAIN (local
);
1396 localvar_alloc (local
, state
);
1399 /* Avoid deep recursion for long blocks. */
1400 while (TREE_CODE (body
) == COMPOUND_EXPR
)
1402 generate_bytecode_insns (TREE_OPERAND (body
, 0), target
, state
);
1403 body
= TREE_OPERAND (body
, 1);
1405 generate_bytecode_insns (body
, target
, state
);
1406 for (local
= BLOCK_EXPR_DECLS (exp
); local
; )
1408 tree next
= TREE_CHAIN (local
);
1409 localvar_free (local
, state
);
1415 generate_bytecode_insns (TREE_OPERAND (exp
, 0), IGNORE_TARGET
, state
);
1416 generate_bytecode_insns (TREE_OPERAND (exp
, 1), target
, state
);
1418 case EXPR_WITH_FILE_LOCATION
:
1420 char *saved_input_filename
= input_filename
;
1421 tree body
= EXPR_WFL_NODE (exp
);
1422 int saved_lineno
= lineno
;
1423 if (body
== empty_stmt_node
)
1425 input_filename
= EXPR_WFL_FILENAME (exp
);
1426 lineno
= EXPR_WFL_LINENO (exp
);
1427 if (EXPR_WFL_EMIT_LINE_NOTE (exp
) && lineno
> 0
1428 && debug_info_level
> DINFO_LEVEL_NONE
)
1429 put_linenumber (lineno
, state
);
1430 generate_bytecode_insns (body
, target
, state
);
1431 input_filename
= saved_input_filename
;
1432 lineno
= saved_lineno
;
1436 if (target
== IGNORE_TARGET
) ; /* do nothing */
1437 else if (TREE_CODE (type
) == POINTER_TYPE
)
1439 if (! integer_zerop (exp
))
1442 OP1 (OPCODE_aconst_null
);
1445 else if (TYPE_PRECISION (type
) <= 32)
1447 push_int_const (TREE_INT_CST_LOW (exp
), state
);
1452 push_long_const (TREE_INT_CST_LOW (exp
), TREE_INT_CST_HIGH (exp
),
1459 int prec
= TYPE_PRECISION (type
) >> 5;
1461 if (real_zerop (exp
))
1462 OP1 (prec
== 1 ? OPCODE_fconst_0
: OPCODE_dconst_0
);
1463 else if (real_onep (exp
))
1464 OP1 (prec
== 1 ? OPCODE_fconst_1
: OPCODE_dconst_1
);
1465 /* FIXME Should also use fconst_2 for 2.0f.
1466 Also, should use iconst_2/ldc followed by i2f/i2d
1467 for other float/double when the value is a small integer. */
1470 offset
= find_constant_index (exp
, state
);
1472 push_constant1 (offset
, state
);
1474 push_constant2 (offset
, state
);
1480 push_constant1 (find_string_constant (&state
->cpool
, exp
), state
);
1484 if (TREE_STATIC (exp
))
1486 field_op (exp
, OPCODE_getstatic
, state
);
1487 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 2 : 1);
1490 /* ... fall through ... */
1492 emit_load (exp
, state
);
1494 case NON_LVALUE_EXPR
:
1496 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
1499 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
1500 generate_bytecode_insns (TREE_OPERAND (exp
, 1), target
, state
);
1501 if (target
!= IGNORE_TARGET
)
1503 jopcode
= OPCODE_iaload
+ adjust_typed_op (type
, 7);
1506 if (! TYPE_IS_WIDE (type
))
1512 tree obj
= TREE_OPERAND (exp
, 0);
1513 tree field
= TREE_OPERAND (exp
, 1);
1514 int is_static
= FIELD_STATIC (field
);
1515 generate_bytecode_insns (obj
,
1516 is_static
? IGNORE_TARGET
: target
, state
);
1517 if (target
!= IGNORE_TARGET
)
1519 if (DECL_NAME (field
) == length_identifier_node
&& !is_static
1520 && TYPE_ARRAY_P (TREE_TYPE (obj
)))
1523 OP1 (OPCODE_arraylength
);
1527 field_op (field
, is_static
? OPCODE_getstatic
: OPCODE_getfield
,
1531 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field
)) ? 2 : 1);
1536 case TRUTH_ANDIF_EXPR
:
1537 case TRUTH_ORIF_EXPR
:
1545 struct jcf_block
*then_label
= gen_jcf_label (state
);
1546 struct jcf_block
*else_label
= gen_jcf_label (state
);
1547 struct jcf_block
*end_label
= gen_jcf_label (state
);
1548 generate_bytecode_conditional (exp
,
1549 then_label
, else_label
, 1, state
);
1550 define_jcf_label (then_label
, state
);
1551 push_int_const (1, state
);
1552 emit_goto (end_label
, state
);
1553 define_jcf_label (else_label
, state
);
1554 push_int_const (0, state
);
1555 define_jcf_label (end_label
, state
);
1561 struct jcf_block
*then_label
= gen_jcf_label (state
);
1562 struct jcf_block
*else_label
= gen_jcf_label (state
);
1563 struct jcf_block
*end_label
= gen_jcf_label (state
);
1564 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1565 then_label
, else_label
, 1, state
);
1566 define_jcf_label (then_label
, state
);
1567 generate_bytecode_insns (TREE_OPERAND (exp
, 1), target
, state
);
1568 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp
, 1))
1569 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1570 || TREE_CODE (TREE_TYPE (exp
)) != VOID_TYPE
)
1571 emit_goto (end_label
, state
);
1572 define_jcf_label (else_label
, state
);
1573 generate_bytecode_insns (TREE_OPERAND (exp
, 2), target
, state
);
1574 define_jcf_label (end_label
, state
);
1579 struct jcf_switch_state
*sw_state
= state
->sw_state
;
1580 struct jcf_relocation
*reloc
= (struct jcf_relocation
*)
1581 obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_relocation
));
1582 HOST_WIDE_INT case_value
= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0));
1584 reloc
->label
= get_jcf_label_here (state
);
1585 reloc
->offset
= case_value
;
1586 reloc
->next
= sw_state
->cases
;
1587 sw_state
->cases
= reloc
;
1588 if (sw_state
->num_cases
== 0)
1590 sw_state
->min_case
= case_value
;
1591 sw_state
->max_case
= case_value
;
1595 if (case_value
< sw_state
->min_case
)
1596 sw_state
->min_case
= case_value
;
1597 if (case_value
> sw_state
->max_case
)
1598 sw_state
->max_case
= case_value
;
1600 sw_state
->num_cases
++;
1604 state
->sw_state
->default_label
= get_jcf_label_here (state
);
1609 /* The SWITCH_EXPR has three parts, generated in the following order:
1610 1. the switch_expression (the value used to select the correct case);
1612 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1613 After code generation, we will re-order then in the order 1, 3, 2.
1614 This is to avoid an extra GOTOs. */
1615 struct jcf_switch_state sw_state
;
1616 struct jcf_block
*expression_last
; /* Last block of the switch_expression. */
1617 struct jcf_block
*body_last
; /* Last block of the switch_body. */
1618 struct jcf_block
*switch_instruction
; /* First block of switch_instruction. */
1619 struct jcf_block
*instruction_last
; /* Last block of the switch_instruction. */
1620 struct jcf_block
*body_block
;
1622 sw_state
.prev
= state
->sw_state
;
1623 state
->sw_state
= &sw_state
;
1624 sw_state
.cases
= NULL
;
1625 sw_state
.num_cases
= 0;
1626 sw_state
.default_label
= NULL
;
1627 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
1628 expression_last
= state
->last_block
;
1629 body_block
= get_jcf_label_here (state
); /* Force a new block here. */
1630 generate_bytecode_insns (TREE_OPERAND (exp
, 1), IGNORE_TARGET
, state
);
1631 body_last
= state
->last_block
;
1633 switch_instruction
= gen_jcf_label (state
);
1634 define_jcf_label (switch_instruction
, state
);
1635 if (sw_state
.default_label
== NULL
)
1636 sw_state
.default_label
= gen_jcf_label (state
);
1638 if (sw_state
.num_cases
<= 1)
1640 if (sw_state
.num_cases
== 0)
1642 emit_pop (1, state
);
1647 push_int_const (sw_state
.cases
->offset
, state
);
1648 emit_if (sw_state
.cases
->label
,
1649 OPCODE_ifeq
, OPCODE_ifne
, state
);
1651 emit_goto (sw_state
.default_label
, state
);
1656 /* Copy the chain of relocs into a sorted array. */
1657 struct jcf_relocation
**relocs
= (struct jcf_relocation
**)
1658 xmalloc (sw_state
.num_cases
* sizeof (struct jcf_relocation
*));
1659 /* The relocs arrays is a buffer with a gap.
1660 The assumption is that cases will normally come in "runs". */
1662 int gap_end
= sw_state
.num_cases
;
1663 struct jcf_relocation
*reloc
;
1664 for (reloc
= sw_state
.cases
; reloc
!= NULL
; reloc
= reloc
->next
)
1666 HOST_WIDE_INT case_value
= reloc
->offset
;
1667 while (gap_end
< sw_state
.num_cases
)
1669 struct jcf_relocation
*end
= relocs
[gap_end
];
1670 if (case_value
<= end
->offset
)
1672 relocs
[gap_start
++] = end
;
1675 while (gap_start
> 0)
1677 struct jcf_relocation
*before
= relocs
[gap_start
-1];
1678 if (case_value
>= before
->offset
)
1680 relocs
[--gap_end
] = before
;
1683 relocs
[gap_start
++] = reloc
;
1684 /* Note we don't check for duplicates. FIXME! */
1687 if (2 * sw_state
.num_cases
1688 >= sw_state
.max_case
- sw_state
.min_case
)
1689 { /* Use tableswitch. */
1691 RESERVE (13 + 4 * (sw_state
.max_case
- sw_state
.min_case
+ 1));
1692 OP1 (OPCODE_tableswitch
);
1693 emit_reloc (0, SWITCH_ALIGN_RELOC
, NULL
, state
);
1694 emit_switch_reloc (sw_state
.default_label
, state
);
1695 OP4 (sw_state
.min_case
);
1696 OP4 (sw_state
.max_case
);
1697 for (i
= sw_state
.min_case
; ; )
1699 reloc
= relocs
[index
];
1700 if (i
== reloc
->offset
)
1702 emit_case_reloc (reloc
, state
);
1703 if (i
== sw_state
.max_case
)
1708 emit_switch_reloc (sw_state
.default_label
, state
);
1713 { /* Use lookupswitch. */
1714 RESERVE(9 + 8 * sw_state
.num_cases
);
1715 OP1 (OPCODE_lookupswitch
);
1716 emit_reloc (0, SWITCH_ALIGN_RELOC
, NULL
, state
);
1717 emit_switch_reloc (sw_state
.default_label
, state
);
1718 OP4 (sw_state
.num_cases
);
1719 for (i
= 0; i
< sw_state
.num_cases
; i
++)
1721 struct jcf_relocation
*reloc
= relocs
[i
];
1722 OP4 (reloc
->offset
);
1723 emit_case_reloc (reloc
, state
);
1729 instruction_last
= state
->last_block
;
1730 if (sw_state
.default_label
->pc
< 0)
1731 define_jcf_label (sw_state
.default_label
, state
);
1732 else /* Force a new block. */
1733 sw_state
.default_label
= get_jcf_label_here (state
);
1734 /* Now re-arrange the blocks so the switch_instruction
1735 comes before the switch_body. */
1736 switch_length
= state
->code_length
- switch_instruction
->pc
;
1737 switch_instruction
->pc
= body_block
->pc
;
1738 instruction_last
->next
= body_block
;
1739 instruction_last
->v
.chunk
->next
= body_block
->v
.chunk
;
1740 expression_last
->next
= switch_instruction
;
1741 expression_last
->v
.chunk
->next
= switch_instruction
->v
.chunk
;
1742 body_last
->next
= sw_state
.default_label
;
1743 body_last
->v
.chunk
->next
= NULL
;
1744 state
->chunk
= body_last
->v
.chunk
;
1745 for (; body_block
!= sw_state
.default_label
; body_block
= body_block
->next
)
1746 body_block
->pc
+= switch_length
;
1748 state
->sw_state
= sw_state
.prev
;
1753 exp
= TREE_OPERAND (exp
, 0);
1754 if (exp
== NULL_TREE
)
1755 exp
= empty_stmt_node
;
1756 else if (TREE_CODE (exp
) != MODIFY_EXPR
)
1759 exp
= TREE_OPERAND (exp
, 1);
1760 generate_bytecode_return (exp
, state
);
1762 case LABELED_BLOCK_EXPR
:
1764 struct jcf_block
*end_label
= gen_jcf_label (state
);
1765 end_label
->next
= state
->labeled_blocks
;
1766 state
->labeled_blocks
= end_label
;
1767 end_label
->pc
= PENDING_EXIT_PC
;
1768 end_label
->u
.labeled_block
= exp
;
1769 if (LABELED_BLOCK_BODY (exp
))
1770 generate_bytecode_insns (LABELED_BLOCK_BODY (exp
), target
, state
);
1771 if (state
->labeled_blocks
!= end_label
)
1773 state
->labeled_blocks
= end_label
->next
;
1774 define_jcf_label (end_label
, state
);
1779 tree body
= TREE_OPERAND (exp
, 0);
1781 if (TREE_CODE (body
) == COMPOUND_EXPR
1782 && TREE_CODE (TREE_OPERAND (body
, 0)) == EXIT_EXPR
)
1784 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1785 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1786 struct jcf_block
*head_label
;
1787 struct jcf_block
*body_label
;
1788 struct jcf_block
*end_label
= gen_jcf_label (state
);
1789 struct jcf_block
*exit_label
= state
->labeled_blocks
;
1790 head_label
= gen_jcf_label (state
);
1791 emit_goto (head_label
, state
);
1792 body_label
= get_jcf_label_here (state
);
1793 generate_bytecode_insns (TREE_OPERAND (body
, 1), target
, state
);
1794 define_jcf_label (head_label
, state
);
1795 generate_bytecode_conditional (TREE_OPERAND (body
, 0),
1796 end_label
, body_label
, 1, state
);
1797 define_jcf_label (end_label
, state
);
1802 struct jcf_block
*head_label
= get_jcf_label_here (state
);
1803 generate_bytecode_insns (body
, IGNORE_TARGET
, state
);
1804 emit_goto (head_label
, state
);
1810 struct jcf_block
*label
= state
->labeled_blocks
;
1811 struct jcf_block
*end_label
= gen_jcf_label (state
);
1812 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1813 label
, end_label
, 0, state
);
1814 define_jcf_label (end_label
, state
);
1817 case EXIT_BLOCK_EXPR
:
1819 struct jcf_block
*label
= state
->labeled_blocks
;
1820 if (TREE_OPERAND (exp
, 1) != NULL
) goto notimpl
;
1821 while (label
->u
.labeled_block
!= TREE_OPERAND (exp
, 0))
1822 label
= label
->next
;
1823 call_cleanups (label
, state
);
1824 emit_goto (label
, state
);
1828 case PREDECREMENT_EXPR
: value
= -1; post_op
= 0; goto increment
;
1829 case PREINCREMENT_EXPR
: value
= 1; post_op
= 0; goto increment
;
1830 case POSTDECREMENT_EXPR
: value
= -1; post_op
= 1; goto increment
;
1831 case POSTINCREMENT_EXPR
: value
= 1; post_op
= 1; goto increment
;
1834 exp
= TREE_OPERAND (exp
, 0);
1835 type
= TREE_TYPE (exp
);
1836 size
= TYPE_IS_WIDE (type
) ? 2 : 1;
1837 if ((TREE_CODE (exp
) == VAR_DECL
|| TREE_CODE (exp
) == PARM_DECL
)
1838 && ! TREE_STATIC (exp
)
1839 && TREE_CODE (type
) == INTEGER_TYPE
1840 && TYPE_PRECISION (type
) == 32)
1842 if (target
!= IGNORE_TARGET
&& post_op
)
1843 emit_load (exp
, state
);
1844 emit_iinc (exp
, value
, state
);
1845 if (target
!= IGNORE_TARGET
&& ! post_op
)
1846 emit_load (exp
, state
);
1849 if (TREE_CODE (exp
) == COMPONENT_REF
)
1851 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
1852 emit_dup (1, 0, state
);
1853 /* Stack: ..., objectref, objectref. */
1854 field_op (TREE_OPERAND (exp
, 1), OPCODE_getfield
, state
);
1856 /* Stack: ..., objectref, oldvalue. */
1859 else if (TREE_CODE (exp
) == ARRAY_REF
)
1861 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
1862 generate_bytecode_insns (TREE_OPERAND (exp
, 1), STACK_TARGET
, state
);
1863 emit_dup (2, 0, state
);
1864 /* Stack: ..., array, index, array, index. */
1865 jopcode
= OPCODE_iaload
+ adjust_typed_op (TREE_TYPE (exp
), 7);
1869 /* Stack: ..., array, index, oldvalue. */
1872 else if (TREE_CODE (exp
) == VAR_DECL
|| TREE_CODE (exp
) == PARM_DECL
)
1874 generate_bytecode_insns (exp
, STACK_TARGET
, state
);
1875 /* Stack: ..., oldvalue. */
1881 if (target
!= IGNORE_TARGET
&& post_op
)
1882 emit_dup (size
, offset
, state
);
1883 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1884 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1885 /* Stack, otherwise: ..., [result, ] oldvalue. */
1887 push_int_const (value
, state
);
1889 push_long_const (value
, value
>= 0 ? 0 : -1, state
);
1891 emit_binop (OPCODE_iadd
+ adjust_typed_op (type
, 3), type
, state
);
1892 if (target
!= IGNORE_TARGET
&& ! post_op
)
1893 emit_dup (size
, offset
, state
);
1894 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1895 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1896 /* Stack, otherwise: ..., [result, ] newvalue. */
1897 goto finish_assignment
;
1901 tree lhs
= TREE_OPERAND (exp
, 0);
1902 tree rhs
= TREE_OPERAND (exp
, 1);
1905 /* See if we can use the iinc instruction. */
1906 if ((TREE_CODE (lhs
) == VAR_DECL
|| TREE_CODE (lhs
) == PARM_DECL
)
1907 && ! TREE_STATIC (lhs
)
1908 && TREE_CODE (TREE_TYPE (lhs
)) == INTEGER_TYPE
1909 && TYPE_PRECISION (TREE_TYPE (lhs
)) == 32
1910 && (TREE_CODE (rhs
) == PLUS_EXPR
|| TREE_CODE (rhs
) == MINUS_EXPR
))
1912 tree arg0
= TREE_OPERAND (rhs
, 0);
1913 tree arg1
= TREE_OPERAND (rhs
, 1);
1914 HOST_WIDE_INT min_value
= -32768;
1915 HOST_WIDE_INT max_value
= 32767;
1916 if (TREE_CODE (rhs
) == MINUS_EXPR
)
1921 else if (arg1
== lhs
)
1924 arg1
= TREE_OPERAND (rhs
, 0);
1926 if (lhs
== arg0
&& TREE_CODE (arg1
) == INTEGER_CST
)
1928 HOST_WIDE_INT hi_value
= TREE_INT_CST_HIGH (arg1
);
1929 value
= TREE_INT_CST_LOW (arg1
);
1930 if ((hi_value
== 0 && value
<= max_value
)
1931 || (hi_value
== -1 && value
>= min_value
))
1933 if (TREE_CODE (rhs
) == MINUS_EXPR
)
1935 emit_iinc (lhs
, value
, state
);
1941 if (TREE_CODE (lhs
) == COMPONENT_REF
)
1943 generate_bytecode_insns (TREE_OPERAND (lhs
, 0),
1944 STACK_TARGET
, state
);
1947 else if (TREE_CODE (lhs
) == ARRAY_REF
)
1949 generate_bytecode_insns (TREE_OPERAND(lhs
, 0),
1950 STACK_TARGET
, state
);
1951 generate_bytecode_insns (TREE_OPERAND(lhs
, 1),
1952 STACK_TARGET
, state
);
1957 generate_bytecode_insns (rhs
, STACK_TARGET
, state
);
1958 if (target
!= IGNORE_TARGET
)
1959 emit_dup (TYPE_IS_WIDE (type
) ? 2 : 1 , offset
, state
);
1965 if (TREE_CODE (exp
) == COMPONENT_REF
)
1967 tree field
= TREE_OPERAND (exp
, 1);
1968 if (! FIELD_STATIC (field
))
1971 FIELD_STATIC (field
) ? OPCODE_putstatic
: OPCODE_putfield
,
1974 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field
)) ? 2 : 1);
1976 else if (TREE_CODE (exp
) == VAR_DECL
1977 || TREE_CODE (exp
) == PARM_DECL
)
1979 if (FIELD_STATIC (exp
))
1981 field_op (exp
, OPCODE_putstatic
, state
);
1982 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 2 : 1);
1985 emit_store (exp
, state
);
1987 else if (TREE_CODE (exp
) == ARRAY_REF
)
1989 jopcode
= OPCODE_iastore
+ adjust_typed_op (TREE_TYPE (exp
), 7);
1992 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 4 : 3);
1995 fatal ("internal error (bad lhs to MODIFY_EXPR)");
1998 jopcode
= OPCODE_iadd
;
2001 jopcode
= OPCODE_isub
;
2004 jopcode
= OPCODE_imul
;
2006 case TRUNC_DIV_EXPR
:
2008 jopcode
= OPCODE_idiv
;
2010 case TRUNC_MOD_EXPR
:
2011 jopcode
= OPCODE_irem
;
2013 case LSHIFT_EXPR
: jopcode
= OPCODE_ishl
; goto binop
;
2014 case RSHIFT_EXPR
: jopcode
= OPCODE_ishr
; goto binop
;
2015 case URSHIFT_EXPR
: jopcode
= OPCODE_iushr
; goto binop
;
2016 case TRUTH_AND_EXPR
:
2017 case BIT_AND_EXPR
: jopcode
= OPCODE_iand
; goto binop
;
2019 case BIT_IOR_EXPR
: jopcode
= OPCODE_ior
; goto binop
;
2020 case TRUTH_XOR_EXPR
:
2021 case BIT_XOR_EXPR
: jopcode
= OPCODE_ixor
; goto binop
;
2024 tree arg0
= TREE_OPERAND (exp
, 0);
2025 tree arg1
= TREE_OPERAND (exp
, 1);
2026 jopcode
+= adjust_typed_op (type
, 3);
2027 if (arg0
== arg1
&& TREE_CODE (arg0
) == SAVE_EXPR
)
2029 /* fold may (e.g) convert 2*x to x+x. */
2030 generate_bytecode_insns (TREE_OPERAND (arg0
, 0), target
, state
);
2031 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0
)) > 32 ? 2 : 1, 0, state
);
2035 generate_bytecode_insns (arg0
, target
, state
);
2036 generate_bytecode_insns (arg1
, target
, state
);
2038 /* For most binary operations, both operands and the result have the
2039 same type. Shift operations are different. Using arg1's type
2040 gets us the correct SP adjustment in all casesd. */
2041 if (target
== STACK_TARGET
)
2042 emit_binop (jopcode
, TREE_TYPE (arg1
), state
);
2045 case TRUTH_NOT_EXPR
:
2047 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2048 if (target
== STACK_TARGET
)
2050 int is_long
= TYPE_PRECISION (TREE_TYPE (exp
)) > 32;
2051 push_int_const (TREE_CODE (exp
) == BIT_NOT_EXPR
? -1 : 1, state
);
2055 NOTE_PUSH (1 + is_long
);
2056 OP1 (OPCODE_ixor
+ is_long
);
2057 NOTE_POP (1 + is_long
);
2061 jopcode
= OPCODE_ineg
;
2062 jopcode
+= adjust_typed_op (type
, 3);
2063 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2064 if (target
== STACK_TARGET
)
2065 emit_unop (jopcode
, type
, state
);
2067 case INSTANCEOF_EXPR
:
2069 int index
= find_class_constant (&state
->cpool
, TREE_OPERAND (exp
, 1));
2070 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2072 OP1 (OPCODE_instanceof
);
2079 case FIX_TRUNC_EXPR
:
2081 tree src
= TREE_OPERAND (exp
, 0);
2082 tree src_type
= TREE_TYPE (src
);
2083 tree dst_type
= TREE_TYPE (exp
);
2084 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2085 if (target
== IGNORE_TARGET
|| src_type
== dst_type
)
2087 if (TREE_CODE (dst_type
) == POINTER_TYPE
)
2089 if (TREE_CODE (exp
) == CONVERT_EXPR
)
2091 int index
= find_class_constant (&state
->cpool
, TREE_TYPE (dst_type
));
2093 OP1 (OPCODE_checkcast
);
2097 else /* Convert numeric types. */
2099 int wide_src
= TYPE_PRECISION (src_type
) > 32;
2100 int wide_dst
= TYPE_PRECISION (dst_type
) > 32;
2101 NOTE_POP (1 + wide_src
);
2103 if (TREE_CODE (dst_type
) == REAL_TYPE
)
2105 if (TREE_CODE (src_type
) == REAL_TYPE
)
2106 OP1 (wide_dst
? OPCODE_f2d
: OPCODE_d2f
);
2107 else if (TYPE_PRECISION (src_type
) == 64)
2108 OP1 (OPCODE_l2f
+ wide_dst
);
2110 OP1 (OPCODE_i2f
+ wide_dst
);
2112 else /* Convert to integral type. */
2114 if (TREE_CODE (src_type
) == REAL_TYPE
)
2115 OP1 (OPCODE_f2i
+ wide_dst
+ 3 * wide_src
);
2120 if (TYPE_PRECISION (dst_type
) < 32)
2123 /* Already converted to int, if needed. */
2124 if (TYPE_PRECISION (dst_type
) <= 8)
2126 else if (TREE_UNSIGNED (dst_type
))
2132 NOTE_PUSH (1 + wide_dst
);
2137 case CLEANUP_POINT_EXPR
:
2139 struct jcf_block
*save_labeled_blocks
= state
->labeled_blocks
;
2140 int can_complete
= CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp
, 0));
2141 generate_bytecode_insns (TREE_OPERAND (exp
, 0), IGNORE_TARGET
, state
);
2142 if (target
!= IGNORE_TARGET
)
2144 while (state
->labeled_blocks
!= save_labeled_blocks
)
2146 struct jcf_block
*finished_label
= NULL
;
2148 tree exception_type
= build_pointer_type (throwable_type_node
);
2149 tree exception_decl
= build_decl (VAR_DECL
, NULL_TREE
,
2151 struct jcf_block
*end_label
= get_jcf_label_here (state
);
2152 struct jcf_block
*label
= state
->labeled_blocks
;
2153 struct jcf_handler
*handler
;
2154 tree cleanup
= label
->u
.labeled_block
;
2155 state
->labeled_blocks
= label
->next
;
2156 state
->num_finalizers
--;
2159 finished_label
= gen_jcf_label (state
);
2160 emit_jsr (label
, state
);
2161 emit_goto (finished_label
, state
);
2162 if (! CAN_COMPLETE_NORMALLY (cleanup
))
2165 handler
= alloc_handler (label
->v
.start_label
, end_label
, state
);
2166 handler
->type
= NULL_TREE
;
2167 localvar_alloc (exception_decl
, state
);
2169 emit_store (exception_decl
, state
);
2170 emit_jsr (label
, state
);
2171 emit_load (exception_decl
, state
);
2173 OP1 (OPCODE_athrow
);
2176 /* The finally block. */
2177 return_link
= build_decl (VAR_DECL
, NULL_TREE
,
2178 return_address_type_node
);
2179 define_jcf_label (label
, state
);
2181 localvar_alloc (return_link
, state
);
2182 emit_store (return_link
, state
);
2183 generate_bytecode_insns (cleanup
, IGNORE_TARGET
, state
);
2184 maybe_wide (OPCODE_ret
, DECL_LOCAL_INDEX (return_link
), state
);
2185 localvar_free (return_link
, state
);
2186 localvar_free (exception_decl
, state
);
2187 if (finished_label
!= NULL
)
2188 define_jcf_label (finished_label
, state
);
2193 case WITH_CLEANUP_EXPR
:
2195 struct jcf_block
*label
;
2196 generate_bytecode_insns (TREE_OPERAND (exp
, 0), IGNORE_TARGET
, state
);
2197 label
= gen_jcf_label (state
);
2198 label
->pc
= PENDING_CLEANUP_PC
;
2199 label
->next
= state
->labeled_blocks
;
2200 state
->labeled_blocks
= label
;
2201 state
->num_finalizers
++;
2202 label
->u
.labeled_block
= TREE_OPERAND (exp
, 2);
2203 label
->v
.start_label
= get_jcf_label_here (state
);
2204 if (target
!= IGNORE_TARGET
)
2211 tree try_clause
= TREE_OPERAND (exp
, 0);
2212 struct jcf_block
*start_label
= get_jcf_label_here (state
);
2213 struct jcf_block
*end_label
; /* End of try clause. */
2214 struct jcf_block
*finished_label
= gen_jcf_label (state
);
2215 tree clause
= TREE_OPERAND (exp
, 1);
2216 if (target
!= IGNORE_TARGET
)
2218 generate_bytecode_insns (try_clause
, IGNORE_TARGET
, state
);
2219 end_label
= get_jcf_label_here (state
);
2220 if (CAN_COMPLETE_NORMALLY (try_clause
))
2221 emit_goto (finished_label
, state
);
2222 while (clause
!= NULL_TREE
)
2224 tree catch_clause
= TREE_OPERAND (clause
, 0);
2225 tree exception_decl
= BLOCK_EXPR_DECLS (catch_clause
);
2226 struct jcf_handler
*handler
= alloc_handler (start_label
, end_label
, state
);
2227 if (exception_decl
== NULL_TREE
)
2228 handler
->type
= NULL_TREE
;
2230 handler
->type
= TREE_TYPE (TREE_TYPE (exception_decl
));
2231 generate_bytecode_insns (catch_clause
, IGNORE_TARGET
, state
);
2232 clause
= TREE_CHAIN (clause
);
2233 if (CAN_COMPLETE_NORMALLY (catch_clause
) && clause
!= NULL_TREE
)
2234 emit_goto (finished_label
, state
);
2236 define_jcf_label (finished_label
, state
);
2239 case TRY_FINALLY_EXPR
:
2241 tree try_block
= TREE_OPERAND (exp
, 0);
2242 tree finally
= TREE_OPERAND (exp
, 1);
2243 struct jcf_block
*finished_label
= gen_jcf_label (state
);
2244 struct jcf_block
*finally_label
= gen_jcf_label (state
);
2245 struct jcf_block
*start_label
= get_jcf_label_here (state
);
2246 tree return_link
= build_decl (VAR_DECL
, NULL_TREE
,
2247 return_address_type_node
);
2248 tree exception_type
= build_pointer_type (throwable_type_node
);
2249 tree exception_decl
= build_decl (VAR_DECL
, NULL_TREE
, exception_type
);
2250 struct jcf_handler
*handler
;
2252 finally_label
->pc
= PENDING_CLEANUP_PC
;
2253 finally_label
->next
= state
->labeled_blocks
;
2254 state
->labeled_blocks
= finally_label
;
2255 state
->num_finalizers
++;
2257 generate_bytecode_insns (try_block
, target
, state
);
2258 if (state
->labeled_blocks
!= finally_label
)
2260 state
->labeled_blocks
= finally_label
->next
;
2261 emit_jsr (finally_label
, state
);
2262 if (CAN_COMPLETE_NORMALLY (try_block
))
2263 emit_goto (finished_label
, state
);
2265 /* Handle exceptions. */
2266 localvar_alloc (return_link
, state
);
2267 handler
= alloc_handler (start_label
, NULL_TREE
, state
);
2268 handler
->end_label
= handler
->handler_label
;
2269 handler
->type
= NULL_TREE
;
2270 localvar_alloc (exception_decl
, state
);
2272 emit_store (exception_decl
, state
);
2273 emit_jsr (finally_label
, state
);
2274 emit_load (exception_decl
, state
);
2276 OP1 (OPCODE_athrow
);
2278 localvar_free (exception_decl
, state
);
2280 /* The finally block. First save return PC into return_link. */
2281 define_jcf_label (finally_label
, state
);
2283 emit_store (return_link
, state
);
2285 generate_bytecode_insns (finally
, IGNORE_TARGET
, state
);
2286 maybe_wide (OPCODE_ret
, DECL_LOCAL_INDEX (return_link
), state
);
2287 localvar_free (return_link
, state
);
2288 define_jcf_label (finished_label
, state
);
2292 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
2294 OP1 (OPCODE_athrow
);
2296 case NEW_ARRAY_INIT
:
2298 tree values
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
2299 tree array_type
= TREE_TYPE (TREE_TYPE (exp
));
2300 tree element_type
= TYPE_ARRAY_ELEMENT (array_type
);
2301 HOST_WIDE_INT length
= java_array_type_length (array_type
);
2302 if (target
== IGNORE_TARGET
)
2304 for ( ; values
!= NULL_TREE
; values
= TREE_CHAIN (values
))
2305 generate_bytecode_insns (TREE_VALUE (values
), target
, state
);
2308 push_int_const (length
, state
);
2311 if (JPRIMITIVE_TYPE_P (element_type
))
2313 int atype
= encode_newarray_type (element_type
);
2314 OP1 (OPCODE_newarray
);
2319 int index
= find_class_constant (&state
->cpool
,
2320 TREE_TYPE (element_type
));
2321 OP1 (OPCODE_anewarray
);
2325 jopcode
= OPCODE_iastore
+ adjust_typed_op (element_type
, 7);
2326 for ( ; values
!= NULL_TREE
; values
= TREE_CHAIN (values
), offset
++)
2328 int save_SP
= state
->code_SP
;
2329 emit_dup (1, 0, state
);
2330 push_int_const (offset
, state
);
2332 generate_bytecode_insns (TREE_VALUE (values
), STACK_TARGET
, state
);
2335 state
->code_SP
= save_SP
;
2339 case NEW_CLASS_EXPR
:
2341 tree
class = TREE_TYPE (TREE_TYPE (exp
));
2342 int need_result
= target
!= IGNORE_TARGET
;
2343 int index
= find_class_constant (&state
->cpool
, class);
2349 NOTE_PUSH (1 + need_result
);
2351 /* ... fall though ... */
2354 tree f
= TREE_OPERAND (exp
, 0);
2355 tree x
= TREE_OPERAND (exp
, 1);
2356 int save_SP
= state
->code_SP
;
2358 if (TREE_CODE (f
) == ADDR_EXPR
)
2359 f
= TREE_OPERAND (f
, 0);
2360 if (f
== soft_newarray_node
)
2362 int type_code
= TREE_INT_CST_LOW (TREE_VALUE (x
));
2363 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x
)),
2364 STACK_TARGET
, state
);
2366 OP1 (OPCODE_newarray
);
2370 else if (f
== soft_multianewarray_node
)
2374 int index
= find_class_constant (&state
->cpool
,
2375 TREE_TYPE (TREE_TYPE (exp
)));
2376 x
= TREE_CHAIN (x
); /* Skip class argument. */
2377 ndims
= TREE_INT_CST_LOW (TREE_VALUE (x
));
2378 for (idim
= ndims
; --idim
>= 0; )
2381 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2384 OP1 (OPCODE_multianewarray
);
2389 else if (f
== soft_anewarray_node
)
2391 tree cl
= TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp
)));
2392 int index
= find_class_constant (&state
->cpool
, TREE_TYPE (cl
));
2393 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2395 OP1 (OPCODE_anewarray
);
2399 else if (f
== soft_monitorenter_node
2400 || f
== soft_monitorexit_node
2403 if (f
== soft_monitorenter_node
)
2404 op
= OPCODE_monitorenter
;
2405 else if (f
== soft_monitorexit_node
)
2406 op
= OPCODE_monitorexit
;
2409 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2415 else if (exp
== soft_exceptioninfo_call_node
)
2417 NOTE_PUSH (1); /* Pushed by exception system. */
2420 for ( ; x
!= NULL_TREE
; x
= TREE_CHAIN (x
))
2422 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2424 nargs
= state
->code_SP
- save_SP
;
2425 state
->code_SP
= save_SP
;
2426 if (f
== soft_fmod_node
)
2433 if (TREE_CODE (exp
) == NEW_CLASS_EXPR
)
2434 NOTE_POP (1); /* Pop implicit this. */
2435 if (TREE_CODE (f
) == FUNCTION_DECL
&& DECL_CONTEXT (f
) != NULL_TREE
)
2437 int index
= find_methodref_index (&state
->cpool
, f
);
2440 if (METHOD_STATIC (f
))
2441 OP1 (OPCODE_invokestatic
);
2442 else if (DECL_CONSTRUCTOR_P (f
) || CALL_USING_SUPER (exp
)
2443 || METHOD_PRIVATE (f
))
2444 OP1 (OPCODE_invokespecial
);
2445 else if (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (f
))))
2447 OP1 (OPCODE_invokeinterface
);
2451 OP1 (OPCODE_invokevirtual
);
2453 f
= TREE_TYPE (TREE_TYPE (f
));
2454 if (TREE_CODE (f
) != VOID_TYPE
)
2456 int size
= TYPE_IS_WIDE (f
) ? 2 : 1;
2457 if (target
== IGNORE_TARGET
)
2458 emit_pop (size
, state
);
2473 error("internal error - tree code not implemented: %s",
2474 tree_code_name
[(int) TREE_CODE (exp
)]);
2479 perform_relocations (state
)
2480 struct jcf_partial
*state
;
2482 struct jcf_block
*block
;
2483 struct jcf_relocation
*reloc
;
2487 /* Before we start, the pc field of each block is an upper bound on
2488 the block's start pc (it may be less, if previous blocks need less
2489 than their maximum).
2491 The minimum size of each block is in the block's chunk->size. */
2493 /* First, figure out the actual locations of each block. */
2496 for (block
= state
->blocks
; block
!= NULL
; block
= block
->next
)
2498 int block_size
= block
->v
.chunk
->size
;
2502 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2503 Assumes relocations are in reverse order. */
2504 reloc
= block
->u
.relocations
;
2505 while (reloc
!= NULL
2506 && reloc
->kind
== OPCODE_goto_w
2507 && reloc
->label
->pc
== block
->next
->pc
2508 && reloc
->offset
+ 2 == block_size
)
2510 reloc
= reloc
->next
;
2511 block
->u
.relocations
= reloc
;
2512 block
->v
.chunk
->size
-= 3;
2517 for (reloc
= block
->u
.relocations
; reloc
!= NULL
; reloc
= reloc
->next
)
2519 if (reloc
->kind
== SWITCH_ALIGN_RELOC
)
2521 /* We assume this is the first relocation in this block,
2522 so we know its final pc. */
2523 int where
= pc
+ reloc
->offset
;
2524 int pad
= ((where
+ 3) & ~3) - where
;
2527 else if (reloc
->kind
< -1 || reloc
->kind
> BLOCK_START_RELOC
)
2529 int delta
= reloc
->label
->pc
- (pc
+ reloc
->offset
- 1);
2530 int expand
= reloc
->kind
> 0 ? 2 : 5;
2534 if (delta
>= -32768 && delta
<= 32767)
2540 block_size
+= expand
;
2546 for (block
= state
->blocks
; block
!= NULL
; block
= block
->next
)
2548 struct chunk
*chunk
= block
->v
.chunk
;
2549 int old_size
= chunk
->size
;
2550 int next_pc
= block
->next
== NULL
? pc
: block
->next
->pc
;
2551 int new_size
= next_pc
- block
->pc
;
2552 unsigned char *new_ptr
;
2553 unsigned char *old_buffer
= chunk
->data
;
2554 unsigned char *old_ptr
= old_buffer
+ old_size
;
2555 if (new_size
!= old_size
)
2557 chunk
->data
= (unsigned char *)
2558 obstack_alloc (state
->chunk_obstack
, new_size
);
2559 chunk
->size
= new_size
;
2561 new_ptr
= chunk
->data
+ new_size
;
2563 /* We do the relocations from back to front, because
2564 the relocations are in reverse order. */
2565 for (reloc
= block
->u
.relocations
; ; reloc
= reloc
->next
)
2567 /* new_ptr and old_ptr point into the old and new buffers,
2568 respectively. (If no relocations cause the buffer to
2569 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2570 The bytes at higher adress have been copied and relocations
2571 handled; those at lower addresses remain to process. */
2573 /* Lower old index of piece to be copied with no relocation.
2574 I.e. high index of the first piece that does need relocation. */
2575 int start
= reloc
== NULL
? 0
2576 : reloc
->kind
== SWITCH_ALIGN_RELOC
? reloc
->offset
2577 : (reloc
->kind
== 0 || reloc
->kind
== BLOCK_START_RELOC
)
2579 : reloc
->offset
+ 2;
2582 int n
= (old_ptr
- old_buffer
) - start
;
2586 memcpy (new_ptr
, old_ptr
, n
);
2587 if (old_ptr
== old_buffer
)
2590 new_offset
= new_ptr
- chunk
->data
;
2591 new_offset
-= (reloc
->kind
== -1 ? 2 : 4);
2592 if (reloc
->kind
== 0)
2595 value
= GET_u4 (old_ptr
);
2597 else if (reloc
->kind
== BLOCK_START_RELOC
)
2603 else if (reloc
->kind
== SWITCH_ALIGN_RELOC
)
2605 int where
= block
->pc
+ reloc
->offset
;
2606 int pad
= ((where
+ 3) & ~3) - where
;
2614 value
= GET_u2 (old_ptr
);
2616 value
+= reloc
->label
->pc
- (block
->pc
+ new_offset
);
2617 *--new_ptr
= (unsigned char) value
; value
>>= 8;
2618 *--new_ptr
= (unsigned char) value
; value
>>= 8;
2619 if (reloc
->kind
!= -1)
2621 *--new_ptr
= (unsigned char) value
; value
>>= 8;
2622 *--new_ptr
= (unsigned char) value
;
2624 if (reloc
->kind
> BLOCK_START_RELOC
)
2626 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2628 *--new_ptr
= reloc
->kind
;
2630 else if (reloc
->kind
< -1)
2632 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2634 *--new_ptr
= OPCODE_goto_w
;
2637 *--new_ptr
= - reloc
->kind
;
2640 if (new_ptr
!= chunk
->data
)
2641 fatal ("internal error - perform_relocations");
2643 state
->code_length
= pc
;
2647 init_jcf_state (state
, work
)
2648 struct jcf_partial
*state
;
2649 struct obstack
*work
;
2651 state
->chunk_obstack
= work
;
2652 state
->first
= state
->chunk
= NULL
;
2653 CPOOL_INIT (&state
->cpool
);
2654 BUFFER_INIT (&state
->localvars
);
2655 BUFFER_INIT (&state
->bytecode
);
2659 init_jcf_method (state
, method
)
2660 struct jcf_partial
*state
;
2663 state
->current_method
= method
;
2664 state
->blocks
= state
->last_block
= NULL
;
2665 state
->linenumber_count
= 0;
2666 state
->first_lvar
= state
->last_lvar
= NULL
;
2667 state
->lvar_count
= 0;
2668 state
->labeled_blocks
= NULL
;
2669 state
->code_length
= 0;
2670 BUFFER_RESET (&state
->bytecode
);
2671 BUFFER_RESET (&state
->localvars
);
2673 state
->code_SP_max
= 0;
2674 state
->handlers
= NULL
;
2675 state
->last_handler
= NULL
;
2676 state
->num_handlers
= 0;
2677 state
->num_finalizers
= 0;
2678 state
->return_value_decl
= NULL_TREE
;
2682 release_jcf_state (state
)
2683 struct jcf_partial
*state
;
2685 CPOOL_FINISH (&state
->cpool
);
2686 obstack_free (state
->chunk_obstack
, state
->first
);
2689 /* Generate and return a list of chunks containing the class CLAS
2690 in the .class file representation. The list can be written to a
2691 .class file using write_chunks. Allocate chunks from obstack WORK. */
2693 static struct chunk
*
2694 generate_classfile (clas
, state
)
2696 struct jcf_partial
*state
;
2698 struct chunk
*cpool_chunk
;
2702 char *fields_count_ptr
;
2703 int fields_count
= 0;
2704 char *methods_count_ptr
;
2705 int methods_count
= 0;
2706 static tree SourceFile_node
= NULL_TREE
;
2709 = clas
== object_type_node
? 0
2710 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas
));
2712 ptr
= append_chunk (NULL
, 8, state
);
2713 PUT4 (0xCafeBabe); /* Magic number */
2714 PUT2 (3); /* Minor version */
2715 PUT2 (45); /* Major version */
2717 append_chunk (NULL
, 0, state
);
2718 cpool_chunk
= state
->chunk
;
2720 /* Next allocate the chunk containing acces_flags through fields_counr. */
2721 if (clas
== object_type_node
)
2724 i
= 8 + 2 * total_supers
;
2725 ptr
= append_chunk (NULL
, i
, state
);
2726 i
= get_access_flags (TYPE_NAME (clas
));
2727 if (! (i
& ACC_INTERFACE
))
2729 PUT2 (i
); /* acces_flags */
2730 i
= find_class_constant (&state
->cpool
, clas
); PUT2 (i
); /* this_class */
2731 if (clas
== object_type_node
)
2733 PUT2(0); /* super_class */
2734 PUT2(0); /* interfaces_count */
2738 tree basetypes
= TYPE_BINFO_BASETYPES (clas
);
2739 tree base
= BINFO_TYPE (TREE_VEC_ELT (basetypes
, 0));
2740 int j
= find_class_constant (&state
->cpool
, base
);
2741 PUT2 (j
); /* super_class */
2742 PUT2 (total_supers
- 1); /* interfaces_count */
2743 for (i
= 1; i
< total_supers
; i
++)
2745 base
= BINFO_TYPE (TREE_VEC_ELT (basetypes
, i
));
2746 j
= find_class_constant (&state
->cpool
, base
);
2750 fields_count_ptr
= ptr
;
2752 for (part
= TYPE_FIELDS (clas
); part
; part
= TREE_CHAIN (part
))
2755 if (DECL_NAME (part
) == NULL_TREE
|| DECL_ARTIFICIAL (part
))
2757 ptr
= append_chunk (NULL
, 8, state
);
2758 i
= get_access_flags (part
); PUT2 (i
);
2759 i
= find_utf8_constant (&state
->cpool
, DECL_NAME (part
)); PUT2 (i
);
2760 i
= find_utf8_constant (&state
->cpool
, build_java_signature (TREE_TYPE (part
)));
2762 have_value
= DECL_INITIAL (part
) != NULL_TREE
&& FIELD_STATIC (part
);
2763 PUT2 (have_value
); /* attributes_count */
2766 tree init
= DECL_INITIAL (part
);
2767 static tree ConstantValue_node
= NULL_TREE
;
2768 ptr
= append_chunk (NULL
, 8, state
);
2769 if (ConstantValue_node
== NULL_TREE
)
2770 ConstantValue_node
= get_identifier ("ConstantValue");
2771 i
= find_utf8_constant (&state
->cpool
, ConstantValue_node
);
2772 PUT2 (i
); /* attribute_name_index */
2773 PUT4 (2); /* attribute_length */
2774 i
= find_constant_index (init
, state
); PUT2 (i
);
2778 ptr
= fields_count_ptr
; PUT2 (fields_count
);
2780 ptr
= methods_count_ptr
= append_chunk (NULL
, 2, state
);
2783 for (part
= TYPE_METHODS (clas
); part
; part
= TREE_CHAIN (part
))
2785 struct jcf_block
*block
;
2786 tree function_body
= DECL_FUNCTION_BODY (part
);
2787 tree body
= function_body
== NULL_TREE
? NULL_TREE
2788 : BLOCK_EXPR_BODY (function_body
);
2789 tree name
= DECL_CONSTRUCTOR_P (part
) ? init_identifier_node
2791 tree type
= TREE_TYPE (part
);
2792 tree save_function
= current_function_decl
;
2793 current_function_decl
= part
;
2794 ptr
= append_chunk (NULL
, 8, state
);
2795 i
= get_access_flags (part
); PUT2 (i
);
2796 i
= find_utf8_constant (&state
->cpool
, name
); PUT2 (i
);
2797 i
= find_utf8_constant (&state
->cpool
, build_java_signature (type
));
2799 i
= (body
!= NULL_TREE
) + (DECL_FUNCTION_THROWS (part
) != NULL_TREE
);
2800 PUT2 (i
); /* attributes_count */
2801 if (body
!= NULL_TREE
)
2803 int code_attributes_count
= 0;
2804 static tree Code_node
= NULL_TREE
;
2807 struct jcf_handler
*handler
;
2808 if (Code_node
== NULL_TREE
)
2809 Code_node
= get_identifier ("Code");
2810 ptr
= append_chunk (NULL
, 14, state
);
2811 i
= find_utf8_constant (&state
->cpool
, Code_node
); PUT2 (i
);
2813 init_jcf_method (state
, part
);
2814 get_jcf_label_here (state
); /* Force a first block. */
2815 for (t
= DECL_ARGUMENTS (part
); t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
2816 localvar_alloc (t
, state
);
2817 generate_bytecode_insns (body
, IGNORE_TARGET
, state
);
2818 if (CAN_COMPLETE_NORMALLY (body
))
2820 if (TREE_CODE (TREE_TYPE (type
)) != VOID_TYPE
)
2823 OP1 (OPCODE_return
);
2825 for (t
= DECL_ARGUMENTS (part
); t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
2826 localvar_free (t
, state
);
2827 if (state
->return_value_decl
!= NULL_TREE
)
2828 localvar_free (state
->return_value_decl
, state
);
2829 finish_jcf_block (state
);
2830 perform_relocations (state
);
2833 i
= 8 + state
->code_length
+ 4 + 8 * state
->num_handlers
;
2834 if (state
->linenumber_count
> 0)
2836 code_attributes_count
++;
2837 i
+= 8 + 4 * state
->linenumber_count
;
2839 if (state
->lvar_count
> 0)
2841 code_attributes_count
++;
2842 i
+= 8 + 10 * state
->lvar_count
;
2844 PUT4 (i
); /* attribute_length */
2845 PUT2 (state
->code_SP_max
); /* max_stack */
2846 PUT2 (localvar_max
); /* max_locals */
2847 PUT4 (state
->code_length
);
2849 /* Emit the exception table. */
2850 ptr
= append_chunk (NULL
, 2 + 8 * state
->num_handlers
, state
);
2851 PUT2 (state
->num_handlers
); /* exception_table_length */
2852 handler
= state
->handlers
;
2853 for (; handler
!= NULL
; handler
= handler
->next
)
2856 PUT2 (handler
->start_label
->pc
);
2857 PUT2 (handler
->end_label
->pc
);
2858 PUT2 (handler
->handler_label
->pc
);
2859 if (handler
->type
== NULL_TREE
)
2862 type_index
= find_class_constant (&state
->cpool
,
2867 ptr
= append_chunk (NULL
, 2, state
);
2868 PUT2 (code_attributes_count
);
2870 /* Write the LineNumberTable attribute. */
2871 if (state
->linenumber_count
> 0)
2873 static tree LineNumberTable_node
= NULL_TREE
;
2874 ptr
= append_chunk (NULL
, 8 + 4 * state
->linenumber_count
, state
);
2875 if (LineNumberTable_node
== NULL_TREE
)
2876 LineNumberTable_node
= get_identifier ("LineNumberTable");
2877 i
= find_utf8_constant (&state
->cpool
, LineNumberTable_node
);
2878 PUT2 (i
); /* attribute_name_index */
2879 i
= 2+4*state
->linenumber_count
; PUT4(i
); /* attribute_length */
2880 i
= state
->linenumber_count
; PUT2 (i
);
2881 for (block
= state
->blocks
; block
!= NULL
; block
= block
->next
)
2883 int line
= block
->linenumber
;
2892 /* Write the LocalVariableTable attribute. */
2893 if (state
->lvar_count
> 0)
2895 static tree LocalVariableTable_node
= NULL_TREE
;
2896 struct localvar_info
*lvar
= state
->first_lvar
;
2897 ptr
= append_chunk (NULL
, 8 + 10 * state
->lvar_count
, state
);
2898 if (LocalVariableTable_node
== NULL_TREE
)
2899 LocalVariableTable_node
= get_identifier("LocalVariableTable");
2900 i
= find_utf8_constant (&state
->cpool
, LocalVariableTable_node
);
2901 PUT2 (i
); /* attribute_name_index */
2902 i
= 2 + 10 * state
->lvar_count
; PUT4 (i
); /* attribute_length */
2903 i
= state
->lvar_count
; PUT2 (i
);
2904 for ( ; lvar
!= NULL
; lvar
= lvar
->next
)
2906 tree name
= DECL_NAME (lvar
->decl
);
2907 tree sig
= build_java_signature (TREE_TYPE (lvar
->decl
));
2908 i
= lvar
->start_label
->pc
; PUT2 (i
);
2909 i
= lvar
->end_label
->pc
- i
; PUT2 (i
);
2910 i
= find_utf8_constant (&state
->cpool
, name
); PUT2 (i
);
2911 i
= find_utf8_constant (&state
->cpool
, sig
); PUT2 (i
);
2912 i
= DECL_LOCAL_INDEX (lvar
->decl
); PUT2 (i
);
2916 if (DECL_FUNCTION_THROWS (part
) != NULL_TREE
)
2918 tree t
= DECL_FUNCTION_THROWS (part
);
2919 int throws_count
= list_length (t
);
2920 static tree Exceptions_node
= NULL_TREE
;
2921 if (Exceptions_node
== NULL_TREE
)
2922 Exceptions_node
= get_identifier ("Exceptions");
2923 ptr
= append_chunk (NULL
, 8 + 2 * throws_count
, state
);
2924 i
= find_utf8_constant (&state
->cpool
, Exceptions_node
);
2925 PUT2 (i
); /* attribute_name_index */
2926 i
= 2 + 2 * throws_count
; PUT4(i
); /* attribute_length */
2927 i
= throws_count
; PUT2 (i
);
2928 for (; t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
2930 i
= find_class_constant (&state
->cpool
, TREE_VALUE (t
));
2935 current_function_decl
= save_function
;
2937 ptr
= methods_count_ptr
; PUT2 (methods_count
);
2939 source_file
= DECL_SOURCE_FILE (TYPE_NAME (clas
));
2940 for (ptr
= source_file
; ; ptr
++)
2945 if (ch
== '/' || ch
== '\\')
2946 source_file
= ptr
+1;
2948 ptr
= append_chunk (NULL
, 10, state
);
2949 PUT2 (1); /* attributes_count */
2951 /* generate the SourceFile attribute. */
2952 if (SourceFile_node
== NULL_TREE
)
2953 SourceFile_node
= get_identifier ("SourceFile");
2954 i
= find_utf8_constant (&state
->cpool
, SourceFile_node
);
2955 PUT2 (i
); /* attribute_name_index */
2957 i
= find_utf8_constant (&state
->cpool
, get_identifier (source_file
));
2960 /* New finally generate the contents of the constant pool chunk. */
2961 i
= count_constant_pool_bytes (&state
->cpool
);
2962 ptr
= obstack_alloc (state
->chunk_obstack
, i
);
2963 cpool_chunk
->data
= ptr
;
2964 cpool_chunk
->size
= i
;
2965 write_constant_pool (&state
->cpool
, ptr
, i
);
2966 return state
->first
;
2970 make_class_file_name (clas
)
2973 const char *dname
, *slash
;
2977 cname
= IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas
)),
2978 "", '.', DIR_SEPARATOR
,
2980 if (jcf_write_base_directory
== NULL
)
2982 /* Make sure we put the class file into the .java file's
2983 directory, and not into some subdirectory thereof. */
2985 dname
= DECL_SOURCE_FILE (TYPE_NAME (clas
));
2986 slash
= strrchr (dname
, DIR_SEPARATOR
);
2992 t
= strrchr (cname
, DIR_SEPARATOR
);
2998 dname
= jcf_write_base_directory
;
2999 slash
= dname
+ strlen (dname
);
3002 r
= xmalloc (slash
- dname
+ strlen (cname
) + 2);
3003 strncpy (r
, dname
, slash
- dname
);
3004 r
[slash
- dname
] = DIR_SEPARATOR
;
3005 strcpy (&r
[slash
- dname
+ 1], cname
);
3007 /* We try to make new directories when we need them. We only do
3008 this for directories which "might not" exist. For instance, we
3009 assume the `-d' directory exists, but we don't assume that any
3010 subdirectory below it exists. It might be worthwhile to keep
3011 track of which directories we've created to avoid gratuitous
3013 dname
= r
+ (slash
- dname
) + 1;
3016 cname
= strchr (dname
, DIR_SEPARATOR
);
3020 if (stat (r
, &sb
) == -1)
3022 /* Try to make it. */
3023 if (mkdir (r
, 0755) == -1)
3025 fatal ("failed to create directory `%s'", r
);
3030 *cname
= DIR_SEPARATOR
;
3031 /* Skip consecutive separators. */
3032 for (dname
= cname
+ 1; *dname
&& *dname
== DIR_SEPARATOR
; ++dname
)
3039 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
3040 The output .class file name is make_class_file_name(CLAS). */
3043 write_classfile (clas
)
3046 struct obstack
*work
= &temporary_obstack
;
3047 struct jcf_partial state
[1];
3048 char *class_file_name
= make_class_file_name (clas
);
3049 struct chunk
*chunks
;
3051 if (class_file_name
!= NULL
)
3053 FILE* stream
= fopen (class_file_name
, "wb");
3055 fatal ("failed to open `%s' for writing", class_file_name
);
3056 jcf_dependency_add_target (class_file_name
);
3057 init_jcf_state (state
, work
);
3058 chunks
= generate_classfile (clas
, state
);
3059 write_chunks (stream
, chunks
);
3060 if (fclose (stream
))
3061 fatal ("failed to close after writing `%s'", class_file_name
);
3062 free (class_file_name
);
3064 release_jcf_state (state
);
3068 string concatenation
3069 synchronized statement