Fixed rare threading problem
[official-gcc.git] / gcc / java / jcf-write.c
blobfcdd42447b4abe82808abde7f15370d3d2e3b69b
1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "jcf.h"
30 #include "tree.h"
31 #include "real.h"
32 #include "java-tree.h"
33 #include "obstack.h"
34 #include "rtl.h"
35 #include "flags.h"
36 #include "java-opcodes.h"
37 #include "parse.h" /* for BLOCK_EXPR_BODY */
38 #include "buffer.h"
39 #include "toplev.h"
40 #include "ggc.h"
41 #include "tm_p.h"
43 extern struct obstack temporary_obstack;
45 /* Base directory in which `.class' files should be written.
46 NULL means to put the file into the same directory as the
47 corresponding .java file. */
48 const char *jcf_write_base_directory = NULL;
50 /* Make sure bytecode.data is big enough for at least N more bytes. */
52 #define RESERVE(N) \
53 do { CHECK_OP(state); \
54 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
55 buffer_grow (&state->bytecode, N); } while (0)
57 /* Add a 1-byte instruction/operand I to bytecode.data,
58 assuming space has already been RESERVE'd. */
60 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
62 /* Like OP1, but I is a 2-byte big endian integer. */
64 #define OP2(I) \
65 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
67 /* Like OP1, but I is a 4-byte big endian integer. */
69 #define OP4(I) \
70 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
71 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73 /* Macro to call each time we push I words on the JVM stack. */
75 #define NOTE_PUSH(I) \
76 do { state->code_SP += (I); \
77 if (state->code_SP > state->code_SP_max) \
78 state->code_SP_max = state->code_SP; } while (0)
80 /* Macro to call each time we pop I words from the JVM stack. */
82 #define NOTE_POP(I) \
83 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
85 /* A chunk or segment of a .class file. */
87 struct chunk
89 /* The next segment of this .class file. */
90 struct chunk *next;
92 /* The actual data in this segment to be written to the .class file. */
93 unsigned char *data;
95 /* The size of the segment to be written to the .class file. */
96 int size;
99 #define PENDING_CLEANUP_PC (-3)
100 #define PENDING_EXIT_PC (-2)
101 #define UNDEFINED_PC (-1)
103 /* Each "block" represents a label plus the bytecode instructions following.
104 There may be branches out of the block, but no incoming jumps, except
105 to the beginning of the block.
107 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
108 associated code yet), but it is an undefined label.
111 struct jcf_block
113 /* For blocks that that are defined, the next block (in pc order).
114 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
115 or a cleanup expression (from a TRY_FINALLY_EXPR),
116 this is the next (outer) such end label, in a stack headed by
117 labeled_blocks in jcf_partial. */
118 struct jcf_block *next;
120 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
121 pc is PENDING_EXIT_PC.
122 In the not-yet-defined end label for pending cleanup subroutine,
123 pc is PENDING_CLEANUP_PC.
124 For other not-yet-defined labels, pc is UNDEFINED_PC.
126 If the label has been defined:
127 Until perform_relocations is finished, this is the maximum possible
128 value of the bytecode offset at the beginning of this block.
129 After perform_relocations, it is the actual offset (pc). */
130 int pc;
132 int linenumber;
134 /* After finish_jcf_block is called, the actual instructions
135 contained in this block. Before that NULL, and the instructions
136 are in state->bytecode. */
137 union {
138 struct chunk *chunk;
140 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
141 covered by the cleanup. */
142 struct jcf_block *start_label;
143 } v;
145 union {
146 /* Set of relocations (in reverse offset order) for this block. */
147 struct jcf_relocation *relocations;
149 /* If this block is that of the not-yet-defined end label of
150 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
151 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
152 tree labeled_block;
153 } u;
156 /* A "relocation" type for the 0-3 bytes of padding at the start
157 of a tableswitch or a lookupswitch. */
158 #define SWITCH_ALIGN_RELOC 4
160 /* A relocation type for the labels in a tableswitch or a lookupswitch;
161 these are relative to the start of the instruction, but (due to
162 th 0-3 bytes of padding), we don't know the offset before relocation. */
163 #define BLOCK_START_RELOC 1
165 struct jcf_relocation
167 /* Next relocation for the current jcf_block. */
168 struct jcf_relocation *next;
170 /* The (byte) offset within the current block that needs to be relocated. */
171 HOST_WIDE_INT offset;
173 /* 0 if offset is a 4-byte relative offset.
174 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
175 for proper alignment in tableswitch/lookupswitch instructions.
176 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
177 to the start of the containing block.
178 -1 if offset is a 2-byte relative offset.
179 < -1 if offset is the address of an instruction with a 2-byte offset
180 that does not have a corresponding 4-byte offset version, in which
181 case the absolute value of kind is the inverted opcode.
182 > 4 if offset is the address of an instruction (such as jsr) with a
183 2-byte offset that does have a corresponding 4-byte offset version,
184 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
185 int kind;
187 /* The label the relocation wants to actually transfer to. */
188 struct jcf_block *label;
191 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
192 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194 /* State for single catch clause. */
196 struct jcf_handler
198 struct jcf_handler *next;
200 struct jcf_block *start_label;
201 struct jcf_block *end_label;
202 struct jcf_block *handler_label;
204 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
205 tree type;
208 /* State for the current switch statement. */
210 struct jcf_switch_state
212 struct jcf_switch_state *prev;
213 struct jcf_block *default_label;
215 struct jcf_relocation *cases;
216 int num_cases;
217 HOST_WIDE_INT min_case, max_case;
220 /* This structure is used to contain the various pieces that will
221 become a .class file. */
223 struct jcf_partial
225 struct chunk *first;
226 struct chunk *chunk;
227 struct obstack *chunk_obstack;
228 tree current_method;
230 /* List of basic blocks for the current method. */
231 struct jcf_block *blocks;
232 struct jcf_block *last_block;
234 struct localvar_info *first_lvar;
235 struct localvar_info *last_lvar;
236 int lvar_count;
238 CPool cpool;
240 int linenumber_count;
242 /* Until perform_relocations, this is a upper bound on the number
243 of bytes (so far) in the instructions for the current method. */
244 int code_length;
246 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
247 struct jcf_block *labeled_blocks;
249 /* The current stack size (stack pointer) in the current method. */
250 int code_SP;
252 /* The largest extent of stack size (stack pointer) in the current method. */
253 int code_SP_max;
255 /* Contains a mapping from local var slot number to localvar_info. */
256 struct buffer localvars;
258 /* The buffer allocated for bytecode for the current jcf_block. */
259 struct buffer bytecode;
261 /* Chain of exception handlers for the current method. */
262 struct jcf_handler *handlers;
264 /* Last element in handlers chain. */
265 struct jcf_handler *last_handler;
267 /* Number of exception handlers for the current method. */
268 int num_handlers;
270 /* Number of finalizers we are currently nested within. */
271 int num_finalizers;
273 /* If non-NULL, use this for the return value. */
274 tree return_value_decl;
276 /* Information about the current switch statement. */
277 struct jcf_switch_state *sw_state;
279 /* The count of jsr instructions that have been emmitted. */
280 long num_jsrs;
283 static void generate_bytecode_insns (tree, int, struct jcf_partial *);
284 static struct chunk * alloc_chunk (struct chunk *, unsigned char *,
285 int, struct obstack *);
286 static unsigned char * append_chunk (unsigned char *, int,
287 struct jcf_partial *);
288 static void append_chunk_copy (unsigned char *, int, struct jcf_partial *);
289 static struct jcf_block * gen_jcf_label (struct jcf_partial *);
290 static void finish_jcf_block (struct jcf_partial *);
291 static void define_jcf_label (struct jcf_block *, struct jcf_partial *);
292 static struct jcf_block * get_jcf_label_here (struct jcf_partial *);
293 static void put_linenumber (int, struct jcf_partial *);
294 static void localvar_alloc (tree, struct jcf_partial *);
295 static void maybe_free_localvar (tree, struct jcf_partial *, int);
296 static int get_access_flags (tree);
297 static void write_chunks (FILE *, struct chunk *);
298 static int adjust_typed_op (tree, int);
299 static void generate_bytecode_conditional (tree, struct jcf_block *,
300 struct jcf_block *, int,
301 struct jcf_partial *);
302 static void generate_bytecode_return (tree, struct jcf_partial *);
303 static void perform_relocations (struct jcf_partial *);
304 static void init_jcf_state (struct jcf_partial *, struct obstack *);
305 static void init_jcf_method (struct jcf_partial *, tree);
306 static void release_jcf_state (struct jcf_partial *);
307 static struct chunk * generate_classfile (tree, struct jcf_partial *);
308 static struct jcf_handler *alloc_handler (struct jcf_block *,
309 struct jcf_block *,
310 struct jcf_partial *);
311 static void emit_iinc (tree, HOST_WIDE_INT, struct jcf_partial *);
312 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
313 struct jcf_partial *);
314 static void push_constant1 (HOST_WIDE_INT, struct jcf_partial *);
315 static void push_constant2 (HOST_WIDE_INT, struct jcf_partial *);
316 static void push_int_const (HOST_WIDE_INT, struct jcf_partial *);
317 static int find_constant_wide (HOST_WIDE_INT, HOST_WIDE_INT,
318 struct jcf_partial *);
319 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
320 struct jcf_partial *);
321 static int find_constant_index (tree, struct jcf_partial *);
322 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
323 struct jcf_partial *);
324 static void field_op (tree, int, struct jcf_partial *);
325 static void maybe_wide (int, int, struct jcf_partial *);
326 static void emit_dup (int, int, struct jcf_partial *);
327 static void emit_pop (int, struct jcf_partial *);
328 static void emit_load_or_store (tree, int, struct jcf_partial *);
329 static void emit_load (tree, struct jcf_partial *);
330 static void emit_store (tree, struct jcf_partial *);
331 static void emit_unop (enum java_opcode, tree, struct jcf_partial *);
332 static void emit_binop (enum java_opcode, tree, struct jcf_partial *);
333 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
334 struct jcf_partial *);
335 static void emit_switch_reloc (struct jcf_block *, struct jcf_partial *);
336 static void emit_case_reloc (struct jcf_relocation *, struct jcf_partial *);
337 static void emit_if (struct jcf_block *, int, int, struct jcf_partial *);
338 static void emit_goto (struct jcf_block *, struct jcf_partial *);
339 static void emit_jsr (struct jcf_block *, struct jcf_partial *);
340 static void call_cleanups (struct jcf_block *, struct jcf_partial *);
341 static char *make_class_file_name (tree);
342 static unsigned char *append_synthetic_attribute (struct jcf_partial *);
343 static void append_deprecated_attribute (struct jcf_partial *);
344 static void append_innerclasses_attribute (struct jcf_partial *, tree);
345 static void append_innerclasses_attribute_entry (struct jcf_partial *, tree, tree);
346 static void append_gcj_attribute (struct jcf_partial *, tree);
348 /* Utility macros for appending (big-endian) data to a buffer.
349 We assume a local variable 'ptr' points into where we want to
350 write next, and we assume enough space has been allocated. */
352 #ifdef ENABLE_JC1_CHECKING
353 static int CHECK_PUT (void *, struct jcf_partial *, int);
355 static int
356 CHECK_PUT (void *ptr, struct jcf_partial *state, int i)
358 if ((unsigned char *) ptr < state->chunk->data
359 || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
360 abort ();
362 return 0;
364 #else
365 #define CHECK_PUT(PTR, STATE, I) ((void)0)
366 #endif
368 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
369 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
370 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
371 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
373 /* There are some cases below where CHECK_PUT is guaranteed to fail.
374 Use the following macros in those specific cases. */
375 #define UNSAFE_PUT1(X) (*ptr++ = (X))
376 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
377 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
378 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
381 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
382 Set the data and size fields to DATA and SIZE, respectively.
383 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
385 static struct chunk *
386 alloc_chunk (struct chunk *last, unsigned char *data,
387 int size, struct obstack *work)
389 struct chunk *chunk = (struct chunk *)
390 obstack_alloc (work, sizeof(struct chunk));
392 if (data == NULL && size > 0)
393 data = obstack_alloc (work, size);
395 chunk->next = NULL;
396 chunk->data = data;
397 chunk->size = size;
398 if (last != NULL)
399 last->next = chunk;
400 return chunk;
403 #ifdef ENABLE_JC1_CHECKING
404 static int CHECK_OP (struct jcf_partial *);
406 static int
407 CHECK_OP (struct jcf_partial *state)
409 if (state->bytecode.ptr > state->bytecode.limit)
410 abort ();
412 return 0;
414 #else
415 #define CHECK_OP(STATE) ((void) 0)
416 #endif
418 static unsigned char *
419 append_chunk (unsigned char *data, int size, struct jcf_partial *state)
421 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
422 if (state->first == NULL)
423 state->first = state->chunk;
424 return state->chunk->data;
427 static void
428 append_chunk_copy (unsigned char *data, int size, struct jcf_partial *state)
430 unsigned char *ptr = append_chunk (NULL, size, state);
431 memcpy (ptr, data, size);
434 static struct jcf_block *
435 gen_jcf_label (struct jcf_partial *state)
437 struct jcf_block *block = (struct jcf_block *)
438 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
439 block->next = NULL;
440 block->linenumber = -1;
441 block->pc = UNDEFINED_PC;
442 return block;
445 static void
446 finish_jcf_block (struct jcf_partial *state)
448 struct jcf_block *block = state->last_block;
449 struct jcf_relocation *reloc;
450 int code_length = BUFFER_LENGTH (&state->bytecode);
451 int pc = state->code_length;
452 append_chunk_copy (state->bytecode.data, code_length, state);
453 BUFFER_RESET (&state->bytecode);
454 block->v.chunk = state->chunk;
456 /* Calculate code_length to the maximum value it can have. */
457 pc += block->v.chunk->size;
458 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
460 int kind = reloc->kind;
461 if (kind == SWITCH_ALIGN_RELOC)
462 pc += 3;
463 else if (kind > BLOCK_START_RELOC)
464 pc += 2; /* 2-byte offset may grow to 4-byte offset */
465 else if (kind < -1)
466 pc += 5; /* May need to add a goto_w. */
468 state->code_length = pc;
471 static void
472 define_jcf_label (struct jcf_block *label, struct jcf_partial *state)
474 if (state->last_block != NULL)
475 finish_jcf_block (state);
476 label->pc = state->code_length;
477 if (state->blocks == NULL)
478 state->blocks = label;
479 else
480 state->last_block->next = label;
481 state->last_block = label;
482 label->next = NULL;
483 label->u.relocations = NULL;
486 static struct jcf_block *
487 get_jcf_label_here (struct jcf_partial *state)
489 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
490 return state->last_block;
491 else
493 struct jcf_block *label = gen_jcf_label (state);
494 define_jcf_label (label, state);
495 return label;
499 /* Note a line number entry for the current PC and given LINE. */
501 static void
502 put_linenumber (int line, struct jcf_partial *state)
504 struct jcf_block *label = get_jcf_label_here (state);
505 if (label->linenumber > 0)
507 label = gen_jcf_label (state);
508 define_jcf_label (label, state);
510 label->linenumber = line;
511 state->linenumber_count++;
514 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
515 in the range (START_LABEL, END_LABEL). */
517 static struct jcf_handler *
518 alloc_handler (struct jcf_block *start_label, struct jcf_block *end_label,
519 struct jcf_partial *state)
521 struct jcf_handler *handler = (struct jcf_handler *)
522 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
523 handler->start_label = start_label;
524 handler->end_label = end_label;
525 handler->handler_label = get_jcf_label_here (state);
526 if (state->handlers == NULL)
527 state->handlers = handler;
528 else
529 state->last_handler->next = handler;
530 state->last_handler = handler;
531 handler->next = NULL;
532 state->num_handlers++;
533 return handler;
537 /* The index of jvm local variable allocated for this DECL.
538 This is assigned when generating .class files;
539 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
540 (We don't allocate DECL_LANG_SPECIFIC for locals from Java source code.) */
542 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
544 struct localvar_info
546 struct localvar_info *next;
548 tree decl;
549 struct jcf_block *start_label;
550 struct jcf_block *end_label;
553 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
554 #define localvar_max \
555 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
557 static void
558 localvar_alloc (tree decl, struct jcf_partial *state)
560 struct jcf_block *start_label = get_jcf_label_here (state);
561 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
562 int index;
563 register struct localvar_info *info;
564 register struct localvar_info **ptr = localvar_buffer;
565 register struct localvar_info **limit
566 = (struct localvar_info**) state->localvars.ptr;
567 for (index = 0; ptr < limit; index++, ptr++)
569 if (ptr[0] == NULL
570 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
571 break;
573 if (ptr == limit)
575 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
576 ptr = (struct localvar_info**) state->localvars.data + index;
577 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
579 info = (struct localvar_info *)
580 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
581 ptr[0] = info;
582 if (wide)
583 ptr[1] = (struct localvar_info *)(~0);
584 DECL_LOCAL_INDEX (decl) = index;
585 info->decl = decl;
586 info->start_label = start_label;
588 if (debug_info_level > DINFO_LEVEL_TERSE
589 && DECL_NAME (decl) != NULL_TREE)
591 /* Generate debugging info. */
592 info->next = NULL;
593 if (state->last_lvar != NULL)
594 state->last_lvar->next = info;
595 else
596 state->first_lvar = info;
597 state->last_lvar = info;
598 state->lvar_count++;
602 static void
603 maybe_free_localvar (tree decl, struct jcf_partial *state, int really)
605 struct jcf_block *end_label = get_jcf_label_here (state);
606 int index = DECL_LOCAL_INDEX (decl);
607 register struct localvar_info **ptr = &localvar_buffer [index];
608 register struct localvar_info *info = *ptr;
609 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
611 info->end_label = end_label;
613 if (info->decl != decl)
614 abort ();
615 if (! really)
616 return;
617 ptr[0] = NULL;
618 if (wide)
620 if (ptr[1] != (struct localvar_info *)(~0))
621 abort ();
622 ptr[1] = NULL;
627 #define STACK_TARGET 1
628 #define IGNORE_TARGET 2
630 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
631 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
633 static int
634 get_access_flags (tree decl)
636 int flags = 0;
637 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
638 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
639 flags |= ACC_PUBLIC;
640 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
641 flags |= ACC_FINAL;
642 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
644 if (TREE_PROTECTED (decl))
645 flags |= ACC_PROTECTED;
646 if (TREE_PRIVATE (decl))
647 flags |= ACC_PRIVATE;
649 else if (TREE_CODE (decl) == TYPE_DECL)
651 if (CLASS_SUPER (decl))
652 flags |= ACC_SUPER;
653 if (CLASS_ABSTRACT (decl))
654 flags |= ACC_ABSTRACT;
655 if (CLASS_INTERFACE (decl))
656 flags |= ACC_INTERFACE;
657 if (CLASS_STATIC (decl))
658 flags |= ACC_STATIC;
659 if (CLASS_PRIVATE (decl))
660 flags |= ACC_PRIVATE;
661 if (CLASS_PROTECTED (decl))
662 flags |= ACC_PROTECTED;
663 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
664 || LOCAL_CLASS_P (TREE_TYPE (decl)))
665 flags |= ACC_PRIVATE;
666 if (CLASS_STRICTFP (decl))
667 flags |= ACC_STRICT;
669 else
670 abort ();
672 if (TREE_CODE (decl) == FUNCTION_DECL)
674 if (METHOD_NATIVE (decl))
675 flags |= ACC_NATIVE;
676 if (METHOD_STATIC (decl))
677 flags |= ACC_STATIC;
678 if (METHOD_SYNCHRONIZED (decl))
679 flags |= ACC_SYNCHRONIZED;
680 if (METHOD_ABSTRACT (decl))
681 flags |= ACC_ABSTRACT;
682 if (METHOD_STRICTFP (decl))
683 flags |= ACC_STRICT;
685 if (isfield)
687 if (FIELD_STATIC (decl))
688 flags |= ACC_STATIC;
689 if (FIELD_VOLATILE (decl))
690 flags |= ACC_VOLATILE;
691 if (FIELD_TRANSIENT (decl))
692 flags |= ACC_TRANSIENT;
694 return flags;
697 /* Write the list of segments starting at CHUNKS to STREAM. */
699 static void
700 write_chunks (FILE* stream, struct chunk *chunks)
702 for (; chunks != NULL; chunks = chunks->next)
703 fwrite (chunks->data, chunks->size, 1, stream);
706 /* Push a 1-word constant in the constant pool at the given INDEX.
707 (Caller is responsible for doing NOTE_PUSH.) */
709 static void
710 push_constant1 (HOST_WIDE_INT index, struct jcf_partial *state)
712 RESERVE (3);
713 if (index < 256)
715 OP1 (OPCODE_ldc);
716 OP1 (index);
718 else
720 OP1 (OPCODE_ldc_w);
721 OP2 (index);
725 /* Push a 2-word constant in the constant pool at the given INDEX.
726 (Caller is responsible for doing NOTE_PUSH.) */
728 static void
729 push_constant2 (HOST_WIDE_INT index, struct jcf_partial *state)
731 RESERVE (3);
732 OP1 (OPCODE_ldc2_w);
733 OP2 (index);
736 /* Push 32-bit integer constant on VM stack.
737 Caller is responsible for doing NOTE_PUSH. */
739 static void
740 push_int_const (HOST_WIDE_INT i, struct jcf_partial *state)
742 RESERVE(3);
743 if (i >= -1 && i <= 5)
744 OP1(OPCODE_iconst_0 + i);
745 else if (i >= -128 && i < 128)
747 OP1(OPCODE_bipush);
748 OP1(i);
750 else if (i >= -32768 && i < 32768)
752 OP1(OPCODE_sipush);
753 OP2(i);
755 else
757 i = find_constant1 (&state->cpool, CONSTANT_Integer,
758 (jword)(i & 0xFFFFFFFF));
759 push_constant1 (i, state);
763 static int
764 find_constant_wide (HOST_WIDE_INT lo, HOST_WIDE_INT hi,
765 struct jcf_partial *state)
767 HOST_WIDE_INT w1, w2;
768 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
769 return find_constant2 (&state->cpool, CONSTANT_Long,
770 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
773 /* Find or allocate a constant pool entry for the given VALUE.
774 Return the index in the constant pool. */
776 static int
777 find_constant_index (tree value, struct jcf_partial *state)
779 if (TREE_CODE (value) == INTEGER_CST)
781 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
782 return find_constant1 (&state->cpool, CONSTANT_Integer,
783 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
784 else
785 return find_constant_wide (TREE_INT_CST_LOW (value),
786 TREE_INT_CST_HIGH (value), state);
788 else if (TREE_CODE (value) == REAL_CST)
790 long words[2];
792 real_to_target (words, &TREE_REAL_CST (value),
793 TYPE_MODE (TREE_TYPE (value)));
794 words[0] &= 0xffffffff;
795 words[1] &= 0xffffffff;
797 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
798 return find_constant1 (&state->cpool, CONSTANT_Float, (jword)words[0]);
799 else
800 return find_constant2 (&state->cpool, CONSTANT_Double,
801 (jword)words[1-FLOAT_WORDS_BIG_ENDIAN],
802 (jword)words[FLOAT_WORDS_BIG_ENDIAN]);
804 else if (TREE_CODE (value) == STRING_CST)
805 return find_string_constant (&state->cpool, value);
807 else
808 abort ();
811 /* Push 64-bit long constant on VM stack.
812 Caller is responsible for doing NOTE_PUSH. */
814 static void
815 push_long_const (HOST_WIDE_INT lo, HOST_WIDE_INT hi, struct jcf_partial *state)
817 HOST_WIDE_INT highpart, dummy;
818 jint lowpart = WORD_TO_INT (lo);
820 rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
822 if (highpart == 0 && (lowpart == 0 || lowpart == 1))
824 RESERVE(1);
825 OP1(OPCODE_lconst_0 + lowpart);
827 else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
828 || (highpart == -1 && lowpart < 0 && lowpart >= -32768))
830 push_int_const (lowpart, state);
831 RESERVE (1);
832 OP1 (OPCODE_i2l);
834 else
835 push_constant2 (find_constant_wide (lo, hi, state), state);
838 static void
839 field_op (tree field, int opcode, struct jcf_partial *state)
841 int index = find_fieldref_index (&state->cpool, field);
842 RESERVE (3);
843 OP1 (opcode);
844 OP2 (index);
847 /* Returns an integer in the range 0 (for 'int') through 4 (for object
848 reference) to 7 (for 'short') which matches the pattern of how JVM
849 opcodes typically depend on the operand type. */
851 static int
852 adjust_typed_op (tree type, int max)
854 switch (TREE_CODE (type))
856 case POINTER_TYPE:
857 case RECORD_TYPE: return 4;
858 case BOOLEAN_TYPE:
859 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
860 case CHAR_TYPE:
861 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
862 case INTEGER_TYPE:
863 switch (TYPE_PRECISION (type))
865 case 8: return max < 5 ? 0 : 5;
866 case 16: return max < 7 ? 0 : 7;
867 case 32: return 0;
868 case 64: return 1;
870 break;
871 case REAL_TYPE:
872 switch (TYPE_PRECISION (type))
874 case 32: return 2;
875 case 64: return 3;
877 break;
878 default:
879 break;
881 abort ();
884 static void
885 maybe_wide (int opcode, int index, struct jcf_partial *state)
887 if (index >= 256)
889 RESERVE (4);
890 OP1 (OPCODE_wide);
891 OP1 (opcode);
892 OP2 (index);
894 else
896 RESERVE (2);
897 OP1 (opcode);
898 OP1 (index);
902 /* Compile code to duplicate with offset, where
903 SIZE is the size of the stack item to duplicate (1 or 2), abd
904 OFFSET is where to insert the result (must be 0, 1, or 2).
905 (The new words get inserted at stack[SP-size-offset].) */
907 static void
908 emit_dup (int size, int offset, struct jcf_partial *state)
910 int kind;
911 if (size == 0)
912 return;
913 RESERVE(1);
914 if (offset == 0)
915 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
916 else if (offset == 1)
917 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
918 else if (offset == 2)
919 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
920 else
921 abort();
922 OP1 (kind);
923 NOTE_PUSH (size);
926 static void
927 emit_pop (int size, struct jcf_partial *state)
929 RESERVE (1);
930 OP1 (OPCODE_pop - 1 + size);
933 static void
934 emit_iinc (tree var, HOST_WIDE_INT value, struct jcf_partial *state)
936 int slot = DECL_LOCAL_INDEX (var);
938 if (value < -128 || value > 127 || slot >= 256)
940 RESERVE (6);
941 OP1 (OPCODE_wide);
942 OP1 (OPCODE_iinc);
943 OP2 (slot);
944 OP2 (value);
946 else
948 RESERVE (3);
949 OP1 (OPCODE_iinc);
950 OP1 (slot);
951 OP1 (value);
955 static void
956 emit_load_or_store (tree var, /* Variable to load from or store into. */
957 int opcode, /* Either OPCODE_iload or OPCODE_istore. */
958 struct jcf_partial *state)
960 tree type = TREE_TYPE (var);
961 int kind = adjust_typed_op (type, 4);
962 int index = DECL_LOCAL_INDEX (var);
963 if (index <= 3)
965 RESERVE (1);
966 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
968 else
969 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
972 static void
973 emit_load (tree var, struct jcf_partial *state)
975 emit_load_or_store (var, OPCODE_iload, state);
976 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
979 static void
980 emit_store (tree var, struct jcf_partial *state)
982 emit_load_or_store (var, OPCODE_istore, state);
983 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
986 static void
987 emit_unop (enum java_opcode opcode, tree type ATTRIBUTE_UNUSED,
988 struct jcf_partial *state)
990 RESERVE(1);
991 OP1 (opcode);
994 static void
995 emit_binop (enum java_opcode opcode, tree type, struct jcf_partial *state)
997 int size = TYPE_IS_WIDE (type) ? 2 : 1;
998 RESERVE(1);
999 OP1 (opcode);
1000 NOTE_POP (size);
1003 static void
1004 emit_reloc (HOST_WIDE_INT value, int kind,
1005 struct jcf_block *target, struct jcf_partial *state)
1007 struct jcf_relocation *reloc = (struct jcf_relocation *)
1008 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1009 struct jcf_block *block = state->last_block;
1010 reloc->next = block->u.relocations;
1011 block->u.relocations = reloc;
1012 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1013 reloc->label = target;
1014 reloc->kind = kind;
1015 if (kind == 0 || kind == BLOCK_START_RELOC)
1016 OP4 (value);
1017 else if (kind != SWITCH_ALIGN_RELOC)
1018 OP2 (value);
1021 static void
1022 emit_switch_reloc (struct jcf_block *label, struct jcf_partial *state)
1024 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1027 /* Similar to emit_switch_reloc,
1028 but re-uses an existing case reloc. */
1030 static void
1031 emit_case_reloc (struct jcf_relocation *reloc, struct jcf_partial *state)
1033 struct jcf_block *block = state->last_block;
1034 reloc->next = block->u.relocations;
1035 block->u.relocations = reloc;
1036 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1037 reloc->kind = BLOCK_START_RELOC;
1038 OP4 (0);
1041 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1042 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1044 static void
1045 emit_if (struct jcf_block *target, int opcode, int inv_opcode,
1046 struct jcf_partial *state)
1048 RESERVE(3);
1049 OP1 (opcode);
1050 /* value is 1 byte from reloc back to start of instruction. */
1051 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1054 static void
1055 emit_goto (struct jcf_block *target, struct jcf_partial *state)
1057 RESERVE(3);
1058 OP1 (OPCODE_goto);
1059 /* Value is 1 byte from reloc back to start of instruction. */
1060 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1063 static void
1064 emit_jsr (struct jcf_block *target, struct jcf_partial *state)
1066 RESERVE(3);
1067 OP1 (OPCODE_jsr);
1068 /* Value is 1 byte from reloc back to start of instruction. */
1069 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1070 state->num_jsrs++;
1073 /* Generate code to evaluate EXP. If the result is true,
1074 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1075 TRUE_BRANCH_FIRST is a code generation hint that the
1076 TRUE_LABEL may follow right after this. (The idea is that we
1077 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1079 static void
1080 generate_bytecode_conditional (tree exp,
1081 struct jcf_block *true_label,
1082 struct jcf_block *false_label,
1083 int true_branch_first,
1084 struct jcf_partial *state)
1086 tree exp0, exp1, type;
1087 int save_SP = state->code_SP;
1088 enum java_opcode op, negop;
1089 switch (TREE_CODE (exp))
1091 case INTEGER_CST:
1092 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1093 break;
1094 case COND_EXPR:
1096 struct jcf_block *then_label = gen_jcf_label (state);
1097 struct jcf_block *else_label = gen_jcf_label (state);
1098 int save_SP_before, save_SP_after;
1099 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1100 then_label, else_label, 1, state);
1101 define_jcf_label (then_label, state);
1102 save_SP_before = state->code_SP;
1103 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1104 true_label, false_label, 1, state);
1105 save_SP_after = state->code_SP;
1106 state->code_SP = save_SP_before;
1107 define_jcf_label (else_label, state);
1108 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1109 true_label, false_label,
1110 true_branch_first, state);
1111 if (state->code_SP != save_SP_after)
1112 abort ();
1114 break;
1115 case TRUTH_NOT_EXPR:
1116 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1117 true_label, ! true_branch_first, state);
1118 break;
1119 case TRUTH_ANDIF_EXPR:
1121 struct jcf_block *next_label = gen_jcf_label (state);
1122 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1123 next_label, false_label, 1, state);
1124 define_jcf_label (next_label, state);
1125 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1126 true_label, false_label, 1, state);
1128 break;
1129 case TRUTH_ORIF_EXPR:
1131 struct jcf_block *next_label = gen_jcf_label (state);
1132 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1133 true_label, next_label, 1, state);
1134 define_jcf_label (next_label, state);
1135 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1136 true_label, false_label, 1, state);
1138 break;
1139 compare_1:
1140 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1141 set it to the corresponding 1-operand if<COND> instructions. */
1142 op = op - 6;
1143 /* FALLTHROUGH */
1144 compare_2:
1145 /* The opcodes with their inverses are allocated in pairs.
1146 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1147 negop = (op & 1) ? op + 1 : op - 1;
1148 compare_2_ptr:
1149 if (true_branch_first)
1151 emit_if (false_label, negop, op, state);
1152 emit_goto (true_label, state);
1154 else
1156 emit_if (true_label, op, negop, state);
1157 emit_goto (false_label, state);
1159 break;
1160 case EQ_EXPR:
1161 op = OPCODE_if_icmpeq;
1162 goto compare;
1163 case NE_EXPR:
1164 op = OPCODE_if_icmpne;
1165 goto compare;
1166 case GT_EXPR:
1167 op = OPCODE_if_icmpgt;
1168 goto compare;
1169 case LT_EXPR:
1170 op = OPCODE_if_icmplt;
1171 goto compare;
1172 case GE_EXPR:
1173 op = OPCODE_if_icmpge;
1174 goto compare;
1175 case LE_EXPR:
1176 op = OPCODE_if_icmple;
1177 goto compare;
1178 compare:
1179 exp0 = TREE_OPERAND (exp, 0);
1180 exp1 = TREE_OPERAND (exp, 1);
1181 type = TREE_TYPE (exp0);
1182 switch (TREE_CODE (type))
1184 int opf;
1185 case POINTER_TYPE: case RECORD_TYPE:
1186 switch (TREE_CODE (exp))
1188 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1189 case NE_EXPR: op = OPCODE_if_acmpne; break;
1190 default: abort();
1192 if (integer_zerop (exp1) || integer_zerop (exp0))
1194 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1195 STACK_TARGET, state);
1196 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1197 negop = (op & 1) ? op - 1 : op + 1;
1198 NOTE_POP (1);
1199 goto compare_2_ptr;
1201 generate_bytecode_insns (exp0, STACK_TARGET, state);
1202 generate_bytecode_insns (exp1, STACK_TARGET, state);
1203 NOTE_POP (2);
1204 goto compare_2;
1205 case REAL_TYPE:
1206 generate_bytecode_insns (exp0, STACK_TARGET, state);
1207 generate_bytecode_insns (exp1, STACK_TARGET, state);
1208 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1209 opf = OPCODE_fcmpg;
1210 else
1211 opf = OPCODE_fcmpl;
1212 if (TYPE_PRECISION (type) > 32)
1214 opf += 2;
1215 NOTE_POP (4);
1217 else
1218 NOTE_POP (2);
1219 RESERVE (1);
1220 OP1 (opf);
1221 goto compare_1;
1222 case INTEGER_TYPE:
1223 if (TYPE_PRECISION (type) > 32)
1225 generate_bytecode_insns (exp0, STACK_TARGET, state);
1226 generate_bytecode_insns (exp1, STACK_TARGET, state);
1227 NOTE_POP (4);
1228 RESERVE (1);
1229 OP1 (OPCODE_lcmp);
1230 goto compare_1;
1232 /* FALLTHROUGH */
1233 default:
1234 if (integer_zerop (exp1))
1236 generate_bytecode_insns (exp0, STACK_TARGET, state);
1237 NOTE_POP (1);
1238 goto compare_1;
1240 if (integer_zerop (exp0))
1242 switch (op)
1244 case OPCODE_if_icmplt:
1245 case OPCODE_if_icmpge:
1246 op += 2;
1247 break;
1248 case OPCODE_if_icmpgt:
1249 case OPCODE_if_icmple:
1250 op -= 2;
1251 break;
1252 default:
1253 break;
1255 generate_bytecode_insns (exp1, STACK_TARGET, state);
1256 NOTE_POP (1);
1257 goto compare_1;
1259 generate_bytecode_insns (exp0, STACK_TARGET, state);
1260 generate_bytecode_insns (exp1, STACK_TARGET, state);
1261 NOTE_POP (2);
1262 goto compare_2;
1265 default:
1266 generate_bytecode_insns (exp, STACK_TARGET, state);
1267 NOTE_POP (1);
1268 if (true_branch_first)
1270 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1271 emit_goto (true_label, state);
1273 else
1275 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1276 emit_goto (false_label, state);
1278 break;
1280 if (save_SP != state->code_SP)
1281 abort ();
1284 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1285 but only as far out as LIMIT (since we are about to jump to the
1286 emit label that is LIMIT). */
1288 static void
1289 call_cleanups (struct jcf_block *limit, struct jcf_partial *state)
1291 struct jcf_block *block = state->labeled_blocks;
1292 for (; block != limit; block = block->next)
1294 if (block->pc == PENDING_CLEANUP_PC)
1295 emit_jsr (block, state);
1299 static void
1300 generate_bytecode_return (tree exp, struct jcf_partial *state)
1302 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1303 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1304 int op;
1305 again:
1306 if (exp != NULL)
1308 switch (TREE_CODE (exp))
1310 case COMPOUND_EXPR:
1311 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1312 state);
1313 exp = TREE_OPERAND (exp, 1);
1314 goto again;
1315 case COND_EXPR:
1317 struct jcf_block *then_label = gen_jcf_label (state);
1318 struct jcf_block *else_label = gen_jcf_label (state);
1319 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1320 then_label, else_label, 1, state);
1321 define_jcf_label (then_label, state);
1322 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1323 define_jcf_label (else_label, state);
1324 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1326 return;
1327 default:
1328 generate_bytecode_insns (exp,
1329 returns_void ? IGNORE_TARGET
1330 : STACK_TARGET, state);
1333 if (returns_void)
1335 op = OPCODE_return;
1336 call_cleanups (NULL, state);
1338 else
1340 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1341 if (state->num_finalizers > 0)
1343 if (state->return_value_decl == NULL_TREE)
1345 state->return_value_decl
1346 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1347 localvar_alloc (state->return_value_decl, state);
1349 emit_store (state->return_value_decl, state);
1350 call_cleanups (NULL, state);
1351 emit_load (state->return_value_decl, state);
1352 /* If we call maybe_free_localvar (state->return_value_decl, state, 1),
1353 then we risk the save decl erroneously re-used in the
1354 finalizer. Instead, we keep the state->return_value_decl
1355 allocated through the rest of the method. This is not
1356 the greatest solution, but it is at least simple and safe. */
1359 RESERVE (1);
1360 OP1 (op);
1363 /* Generate bytecode for sub-expression EXP of METHOD.
1364 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1366 static void
1367 generate_bytecode_insns (tree exp, int target, struct jcf_partial *state)
1369 tree type, arg;
1370 enum java_opcode jopcode;
1371 int op;
1372 HOST_WIDE_INT value;
1373 int post_op;
1374 int size;
1375 int offset;
1377 if (exp == NULL && target == IGNORE_TARGET)
1378 return;
1380 type = TREE_TYPE (exp);
1382 switch (TREE_CODE (exp))
1384 case BLOCK:
1385 if (BLOCK_EXPR_BODY (exp))
1387 tree local;
1388 tree body = BLOCK_EXPR_BODY (exp);
1389 long jsrs = state->num_jsrs;
1390 for (local = BLOCK_EXPR_DECLS (exp); local; )
1392 tree next = TREE_CHAIN (local);
1393 localvar_alloc (local, state);
1394 local = next;
1396 /* Avoid deep recursion for long blocks. */
1397 while (TREE_CODE (body) == COMPOUND_EXPR)
1399 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1400 body = TREE_OPERAND (body, 1);
1402 generate_bytecode_insns (body, target, state);
1404 for (local = BLOCK_EXPR_DECLS (exp); local; )
1406 tree next = TREE_CHAIN (local);
1407 maybe_free_localvar (local, state, state->num_jsrs <= jsrs);
1408 local = next;
1411 break;
1412 case COMPOUND_EXPR:
1413 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1414 /* Normally the first operand to a COMPOUND_EXPR must complete
1415 normally. However, in the special case of a do-while
1416 statement this is not necessarily the case. */
1417 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1418 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1419 break;
1420 case EXPR_WITH_FILE_LOCATION:
1422 const char *saved_input_filename = input_filename;
1423 tree body = EXPR_WFL_NODE (exp);
1424 int saved_lineno = input_line;
1425 if (body == empty_stmt_node)
1426 break;
1427 input_filename = EXPR_WFL_FILENAME (exp);
1428 input_line = EXPR_WFL_LINENO (exp);
1429 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && input_line > 0
1430 && debug_info_level > DINFO_LEVEL_NONE)
1431 put_linenumber (input_line, state);
1432 generate_bytecode_insns (body, target, state);
1433 input_filename = saved_input_filename;
1434 input_line = saved_lineno;
1436 break;
1437 case INTEGER_CST:
1438 if (target == IGNORE_TARGET) ; /* do nothing */
1439 else if (TREE_CODE (type) == POINTER_TYPE)
1441 if (! integer_zerop (exp))
1442 abort();
1443 RESERVE(1);
1444 OP1 (OPCODE_aconst_null);
1445 NOTE_PUSH (1);
1447 else if (TYPE_PRECISION (type) <= 32)
1449 push_int_const (TREE_INT_CST_LOW (exp), state);
1450 NOTE_PUSH (1);
1452 else
1454 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1455 state);
1456 NOTE_PUSH (2);
1458 break;
1459 case REAL_CST:
1461 int prec = TYPE_PRECISION (type) >> 5;
1462 RESERVE(1);
1463 if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1464 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1465 else if (real_onep (exp))
1466 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1467 /* FIXME Should also use fconst_2 for 2.0f.
1468 Also, should use iconst_2/ldc followed by i2f/i2d
1469 for other float/double when the value is a small integer. */
1470 else
1472 offset = find_constant_index (exp, state);
1473 if (prec == 1)
1474 push_constant1 (offset, state);
1475 else
1476 push_constant2 (offset, state);
1478 NOTE_PUSH (prec);
1480 break;
1481 case STRING_CST:
1482 push_constant1 (find_string_constant (&state->cpool, exp), state);
1483 NOTE_PUSH (1);
1484 break;
1485 case VAR_DECL:
1486 if (TREE_STATIC (exp))
1488 field_op (exp, OPCODE_getstatic, state);
1489 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1490 break;
1492 /* ... fall through ... */
1493 case PARM_DECL:
1494 emit_load (exp, state);
1495 break;
1496 case NON_LVALUE_EXPR:
1497 case INDIRECT_REF:
1498 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1499 break;
1500 case ARRAY_REF:
1501 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1502 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1503 if (target != IGNORE_TARGET)
1505 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1506 RESERVE(1);
1507 OP1 (jopcode);
1508 if (! TYPE_IS_WIDE (type))
1509 NOTE_POP (1);
1511 break;
1512 case COMPONENT_REF:
1514 tree obj = TREE_OPERAND (exp, 0);
1515 tree field = TREE_OPERAND (exp, 1);
1516 int is_static = FIELD_STATIC (field);
1517 generate_bytecode_insns (obj,
1518 is_static ? IGNORE_TARGET : target, state);
1519 if (target != IGNORE_TARGET)
1521 if (DECL_NAME (field) == length_identifier_node && !is_static
1522 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1524 RESERVE (1);
1525 OP1 (OPCODE_arraylength);
1527 else
1529 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1530 state);
1531 if (! is_static)
1532 NOTE_POP (1);
1533 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1537 break;
1538 case TRUTH_ANDIF_EXPR:
1539 case TRUTH_ORIF_EXPR:
1540 case EQ_EXPR:
1541 case NE_EXPR:
1542 case GT_EXPR:
1543 case LT_EXPR:
1544 case GE_EXPR:
1545 case LE_EXPR:
1547 struct jcf_block *then_label = gen_jcf_label (state);
1548 struct jcf_block *else_label = gen_jcf_label (state);
1549 struct jcf_block *end_label = gen_jcf_label (state);
1550 generate_bytecode_conditional (exp,
1551 then_label, else_label, 1, state);
1552 define_jcf_label (then_label, state);
1553 push_int_const (1, state);
1554 emit_goto (end_label, state);
1555 define_jcf_label (else_label, state);
1556 push_int_const (0, state);
1557 define_jcf_label (end_label, state);
1558 NOTE_PUSH (1);
1560 break;
1561 case COND_EXPR:
1563 struct jcf_block *then_label = gen_jcf_label (state);
1564 struct jcf_block *else_label = gen_jcf_label (state);
1565 struct jcf_block *end_label = gen_jcf_label (state);
1566 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1567 then_label, else_label, 1, state);
1568 define_jcf_label (then_label, state);
1569 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1570 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1571 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1572 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1573 emit_goto (end_label, state);
1574 define_jcf_label (else_label, state);
1575 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1576 define_jcf_label (end_label, state);
1577 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1578 if (TREE_TYPE (exp) != void_type_node)
1579 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1581 break;
1582 case CASE_EXPR:
1584 struct jcf_switch_state *sw_state = state->sw_state;
1585 struct jcf_relocation *reloc = (struct jcf_relocation *)
1586 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1587 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1588 reloc->kind = 0;
1589 reloc->label = get_jcf_label_here (state);
1590 reloc->offset = case_value;
1591 reloc->next = sw_state->cases;
1592 sw_state->cases = reloc;
1593 if (sw_state->num_cases == 0)
1595 sw_state->min_case = case_value;
1596 sw_state->max_case = case_value;
1598 else
1600 if (case_value < sw_state->min_case)
1601 sw_state->min_case = case_value;
1602 if (case_value > sw_state->max_case)
1603 sw_state->max_case = case_value;
1605 sw_state->num_cases++;
1607 break;
1608 case DEFAULT_EXPR:
1609 state->sw_state->default_label = get_jcf_label_here (state);
1610 break;
1612 case SWITCH_EXPR:
1614 /* The SWITCH_EXPR has three parts, generated in the following order:
1615 1. the switch_expression (the value used to select the correct case);
1616 2. the switch_body;
1617 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1618 After code generation, we will re-order them in the order 1, 3, 2.
1619 This is to avoid any extra GOTOs. */
1620 struct jcf_switch_state sw_state;
1621 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1622 struct jcf_block *body_last; /* Last block of the switch_body. */
1623 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1624 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1625 struct jcf_block *body_block;
1626 int switch_length;
1627 sw_state.prev = state->sw_state;
1628 state->sw_state = &sw_state;
1629 sw_state.cases = NULL;
1630 sw_state.num_cases = 0;
1631 sw_state.default_label = NULL;
1632 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1633 expression_last = state->last_block;
1634 /* Force a new block here. */
1635 body_block = gen_jcf_label (state);
1636 define_jcf_label (body_block, state);
1637 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1638 body_last = state->last_block;
1640 switch_instruction = gen_jcf_label (state);
1641 define_jcf_label (switch_instruction, state);
1642 if (sw_state.default_label == NULL)
1643 sw_state.default_label = gen_jcf_label (state);
1645 if (sw_state.num_cases <= 1)
1647 if (sw_state.num_cases == 0)
1649 emit_pop (1, state);
1650 NOTE_POP (1);
1652 else
1654 push_int_const (sw_state.cases->offset, state);
1655 NOTE_PUSH (1);
1656 emit_if (sw_state.cases->label,
1657 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1659 emit_goto (sw_state.default_label, state);
1661 else
1663 HOST_WIDE_INT i;
1664 unsigned HOST_WIDE_INT delta;
1665 /* Copy the chain of relocs into a sorted array. */
1666 struct jcf_relocation **relocs = (struct jcf_relocation **)
1667 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1668 /* The relocs arrays is a buffer with a gap.
1669 The assumption is that cases will normally come in "runs". */
1670 int gap_start = 0;
1671 int gap_end = sw_state.num_cases;
1672 struct jcf_relocation *reloc;
1673 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1675 HOST_WIDE_INT case_value = reloc->offset;
1676 while (gap_end < sw_state.num_cases)
1678 struct jcf_relocation *end = relocs[gap_end];
1679 if (case_value <= end->offset)
1680 break;
1681 relocs[gap_start++] = end;
1682 gap_end++;
1684 while (gap_start > 0)
1686 struct jcf_relocation *before = relocs[gap_start-1];
1687 if (case_value >= before->offset)
1688 break;
1689 relocs[--gap_end] = before;
1690 gap_start--;
1692 relocs[gap_start++] = reloc;
1693 /* Note we don't check for duplicates. This is
1694 handled by the parser. */
1697 /* We could have DELTA < 0 if sw_state.min_case is
1698 something like Integer.MIN_VALUE. That is why delta is
1699 unsigned. */
1700 delta = sw_state.max_case - sw_state.min_case;
1701 if (2 * (unsigned) sw_state.num_cases >= delta)
1702 { /* Use tableswitch. */
1703 int index = 0;
1704 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1705 OP1 (OPCODE_tableswitch);
1706 emit_reloc (RELOCATION_VALUE_0,
1707 SWITCH_ALIGN_RELOC, NULL, state);
1708 emit_switch_reloc (sw_state.default_label, state);
1709 OP4 (sw_state.min_case);
1710 OP4 (sw_state.max_case);
1711 for (i = sw_state.min_case; ; )
1713 reloc = relocs[index];
1714 if (i == reloc->offset)
1716 emit_case_reloc (reloc, state);
1717 if (i == sw_state.max_case)
1718 break;
1719 index++;
1721 else
1722 emit_switch_reloc (sw_state.default_label, state);
1723 i++;
1726 else
1727 { /* Use lookupswitch. */
1728 RESERVE(9 + 8 * sw_state.num_cases);
1729 OP1 (OPCODE_lookupswitch);
1730 emit_reloc (RELOCATION_VALUE_0,
1731 SWITCH_ALIGN_RELOC, NULL, state);
1732 emit_switch_reloc (sw_state.default_label, state);
1733 OP4 (sw_state.num_cases);
1734 for (i = 0; i < sw_state.num_cases; i++)
1736 struct jcf_relocation *reloc = relocs[i];
1737 OP4 (reloc->offset);
1738 emit_case_reloc (reloc, state);
1741 free (relocs);
1744 instruction_last = state->last_block;
1745 if (sw_state.default_label->pc < 0)
1746 define_jcf_label (sw_state.default_label, state);
1747 else /* Force a new block. */
1748 sw_state.default_label = get_jcf_label_here (state);
1749 /* Now re-arrange the blocks so the switch_instruction
1750 comes before the switch_body. */
1751 switch_length = state->code_length - switch_instruction->pc;
1752 switch_instruction->pc = body_block->pc;
1753 instruction_last->next = body_block;
1754 instruction_last->v.chunk->next = body_block->v.chunk;
1755 expression_last->next = switch_instruction;
1756 expression_last->v.chunk->next = switch_instruction->v.chunk;
1757 body_last->next = sw_state.default_label;
1758 body_last->v.chunk->next = NULL;
1759 state->chunk = body_last->v.chunk;
1760 for (; body_block != sw_state.default_label; body_block = body_block->next)
1761 body_block->pc += switch_length;
1763 state->sw_state = sw_state.prev;
1764 break;
1767 case RETURN_EXPR:
1768 exp = TREE_OPERAND (exp, 0);
1769 if (exp == NULL_TREE)
1770 exp = empty_stmt_node;
1771 else if (TREE_CODE (exp) != MODIFY_EXPR)
1772 abort ();
1773 else
1774 exp = TREE_OPERAND (exp, 1);
1775 generate_bytecode_return (exp, state);
1776 break;
1777 case LABELED_BLOCK_EXPR:
1779 struct jcf_block *end_label = gen_jcf_label (state);
1780 end_label->next = state->labeled_blocks;
1781 state->labeled_blocks = end_label;
1782 end_label->pc = PENDING_EXIT_PC;
1783 end_label->u.labeled_block = exp;
1784 if (LABELED_BLOCK_BODY (exp))
1785 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1786 if (state->labeled_blocks != end_label)
1787 abort();
1788 state->labeled_blocks = end_label->next;
1789 define_jcf_label (end_label, state);
1791 break;
1792 case LOOP_EXPR:
1794 tree body = TREE_OPERAND (exp, 0);
1795 #if 0
1796 if (TREE_CODE (body) == COMPOUND_EXPR
1797 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1799 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1800 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1801 struct jcf_block *head_label;
1802 struct jcf_block *body_label;
1803 struct jcf_block *end_label = gen_jcf_label (state);
1804 struct jcf_block *exit_label = state->labeled_blocks;
1805 head_label = gen_jcf_label (state);
1806 emit_goto (head_label, state);
1807 body_label = get_jcf_label_here (state);
1808 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1809 define_jcf_label (head_label, state);
1810 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1811 end_label, body_label, 1, state);
1812 define_jcf_label (end_label, state);
1814 else
1815 #endif
1817 struct jcf_block *head_label = get_jcf_label_here (state);
1818 generate_bytecode_insns (body, IGNORE_TARGET, state);
1819 if (CAN_COMPLETE_NORMALLY (body))
1820 emit_goto (head_label, state);
1823 break;
1824 case EXIT_EXPR:
1826 struct jcf_block *label = state->labeled_blocks;
1827 struct jcf_block *end_label = gen_jcf_label (state);
1828 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1829 label, end_label, 0, state);
1830 define_jcf_label (end_label, state);
1832 break;
1833 case EXIT_BLOCK_EXPR:
1835 struct jcf_block *label = state->labeled_blocks;
1836 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1837 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1838 label = label->next;
1839 call_cleanups (label, state);
1840 emit_goto (label, state);
1842 break;
1844 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1845 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1846 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1847 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1848 increment:
1850 arg = TREE_OPERAND (exp, 1);
1851 exp = TREE_OPERAND (exp, 0);
1852 type = TREE_TYPE (exp);
1853 size = TYPE_IS_WIDE (type) ? 2 : 1;
1854 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1855 && ! TREE_STATIC (exp)
1856 && TREE_CODE (type) == INTEGER_TYPE
1857 && TYPE_PRECISION (type) == 32)
1859 if (target != IGNORE_TARGET && post_op)
1860 emit_load (exp, state);
1861 emit_iinc (exp, value, state);
1862 if (target != IGNORE_TARGET && ! post_op)
1863 emit_load (exp, state);
1864 break;
1866 if (TREE_CODE (exp) == COMPONENT_REF)
1868 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1869 emit_dup (1, 0, state);
1870 /* Stack: ..., objectref, objectref. */
1871 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1872 NOTE_PUSH (size-1);
1873 /* Stack: ..., objectref, oldvalue. */
1874 offset = 1;
1876 else if (TREE_CODE (exp) == ARRAY_REF)
1878 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1879 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1880 emit_dup (2, 0, state);
1881 /* Stack: ..., array, index, array, index. */
1882 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1883 RESERVE(1);
1884 OP1 (jopcode);
1885 NOTE_POP (2-size);
1886 /* Stack: ..., array, index, oldvalue. */
1887 offset = 2;
1889 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1891 generate_bytecode_insns (exp, STACK_TARGET, state);
1892 /* Stack: ..., oldvalue. */
1893 offset = 0;
1895 else
1896 abort ();
1898 if (target != IGNORE_TARGET && post_op)
1899 emit_dup (size, offset, state);
1900 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1901 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1902 /* Stack, otherwise: ..., [result, ] oldvalue. */
1903 generate_bytecode_insns (arg, STACK_TARGET, state);
1904 emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1905 + adjust_typed_op (type, 3),
1906 type, state);
1907 if (target != IGNORE_TARGET && ! post_op)
1908 emit_dup (size, offset, state);
1909 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1910 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1911 /* Stack, otherwise: ..., [result, ] newvalue. */
1912 goto finish_assignment;
1914 case MODIFY_EXPR:
1916 tree lhs = TREE_OPERAND (exp, 0);
1917 tree rhs = TREE_OPERAND (exp, 1);
1918 int offset = 0;
1920 /* See if we can use the iinc instruction. */
1921 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1922 && ! TREE_STATIC (lhs)
1923 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1924 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1925 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1927 tree arg0 = TREE_OPERAND (rhs, 0);
1928 tree arg1 = TREE_OPERAND (rhs, 1);
1929 HOST_WIDE_INT min_value = -32768;
1930 HOST_WIDE_INT max_value = 32767;
1931 if (TREE_CODE (rhs) == MINUS_EXPR)
1933 min_value++;
1934 max_value++;
1936 else if (arg1 == lhs)
1938 arg0 = arg1;
1939 arg1 = TREE_OPERAND (rhs, 0);
1941 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1943 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1944 value = TREE_INT_CST_LOW (arg1);
1945 if ((hi_value == 0 && value <= max_value)
1946 || (hi_value == -1 && value >= min_value))
1948 if (TREE_CODE (rhs) == MINUS_EXPR)
1949 value = -value;
1950 emit_iinc (lhs, value, state);
1951 if (target != IGNORE_TARGET)
1952 emit_load (lhs, state);
1953 break;
1958 if (TREE_CODE (lhs) == COMPONENT_REF)
1960 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
1961 STACK_TARGET, state);
1962 offset = 1;
1964 else if (TREE_CODE (lhs) == ARRAY_REF)
1966 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
1967 STACK_TARGET, state);
1968 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
1969 STACK_TARGET, state);
1970 offset = 2;
1972 else
1973 offset = 0;
1975 /* If the rhs is a binary expression and the left operand is
1976 `==' to the lhs then we have an OP= expression. In this
1977 case we must do some special processing. */
1978 if (TREE_CODE_CLASS (TREE_CODE (rhs)) == '2'
1979 && lhs == TREE_OPERAND (rhs, 0))
1981 if (TREE_CODE (lhs) == COMPONENT_REF)
1983 tree field = TREE_OPERAND (lhs, 1);
1984 if (! FIELD_STATIC (field))
1986 /* Duplicate the object reference so we can get
1987 the field. */
1988 emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
1989 NOTE_POP (1);
1991 field_op (field, (FIELD_STATIC (field)
1992 ? OPCODE_getstatic
1993 : OPCODE_getfield),
1994 state);
1996 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1998 else if (TREE_CODE (lhs) == VAR_DECL
1999 || TREE_CODE (lhs) == PARM_DECL)
2001 if (FIELD_STATIC (lhs))
2003 field_op (lhs, OPCODE_getstatic, state);
2004 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2006 else
2007 emit_load (lhs, state);
2009 else if (TREE_CODE (lhs) == ARRAY_REF)
2011 /* Duplicate the array and index, which are on the
2012 stack, so that we can load the old value. */
2013 emit_dup (2, 0, state);
2014 NOTE_POP (2);
2015 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2016 RESERVE (1);
2017 OP1 (jopcode);
2018 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2020 else
2021 abort ();
2023 /* This function correctly handles the case where the LHS
2024 of a binary expression is NULL_TREE. */
2025 rhs = build (TREE_CODE (rhs), TREE_TYPE (rhs),
2026 NULL_TREE, TREE_OPERAND (rhs, 1));
2029 generate_bytecode_insns (rhs, STACK_TARGET, state);
2030 if (target != IGNORE_TARGET)
2031 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2032 exp = lhs;
2034 /* FALLTHROUGH */
2036 finish_assignment:
2037 if (TREE_CODE (exp) == COMPONENT_REF)
2039 tree field = TREE_OPERAND (exp, 1);
2040 if (! FIELD_STATIC (field))
2041 NOTE_POP (1);
2042 field_op (field,
2043 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2044 state);
2046 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2048 else if (TREE_CODE (exp) == VAR_DECL
2049 || TREE_CODE (exp) == PARM_DECL)
2051 if (FIELD_STATIC (exp))
2053 field_op (exp, OPCODE_putstatic, state);
2054 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2056 else
2057 emit_store (exp, state);
2059 else if (TREE_CODE (exp) == ARRAY_REF)
2061 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2062 RESERVE (1);
2063 OP1 (jopcode);
2064 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2066 else
2067 abort ();
2068 break;
2069 case PLUS_EXPR:
2070 jopcode = OPCODE_iadd;
2071 goto binop;
2072 case MINUS_EXPR:
2073 jopcode = OPCODE_isub;
2074 goto binop;
2075 case MULT_EXPR:
2076 jopcode = OPCODE_imul;
2077 goto binop;
2078 case TRUNC_DIV_EXPR:
2079 case RDIV_EXPR:
2080 jopcode = OPCODE_idiv;
2081 goto binop;
2082 case TRUNC_MOD_EXPR:
2083 jopcode = OPCODE_irem;
2084 goto binop;
2085 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2086 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2087 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2088 case TRUTH_AND_EXPR:
2089 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2090 case TRUTH_OR_EXPR:
2091 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2092 case TRUTH_XOR_EXPR:
2093 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2094 binop:
2096 tree arg0 = TREE_OPERAND (exp, 0);
2097 tree arg1 = TREE_OPERAND (exp, 1);
2098 jopcode += adjust_typed_op (type, 3);
2099 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
2101 /* fold may (e.g) convert 2*x to x+x. */
2102 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
2103 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2105 else
2107 /* ARG0 will be NULL_TREE if we're handling an `OP='
2108 expression. In this case the stack already holds the
2109 LHS. See the MODIFY_EXPR case. */
2110 if (arg0 != NULL_TREE)
2111 generate_bytecode_insns (arg0, target, state);
2112 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2113 arg1 = convert (int_type_node, arg1);
2114 generate_bytecode_insns (arg1, target, state);
2116 /* For most binary operations, both operands and the result have the
2117 same type. Shift operations are different. Using arg1's type
2118 gets us the correct SP adjustment in all cases. */
2119 if (target == STACK_TARGET)
2120 emit_binop (jopcode, TREE_TYPE (arg1), state);
2121 break;
2123 case TRUTH_NOT_EXPR:
2124 case BIT_NOT_EXPR:
2125 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2126 if (target == STACK_TARGET)
2128 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2129 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2130 RESERVE (2);
2131 if (is_long)
2132 OP1 (OPCODE_i2l);
2133 NOTE_PUSH (1 + is_long);
2134 OP1 (OPCODE_ixor + is_long);
2135 NOTE_POP (1 + is_long);
2137 break;
2138 case NEGATE_EXPR:
2139 jopcode = OPCODE_ineg;
2140 jopcode += adjust_typed_op (type, 3);
2141 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2142 if (target == STACK_TARGET)
2143 emit_unop (jopcode, type, state);
2144 break;
2145 case INSTANCEOF_EXPR:
2147 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2148 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2149 RESERVE (3);
2150 OP1 (OPCODE_instanceof);
2151 OP2 (index);
2153 break;
2154 case SAVE_EXPR:
2155 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2156 break;
2157 case CONVERT_EXPR:
2158 case NOP_EXPR:
2159 case FLOAT_EXPR:
2160 case FIX_TRUNC_EXPR:
2162 tree src = TREE_OPERAND (exp, 0);
2163 tree src_type = TREE_TYPE (src);
2164 tree dst_type = TREE_TYPE (exp);
2165 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2166 if (target == IGNORE_TARGET || src_type == dst_type)
2167 break;
2168 if (TREE_CODE (dst_type) == POINTER_TYPE)
2170 if (TREE_CODE (exp) == CONVERT_EXPR)
2172 int index = find_class_constant (&state->cpool,
2173 TREE_TYPE (dst_type));
2174 RESERVE (3);
2175 OP1 (OPCODE_checkcast);
2176 OP2 (index);
2179 else /* Convert numeric types. */
2181 int wide_src = TYPE_PRECISION (src_type) > 32;
2182 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2183 NOTE_POP (1 + wide_src);
2184 RESERVE (1);
2185 if (TREE_CODE (dst_type) == REAL_TYPE)
2187 if (TREE_CODE (src_type) == REAL_TYPE)
2188 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2189 else if (TYPE_PRECISION (src_type) == 64)
2190 OP1 (OPCODE_l2f + wide_dst);
2191 else
2192 OP1 (OPCODE_i2f + wide_dst);
2194 else /* Convert to integral type. */
2196 if (TREE_CODE (src_type) == REAL_TYPE)
2197 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2198 else if (wide_dst)
2199 OP1 (OPCODE_i2l);
2200 else if (wide_src)
2201 OP1 (OPCODE_l2i);
2202 if (TYPE_PRECISION (dst_type) < 32)
2204 RESERVE (1);
2205 /* Already converted to int, if needed. */
2206 if (TYPE_PRECISION (dst_type) <= 8)
2207 OP1 (OPCODE_i2b);
2208 else if (TREE_UNSIGNED (dst_type))
2209 OP1 (OPCODE_i2c);
2210 else
2211 OP1 (OPCODE_i2s);
2214 NOTE_PUSH (1 + wide_dst);
2217 break;
2219 case TRY_EXPR:
2221 tree try_clause = TREE_OPERAND (exp, 0);
2222 struct jcf_block *start_label = get_jcf_label_here (state);
2223 struct jcf_block *end_label; /* End of try clause. */
2224 struct jcf_block *finished_label = gen_jcf_label (state);
2225 tree clause = TREE_OPERAND (exp, 1);
2226 if (target != IGNORE_TARGET)
2227 abort ();
2228 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2229 end_label = get_jcf_label_here (state);
2230 if (end_label == start_label)
2231 break;
2232 if (CAN_COMPLETE_NORMALLY (try_clause))
2233 emit_goto (finished_label, state);
2234 while (clause != NULL_TREE)
2236 tree catch_clause = TREE_OPERAND (clause, 0);
2237 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2238 struct jcf_handler *handler = alloc_handler (start_label,
2239 end_label, state);
2240 if (exception_decl == NULL_TREE)
2241 handler->type = NULL_TREE;
2242 else
2243 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2244 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2245 clause = TREE_CHAIN (clause);
2246 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2247 emit_goto (finished_label, state);
2249 define_jcf_label (finished_label, state);
2251 break;
2253 case TRY_FINALLY_EXPR:
2255 struct jcf_block *finished_label = NULL;
2256 struct jcf_block *finally_label, *start_label, *end_label;
2257 struct jcf_handler *handler;
2258 tree try_block = TREE_OPERAND (exp, 0);
2259 tree finally = TREE_OPERAND (exp, 1);
2260 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2262 tree exception_type;
2264 finally_label = gen_jcf_label (state);
2265 start_label = get_jcf_label_here (state);
2266 /* If the `finally' clause can complete normally, we emit it
2267 as a subroutine and let the other clauses call it via
2268 `jsr'. If it can't complete normally, then we simply emit
2269 `goto's directly to it. */
2270 if (CAN_COMPLETE_NORMALLY (finally))
2272 finally_label->pc = PENDING_CLEANUP_PC;
2273 finally_label->next = state->labeled_blocks;
2274 state->labeled_blocks = finally_label;
2275 state->num_finalizers++;
2278 generate_bytecode_insns (try_block, target, state);
2280 if (CAN_COMPLETE_NORMALLY (finally))
2282 if (state->labeled_blocks != finally_label)
2283 abort();
2284 state->labeled_blocks = finally_label->next;
2286 end_label = get_jcf_label_here (state);
2288 if (end_label == start_label)
2290 state->num_finalizers--;
2291 define_jcf_label (finally_label, state);
2292 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2293 break;
2296 if (CAN_COMPLETE_NORMALLY (finally))
2298 return_link = build_decl (VAR_DECL, NULL_TREE,
2299 return_address_type_node);
2300 finished_label = gen_jcf_label (state);
2303 if (CAN_COMPLETE_NORMALLY (try_block))
2305 if (CAN_COMPLETE_NORMALLY (finally))
2307 emit_jsr (finally_label, state);
2308 emit_goto (finished_label, state);
2310 else
2311 emit_goto (finally_label, state);
2314 /* Handle exceptions. */
2316 exception_type = build_pointer_type (throwable_type_node);
2317 if (CAN_COMPLETE_NORMALLY (finally))
2319 /* We're going to generate a subroutine, so we'll need to
2320 save and restore the exception around the `jsr'. */
2321 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2322 localvar_alloc (return_link, state);
2324 handler = alloc_handler (start_label, end_label, state);
2325 handler->type = NULL_TREE;
2326 if (CAN_COMPLETE_NORMALLY (finally))
2328 localvar_alloc (exception_decl, state);
2329 NOTE_PUSH (1);
2330 emit_store (exception_decl, state);
2331 emit_jsr (finally_label, state);
2332 emit_load (exception_decl, state);
2333 RESERVE (1);
2334 OP1 (OPCODE_athrow);
2335 NOTE_POP (1);
2337 else
2339 /* We're not generating a subroutine. In this case we can
2340 simply have the exception handler pop the exception and
2341 then fall through to the `finally' block. */
2342 NOTE_PUSH (1);
2343 emit_pop (1, state);
2344 NOTE_POP (1);
2347 /* The finally block. If we're generating a subroutine, first
2348 save return PC into return_link. Otherwise, just generate
2349 the code for the `finally' block. */
2350 define_jcf_label (finally_label, state);
2351 if (CAN_COMPLETE_NORMALLY (finally))
2353 NOTE_PUSH (1);
2354 emit_store (return_link, state);
2357 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2358 if (CAN_COMPLETE_NORMALLY (finally))
2360 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2361 maybe_free_localvar (exception_decl, state, 1);
2362 maybe_free_localvar (return_link, state, 1);
2363 define_jcf_label (finished_label, state);
2366 break;
2367 case THROW_EXPR:
2368 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2369 RESERVE (1);
2370 OP1 (OPCODE_athrow);
2371 break;
2372 case NEW_ARRAY_INIT:
2374 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2375 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2376 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2377 HOST_WIDE_INT length = java_array_type_length (array_type);
2378 if (target == IGNORE_TARGET)
2380 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2381 generate_bytecode_insns (TREE_VALUE (values), target, state);
2382 break;
2384 push_int_const (length, state);
2385 NOTE_PUSH (1);
2386 RESERVE (3);
2387 if (JPRIMITIVE_TYPE_P (element_type))
2389 int atype = encode_newarray_type (element_type);
2390 OP1 (OPCODE_newarray);
2391 OP1 (atype);
2393 else
2395 int index = find_class_constant (&state->cpool,
2396 TREE_TYPE (element_type));
2397 OP1 (OPCODE_anewarray);
2398 OP2 (index);
2400 offset = 0;
2401 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2402 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2404 int save_SP = state->code_SP;
2405 emit_dup (1, 0, state);
2406 push_int_const (offset, state);
2407 NOTE_PUSH (1);
2408 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2409 RESERVE (1);
2410 OP1 (jopcode);
2411 state->code_SP = save_SP;
2414 break;
2415 case JAVA_EXC_OBJ_EXPR:
2416 NOTE_PUSH (1); /* Pushed by exception system. */
2417 break;
2418 case NEW_CLASS_EXPR:
2420 tree class = TREE_TYPE (TREE_TYPE (exp));
2421 int need_result = target != IGNORE_TARGET;
2422 int index = find_class_constant (&state->cpool, class);
2423 RESERVE (4);
2424 OP1 (OPCODE_new);
2425 OP2 (index);
2426 if (need_result)
2427 OP1 (OPCODE_dup);
2428 NOTE_PUSH (1 + need_result);
2430 /* ... fall though ... */
2431 case CALL_EXPR:
2433 tree f = TREE_OPERAND (exp, 0);
2434 tree x = TREE_OPERAND (exp, 1);
2435 int save_SP = state->code_SP;
2436 int nargs;
2437 if (TREE_CODE (f) == ADDR_EXPR)
2438 f = TREE_OPERAND (f, 0);
2439 if (f == soft_newarray_node)
2441 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2442 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2443 STACK_TARGET, state);
2444 RESERVE (2);
2445 OP1 (OPCODE_newarray);
2446 OP1 (type_code);
2447 break;
2449 else if (f == soft_multianewarray_node)
2451 int ndims;
2452 int idim;
2453 int index = find_class_constant (&state->cpool,
2454 TREE_TYPE (TREE_TYPE (exp)));
2455 x = TREE_CHAIN (x); /* Skip class argument. */
2456 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2457 for (idim = ndims; --idim >= 0; )
2459 x = TREE_CHAIN (x);
2460 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2462 RESERVE (4);
2463 OP1 (OPCODE_multianewarray);
2464 OP2 (index);
2465 OP1 (ndims);
2466 break;
2468 else if (f == soft_anewarray_node)
2470 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2471 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2472 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2473 RESERVE (3);
2474 OP1 (OPCODE_anewarray);
2475 OP2 (index);
2476 break;
2478 else if (f == soft_monitorenter_node
2479 || f == soft_monitorexit_node
2480 || f == throw_node)
2482 if (f == soft_monitorenter_node)
2483 op = OPCODE_monitorenter;
2484 else if (f == soft_monitorexit_node)
2485 op = OPCODE_monitorexit;
2486 else
2487 op = OPCODE_athrow;
2488 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2489 RESERVE (1);
2490 OP1 (op);
2491 NOTE_POP (1);
2492 break;
2494 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2496 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2498 nargs = state->code_SP - save_SP;
2499 state->code_SP = save_SP;
2500 if (f == soft_fmod_node)
2502 RESERVE (1);
2503 OP1 (OPCODE_drem);
2504 NOTE_PUSH (2);
2505 break;
2507 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2508 NOTE_POP (1); /* Pop implicit this. */
2509 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2511 tree context = DECL_CONTEXT (f);
2512 int index, interface = 0;
2513 RESERVE (5);
2514 if (METHOD_STATIC (f))
2515 OP1 (OPCODE_invokestatic);
2516 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2517 || METHOD_PRIVATE (f))
2518 OP1 (OPCODE_invokespecial);
2519 else
2521 if (CLASS_INTERFACE (TYPE_NAME (context)))
2523 tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2524 context = TREE_TYPE (TREE_TYPE (arg1));
2525 if (CLASS_INTERFACE (TYPE_NAME (context)))
2526 interface = 1;
2528 if (interface)
2529 OP1 (OPCODE_invokeinterface);
2530 else
2531 OP1 (OPCODE_invokevirtual);
2533 index = find_methodref_with_class_index (&state->cpool, f, context);
2534 OP2 (index);
2535 if (interface)
2537 if (nargs <= 0)
2538 abort ();
2540 OP1 (nargs);
2541 OP1 (0);
2543 f = TREE_TYPE (TREE_TYPE (f));
2544 if (TREE_CODE (f) != VOID_TYPE)
2546 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2547 if (target == IGNORE_TARGET)
2548 emit_pop (size, state);
2549 else
2550 NOTE_PUSH (size);
2552 break;
2555 /* fall through */
2556 notimpl:
2557 default:
2558 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2559 tree_code_name [(int) TREE_CODE (exp)]);
2563 static void
2564 perform_relocations (struct jcf_partial *state)
2566 struct jcf_block *block;
2567 struct jcf_relocation *reloc;
2568 int pc;
2569 int shrink;
2571 /* Before we start, the pc field of each block is an upper bound on
2572 the block's start pc (it may be less, if previous blocks need less
2573 than their maximum).
2575 The minimum size of each block is in the block's chunk->size. */
2577 /* First, figure out the actual locations of each block. */
2578 pc = 0;
2579 shrink = 0;
2580 for (block = state->blocks; block != NULL; block = block->next)
2582 int block_size = block->v.chunk->size;
2584 block->pc = pc;
2586 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2587 Assumes relocations are in reverse order. */
2588 reloc = block->u.relocations;
2589 while (reloc != NULL
2590 && reloc->kind == OPCODE_goto_w
2591 && reloc->label->pc == block->next->pc
2592 && reloc->offset + 2 == block_size)
2594 reloc = reloc->next;
2595 block->u.relocations = reloc;
2596 block->v.chunk->size -= 3;
2597 block_size -= 3;
2598 shrink += 3;
2601 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2602 jump directly to X. We're careful here to avoid an infinite
2603 loop if the `goto's themselves form one. We do this
2604 optimization because we can generate a goto-to-goto for some
2605 try/finally blocks. */
2606 while (reloc != NULL
2607 && reloc->kind == OPCODE_goto_w
2608 && reloc->label != block
2609 && reloc->label->v.chunk->data != NULL
2610 && reloc->label->v.chunk->data[0] == OPCODE_goto)
2612 /* Find the reloc for the first instruction of the
2613 destination block. */
2614 struct jcf_relocation *first_reloc;
2615 for (first_reloc = reloc->label->u.relocations;
2616 first_reloc;
2617 first_reloc = first_reloc->next)
2619 if (first_reloc->offset == 1
2620 && first_reloc->kind == OPCODE_goto_w)
2622 reloc->label = first_reloc->label;
2623 break;
2627 /* If we didn't do anything, exit the loop. */
2628 if (first_reloc == NULL)
2629 break;
2632 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2634 if (reloc->kind == SWITCH_ALIGN_RELOC)
2636 /* We assume this is the first relocation in this block,
2637 so we know its final pc. */
2638 int where = pc + reloc->offset;
2639 int pad = ((where + 3) & ~3) - where;
2640 block_size += pad;
2642 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2644 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2645 int expand = reloc->kind > 0 ? 2 : 5;
2647 if (delta > 0)
2648 delta -= shrink;
2649 if (delta >= -32768 && delta <= 32767)
2651 shrink += expand;
2652 reloc->kind = -1;
2654 else
2655 block_size += expand;
2658 pc += block_size;
2661 for (block = state->blocks; block != NULL; block = block->next)
2663 struct chunk *chunk = block->v.chunk;
2664 int old_size = chunk->size;
2665 int next_pc = block->next == NULL ? pc : block->next->pc;
2666 int new_size = next_pc - block->pc;
2667 unsigned char *new_ptr;
2668 unsigned char *old_buffer = chunk->data;
2669 unsigned char *old_ptr = old_buffer + old_size;
2670 if (new_size != old_size)
2672 chunk->data = (unsigned char *)
2673 obstack_alloc (state->chunk_obstack, new_size);
2674 chunk->size = new_size;
2676 new_ptr = chunk->data + new_size;
2678 /* We do the relocations from back to front, because
2679 the relocations are in reverse order. */
2680 for (reloc = block->u.relocations; ; reloc = reloc->next)
2682 /* new_ptr and old_ptr point into the old and new buffers,
2683 respectively. (If no relocations cause the buffer to
2684 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2685 The bytes at higher address have been copied and relocations
2686 handled; those at lower addresses remain to process. */
2688 /* Lower old index of piece to be copied with no relocation.
2689 I.e. high index of the first piece that does need relocation. */
2690 int start = reloc == NULL ? 0
2691 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2692 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2693 ? reloc->offset + 4
2694 : reloc->offset + 2;
2695 int32 value;
2696 int new_offset;
2697 int n = (old_ptr - old_buffer) - start;
2698 new_ptr -= n;
2699 old_ptr -= n;
2700 if (n > 0)
2701 memcpy (new_ptr, old_ptr, n);
2702 if (old_ptr == old_buffer)
2703 break;
2705 new_offset = new_ptr - chunk->data;
2706 new_offset -= (reloc->kind == -1 ? 2 : 4);
2707 if (reloc->kind == 0)
2709 old_ptr -= 4;
2710 value = GET_u4 (old_ptr);
2712 else if (reloc->kind == BLOCK_START_RELOC)
2714 old_ptr -= 4;
2715 value = 0;
2716 new_offset = 0;
2718 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2720 int where = block->pc + reloc->offset;
2721 int pad = ((where + 3) & ~3) - where;
2722 while (--pad >= 0)
2723 *--new_ptr = 0;
2724 continue;
2726 else
2728 old_ptr -= 2;
2729 value = GET_u2 (old_ptr);
2731 value += reloc->label->pc - (block->pc + new_offset);
2732 *--new_ptr = (unsigned char) value; value >>= 8;
2733 *--new_ptr = (unsigned char) value; value >>= 8;
2734 if (reloc->kind != -1)
2736 *--new_ptr = (unsigned char) value; value >>= 8;
2737 *--new_ptr = (unsigned char) value;
2739 if (reloc->kind > BLOCK_START_RELOC)
2741 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2742 --old_ptr;
2743 *--new_ptr = reloc->kind;
2745 else if (reloc->kind < -1)
2747 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2748 --old_ptr;
2749 *--new_ptr = OPCODE_goto_w;
2750 *--new_ptr = 3;
2751 *--new_ptr = 0;
2752 *--new_ptr = - reloc->kind;
2755 if (new_ptr != chunk->data)
2756 abort ();
2758 state->code_length = pc;
2761 static void
2762 init_jcf_state (struct jcf_partial *state, struct obstack *work)
2764 state->chunk_obstack = work;
2765 state->first = state->chunk = NULL;
2766 CPOOL_INIT (&state->cpool);
2767 BUFFER_INIT (&state->localvars);
2768 BUFFER_INIT (&state->bytecode);
2771 static void
2772 init_jcf_method (struct jcf_partial *state, tree method)
2774 state->current_method = method;
2775 state->blocks = state->last_block = NULL;
2776 state->linenumber_count = 0;
2777 state->first_lvar = state->last_lvar = NULL;
2778 state->lvar_count = 0;
2779 state->labeled_blocks = NULL;
2780 state->code_length = 0;
2781 BUFFER_RESET (&state->bytecode);
2782 BUFFER_RESET (&state->localvars);
2783 state->code_SP = 0;
2784 state->code_SP_max = 0;
2785 state->handlers = NULL;
2786 state->last_handler = NULL;
2787 state->num_handlers = 0;
2788 state->num_finalizers = 0;
2789 state->return_value_decl = NULL_TREE;
2792 static void
2793 release_jcf_state (struct jcf_partial *state)
2795 CPOOL_FINISH (&state->cpool);
2796 obstack_free (state->chunk_obstack, state->first);
2799 /* Generate and return a list of chunks containing the class CLAS
2800 in the .class file representation. The list can be written to a
2801 .class file using write_chunks. Allocate chunks from obstack WORK. */
2803 static GTY(()) tree SourceFile_node;
2804 static struct chunk *
2805 generate_classfile (tree clas, struct jcf_partial *state)
2807 struct chunk *cpool_chunk;
2808 const char *source_file, *s;
2809 char *ptr;
2810 int i;
2811 char *fields_count_ptr;
2812 int fields_count = 0;
2813 char *methods_count_ptr;
2814 int methods_count = 0;
2815 tree part;
2816 int total_supers
2817 = clas == object_type_node ? 0
2818 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2820 ptr = append_chunk (NULL, 8, state);
2821 PUT4 (0xCafeBabe); /* Magic number */
2822 PUT2 (3); /* Minor version */
2823 PUT2 (45); /* Major version */
2825 append_chunk (NULL, 0, state);
2826 cpool_chunk = state->chunk;
2828 /* Next allocate the chunk containing acces_flags through fields_count. */
2829 if (clas == object_type_node)
2830 i = 10;
2831 else
2832 i = 8 + 2 * total_supers;
2833 ptr = append_chunk (NULL, i, state);
2834 i = get_access_flags (TYPE_NAME (clas));
2835 if (! (i & ACC_INTERFACE))
2836 i |= ACC_SUPER;
2837 PUT2 (i); /* acces_flags */
2838 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2839 if (clas == object_type_node)
2841 PUT2(0); /* super_class */
2842 PUT2(0); /* interfaces_count */
2844 else
2846 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2847 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2848 int j = find_class_constant (&state->cpool, base);
2849 PUT2 (j); /* super_class */
2850 PUT2 (total_supers - 1); /* interfaces_count */
2851 for (i = 1; i < total_supers; i++)
2853 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2854 j = find_class_constant (&state->cpool, base);
2855 PUT2 (j);
2858 fields_count_ptr = ptr;
2860 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2862 int have_value, attr_count = 0;
2863 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2864 continue;
2865 ptr = append_chunk (NULL, 8, state);
2866 i = get_access_flags (part); PUT2 (i);
2867 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2868 i = find_utf8_constant (&state->cpool,
2869 build_java_signature (TREE_TYPE (part)));
2870 PUT2(i);
2871 have_value = DECL_INITIAL (part) != NULL_TREE
2872 && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
2873 && FIELD_FINAL (part)
2874 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
2875 || TREE_TYPE (part) == string_ptr_type_node);
2876 if (have_value)
2877 attr_count++;
2879 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
2880 || FIELD_SYNTHETIC (part))
2881 attr_count++;
2882 if (FIELD_DEPRECATED (part))
2883 attr_count++;
2885 PUT2 (attr_count); /* attributes_count */
2886 if (have_value)
2888 tree init = DECL_INITIAL (part);
2889 static tree ConstantValue_node = NULL_TREE;
2890 if (TREE_TYPE (part) != TREE_TYPE (init))
2891 fatal_error ("field initializer type mismatch");
2892 ptr = append_chunk (NULL, 8, state);
2893 if (ConstantValue_node == NULL_TREE)
2894 ConstantValue_node = get_identifier ("ConstantValue");
2895 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2896 PUT2 (i); /* attribute_name_index */
2897 PUT4 (2); /* attribute_length */
2898 i = find_constant_index (init, state); PUT2 (i);
2900 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
2901 fields and other fields which need it. */
2902 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
2903 || FIELD_SYNTHETIC (part))
2904 ptr = append_synthetic_attribute (state);
2905 if (FIELD_DEPRECATED (part))
2906 append_deprecated_attribute (state);
2907 fields_count++;
2909 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2911 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2912 PUT2 (0);
2914 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2916 struct jcf_block *block;
2917 tree function_body = DECL_FUNCTION_BODY (part);
2918 tree body = function_body == NULL_TREE ? NULL_TREE
2919 : BLOCK_EXPR_BODY (function_body);
2920 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2921 : DECL_NAME (part);
2922 tree type = TREE_TYPE (part);
2923 tree save_function = current_function_decl;
2924 int synthetic_p = 0;
2925 current_function_decl = part;
2926 ptr = append_chunk (NULL, 8, state);
2927 i = get_access_flags (part); PUT2 (i);
2928 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2929 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2930 PUT2 (i);
2931 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
2933 /* Make room for the Synthetic attribute (of zero length.) */
2934 if (DECL_FINIT_P (part)
2935 || DECL_INSTINIT_P (part)
2936 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
2937 || TYPE_DOT_CLASS (clas) == part)
2939 i++;
2940 synthetic_p = 1;
2942 /* Make room for Deprecated attribute. */
2943 if (METHOD_DEPRECATED (part))
2944 i++;
2946 PUT2 (i); /* attributes_count */
2948 if (synthetic_p)
2949 ptr = append_synthetic_attribute (state);
2951 if (body != NULL_TREE)
2953 int code_attributes_count = 0;
2954 static tree Code_node = NULL_TREE;
2955 tree t;
2956 char *attr_len_ptr;
2957 struct jcf_handler *handler;
2958 if (Code_node == NULL_TREE)
2959 Code_node = get_identifier ("Code");
2960 ptr = append_chunk (NULL, 14, state);
2961 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
2962 attr_len_ptr = ptr;
2963 init_jcf_method (state, part);
2964 get_jcf_label_here (state); /* Force a first block. */
2965 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2966 localvar_alloc (t, state);
2967 state->num_jsrs = 0;
2968 generate_bytecode_insns (body, IGNORE_TARGET, state);
2969 if (CAN_COMPLETE_NORMALLY (body))
2971 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
2972 abort();
2973 RESERVE (1);
2974 OP1 (OPCODE_return);
2976 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2977 maybe_free_localvar (t, state, 1);
2978 if (state->return_value_decl != NULL_TREE)
2979 maybe_free_localvar (state->return_value_decl, state, 1);
2980 finish_jcf_block (state);
2981 perform_relocations (state);
2983 ptr = attr_len_ptr;
2984 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
2985 if (state->linenumber_count > 0)
2987 code_attributes_count++;
2988 i += 8 + 4 * state->linenumber_count;
2990 if (state->lvar_count > 0)
2992 code_attributes_count++;
2993 i += 8 + 10 * state->lvar_count;
2995 UNSAFE_PUT4 (i); /* attribute_length */
2996 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
2997 UNSAFE_PUT2 (localvar_max); /* max_locals */
2998 UNSAFE_PUT4 (state->code_length);
3000 /* Emit the exception table. */
3001 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3002 PUT2 (state->num_handlers); /* exception_table_length */
3003 handler = state->handlers;
3004 for (; handler != NULL; handler = handler->next)
3006 int type_index;
3007 PUT2 (handler->start_label->pc);
3008 PUT2 (handler->end_label->pc);
3009 PUT2 (handler->handler_label->pc);
3010 if (handler->type == NULL_TREE)
3011 type_index = 0;
3012 else
3013 type_index = find_class_constant (&state->cpool,
3014 handler->type);
3015 PUT2 (type_index);
3018 ptr = append_chunk (NULL, 2, state);
3019 PUT2 (code_attributes_count);
3021 /* Write the LineNumberTable attribute. */
3022 if (state->linenumber_count > 0)
3024 static tree LineNumberTable_node = NULL_TREE;
3025 ptr = append_chunk (NULL,
3026 8 + 4 * state->linenumber_count, state);
3027 if (LineNumberTable_node == NULL_TREE)
3028 LineNumberTable_node = get_identifier ("LineNumberTable");
3029 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3030 PUT2 (i); /* attribute_name_index */
3031 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3032 i = state->linenumber_count; PUT2 (i);
3033 for (block = state->blocks; block != NULL; block = block->next)
3035 int line = block->linenumber;
3036 if (line > 0)
3038 PUT2 (block->pc);
3039 PUT2 (line);
3044 /* Write the LocalVariableTable attribute. */
3045 if (state->lvar_count > 0)
3047 static tree LocalVariableTable_node = NULL_TREE;
3048 struct localvar_info *lvar = state->first_lvar;
3049 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3050 if (LocalVariableTable_node == NULL_TREE)
3051 LocalVariableTable_node = get_identifier("LocalVariableTable");
3052 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3053 PUT2 (i); /* attribute_name_index */
3054 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3055 i = state->lvar_count; PUT2 (i);
3056 for ( ; lvar != NULL; lvar = lvar->next)
3058 tree name = DECL_NAME (lvar->decl);
3059 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3060 i = lvar->start_label->pc; PUT2 (i);
3061 i = lvar->end_label->pc - i; PUT2 (i);
3062 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3063 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3064 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3068 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3070 tree t = DECL_FUNCTION_THROWS (part);
3071 int throws_count = list_length (t);
3072 static tree Exceptions_node = NULL_TREE;
3073 if (Exceptions_node == NULL_TREE)
3074 Exceptions_node = get_identifier ("Exceptions");
3075 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3076 i = find_utf8_constant (&state->cpool, Exceptions_node);
3077 PUT2 (i); /* attribute_name_index */
3078 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3079 i = throws_count; PUT2 (i);
3080 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3082 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3083 PUT2 (i);
3087 if (METHOD_DEPRECATED (part))
3088 append_deprecated_attribute (state);
3090 methods_count++;
3091 current_function_decl = save_function;
3093 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3095 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3096 for (s = source_file; ; s++)
3098 char ch = *s;
3099 if (ch == '\0')
3100 break;
3101 if (ch == '/' || ch == '\\')
3102 source_file = s+1;
3104 ptr = append_chunk (NULL, 10, state);
3106 i = 1; /* Source file always exists as an attribute */
3107 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3108 i++;
3109 if (clas == object_type_node)
3110 i++;
3111 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3112 i++;
3114 PUT2 (i); /* attributes_count */
3116 /* generate the SourceFile attribute. */
3117 if (SourceFile_node == NULL_TREE)
3119 SourceFile_node = get_identifier ("SourceFile");
3122 i = find_utf8_constant (&state->cpool, SourceFile_node);
3123 PUT2 (i); /* attribute_name_index */
3124 PUT4 (2);
3125 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3126 PUT2 (i);
3127 append_gcj_attribute (state, clas);
3128 append_innerclasses_attribute (state, clas);
3129 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3130 append_deprecated_attribute (state);
3132 /* New finally generate the contents of the constant pool chunk. */
3133 i = count_constant_pool_bytes (&state->cpool);
3134 ptr = obstack_alloc (state->chunk_obstack, i);
3135 cpool_chunk->data = ptr;
3136 cpool_chunk->size = i;
3137 write_constant_pool (&state->cpool, ptr, i);
3138 return state->first;
3141 static GTY(()) tree Synthetic_node;
3142 static unsigned char *
3143 append_synthetic_attribute (struct jcf_partial *state)
3145 unsigned char *ptr = append_chunk (NULL, 6, state);
3146 int i;
3148 if (Synthetic_node == NULL_TREE)
3150 Synthetic_node = get_identifier ("Synthetic");
3152 i = find_utf8_constant (&state->cpool, Synthetic_node);
3153 PUT2 (i); /* Attribute string index */
3154 PUT4 (0); /* Attribute length */
3156 return ptr;
3159 static void
3160 append_deprecated_attribute (struct jcf_partial *state)
3162 unsigned char *ptr = append_chunk (NULL, 6, state);
3163 int i;
3165 i = find_utf8_constant (&state->cpool, get_identifier ("Deprecated"));
3166 PUT2 (i); /* Attribute string index */
3167 PUT4 (0); /* Attribute length */
3170 static void
3171 append_gcj_attribute (struct jcf_partial *state, tree class)
3173 unsigned char *ptr;
3174 int i;
3176 if (class != object_type_node)
3177 return;
3179 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3180 i = find_utf8_constant (&state->cpool,
3181 get_identifier ("gnu.gcj.gcj-compiled"));
3182 PUT2 (i); /* Attribute string index */
3183 PUT4 (0); /* Attribute length */
3186 static tree InnerClasses_node;
3187 static void
3188 append_innerclasses_attribute (struct jcf_partial *state, tree class)
3190 tree orig_decl = TYPE_NAME (class);
3191 tree current, decl;
3192 int length = 0, i;
3193 unsigned char *ptr, *length_marker, *number_marker;
3195 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3196 return;
3198 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3200 if (InnerClasses_node == NULL_TREE)
3202 InnerClasses_node = get_identifier ("InnerClasses");
3204 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3205 PUT2 (i);
3206 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3207 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3209 /* Generate the entries: all inner classes visible from the one we
3210 process: itself, up and down. */
3211 while (class && INNER_CLASS_TYPE_P (class))
3213 const char *n;
3215 decl = TYPE_NAME (class);
3216 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3217 IDENTIFIER_LENGTH (DECL_NAME (decl));
3219 while (n[-1] != '$')
3220 n--;
3221 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3222 length++;
3224 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3227 decl = orig_decl;
3228 for (current = DECL_INNER_CLASS_LIST (decl);
3229 current; current = TREE_CHAIN (current))
3231 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3232 TREE_VALUE (current));
3233 length++;
3236 ptr = length_marker; PUT4 (8*length+2);
3237 ptr = number_marker; PUT2 (length);
3240 static void
3241 append_innerclasses_attribute_entry (struct jcf_partial *state,
3242 tree decl, tree name)
3244 int icii, icaf;
3245 int ocii = 0, ini = 0;
3246 unsigned char *ptr = append_chunk (NULL, 8, state);
3248 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3250 /* Sun's implementation seems to generate ocii to 0 for inner
3251 classes (which aren't considered members of the class they're
3252 in.) The specs are saying that if the class is anonymous,
3253 inner_name_index must be zero. */
3254 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3256 ocii = find_class_constant (&state->cpool,
3257 TREE_TYPE (DECL_CONTEXT (decl)));
3258 ini = find_utf8_constant (&state->cpool, name);
3260 icaf = get_access_flags (decl);
3262 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3265 static char *
3266 make_class_file_name (tree clas)
3268 const char *dname, *cname, *slash;
3269 char *r;
3270 struct stat sb;
3271 char sep;
3273 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3274 "", '.', DIR_SEPARATOR,
3275 ".class"));
3276 if (jcf_write_base_directory == NULL)
3278 /* Make sure we put the class file into the .java file's
3279 directory, and not into some subdirectory thereof. */
3280 char *t;
3281 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3282 slash = strrchr (dname, DIR_SEPARATOR);
3283 #ifdef DIR_SEPARATOR_2
3284 if (! slash)
3285 slash = strrchr (dname, DIR_SEPARATOR_2);
3286 #endif
3287 if (! slash)
3289 dname = ".";
3290 slash = dname + 1;
3291 sep = DIR_SEPARATOR;
3293 else
3294 sep = *slash;
3296 t = strrchr (cname, DIR_SEPARATOR);
3297 if (t)
3298 cname = t + 1;
3300 else
3302 char *s;
3304 dname = jcf_write_base_directory;
3306 s = strrchr (dname, DIR_SEPARATOR);
3307 #ifdef DIR_SEPARATOR_2
3308 if (! s)
3309 s = strrchr (dname, DIR_SEPARATOR_2);
3310 #endif
3311 if (s)
3312 sep = *s;
3313 else
3314 sep = DIR_SEPARATOR;
3316 slash = dname + strlen (dname);
3319 r = xmalloc (slash - dname + strlen (cname) + 2);
3320 strncpy (r, dname, slash - dname);
3321 r[slash - dname] = sep;
3322 strcpy (&r[slash - dname + 1], cname);
3324 /* We try to make new directories when we need them. We only do
3325 this for directories which "might not" exist. For instance, we
3326 assume the `-d' directory exists, but we don't assume that any
3327 subdirectory below it exists. It might be worthwhile to keep
3328 track of which directories we've created to avoid gratuitous
3329 stat()s. */
3330 dname = r + (slash - dname) + 1;
3331 while (1)
3333 char *s = strchr (dname, sep);
3334 if (s == NULL)
3335 break;
3336 *s = '\0';
3337 if (stat (r, &sb) == -1
3338 /* Try to make it. */
3339 && mkdir (r, 0755) == -1)
3340 fatal_error ("can't create directory %s: %m", r);
3342 *s = sep;
3343 /* Skip consecutive separators. */
3344 for (dname = s + 1; *dname && *dname == sep; ++dname)
3348 return r;
3351 /* Write out the contents of a class (RECORD_TYPE) CLAS, as a .class file.
3352 The output .class file name is make_class_file_name(CLAS). */
3354 void
3355 write_classfile (tree clas)
3357 struct obstack *work = &temporary_obstack;
3358 struct jcf_partial state[1];
3359 char *class_file_name = make_class_file_name (clas);
3360 struct chunk *chunks;
3362 if (class_file_name != NULL)
3364 FILE *stream;
3365 char *temporary_file_name;
3367 /* The .class file is initially written to a ".tmp" file so that
3368 if multiple instances of the compiler are running at once
3369 they do not see partially formed class files. */
3370 temporary_file_name = concat (class_file_name, ".tmp", NULL);
3371 stream = fopen (temporary_file_name, "wb");
3372 if (stream == NULL)
3373 fatal_error ("can't open %s for writing: %m", temporary_file_name);
3375 jcf_dependency_add_target (class_file_name);
3376 init_jcf_state (state, work);
3377 chunks = generate_classfile (clas, state);
3378 write_chunks (stream, chunks);
3379 if (fclose (stream))
3380 fatal_error ("error closing %s: %m", temporary_file_name);
3382 /* If a file named by the string pointed to by `new' exists
3383 prior to the call to the `rename' function, the bahaviour
3384 is implementation-defined. ISO 9899-1990 7.9.4.2.
3386 For example, on Win32 with MSVCRT, it is an error. */
3388 unlink (class_file_name);
3390 if (rename (temporary_file_name, class_file_name) == -1)
3392 remove (temporary_file_name);
3393 fatal_error ("can't create %s: %m", class_file_name);
3395 free (temporary_file_name);
3396 free (class_file_name);
3398 release_jcf_state (state);
3401 /* TODO:
3402 string concatenation
3403 synchronized statement
3406 #include "gt-java-jcf-write.h"