Remove old autovect-branch by moving to "dead" directory.
[official-gcc.git] / old-autovect-branch / gcc / java / jcf-write.c
blobcaf57d1e8bc6ba5b3e7b8dbef1203a228f92bcea
1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "jcf.h"
30 #include "tree.h"
31 #include "real.h"
32 #include "java-tree.h"
33 #include "obstack.h"
34 #include "rtl.h"
35 #include "flags.h"
36 #include "java-opcodes.h"
37 #include "parse.h" /* for BLOCK_EXPR_BODY */
38 #include "buffer.h"
39 #include "toplev.h"
40 #include "ggc.h"
41 #include "tm_p.h"
43 extern struct obstack temporary_obstack;
45 /* Base directory in which `.class' files should be written.
46 NULL means to put the file into the same directory as the
47 corresponding .java file. */
48 const char *jcf_write_base_directory = NULL;
50 /* Make sure bytecode.data is big enough for at least N more bytes. */
52 #define RESERVE(N) \
53 do { CHECK_OP(state); \
54 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
55 buffer_grow (&state->bytecode, N); } while (0)
57 /* Add a 1-byte instruction/operand I to bytecode.data,
58 assuming space has already been RESERVE'd. */
60 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
62 /* Like OP1, but I is a 2-byte big endian integer. */
64 #define OP2(I) \
65 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
67 /* Like OP1, but I is a 4-byte big endian integer. */
69 #define OP4(I) \
70 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
71 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73 /* Macro to call each time we push I words on the JVM stack. */
75 #define NOTE_PUSH(I) \
76 do { state->code_SP += (I); \
77 if (state->code_SP > state->code_SP_max) \
78 state->code_SP_max = state->code_SP; } while (0)
80 /* Macro to call each time we pop I words from the JVM stack. */
82 #define NOTE_POP(I) \
83 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
85 /* A chunk or segment of a .class file. */
87 struct chunk
89 /* The next segment of this .class file. */
90 struct chunk *next;
92 /* The actual data in this segment to be written to the .class file. */
93 unsigned char *data;
95 /* The size of the segment to be written to the .class file. */
96 int size;
99 #define PENDING_CLEANUP_PC (-3)
100 #define PENDING_EXIT_PC (-2)
101 #define UNDEFINED_PC (-1)
103 /* Each "block" represents a label plus the bytecode instructions following.
104 There may be branches out of the block, but no incoming jumps, except
105 to the beginning of the block.
107 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
108 associated code yet), but it is an undefined label.
111 struct jcf_block
113 /* For blocks that that are defined, the next block (in pc order).
114 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
115 or a cleanup expression (from a TRY_FINALLY_EXPR),
116 this is the next (outer) such end label, in a stack headed by
117 labeled_blocks in jcf_partial. */
118 struct jcf_block *next;
120 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
121 pc is PENDING_EXIT_PC.
122 In the not-yet-defined end label for pending cleanup subroutine,
123 pc is PENDING_CLEANUP_PC.
124 For other not-yet-defined labels, pc is UNDEFINED_PC.
126 If the label has been defined:
127 Until perform_relocations is finished, this is the maximum possible
128 value of the bytecode offset at the beginning of this block.
129 After perform_relocations, it is the actual offset (pc). */
130 int pc;
132 int linenumber;
134 /* After finish_jcf_block is called, the actual instructions
135 contained in this block. Before that NULL, and the instructions
136 are in state->bytecode. */
137 union {
138 struct chunk *chunk;
140 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
141 covered by the cleanup. */
142 struct jcf_block *start_label;
143 } v;
145 union {
146 /* Set of relocations (in reverse offset order) for this block. */
147 struct jcf_relocation *relocations;
149 /* If this block is that of the not-yet-defined end label of
150 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
151 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
152 tree labeled_block;
153 } u;
156 /* A "relocation" type for the 0-3 bytes of padding at the start
157 of a tableswitch or a lookupswitch. */
158 #define SWITCH_ALIGN_RELOC 4
160 /* A relocation type for the labels in a tableswitch or a lookupswitch;
161 these are relative to the start of the instruction, but (due to
162 th 0-3 bytes of padding), we don't know the offset before relocation. */
163 #define BLOCK_START_RELOC 1
165 struct jcf_relocation
167 /* Next relocation for the current jcf_block. */
168 struct jcf_relocation *next;
170 /* The (byte) offset within the current block that needs to be relocated. */
171 HOST_WIDE_INT offset;
173 /* 0 if offset is a 4-byte relative offset.
174 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
175 for proper alignment in tableswitch/lookupswitch instructions.
176 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
177 to the start of the containing block.
178 -1 if offset is a 2-byte relative offset.
179 < -1 if offset is the address of an instruction with a 2-byte offset
180 that does not have a corresponding 4-byte offset version, in which
181 case the absolute value of kind is the inverted opcode.
182 > 4 if offset is the address of an instruction (such as jsr) with a
183 2-byte offset that does have a corresponding 4-byte offset version,
184 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
185 int kind;
187 /* The label the relocation wants to actually transfer to. */
188 struct jcf_block *label;
191 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
192 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194 /* State for single catch clause. */
196 struct jcf_handler
198 struct jcf_handler *next;
200 struct jcf_block *start_label;
201 struct jcf_block *end_label;
202 struct jcf_block *handler_label;
204 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
205 tree type;
208 /* State for the current switch statement. */
210 struct jcf_switch_state
212 struct jcf_switch_state *prev;
213 struct jcf_block *default_label;
215 struct jcf_relocation *cases;
216 int num_cases;
217 HOST_WIDE_INT min_case, max_case;
220 /* This structure is used to contain the various pieces that will
221 become a .class file. */
223 struct jcf_partial
225 struct chunk *first;
226 struct chunk *chunk;
227 struct obstack *chunk_obstack;
228 tree current_method;
230 /* List of basic blocks for the current method. */
231 struct jcf_block *blocks;
232 struct jcf_block *last_block;
234 struct localvar_info *first_lvar;
235 struct localvar_info *last_lvar;
236 int lvar_count;
238 CPool cpool;
240 int linenumber_count;
242 /* Until perform_relocations, this is a upper bound on the number
243 of bytes (so far) in the instructions for the current method. */
244 int code_length;
246 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
247 struct jcf_block *labeled_blocks;
249 /* The current stack size (stack pointer) in the current method. */
250 int code_SP;
252 /* The largest extent of stack size (stack pointer) in the current method. */
253 int code_SP_max;
255 /* Contains a mapping from local var slot number to localvar_info. */
256 struct buffer localvars;
258 /* The buffer allocated for bytecode for the current jcf_block. */
259 struct buffer bytecode;
261 /* Chain of exception handlers for the current method. */
262 struct jcf_handler *handlers;
264 /* Last element in handlers chain. */
265 struct jcf_handler *last_handler;
267 /* Number of exception handlers for the current method. */
268 int num_handlers;
270 /* Number of finalizers we are currently nested within. */
271 int num_finalizers;
273 /* If non-NULL, use this for the return value. */
274 tree return_value_decl;
276 /* Information about the current switch statement. */
277 struct jcf_switch_state *sw_state;
279 /* The count of jsr instructions that have been emitted. */
280 long num_jsrs;
283 static void generate_bytecode_insns (tree, int, struct jcf_partial *);
284 static struct chunk * alloc_chunk (struct chunk *, unsigned char *,
285 int, struct obstack *);
286 static unsigned char * append_chunk (unsigned char *, int,
287 struct jcf_partial *);
288 static void append_chunk_copy (unsigned char *, int, struct jcf_partial *);
289 static struct jcf_block * gen_jcf_label (struct jcf_partial *);
290 static void finish_jcf_block (struct jcf_partial *);
291 static void define_jcf_label (struct jcf_block *, struct jcf_partial *);
292 static struct jcf_block * get_jcf_label_here (struct jcf_partial *);
293 static void put_linenumber (int, struct jcf_partial *);
294 static void localvar_alloc (tree, struct jcf_partial *);
295 static void maybe_free_localvar (tree, struct jcf_partial *, int);
296 static int get_access_flags (tree);
297 static void write_chunks (FILE *, struct chunk *);
298 static int adjust_typed_op (tree, int);
299 static void generate_bytecode_conditional (tree, struct jcf_block *,
300 struct jcf_block *, int,
301 struct jcf_partial *);
302 static void generate_bytecode_return (tree, struct jcf_partial *);
303 static void perform_relocations (struct jcf_partial *);
304 static void init_jcf_state (struct jcf_partial *, struct obstack *);
305 static void init_jcf_method (struct jcf_partial *, tree);
306 static void release_jcf_state (struct jcf_partial *);
307 static int get_classfile_modifiers (tree class);
308 static struct chunk * generate_classfile (tree, struct jcf_partial *);
309 static struct jcf_handler *alloc_handler (struct jcf_block *,
310 struct jcf_block *,
311 struct jcf_partial *);
312 static void emit_iinc (tree, HOST_WIDE_INT, struct jcf_partial *);
313 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
314 struct jcf_partial *);
315 static void push_constant1 (HOST_WIDE_INT, struct jcf_partial *);
316 static void push_constant2 (HOST_WIDE_INT, struct jcf_partial *);
317 static void push_int_const (HOST_WIDE_INT, struct jcf_partial *);
318 static int find_constant_wide (HOST_WIDE_INT, HOST_WIDE_INT,
319 struct jcf_partial *);
320 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
321 struct jcf_partial *);
322 static int find_constant_index (tree, struct jcf_partial *);
323 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
324 struct jcf_partial *);
325 static void field_op (tree, int, struct jcf_partial *);
326 static void maybe_wide (int, int, struct jcf_partial *);
327 static void emit_dup (int, int, struct jcf_partial *);
328 static void emit_pop (int, struct jcf_partial *);
329 static void emit_load_or_store (tree, int, struct jcf_partial *);
330 static void emit_load (tree, struct jcf_partial *);
331 static void emit_store (tree, struct jcf_partial *);
332 static void emit_unop (enum java_opcode, tree, struct jcf_partial *);
333 static void emit_binop (enum java_opcode, tree, struct jcf_partial *);
334 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
335 struct jcf_partial *);
336 static void emit_switch_reloc (struct jcf_block *, struct jcf_partial *);
337 static void emit_case_reloc (struct jcf_relocation *, struct jcf_partial *);
338 static void emit_if (struct jcf_block *, int, int, struct jcf_partial *);
339 static void emit_goto (struct jcf_block *, struct jcf_partial *);
340 static void emit_jsr (struct jcf_block *, struct jcf_partial *);
341 static void call_cleanups (struct jcf_block *, struct jcf_partial *);
342 static char *make_class_file_name (tree);
343 static unsigned char *append_synthetic_attribute (struct jcf_partial *);
344 static void append_deprecated_attribute (struct jcf_partial *);
345 static void append_innerclasses_attribute (struct jcf_partial *, tree);
346 static void append_innerclasses_attribute_entry (struct jcf_partial *, tree, tree);
347 static void append_gcj_attribute (struct jcf_partial *, tree);
349 /* Utility macros for appending (big-endian) data to a buffer.
350 We assume a local variable 'ptr' points into where we want to
351 write next, and we assume enough space has been allocated. */
353 #ifdef ENABLE_JC1_CHECKING
354 static int CHECK_PUT (void *, struct jcf_partial *, int);
356 static int
357 CHECK_PUT (void *ptr, struct jcf_partial *state, int i)
359 if ((unsigned char *) ptr < state->chunk->data
360 || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
361 abort ();
363 return 0;
365 #else
366 #define CHECK_PUT(PTR, STATE, I) ((void)0)
367 #endif
369 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
370 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
371 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
372 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
374 /* There are some cases below where CHECK_PUT is guaranteed to fail.
375 Use the following macros in those specific cases. */
376 #define UNSAFE_PUT1(X) (*ptr++ = (X))
377 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
378 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
379 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
382 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
383 Set the data and size fields to DATA and SIZE, respectively.
384 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
386 static struct chunk *
387 alloc_chunk (struct chunk *last, unsigned char *data,
388 int size, struct obstack *work)
390 struct chunk *chunk = obstack_alloc (work, sizeof(struct chunk));
392 if (data == NULL && size > 0)
393 data = obstack_alloc (work, size);
395 chunk->next = NULL;
396 chunk->data = data;
397 chunk->size = size;
398 if (last != NULL)
399 last->next = chunk;
400 return chunk;
403 #ifdef ENABLE_JC1_CHECKING
404 static int CHECK_OP (struct jcf_partial *);
406 static int
407 CHECK_OP (struct jcf_partial *state)
409 if (state->bytecode.ptr > state->bytecode.limit)
410 abort ();
412 return 0;
414 #else
415 #define CHECK_OP(STATE) ((void) 0)
416 #endif
418 static unsigned char *
419 append_chunk (unsigned char *data, int size, struct jcf_partial *state)
421 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
422 if (state->first == NULL)
423 state->first = state->chunk;
424 return state->chunk->data;
427 static void
428 append_chunk_copy (unsigned char *data, int size, struct jcf_partial *state)
430 unsigned char *ptr = append_chunk (NULL, size, state);
431 memcpy (ptr, data, size);
434 static struct jcf_block *
435 gen_jcf_label (struct jcf_partial *state)
437 struct jcf_block *block
438 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
439 block->next = NULL;
440 block->linenumber = -1;
441 block->pc = UNDEFINED_PC;
442 return block;
445 static void
446 finish_jcf_block (struct jcf_partial *state)
448 struct jcf_block *block = state->last_block;
449 struct jcf_relocation *reloc;
450 int code_length = BUFFER_LENGTH (&state->bytecode);
451 int pc = state->code_length;
452 append_chunk_copy (state->bytecode.data, code_length, state);
453 BUFFER_RESET (&state->bytecode);
454 block->v.chunk = state->chunk;
456 /* Calculate code_length to the maximum value it can have. */
457 pc += block->v.chunk->size;
458 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
460 int kind = reloc->kind;
461 if (kind == SWITCH_ALIGN_RELOC)
462 pc += 3;
463 else if (kind > BLOCK_START_RELOC)
464 pc += 2; /* 2-byte offset may grow to 4-byte offset */
465 else if (kind < -1)
466 pc += 5; /* May need to add a goto_w. */
468 state->code_length = pc;
471 static void
472 define_jcf_label (struct jcf_block *label, struct jcf_partial *state)
474 if (state->last_block != NULL)
475 finish_jcf_block (state);
476 label->pc = state->code_length;
477 if (state->blocks == NULL)
478 state->blocks = label;
479 else
480 state->last_block->next = label;
481 state->last_block = label;
482 label->next = NULL;
483 label->u.relocations = NULL;
486 static struct jcf_block *
487 get_jcf_label_here (struct jcf_partial *state)
489 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
490 return state->last_block;
491 else
493 struct jcf_block *label = gen_jcf_label (state);
494 define_jcf_label (label, state);
495 return label;
499 /* Note a line number entry for the current PC and given LINE. */
501 static void
502 put_linenumber (int line, struct jcf_partial *state)
504 struct jcf_block *label = get_jcf_label_here (state);
505 if (label->linenumber > 0)
507 label = gen_jcf_label (state);
508 define_jcf_label (label, state);
510 label->linenumber = line;
511 state->linenumber_count++;
514 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
515 in the range (START_LABEL, END_LABEL). */
517 static struct jcf_handler *
518 alloc_handler (struct jcf_block *start_label, struct jcf_block *end_label,
519 struct jcf_partial *state)
521 struct jcf_handler *handler
522 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
523 handler->start_label = start_label;
524 handler->end_label = end_label;
525 handler->handler_label = get_jcf_label_here (state);
526 if (state->handlers == NULL)
527 state->handlers = handler;
528 else
529 state->last_handler->next = handler;
530 state->last_handler = handler;
531 handler->next = NULL;
532 state->num_handlers++;
533 return handler;
537 /* The index of jvm local variable allocated for this DECL.
538 This is assigned when generating .class files;
539 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
540 (We don't allocate DECL_LANG_SPECIFIC for locals from Java source code.) */
542 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
544 struct localvar_info
546 struct localvar_info *next;
548 tree decl;
549 struct jcf_block *start_label;
550 struct jcf_block *end_label;
553 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
554 #define localvar_max \
555 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
557 static void
558 localvar_alloc (tree decl, struct jcf_partial *state)
560 struct jcf_block *start_label = get_jcf_label_here (state);
561 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
562 int index;
563 struct localvar_info *info;
564 struct localvar_info **ptr = localvar_buffer;
565 struct localvar_info **limit
566 = (struct localvar_info**) state->localvars.ptr;
567 for (index = 0; ptr < limit; index++, ptr++)
569 if (ptr[0] == NULL
570 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
571 break;
573 if (ptr == limit)
575 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
576 ptr = (struct localvar_info**) state->localvars.data + index;
577 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
579 info = obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
580 ptr[0] = info;
581 if (wide)
582 ptr[1] = (struct localvar_info *)(~0);
583 DECL_LOCAL_INDEX (decl) = index;
584 info->decl = decl;
585 info->start_label = start_label;
587 if (debug_info_level > DINFO_LEVEL_TERSE
588 && DECL_NAME (decl) != NULL_TREE)
590 /* Generate debugging info. */
591 info->next = NULL;
592 if (state->last_lvar != NULL)
593 state->last_lvar->next = info;
594 else
595 state->first_lvar = info;
596 state->last_lvar = info;
597 state->lvar_count++;
601 static void
602 maybe_free_localvar (tree decl, struct jcf_partial *state, int really)
604 struct jcf_block *end_label = get_jcf_label_here (state);
605 int index = DECL_LOCAL_INDEX (decl);
606 struct localvar_info **ptr = &localvar_buffer [index];
607 struct localvar_info *info = *ptr;
608 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
610 info->end_label = end_label;
612 if (info->decl != decl)
613 abort ();
614 if (! really)
615 return;
616 ptr[0] = NULL;
617 if (wide)
619 if (ptr[1] != (struct localvar_info *)(~0))
620 abort ();
621 ptr[1] = NULL;
626 #define STACK_TARGET 1
627 #define IGNORE_TARGET 2
629 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
630 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
632 static int
633 get_access_flags (tree decl)
635 int flags = 0;
636 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
638 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
640 if (TREE_PROTECTED (decl))
641 flags |= ACC_PROTECTED;
642 if (TREE_PRIVATE (decl))
643 flags |= ACC_PRIVATE;
645 else if (TREE_CODE (decl) == TYPE_DECL)
647 if (CLASS_PUBLIC (decl))
648 flags |= ACC_PUBLIC;
649 if (CLASS_FINAL (decl))
650 flags |= ACC_FINAL;
651 if (CLASS_SUPER (decl))
652 flags |= ACC_SUPER;
653 if (CLASS_ABSTRACT (decl))
654 flags |= ACC_ABSTRACT;
655 if (CLASS_INTERFACE (decl))
656 flags |= ACC_INTERFACE;
657 if (CLASS_STATIC (decl))
658 flags |= ACC_STATIC;
659 if (CLASS_PRIVATE (decl))
660 flags |= ACC_PRIVATE;
661 if (CLASS_PROTECTED (decl))
662 flags |= ACC_PROTECTED;
663 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
664 || LOCAL_CLASS_P (TREE_TYPE (decl)))
665 flags |= ACC_PRIVATE;
666 if (CLASS_STRICTFP (decl))
667 flags |= ACC_STRICT;
669 else
670 abort ();
672 if (TREE_CODE (decl) == FUNCTION_DECL)
674 if (METHOD_PUBLIC (decl))
675 flags |= ACC_PUBLIC;
676 if (METHOD_FINAL (decl))
677 flags |= ACC_FINAL;
678 if (METHOD_NATIVE (decl))
679 flags |= ACC_NATIVE;
680 if (METHOD_STATIC (decl))
681 flags |= ACC_STATIC;
682 if (METHOD_SYNCHRONIZED (decl))
683 flags |= ACC_SYNCHRONIZED;
684 if (METHOD_ABSTRACT (decl))
685 flags |= ACC_ABSTRACT;
686 if (METHOD_STRICTFP (decl))
687 flags |= ACC_STRICT;
689 if (isfield)
691 if (FIELD_PUBLIC (decl))
692 flags |= ACC_PUBLIC;
693 if (FIELD_FINAL (decl))
694 flags |= ACC_FINAL;
695 if (FIELD_STATIC (decl))
696 flags |= ACC_STATIC;
697 if (FIELD_VOLATILE (decl))
698 flags |= ACC_VOLATILE;
699 if (FIELD_TRANSIENT (decl))
700 flags |= ACC_TRANSIENT;
702 return flags;
705 /* Write the list of segments starting at CHUNKS to STREAM. */
707 static void
708 write_chunks (FILE* stream, struct chunk *chunks)
710 for (; chunks != NULL; chunks = chunks->next)
711 fwrite (chunks->data, chunks->size, 1, stream);
714 /* Push a 1-word constant in the constant pool at the given INDEX.
715 (Caller is responsible for doing NOTE_PUSH.) */
717 static void
718 push_constant1 (HOST_WIDE_INT index, struct jcf_partial *state)
720 RESERVE (3);
721 if (index < 256)
723 OP1 (OPCODE_ldc);
724 OP1 (index);
726 else
728 OP1 (OPCODE_ldc_w);
729 OP2 (index);
733 /* Push a 2-word constant in the constant pool at the given INDEX.
734 (Caller is responsible for doing NOTE_PUSH.) */
736 static void
737 push_constant2 (HOST_WIDE_INT index, struct jcf_partial *state)
739 RESERVE (3);
740 OP1 (OPCODE_ldc2_w);
741 OP2 (index);
744 /* Push 32-bit integer constant on VM stack.
745 Caller is responsible for doing NOTE_PUSH. */
747 static void
748 push_int_const (HOST_WIDE_INT i, struct jcf_partial *state)
750 RESERVE(3);
751 if (i >= -1 && i <= 5)
752 OP1(OPCODE_iconst_0 + i);
753 else if (i >= -128 && i < 128)
755 OP1(OPCODE_bipush);
756 OP1(i);
758 else if (i >= -32768 && i < 32768)
760 OP1(OPCODE_sipush);
761 OP2(i);
763 else
765 i = find_constant1 (&state->cpool, CONSTANT_Integer,
766 (jword)(i & 0xFFFFFFFF));
767 push_constant1 (i, state);
771 static int
772 find_constant_wide (HOST_WIDE_INT lo, HOST_WIDE_INT hi,
773 struct jcf_partial *state)
775 unsigned HOST_WIDE_INT w1;
776 HOST_WIDE_INT w2;
777 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
778 return find_constant2 (&state->cpool, CONSTANT_Long,
779 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
782 /* Find or allocate a constant pool entry for the given VALUE.
783 Return the index in the constant pool. */
785 static int
786 find_constant_index (tree value, struct jcf_partial *state)
788 if (TREE_CODE (value) == INTEGER_CST)
790 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
791 return find_constant1 (&state->cpool, CONSTANT_Integer,
792 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
793 else
794 return find_constant_wide (TREE_INT_CST_LOW (value),
795 TREE_INT_CST_HIGH (value), state);
797 else if (TREE_CODE (value) == REAL_CST)
799 long words[2];
801 /* IEEE NaN can have many values, but the Java VM spec defines a
802 canonical NaN. */
803 if (flag_emit_class_files
804 && REAL_VALUE_ISNAN (TREE_REAL_CST (value)))
806 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
807 return find_constant1 (&state->cpool, CONSTANT_Float,
808 0x7fc00000);
809 else
810 return find_constant2 (&state->cpool, CONSTANT_Double,
811 0x7ff80000, 0x00000000);
814 real_to_target (words, &TREE_REAL_CST (value),
815 TYPE_MODE (TREE_TYPE (value)));
816 words[0] &= 0xffffffff;
817 words[1] &= 0xffffffff;
819 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
820 return find_constant1 (&state->cpool, CONSTANT_Float, (jword)words[0]);
821 else
822 return find_constant2 (&state->cpool, CONSTANT_Double,
823 (jword)words[1-FLOAT_WORDS_BIG_ENDIAN],
824 (jword)words[FLOAT_WORDS_BIG_ENDIAN]);
826 else if (TREE_CODE (value) == STRING_CST)
827 return find_string_constant (&state->cpool, value);
829 else
830 abort ();
833 /* Push 64-bit long constant on VM stack.
834 Caller is responsible for doing NOTE_PUSH. */
836 static void
837 push_long_const (HOST_WIDE_INT lo, HOST_WIDE_INT hi, struct jcf_partial *state)
839 unsigned HOST_WIDE_INT highpart;
840 HOST_WIDE_INT dummy;
841 jint lowpart = WORD_TO_INT (lo);
843 rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
845 if (highpart == 0 && (lowpart == 0 || lowpart == 1))
847 RESERVE(1);
848 OP1(OPCODE_lconst_0 + lowpart);
850 else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
851 || (highpart == (unsigned HOST_WIDE_INT)-1
852 && lowpart < 0 && lowpart >= -32768))
854 push_int_const (lowpart, state);
855 RESERVE (1);
856 OP1 (OPCODE_i2l);
858 else
859 push_constant2 (find_constant_wide (lo, hi, state), state);
862 static void
863 field_op (tree field, int opcode, struct jcf_partial *state)
865 int index = find_fieldref_index (&state->cpool, field);
866 RESERVE (3);
867 OP1 (opcode);
868 OP2 (index);
871 /* Returns an integer in the range 0 (for 'int') through 4 (for object
872 reference) to 7 (for 'short') which matches the pattern of how JVM
873 opcodes typically depend on the operand type. */
875 static int
876 adjust_typed_op (tree type, int max)
878 switch (TREE_CODE (type))
880 case POINTER_TYPE:
881 case RECORD_TYPE: return 4;
882 case BOOLEAN_TYPE:
883 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
884 case CHAR_TYPE:
885 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
886 case INTEGER_TYPE:
887 switch (TYPE_PRECISION (type))
889 case 8: return max < 5 ? 0 : 5;
890 case 16: return max < 7 ? 0 : 7;
891 case 32: return 0;
892 case 64: return 1;
894 break;
895 case REAL_TYPE:
896 switch (TYPE_PRECISION (type))
898 case 32: return 2;
899 case 64: return 3;
901 break;
902 default:
903 break;
905 abort ();
908 static void
909 maybe_wide (int opcode, int index, struct jcf_partial *state)
911 if (index >= 256)
913 RESERVE (4);
914 OP1 (OPCODE_wide);
915 OP1 (opcode);
916 OP2 (index);
918 else
920 RESERVE (2);
921 OP1 (opcode);
922 OP1 (index);
926 /* Compile code to duplicate with offset, where
927 SIZE is the size of the stack item to duplicate (1 or 2), abd
928 OFFSET is where to insert the result (must be 0, 1, or 2).
929 (The new words get inserted at stack[SP-size-offset].) */
931 static void
932 emit_dup (int size, int offset, struct jcf_partial *state)
934 int kind;
935 if (size == 0)
936 return;
937 RESERVE(1);
938 if (offset == 0)
939 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
940 else if (offset == 1)
941 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
942 else if (offset == 2)
943 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
944 else
945 abort();
946 OP1 (kind);
947 NOTE_PUSH (size);
950 static void
951 emit_pop (int size, struct jcf_partial *state)
953 RESERVE (1);
954 OP1 (OPCODE_pop - 1 + size);
957 static void
958 emit_iinc (tree var, HOST_WIDE_INT value, struct jcf_partial *state)
960 int slot = DECL_LOCAL_INDEX (var);
962 if (value < -128 || value > 127 || slot >= 256)
964 RESERVE (6);
965 OP1 (OPCODE_wide);
966 OP1 (OPCODE_iinc);
967 OP2 (slot);
968 OP2 (value);
970 else
972 RESERVE (3);
973 OP1 (OPCODE_iinc);
974 OP1 (slot);
975 OP1 (value);
979 static void
980 emit_load_or_store (tree var, /* Variable to load from or store into. */
981 int opcode, /* Either OPCODE_iload or OPCODE_istore. */
982 struct jcf_partial *state)
984 tree type = TREE_TYPE (var);
985 int kind = adjust_typed_op (type, 4);
986 int index = DECL_LOCAL_INDEX (var);
987 if (index <= 3)
989 RESERVE (1);
990 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
992 else
993 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
996 static void
997 emit_load (tree var, struct jcf_partial *state)
999 emit_load_or_store (var, OPCODE_iload, state);
1000 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1003 static void
1004 emit_store (tree var, struct jcf_partial *state)
1006 emit_load_or_store (var, OPCODE_istore, state);
1007 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1010 static void
1011 emit_unop (enum java_opcode opcode, tree type ATTRIBUTE_UNUSED,
1012 struct jcf_partial *state)
1014 RESERVE(1);
1015 OP1 (opcode);
1018 static void
1019 emit_binop (enum java_opcode opcode, tree type, struct jcf_partial *state)
1021 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1022 RESERVE(1);
1023 OP1 (opcode);
1024 NOTE_POP (size);
1027 static void
1028 emit_reloc (HOST_WIDE_INT value, int kind,
1029 struct jcf_block *target, struct jcf_partial *state)
1031 struct jcf_relocation *reloc
1032 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1033 struct jcf_block *block = state->last_block;
1034 reloc->next = block->u.relocations;
1035 block->u.relocations = reloc;
1036 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1037 reloc->label = target;
1038 reloc->kind = kind;
1039 if (kind == 0 || kind == BLOCK_START_RELOC)
1040 OP4 (value);
1041 else if (kind != SWITCH_ALIGN_RELOC)
1042 OP2 (value);
1045 static void
1046 emit_switch_reloc (struct jcf_block *label, struct jcf_partial *state)
1048 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1051 /* Similar to emit_switch_reloc,
1052 but re-uses an existing case reloc. */
1054 static void
1055 emit_case_reloc (struct jcf_relocation *reloc, struct jcf_partial *state)
1057 struct jcf_block *block = state->last_block;
1058 reloc->next = block->u.relocations;
1059 block->u.relocations = reloc;
1060 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1061 reloc->kind = BLOCK_START_RELOC;
1062 OP4 (0);
1065 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1066 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1068 static void
1069 emit_if (struct jcf_block *target, int opcode, int inv_opcode,
1070 struct jcf_partial *state)
1072 RESERVE(3);
1073 OP1 (opcode);
1074 /* value is 1 byte from reloc back to start of instruction. */
1075 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1078 static void
1079 emit_goto (struct jcf_block *target, struct jcf_partial *state)
1081 RESERVE(3);
1082 OP1 (OPCODE_goto);
1083 /* Value is 1 byte from reloc back to start of instruction. */
1084 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1087 static void
1088 emit_jsr (struct jcf_block *target, struct jcf_partial *state)
1090 RESERVE(3);
1091 OP1 (OPCODE_jsr);
1092 /* Value is 1 byte from reloc back to start of instruction. */
1093 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1094 state->num_jsrs++;
1097 /* Generate code to evaluate EXP. If the result is true,
1098 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1099 TRUE_BRANCH_FIRST is a code generation hint that the
1100 TRUE_LABEL may follow right after this. (The idea is that we
1101 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1103 static void
1104 generate_bytecode_conditional (tree exp,
1105 struct jcf_block *true_label,
1106 struct jcf_block *false_label,
1107 int true_branch_first,
1108 struct jcf_partial *state)
1110 tree exp0, exp1, type;
1111 int save_SP = state->code_SP;
1112 enum java_opcode op, negop;
1113 bool unordered = 0;
1115 switch (TREE_CODE (exp))
1117 case INTEGER_CST:
1118 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1119 break;
1120 case COND_EXPR:
1122 struct jcf_block *then_label = gen_jcf_label (state);
1123 struct jcf_block *else_label = gen_jcf_label (state);
1124 int save_SP_before, save_SP_after;
1125 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1126 then_label, else_label, 1, state);
1127 define_jcf_label (then_label, state);
1128 save_SP_before = state->code_SP;
1129 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1130 true_label, false_label, 1, state);
1131 save_SP_after = state->code_SP;
1132 state->code_SP = save_SP_before;
1133 define_jcf_label (else_label, state);
1134 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1135 true_label, false_label,
1136 true_branch_first, state);
1137 if (state->code_SP != save_SP_after)
1138 abort ();
1140 break;
1141 case TRUTH_NOT_EXPR:
1142 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1143 true_label, ! true_branch_first, state);
1144 break;
1145 case TRUTH_ANDIF_EXPR:
1147 struct jcf_block *next_label = gen_jcf_label (state);
1148 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1149 next_label, false_label, 1, state);
1150 define_jcf_label (next_label, state);
1151 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1152 true_label, false_label, 1, state);
1154 break;
1155 case TRUTH_ORIF_EXPR:
1157 struct jcf_block *next_label = gen_jcf_label (state);
1158 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1159 true_label, next_label, 1, state);
1160 define_jcf_label (next_label, state);
1161 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1162 true_label, false_label, 1, state);
1164 break;
1165 compare_1:
1166 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1167 set it to the corresponding 1-operand if<COND> instructions. */
1168 op = op - 6;
1169 /* FALLTHROUGH */
1170 compare_2:
1171 /* The opcodes with their inverses are allocated in pairs.
1172 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1173 negop = (op & 1) ? op + 1 : op - 1;
1174 compare_2_ptr:
1175 if (true_branch_first)
1177 emit_if (false_label, negop, op, state);
1178 emit_goto (true_label, state);
1180 else
1182 emit_if (true_label, op, negop, state);
1183 emit_goto (false_label, state);
1185 break;
1187 case UNEQ_EXPR:
1188 unordered = 1;
1189 case EQ_EXPR:
1190 op = OPCODE_if_icmpeq;
1191 goto compare;
1193 case LTGT_EXPR:
1194 unordered = 1;
1195 case NE_EXPR:
1196 op = OPCODE_if_icmpne;
1197 goto compare;
1199 case UNLE_EXPR:
1200 unordered = 1;
1201 case GT_EXPR:
1202 op = OPCODE_if_icmpgt;
1203 goto compare;
1205 case UNGE_EXPR:
1206 unordered = 1;
1207 case LT_EXPR:
1208 op = OPCODE_if_icmplt;
1209 goto compare;
1211 case UNLT_EXPR:
1212 unordered = 1;
1213 case GE_EXPR:
1214 op = OPCODE_if_icmpge;
1215 goto compare;
1217 case UNGT_EXPR:
1218 unordered = 1;
1219 case LE_EXPR:
1220 op = OPCODE_if_icmple;
1221 goto compare;
1223 compare:
1224 if (unordered)
1226 /* UNLT_EXPR(a, b) means 'a < b || unordered(a, b)'. This is
1227 the same as the Java source expression '!(a >= b)', so handle
1228 it that way. */
1229 struct jcf_block *tmp = true_label;
1230 true_label = false_label;
1231 false_label = tmp;
1232 true_branch_first = !true_branch_first;
1235 exp0 = TREE_OPERAND (exp, 0);
1236 exp1 = TREE_OPERAND (exp, 1);
1237 type = TREE_TYPE (exp0);
1238 switch (TREE_CODE (type))
1240 int opf;
1241 case POINTER_TYPE: case RECORD_TYPE:
1242 switch (TREE_CODE (exp))
1244 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1245 case NE_EXPR: op = OPCODE_if_acmpne; break;
1246 default: abort();
1248 if (integer_zerop (exp1) || integer_zerop (exp0))
1250 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1251 STACK_TARGET, state);
1252 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1253 negop = (op & 1) ? op - 1 : op + 1;
1254 NOTE_POP (1);
1255 goto compare_2_ptr;
1257 generate_bytecode_insns (exp0, STACK_TARGET, state);
1258 generate_bytecode_insns (exp1, STACK_TARGET, state);
1259 NOTE_POP (2);
1260 goto compare_2;
1261 case REAL_TYPE:
1262 generate_bytecode_insns (exp0, STACK_TARGET, state);
1263 generate_bytecode_insns (exp1, STACK_TARGET, state);
1264 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1265 opf = OPCODE_fcmpg;
1266 else
1267 opf = OPCODE_fcmpl;
1268 if (TYPE_PRECISION (type) > 32)
1270 opf += 2;
1271 NOTE_POP (4);
1273 else
1274 NOTE_POP (2);
1275 RESERVE (1);
1276 OP1 (opf);
1277 goto compare_1;
1278 case INTEGER_TYPE:
1279 if (TYPE_PRECISION (type) > 32)
1281 generate_bytecode_insns (exp0, STACK_TARGET, state);
1282 generate_bytecode_insns (exp1, STACK_TARGET, state);
1283 NOTE_POP (4);
1284 RESERVE (1);
1285 OP1 (OPCODE_lcmp);
1286 goto compare_1;
1288 /* FALLTHROUGH */
1289 default:
1290 if (integer_zerop (exp1))
1292 generate_bytecode_insns (exp0, STACK_TARGET, state);
1293 NOTE_POP (1);
1294 goto compare_1;
1296 if (integer_zerop (exp0))
1298 switch (op)
1300 case OPCODE_if_icmplt:
1301 case OPCODE_if_icmpge:
1302 op += 2;
1303 break;
1304 case OPCODE_if_icmpgt:
1305 case OPCODE_if_icmple:
1306 op -= 2;
1307 break;
1308 default:
1309 break;
1311 generate_bytecode_insns (exp1, STACK_TARGET, state);
1312 NOTE_POP (1);
1313 goto compare_1;
1315 generate_bytecode_insns (exp0, STACK_TARGET, state);
1316 generate_bytecode_insns (exp1, STACK_TARGET, state);
1317 NOTE_POP (2);
1318 goto compare_2;
1321 default:
1322 generate_bytecode_insns (exp, STACK_TARGET, state);
1323 NOTE_POP (1);
1324 if (true_branch_first)
1326 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1327 emit_goto (true_label, state);
1329 else
1331 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1332 emit_goto (false_label, state);
1334 break;
1336 if (save_SP != state->code_SP)
1337 abort ();
1340 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1341 but only as far out as LIMIT (since we are about to jump to the
1342 emit label that is LIMIT). */
1344 static void
1345 call_cleanups (struct jcf_block *limit, struct jcf_partial *state)
1347 struct jcf_block *block = state->labeled_blocks;
1348 for (; block != limit; block = block->next)
1350 if (block->pc == PENDING_CLEANUP_PC)
1351 emit_jsr (block, state);
1355 static void
1356 generate_bytecode_return (tree exp, struct jcf_partial *state)
1358 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1359 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1360 int op;
1361 again:
1362 if (exp != NULL)
1364 switch (TREE_CODE (exp))
1366 case COMPOUND_EXPR:
1367 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1368 state);
1369 exp = TREE_OPERAND (exp, 1);
1370 goto again;
1371 case COND_EXPR:
1373 struct jcf_block *then_label = gen_jcf_label (state);
1374 struct jcf_block *else_label = gen_jcf_label (state);
1375 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1376 then_label, else_label, 1, state);
1377 define_jcf_label (then_label, state);
1378 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1379 define_jcf_label (else_label, state);
1380 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1382 return;
1383 default:
1384 generate_bytecode_insns (exp,
1385 returns_void ? IGNORE_TARGET
1386 : STACK_TARGET, state);
1389 if (returns_void)
1391 op = OPCODE_return;
1392 call_cleanups (NULL, state);
1394 else
1396 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1397 if (state->num_finalizers > 0)
1399 if (state->return_value_decl == NULL_TREE)
1401 state->return_value_decl
1402 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1403 localvar_alloc (state->return_value_decl, state);
1405 emit_store (state->return_value_decl, state);
1406 call_cleanups (NULL, state);
1407 emit_load (state->return_value_decl, state);
1408 /* If we call maybe_free_localvar (state->return_value_decl, state, 1),
1409 then we risk the save decl erroneously re-used in the
1410 finalizer. Instead, we keep the state->return_value_decl
1411 allocated through the rest of the method. This is not
1412 the greatest solution, but it is at least simple and safe. */
1415 RESERVE (1);
1416 OP1 (op);
1419 /* Generate bytecode for sub-expression EXP of METHOD.
1420 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1422 static void
1423 generate_bytecode_insns (tree exp, int target, struct jcf_partial *state)
1425 tree type, arg;
1426 enum java_opcode jopcode;
1427 int op;
1428 HOST_WIDE_INT value;
1429 int post_op;
1430 int size;
1431 int offset;
1433 if (exp == NULL && target == IGNORE_TARGET)
1434 return;
1436 type = TREE_TYPE (exp);
1438 switch (TREE_CODE (exp))
1440 case BLOCK:
1441 if (BLOCK_EXPR_BODY (exp))
1443 tree local;
1444 tree body = BLOCK_EXPR_BODY (exp);
1445 long jsrs = state->num_jsrs;
1446 for (local = BLOCK_EXPR_DECLS (exp); local; )
1448 tree next = TREE_CHAIN (local);
1449 localvar_alloc (local, state);
1450 local = next;
1452 /* Avoid deep recursion for long blocks. */
1453 while (TREE_CODE (body) == COMPOUND_EXPR)
1455 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1456 body = TREE_OPERAND (body, 1);
1458 generate_bytecode_insns (body, target, state);
1460 for (local = BLOCK_EXPR_DECLS (exp); local; )
1462 tree next = TREE_CHAIN (local);
1463 maybe_free_localvar (local, state, state->num_jsrs <= jsrs);
1464 local = next;
1467 break;
1468 case COMPOUND_EXPR:
1469 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1470 /* Normally the first operand to a COMPOUND_EXPR must complete
1471 normally. However, in the special case of a do-while
1472 statement this is not necessarily the case. */
1473 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1474 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1475 break;
1476 case EXPR_WITH_FILE_LOCATION:
1478 location_t saved_location = input_location;
1479 tree body = EXPR_WFL_NODE (exp);
1480 if (IS_EMPTY_STMT (body))
1481 break;
1482 #ifdef USE_MAPPED_LOCATION
1483 input_location = EXPR_LOCATION (exp);
1484 #else
1485 input_filename = EXPR_WFL_FILENAME (exp);
1486 input_line = EXPR_WFL_LINENO (exp);
1487 #endif
1488 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && input_line > 0
1489 && debug_info_level > DINFO_LEVEL_NONE)
1490 put_linenumber (input_line, state);
1491 generate_bytecode_insns (body, target, state);
1492 input_location = saved_location;
1494 break;
1495 case INTEGER_CST:
1496 if (target == IGNORE_TARGET) ; /* do nothing */
1497 else if (TREE_CODE (type) == POINTER_TYPE)
1499 if (! integer_zerop (exp))
1500 abort();
1501 RESERVE(1);
1502 OP1 (OPCODE_aconst_null);
1503 NOTE_PUSH (1);
1505 else if (TYPE_PRECISION (type) <= 32)
1507 push_int_const (TREE_INT_CST_LOW (exp), state);
1508 NOTE_PUSH (1);
1510 else
1512 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1513 state);
1514 NOTE_PUSH (2);
1516 break;
1517 case REAL_CST:
1519 int prec = TYPE_PRECISION (type) >> 5;
1520 RESERVE(1);
1521 if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1522 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1523 else if (real_onep (exp))
1524 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1525 else if (prec == 1 && real_twop (exp))
1526 OP1 (OPCODE_fconst_2);
1527 /* ??? We could also use iconst_3/ldc followed by i2f/i2d
1528 for other float/double when the value is a small integer. */
1529 else
1531 offset = find_constant_index (exp, state);
1532 if (prec == 1)
1533 push_constant1 (offset, state);
1534 else
1535 push_constant2 (offset, state);
1537 NOTE_PUSH (prec);
1539 break;
1540 case STRING_CST:
1541 push_constant1 (find_string_constant (&state->cpool, exp), state);
1542 NOTE_PUSH (1);
1543 break;
1544 case VAR_DECL:
1545 if (TREE_STATIC (exp))
1547 field_op (exp, OPCODE_getstatic, state);
1548 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1549 break;
1551 /* ... fall through ... */
1552 case PARM_DECL:
1553 emit_load (exp, state);
1554 break;
1555 case NON_LVALUE_EXPR:
1556 case INDIRECT_REF:
1557 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1558 break;
1559 case ARRAY_REF:
1560 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1561 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1562 if (target != IGNORE_TARGET)
1564 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1565 RESERVE(1);
1566 OP1 (jopcode);
1567 if (! TYPE_IS_WIDE (type))
1568 NOTE_POP (1);
1570 break;
1571 case COMPONENT_REF:
1573 tree obj = TREE_OPERAND (exp, 0);
1574 tree field = TREE_OPERAND (exp, 1);
1575 int is_static = FIELD_STATIC (field);
1576 generate_bytecode_insns (obj,
1577 is_static ? IGNORE_TARGET : target, state);
1578 if (target != IGNORE_TARGET)
1580 if (DECL_NAME (field) == length_identifier_node && !is_static
1581 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1583 RESERVE (1);
1584 OP1 (OPCODE_arraylength);
1586 else
1588 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1589 state);
1590 if (! is_static)
1591 NOTE_POP (1);
1592 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1596 break;
1597 case TRUTH_ANDIF_EXPR:
1598 case TRUTH_ORIF_EXPR:
1599 case EQ_EXPR:
1600 case NE_EXPR:
1601 case GT_EXPR:
1602 case LT_EXPR:
1603 case GE_EXPR:
1604 case LE_EXPR:
1605 case UNLT_EXPR:
1606 case UNLE_EXPR:
1607 case UNGT_EXPR:
1608 case UNGE_EXPR:
1609 case UNEQ_EXPR:
1610 case LTGT_EXPR:
1612 struct jcf_block *then_label = gen_jcf_label (state);
1613 struct jcf_block *else_label = gen_jcf_label (state);
1614 struct jcf_block *end_label = gen_jcf_label (state);
1615 generate_bytecode_conditional (exp,
1616 then_label, else_label, 1, state);
1617 define_jcf_label (then_label, state);
1618 push_int_const (1, state);
1619 emit_goto (end_label, state);
1620 define_jcf_label (else_label, state);
1621 push_int_const (0, state);
1622 define_jcf_label (end_label, state);
1623 NOTE_PUSH (1);
1625 break;
1626 case COND_EXPR:
1628 struct jcf_block *then_label = gen_jcf_label (state);
1629 struct jcf_block *else_label = gen_jcf_label (state);
1630 struct jcf_block *end_label = gen_jcf_label (state);
1631 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1632 then_label, else_label, 1, state);
1633 define_jcf_label (then_label, state);
1634 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1635 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1636 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1637 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1638 emit_goto (end_label, state);
1639 define_jcf_label (else_label, state);
1640 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1641 define_jcf_label (end_label, state);
1642 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1643 if (TREE_TYPE (exp) != void_type_node)
1644 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1646 break;
1647 case CASE_EXPR:
1649 struct jcf_switch_state *sw_state = state->sw_state;
1650 struct jcf_relocation *reloc
1651 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1652 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1653 reloc->kind = 0;
1654 reloc->label = get_jcf_label_here (state);
1655 reloc->offset = case_value;
1656 reloc->next = sw_state->cases;
1657 sw_state->cases = reloc;
1658 if (sw_state->num_cases == 0)
1660 sw_state->min_case = case_value;
1661 sw_state->max_case = case_value;
1663 else
1665 if (case_value < sw_state->min_case)
1666 sw_state->min_case = case_value;
1667 if (case_value > sw_state->max_case)
1668 sw_state->max_case = case_value;
1670 sw_state->num_cases++;
1672 break;
1673 case DEFAULT_EXPR:
1674 state->sw_state->default_label = get_jcf_label_here (state);
1675 break;
1677 case SWITCH_EXPR:
1679 /* The SWITCH_EXPR has three parts, generated in the following order:
1680 1. the switch_expression (the value used to select the correct case);
1681 2. the switch_body;
1682 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1683 After code generation, we will re-order them in the order 1, 3, 2.
1684 This is to avoid any extra GOTOs. */
1685 struct jcf_switch_state sw_state;
1686 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1687 struct jcf_block *body_last; /* Last block of the switch_body. */
1688 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1689 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1690 struct jcf_block *body_block;
1691 int switch_length;
1692 sw_state.prev = state->sw_state;
1693 state->sw_state = &sw_state;
1694 sw_state.cases = NULL;
1695 sw_state.num_cases = 0;
1696 sw_state.default_label = NULL;
1697 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1698 expression_last = state->last_block;
1699 /* Force a new block here. */
1700 body_block = gen_jcf_label (state);
1701 define_jcf_label (body_block, state);
1702 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1703 body_last = state->last_block;
1705 switch_instruction = gen_jcf_label (state);
1706 define_jcf_label (switch_instruction, state);
1707 if (sw_state.default_label == NULL)
1708 sw_state.default_label = gen_jcf_label (state);
1710 if (sw_state.num_cases <= 1)
1712 if (sw_state.num_cases == 0)
1714 emit_pop (1, state);
1715 NOTE_POP (1);
1717 else
1719 push_int_const (sw_state.cases->offset, state);
1720 NOTE_PUSH (1);
1721 emit_if (sw_state.cases->label,
1722 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1724 emit_goto (sw_state.default_label, state);
1726 else
1728 HOST_WIDE_INT i;
1729 unsigned HOST_WIDE_INT delta;
1730 /* Copy the chain of relocs into a sorted array. */
1731 struct jcf_relocation **relocs
1732 = xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1733 /* The relocs arrays is a buffer with a gap.
1734 The assumption is that cases will normally come in "runs". */
1735 int gap_start = 0;
1736 int gap_end = sw_state.num_cases;
1737 struct jcf_relocation *reloc;
1738 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1740 HOST_WIDE_INT case_value = reloc->offset;
1741 while (gap_end < sw_state.num_cases)
1743 struct jcf_relocation *end = relocs[gap_end];
1744 if (case_value <= end->offset)
1745 break;
1746 relocs[gap_start++] = end;
1747 gap_end++;
1749 while (gap_start > 0)
1751 struct jcf_relocation *before = relocs[gap_start-1];
1752 if (case_value >= before->offset)
1753 break;
1754 relocs[--gap_end] = before;
1755 gap_start--;
1757 relocs[gap_start++] = reloc;
1758 /* Note we don't check for duplicates. This is
1759 handled by the parser. */
1762 /* We could have DELTA < 0 if sw_state.min_case is
1763 something like Integer.MIN_VALUE. That is why delta is
1764 unsigned. */
1765 delta = sw_state.max_case - sw_state.min_case;
1766 if (2 * (unsigned) sw_state.num_cases >= delta)
1767 { /* Use tableswitch. */
1768 int index = 0;
1769 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1770 OP1 (OPCODE_tableswitch);
1771 emit_reloc (RELOCATION_VALUE_0,
1772 SWITCH_ALIGN_RELOC, NULL, state);
1773 emit_switch_reloc (sw_state.default_label, state);
1774 OP4 (sw_state.min_case);
1775 OP4 (sw_state.max_case);
1776 for (i = sw_state.min_case; ; )
1778 reloc = relocs[index];
1779 if (i == reloc->offset)
1781 emit_case_reloc (reloc, state);
1782 if (i == sw_state.max_case)
1783 break;
1784 index++;
1786 else
1787 emit_switch_reloc (sw_state.default_label, state);
1788 i++;
1791 else
1792 { /* Use lookupswitch. */
1793 RESERVE(9 + 8 * sw_state.num_cases);
1794 OP1 (OPCODE_lookupswitch);
1795 emit_reloc (RELOCATION_VALUE_0,
1796 SWITCH_ALIGN_RELOC, NULL, state);
1797 emit_switch_reloc (sw_state.default_label, state);
1798 OP4 (sw_state.num_cases);
1799 for (i = 0; i < sw_state.num_cases; i++)
1801 struct jcf_relocation *reloc = relocs[i];
1802 OP4 (reloc->offset);
1803 emit_case_reloc (reloc, state);
1806 free (relocs);
1809 instruction_last = state->last_block;
1810 if (sw_state.default_label->pc < 0)
1811 define_jcf_label (sw_state.default_label, state);
1812 else /* Force a new block. */
1813 sw_state.default_label = get_jcf_label_here (state);
1814 /* Now re-arrange the blocks so the switch_instruction
1815 comes before the switch_body. */
1816 switch_length = state->code_length - switch_instruction->pc;
1817 switch_instruction->pc = body_block->pc;
1818 instruction_last->next = body_block;
1819 instruction_last->v.chunk->next = body_block->v.chunk;
1820 expression_last->next = switch_instruction;
1821 expression_last->v.chunk->next = switch_instruction->v.chunk;
1822 body_last->next = sw_state.default_label;
1823 body_last->v.chunk->next = NULL;
1824 state->chunk = body_last->v.chunk;
1825 for (; body_block != sw_state.default_label; body_block = body_block->next)
1826 body_block->pc += switch_length;
1828 state->sw_state = sw_state.prev;
1829 break;
1832 case RETURN_EXPR:
1833 exp = TREE_OPERAND (exp, 0);
1834 if (exp == NULL_TREE)
1835 exp = build_java_empty_stmt ();
1836 else if (TREE_CODE (exp) != MODIFY_EXPR)
1837 abort ();
1838 else
1839 exp = TREE_OPERAND (exp, 1);
1840 generate_bytecode_return (exp, state);
1841 break;
1842 case LABELED_BLOCK_EXPR:
1844 struct jcf_block *end_label = gen_jcf_label (state);
1845 end_label->next = state->labeled_blocks;
1846 state->labeled_blocks = end_label;
1847 end_label->pc = PENDING_EXIT_PC;
1848 end_label->u.labeled_block = exp;
1849 if (LABELED_BLOCK_BODY (exp))
1850 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1851 if (state->labeled_blocks != end_label)
1852 abort();
1853 state->labeled_blocks = end_label->next;
1854 define_jcf_label (end_label, state);
1856 break;
1857 case LOOP_EXPR:
1859 tree body = TREE_OPERAND (exp, 0);
1860 #if 0
1861 if (TREE_CODE (body) == COMPOUND_EXPR
1862 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1864 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1865 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1866 struct jcf_block *head_label;
1867 struct jcf_block *body_label;
1868 struct jcf_block *end_label = gen_jcf_label (state);
1869 struct jcf_block *exit_label = state->labeled_blocks;
1870 head_label = gen_jcf_label (state);
1871 emit_goto (head_label, state);
1872 body_label = get_jcf_label_here (state);
1873 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1874 define_jcf_label (head_label, state);
1875 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1876 end_label, body_label, 1, state);
1877 define_jcf_label (end_label, state);
1879 else
1880 #endif
1882 struct jcf_block *head_label = get_jcf_label_here (state);
1883 generate_bytecode_insns (body, IGNORE_TARGET, state);
1884 if (CAN_COMPLETE_NORMALLY (body))
1885 emit_goto (head_label, state);
1888 break;
1889 case EXIT_EXPR:
1891 struct jcf_block *label = state->labeled_blocks;
1892 struct jcf_block *end_label = gen_jcf_label (state);
1893 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1894 label, end_label, 0, state);
1895 define_jcf_label (end_label, state);
1897 break;
1898 case EXIT_BLOCK_EXPR:
1900 struct jcf_block *label = state->labeled_blocks;
1901 while (label->u.labeled_block != EXIT_BLOCK_LABELED_BLOCK (exp))
1902 label = label->next;
1903 call_cleanups (label, state);
1904 emit_goto (label, state);
1906 break;
1908 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1909 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1910 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1911 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1912 increment:
1914 arg = TREE_OPERAND (exp, 1);
1915 exp = TREE_OPERAND (exp, 0);
1916 type = TREE_TYPE (exp);
1917 size = TYPE_IS_WIDE (type) ? 2 : 1;
1918 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1919 && ! TREE_STATIC (exp)
1920 && TREE_CODE (type) == INTEGER_TYPE
1921 && TYPE_PRECISION (type) == 32)
1923 if (target != IGNORE_TARGET && post_op)
1924 emit_load (exp, state);
1925 emit_iinc (exp, value, state);
1926 if (target != IGNORE_TARGET && ! post_op)
1927 emit_load (exp, state);
1928 break;
1930 if (TREE_CODE (exp) == COMPONENT_REF)
1932 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1933 emit_dup (1, 0, state);
1934 /* Stack: ..., objectref, objectref. */
1935 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1936 NOTE_PUSH (size-1);
1937 /* Stack: ..., objectref, oldvalue. */
1938 offset = 1;
1940 else if (TREE_CODE (exp) == ARRAY_REF)
1942 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1943 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1944 emit_dup (2, 0, state);
1945 /* Stack: ..., array, index, array, index. */
1946 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1947 RESERVE(1);
1948 OP1 (jopcode);
1949 NOTE_POP (2-size);
1950 /* Stack: ..., array, index, oldvalue. */
1951 offset = 2;
1953 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1955 generate_bytecode_insns (exp, STACK_TARGET, state);
1956 /* Stack: ..., oldvalue. */
1957 offset = 0;
1959 else
1960 abort ();
1962 if (target != IGNORE_TARGET && post_op)
1963 emit_dup (size, offset, state);
1964 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1965 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1966 /* Stack, otherwise: ..., [result, ] oldvalue. */
1967 generate_bytecode_insns (arg, STACK_TARGET, state);
1968 emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1969 + adjust_typed_op (type, 3),
1970 type, state);
1971 if (target != IGNORE_TARGET && ! post_op)
1972 emit_dup (size, offset, state);
1973 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1974 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1975 /* Stack, otherwise: ..., [result, ] newvalue. */
1976 goto finish_assignment;
1978 case MODIFY_EXPR:
1980 tree lhs = TREE_OPERAND (exp, 0);
1981 tree rhs = TREE_OPERAND (exp, 1);
1982 int offset = 0;
1984 /* See if we can use the iinc instruction. */
1985 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1986 && ! TREE_STATIC (lhs)
1987 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1988 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1989 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1991 tree arg0 = TREE_OPERAND (rhs, 0);
1992 tree arg1 = TREE_OPERAND (rhs, 1);
1993 HOST_WIDE_INT min_value = -32768;
1994 HOST_WIDE_INT max_value = 32767;
1995 if (TREE_CODE (rhs) == MINUS_EXPR)
1997 min_value++;
1998 max_value++;
2000 else if (arg1 == lhs)
2002 arg0 = arg1;
2003 arg1 = TREE_OPERAND (rhs, 0);
2005 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
2007 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
2008 value = TREE_INT_CST_LOW (arg1);
2009 if ((hi_value == 0 && value <= max_value)
2010 || (hi_value == -1 && value >= min_value))
2012 if (TREE_CODE (rhs) == MINUS_EXPR)
2013 value = -value;
2014 emit_iinc (lhs, value, state);
2015 if (target != IGNORE_TARGET)
2016 emit_load (lhs, state);
2017 break;
2022 if (TREE_CODE (lhs) == COMPONENT_REF)
2024 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
2025 STACK_TARGET, state);
2026 offset = 1;
2028 else if (TREE_CODE (lhs) == ARRAY_REF)
2030 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2031 STACK_TARGET, state);
2032 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2033 STACK_TARGET, state);
2034 offset = 2;
2036 else
2037 offset = 0;
2039 /* If the rhs is a binary expression and the left operand is
2040 `==' to the lhs then we have an OP= expression. In this
2041 case we must do some special processing. */
2042 if (BINARY_CLASS_P (rhs) && lhs == TREE_OPERAND (rhs, 0))
2044 if (TREE_CODE (lhs) == COMPONENT_REF)
2046 tree field = TREE_OPERAND (lhs, 1);
2047 if (! FIELD_STATIC (field))
2049 /* Duplicate the object reference so we can get
2050 the field. */
2051 emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
2052 NOTE_POP (1);
2054 field_op (field, (FIELD_STATIC (field)
2055 ? OPCODE_getstatic
2056 : OPCODE_getfield),
2057 state);
2059 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2061 else if (TREE_CODE (lhs) == VAR_DECL
2062 || TREE_CODE (lhs) == PARM_DECL)
2064 if (FIELD_STATIC (lhs))
2066 field_op (lhs, OPCODE_getstatic, state);
2067 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2069 else
2070 emit_load (lhs, state);
2072 else if (TREE_CODE (lhs) == ARRAY_REF)
2074 /* Duplicate the array and index, which are on the
2075 stack, so that we can load the old value. */
2076 emit_dup (2, 0, state);
2077 NOTE_POP (2);
2078 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2079 RESERVE (1);
2080 OP1 (jopcode);
2081 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2083 else
2084 abort ();
2086 /* This function correctly handles the case where the LHS
2087 of a binary expression is NULL_TREE. */
2088 rhs = build2 (TREE_CODE (rhs), TREE_TYPE (rhs),
2089 NULL_TREE, TREE_OPERAND (rhs, 1));
2092 generate_bytecode_insns (rhs, STACK_TARGET, state);
2093 if (target != IGNORE_TARGET)
2094 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2095 exp = lhs;
2097 /* FALLTHROUGH */
2099 finish_assignment:
2100 if (TREE_CODE (exp) == COMPONENT_REF)
2102 tree field = TREE_OPERAND (exp, 1);
2103 if (! FIELD_STATIC (field))
2104 NOTE_POP (1);
2105 field_op (field,
2106 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2107 state);
2109 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2111 else if (TREE_CODE (exp) == VAR_DECL
2112 || TREE_CODE (exp) == PARM_DECL)
2114 if (FIELD_STATIC (exp))
2116 field_op (exp, OPCODE_putstatic, state);
2117 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2119 else
2120 emit_store (exp, state);
2122 else if (TREE_CODE (exp) == ARRAY_REF)
2124 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2125 RESERVE (1);
2126 OP1 (jopcode);
2127 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2129 else
2130 abort ();
2131 break;
2132 case PLUS_EXPR:
2133 jopcode = OPCODE_iadd;
2134 goto binop;
2135 case MINUS_EXPR:
2136 jopcode = OPCODE_isub;
2137 goto binop;
2138 case MULT_EXPR:
2139 jopcode = OPCODE_imul;
2140 goto binop;
2141 case TRUNC_DIV_EXPR:
2142 case RDIV_EXPR:
2143 jopcode = OPCODE_idiv;
2144 goto binop;
2145 case TRUNC_MOD_EXPR:
2146 jopcode = OPCODE_irem;
2147 goto binop;
2148 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2149 case RSHIFT_EXPR:
2150 jopcode = TYPE_UNSIGNED (type) ? OPCODE_iushr : OPCODE_ishr;
2151 goto binop;
2152 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2153 case TRUTH_AND_EXPR:
2154 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2155 case TRUTH_OR_EXPR:
2156 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2157 case TRUTH_XOR_EXPR:
2158 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2159 binop:
2161 tree arg0 = TREE_OPERAND (exp, 0);
2162 tree arg1 = TREE_OPERAND (exp, 1);
2163 jopcode += adjust_typed_op (type, 3);
2164 if (arg0 != NULL_TREE && operand_equal_p (arg0, arg1, 0))
2166 /* fold may (e.g) convert 2*x to x+x. */
2167 generate_bytecode_insns (arg0, target, state);
2168 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2170 else
2172 /* ARG0 will be NULL_TREE if we're handling an `OP='
2173 expression. In this case the stack already holds the
2174 LHS. See the MODIFY_EXPR case. */
2175 if (arg0 != NULL_TREE)
2176 generate_bytecode_insns (arg0, target, state);
2177 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2178 arg1 = convert (int_type_node, arg1);
2179 generate_bytecode_insns (arg1, target, state);
2181 /* For most binary operations, both operands and the result have the
2182 same type. Shift operations are different. Using arg1's type
2183 gets us the correct SP adjustment in all cases. */
2184 if (target == STACK_TARGET)
2185 emit_binop (jopcode, TREE_TYPE (arg1), state);
2186 break;
2188 case TRUTH_NOT_EXPR:
2189 case BIT_NOT_EXPR:
2190 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2191 if (target == STACK_TARGET)
2193 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2194 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2195 RESERVE (2);
2196 if (is_long)
2197 OP1 (OPCODE_i2l);
2198 NOTE_PUSH (1 + is_long);
2199 OP1 (OPCODE_ixor + is_long);
2200 NOTE_POP (1 + is_long);
2202 break;
2203 case NEGATE_EXPR:
2204 jopcode = OPCODE_ineg;
2205 jopcode += adjust_typed_op (type, 3);
2206 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2207 if (target == STACK_TARGET)
2208 emit_unop (jopcode, type, state);
2209 break;
2210 case INSTANCEOF_EXPR:
2212 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2213 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2214 RESERVE (3);
2215 OP1 (OPCODE_instanceof);
2216 OP2 (index);
2218 break;
2219 case SAVE_EXPR:
2220 /* The first time through, the argument of the SAVE_EXPR will be
2221 something complex. Evaluate it, and replace the argument with
2222 a VAR_DECL that holds the result. */
2223 arg = TREE_OPERAND (exp, 0);
2224 if (TREE_CODE (arg) != VAR_DECL || DECL_NAME (arg))
2226 tree type = TREE_TYPE (exp);
2227 tree decl = build_decl (VAR_DECL, NULL_TREE, type);
2228 generate_bytecode_insns (arg, STACK_TARGET, state);
2229 localvar_alloc (decl, state);
2230 TREE_OPERAND (exp, 0) = decl;
2231 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1, 0, state);
2232 emit_store (decl, state);
2234 else
2236 emit_load (arg, state);
2238 break;
2239 case CONVERT_EXPR:
2240 case NOP_EXPR:
2241 case FLOAT_EXPR:
2242 case FIX_TRUNC_EXPR:
2244 tree src = TREE_OPERAND (exp, 0);
2245 tree src_type = TREE_TYPE (src);
2246 tree dst_type = TREE_TYPE (exp);
2247 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2248 if (target == IGNORE_TARGET || src_type == dst_type)
2249 break;
2250 if (TREE_CODE (dst_type) == POINTER_TYPE)
2252 if (TREE_CODE (exp) == CONVERT_EXPR)
2254 int index = find_class_constant (&state->cpool,
2255 TREE_TYPE (dst_type));
2256 RESERVE (3);
2257 OP1 (OPCODE_checkcast);
2258 OP2 (index);
2261 else /* Convert numeric types. */
2263 int src_prec = TYPE_PRECISION (src_type);
2264 int dst_prec = TYPE_PRECISION (dst_type);
2265 int wide_src = src_prec > 32;
2266 int wide_dst = dst_prec > 32;
2267 if (TREE_CODE (dst_type) == REAL_TYPE)
2269 NOTE_POP (1 + wide_src);
2270 RESERVE (1);
2271 if (TREE_CODE (src_type) == REAL_TYPE)
2272 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2273 else if (src_prec == 64)
2274 OP1 (OPCODE_l2f + wide_dst);
2275 else
2276 OP1 (OPCODE_i2f + wide_dst);
2277 NOTE_PUSH (1 + wide_dst);
2279 /* Convert to integral type (but ignore non-widening
2280 and non-narrowing integer type conversions). */
2281 else if (TREE_CODE (src_type) == REAL_TYPE
2282 || src_prec != dst_prec)
2284 NOTE_POP (1 + wide_src);
2285 RESERVE (1);
2286 if (TREE_CODE (src_type) == REAL_TYPE)
2287 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2288 else if (wide_dst)
2289 OP1 (OPCODE_i2l);
2290 else if (wide_src)
2291 OP1 (OPCODE_l2i);
2292 if (dst_prec < 32)
2294 RESERVE (1);
2295 /* Already converted to int, if needed. */
2296 if (dst_prec <= 8)
2297 OP1 (OPCODE_i2b);
2298 else if (TYPE_UNSIGNED (dst_type))
2299 OP1 (OPCODE_i2c);
2300 else
2301 OP1 (OPCODE_i2s);
2303 NOTE_PUSH (1 + wide_dst);
2307 break;
2309 case TRY_EXPR:
2311 tree try_clause = TREE_OPERAND (exp, 0);
2312 struct jcf_block *start_label = get_jcf_label_here (state);
2313 struct jcf_block *end_label; /* End of try clause. */
2314 struct jcf_block *finished_label = gen_jcf_label (state);
2315 tree clause = TREE_OPERAND (exp, 1);
2316 if (target != IGNORE_TARGET)
2317 abort ();
2318 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2319 end_label = get_jcf_label_here (state);
2320 if (end_label == start_label)
2321 break;
2322 if (CAN_COMPLETE_NORMALLY (try_clause))
2323 emit_goto (finished_label, state);
2324 while (clause != NULL_TREE)
2326 tree catch_clause = TREE_OPERAND (clause, 0);
2327 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2328 struct jcf_handler *handler = alloc_handler (start_label,
2329 end_label, state);
2330 if (exception_decl == NULL_TREE)
2331 handler->type = NULL_TREE;
2332 else
2333 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2334 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2335 clause = TREE_CHAIN (clause);
2336 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2337 emit_goto (finished_label, state);
2339 define_jcf_label (finished_label, state);
2341 break;
2343 case TRY_FINALLY_EXPR:
2345 struct jcf_block *finished_label = NULL;
2346 struct jcf_block *finally_label, *start_label, *end_label;
2347 struct jcf_handler *handler;
2348 tree try_block = TREE_OPERAND (exp, 0);
2349 tree finally = TREE_OPERAND (exp, 1);
2350 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2352 tree exception_type;
2354 finally_label = gen_jcf_label (state);
2355 start_label = get_jcf_label_here (state);
2356 /* If the `finally' clause can complete normally, we emit it
2357 as a subroutine and let the other clauses call it via
2358 `jsr'. If it can't complete normally, then we simply emit
2359 `goto's directly to it. */
2360 if (CAN_COMPLETE_NORMALLY (finally))
2362 finally_label->pc = PENDING_CLEANUP_PC;
2363 finally_label->next = state->labeled_blocks;
2364 state->labeled_blocks = finally_label;
2365 state->num_finalizers++;
2368 generate_bytecode_insns (try_block, target, state);
2370 if (CAN_COMPLETE_NORMALLY (finally))
2372 if (state->labeled_blocks != finally_label)
2373 abort();
2374 state->labeled_blocks = finally_label->next;
2376 end_label = get_jcf_label_here (state);
2378 if (end_label == start_label)
2380 state->num_finalizers--;
2381 define_jcf_label (finally_label, state);
2382 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2383 break;
2386 if (CAN_COMPLETE_NORMALLY (finally))
2388 return_link = build_decl (VAR_DECL, NULL_TREE,
2389 return_address_type_node);
2390 finished_label = gen_jcf_label (state);
2393 if (CAN_COMPLETE_NORMALLY (try_block))
2395 if (CAN_COMPLETE_NORMALLY (finally))
2397 emit_jsr (finally_label, state);
2398 emit_goto (finished_label, state);
2400 else
2401 emit_goto (finally_label, state);
2404 /* Handle exceptions. */
2406 exception_type = build_pointer_type (throwable_type_node);
2407 if (CAN_COMPLETE_NORMALLY (finally))
2409 /* We're going to generate a subroutine, so we'll need to
2410 save and restore the exception around the `jsr'. */
2411 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2412 localvar_alloc (return_link, state);
2414 handler = alloc_handler (start_label, end_label, state);
2415 handler->type = NULL_TREE;
2416 if (CAN_COMPLETE_NORMALLY (finally))
2418 localvar_alloc (exception_decl, state);
2419 NOTE_PUSH (1);
2420 emit_store (exception_decl, state);
2421 emit_jsr (finally_label, state);
2422 emit_load (exception_decl, state);
2423 RESERVE (1);
2424 OP1 (OPCODE_athrow);
2425 NOTE_POP (1);
2427 else
2429 /* We're not generating a subroutine. In this case we can
2430 simply have the exception handler pop the exception and
2431 then fall through to the `finally' block. */
2432 NOTE_PUSH (1);
2433 emit_pop (1, state);
2434 NOTE_POP (1);
2437 /* The finally block. If we're generating a subroutine, first
2438 save return PC into return_link. Otherwise, just generate
2439 the code for the `finally' block. */
2440 define_jcf_label (finally_label, state);
2441 if (CAN_COMPLETE_NORMALLY (finally))
2443 NOTE_PUSH (1);
2444 emit_store (return_link, state);
2447 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2448 if (CAN_COMPLETE_NORMALLY (finally))
2450 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2451 maybe_free_localvar (exception_decl, state, 1);
2452 maybe_free_localvar (return_link, state, 1);
2453 define_jcf_label (finished_label, state);
2456 break;
2457 case THROW_EXPR:
2458 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2459 RESERVE (1);
2460 OP1 (OPCODE_athrow);
2461 break;
2462 case NEW_ARRAY_INIT:
2464 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2465 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2466 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2467 unsigned HOST_WIDE_INT idx;
2468 tree value;
2469 HOST_WIDE_INT length = java_array_type_length (array_type);
2470 if (target == IGNORE_TARGET)
2472 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2473 generate_bytecode_insns (value, target, state);
2474 break;
2476 push_int_const (length, state);
2477 NOTE_PUSH (1);
2478 RESERVE (3);
2479 if (JPRIMITIVE_TYPE_P (element_type))
2481 int atype = encode_newarray_type (element_type);
2482 OP1 (OPCODE_newarray);
2483 OP1 (atype);
2485 else
2487 int index = find_class_constant (&state->cpool,
2488 TREE_TYPE (element_type));
2489 OP1 (OPCODE_anewarray);
2490 OP2 (index);
2492 offset = 0;
2493 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2494 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2496 int save_SP = state->code_SP;
2497 emit_dup (1, 0, state);
2498 push_int_const (offset, state);
2499 NOTE_PUSH (1);
2500 generate_bytecode_insns (value, STACK_TARGET, state);
2501 RESERVE (1);
2502 OP1 (jopcode);
2503 state->code_SP = save_SP;
2504 offset++;
2507 break;
2508 case JAVA_EXC_OBJ_EXPR:
2509 NOTE_PUSH (1); /* Pushed by exception system. */
2510 break;
2511 case MIN_EXPR:
2512 case MAX_EXPR:
2514 /* This copes with cases where fold() has created MIN or MAX
2515 from a conditional expression. */
2516 enum tree_code code = TREE_CODE (exp) == MIN_EXPR ? LT_EXPR : GT_EXPR;
2517 tree op0 = TREE_OPERAND (exp, 0);
2518 tree op1 = TREE_OPERAND (exp, 1);
2519 tree x;
2520 if (TREE_SIDE_EFFECTS (op0) || TREE_SIDE_EFFECTS (op1))
2521 abort ();
2522 x = build3 (COND_EXPR, TREE_TYPE (exp),
2523 build2 (code, boolean_type_node, op0, op1),
2524 op0, op1);
2525 generate_bytecode_insns (x, target, state);
2526 break;
2528 case NEW_CLASS_EXPR:
2530 tree class = TREE_TYPE (TREE_TYPE (exp));
2531 int need_result = target != IGNORE_TARGET;
2532 int index = find_class_constant (&state->cpool, class);
2533 RESERVE (4);
2534 OP1 (OPCODE_new);
2535 OP2 (index);
2536 if (need_result)
2537 OP1 (OPCODE_dup);
2538 NOTE_PUSH (1 + need_result);
2540 /* ... fall though ... */
2541 case CALL_EXPR:
2543 tree f = TREE_OPERAND (exp, 0);
2544 tree x = TREE_OPERAND (exp, 1);
2545 int save_SP = state->code_SP;
2546 int nargs;
2547 if (TREE_CODE (f) == ADDR_EXPR)
2548 f = TREE_OPERAND (f, 0);
2549 if (f == soft_newarray_node)
2551 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2552 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2553 STACK_TARGET, state);
2554 RESERVE (2);
2555 OP1 (OPCODE_newarray);
2556 OP1 (type_code);
2557 break;
2559 else if (f == soft_multianewarray_node)
2561 int ndims;
2562 int idim;
2563 int index = find_class_constant (&state->cpool,
2564 TREE_TYPE (TREE_TYPE (exp)));
2565 x = TREE_CHAIN (x); /* Skip class argument. */
2566 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2567 for (idim = ndims; --idim >= 0; )
2569 x = TREE_CHAIN (x);
2570 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2572 RESERVE (4);
2573 OP1 (OPCODE_multianewarray);
2574 OP2 (index);
2575 OP1 (ndims);
2576 NOTE_POP (ndims - 1);
2577 break;
2579 else if (f == soft_anewarray_node)
2581 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2582 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2583 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2584 RESERVE (3);
2585 OP1 (OPCODE_anewarray);
2586 OP2 (index);
2587 break;
2589 else if (f == soft_monitorenter_node
2590 || f == soft_monitorexit_node
2591 || f == throw_node)
2593 if (f == soft_monitorenter_node)
2594 op = OPCODE_monitorenter;
2595 else if (f == soft_monitorexit_node)
2596 op = OPCODE_monitorexit;
2597 else
2598 op = OPCODE_athrow;
2599 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2600 RESERVE (1);
2601 OP1 (op);
2602 NOTE_POP (1);
2603 break;
2605 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2607 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2609 nargs = state->code_SP - save_SP;
2610 state->code_SP = save_SP;
2611 if (f == soft_fmod_node)
2613 RESERVE (1);
2614 OP1 (OPCODE_drem);
2615 NOTE_PUSH (2);
2616 break;
2618 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2619 NOTE_POP (1); /* Pop implicit this. */
2620 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2622 tree context = DECL_CONTEXT (f);
2623 int index, interface = 0;
2624 RESERVE (5);
2625 if (METHOD_STATIC (f))
2626 OP1 (OPCODE_invokestatic);
2627 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2628 || METHOD_PRIVATE (f))
2629 OP1 (OPCODE_invokespecial);
2630 else
2632 if (CLASS_INTERFACE (TYPE_NAME (context)))
2634 tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2635 context = TREE_TYPE (TREE_TYPE (arg1));
2636 if (CLASS_INTERFACE (TYPE_NAME (context)))
2637 interface = 1;
2639 if (interface)
2640 OP1 (OPCODE_invokeinterface);
2641 else
2642 OP1 (OPCODE_invokevirtual);
2644 index = find_methodref_with_class_index (&state->cpool, f, context);
2645 OP2 (index);
2646 if (interface)
2648 if (nargs <= 0)
2649 abort ();
2651 OP1 (nargs);
2652 OP1 (0);
2654 f = TREE_TYPE (TREE_TYPE (f));
2655 if (TREE_CODE (f) != VOID_TYPE)
2657 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2658 if (target == IGNORE_TARGET)
2659 emit_pop (size, state);
2660 else
2661 NOTE_PUSH (size);
2663 break;
2666 /* fall through */
2667 default:
2668 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2669 tree_code_name [(int) TREE_CODE (exp)]);
2673 static void
2674 perform_relocations (struct jcf_partial *state)
2676 struct jcf_block *block;
2677 struct jcf_relocation *reloc;
2678 int pc;
2679 int shrink;
2681 /* Before we start, the pc field of each block is an upper bound on
2682 the block's start pc (it may be less, if previous blocks need less
2683 than their maximum).
2685 The minimum size of each block is in the block's chunk->size. */
2687 /* First, figure out the actual locations of each block. */
2688 pc = 0;
2689 shrink = 0;
2690 for (block = state->blocks; block != NULL; block = block->next)
2692 int block_size = block->v.chunk->size;
2694 block->pc = pc;
2696 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2697 Assumes relocations are in reverse order. */
2698 reloc = block->u.relocations;
2699 while (reloc != NULL
2700 && reloc->kind == OPCODE_goto_w
2701 && reloc->label->pc == block->next->pc
2702 && reloc->offset + 2 == block_size)
2704 reloc = reloc->next;
2705 block->u.relocations = reloc;
2706 block->v.chunk->size -= 3;
2707 block_size -= 3;
2708 shrink += 3;
2711 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2712 jump directly to X. We're careful here to avoid an infinite
2713 loop if the `goto's themselves form one. We do this
2714 optimization because we can generate a goto-to-goto for some
2715 try/finally blocks. */
2716 while (reloc != NULL
2717 && reloc->kind == OPCODE_goto_w
2718 && reloc->label != block
2719 && reloc->label->v.chunk->data != NULL
2720 && reloc->label->v.chunk->data[0] == OPCODE_goto)
2722 /* Find the reloc for the first instruction of the
2723 destination block. */
2724 struct jcf_relocation *first_reloc;
2725 for (first_reloc = reloc->label->u.relocations;
2726 first_reloc;
2727 first_reloc = first_reloc->next)
2729 if (first_reloc->offset == 1
2730 && first_reloc->kind == OPCODE_goto_w)
2732 reloc->label = first_reloc->label;
2733 break;
2737 /* If we didn't do anything, exit the loop. */
2738 if (first_reloc == NULL)
2739 break;
2742 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2744 if (reloc->kind == SWITCH_ALIGN_RELOC)
2746 /* We assume this is the first relocation in this block,
2747 so we know its final pc. */
2748 int where = pc + reloc->offset;
2749 int pad = ((where + 3) & ~3) - where;
2750 block_size += pad;
2752 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2754 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2755 int expand = reloc->kind > 0 ? 2 : 5;
2757 if (delta > 0)
2758 delta -= shrink;
2759 if (delta >= -32768 && delta <= 32767)
2761 shrink += expand;
2762 reloc->kind = -1;
2764 else
2765 block_size += expand;
2768 pc += block_size;
2771 for (block = state->blocks; block != NULL; block = block->next)
2773 struct chunk *chunk = block->v.chunk;
2774 int old_size = chunk->size;
2775 int next_pc = block->next == NULL ? pc : block->next->pc;
2776 int new_size = next_pc - block->pc;
2777 unsigned char *new_ptr;
2778 unsigned char *old_buffer = chunk->data;
2779 unsigned char *old_ptr = old_buffer + old_size;
2780 if (new_size != old_size)
2782 chunk->data = obstack_alloc (state->chunk_obstack, new_size);
2783 chunk->size = new_size;
2785 new_ptr = chunk->data + new_size;
2787 /* We do the relocations from back to front, because
2788 the relocations are in reverse order. */
2789 for (reloc = block->u.relocations; ; reloc = reloc->next)
2791 /* new_ptr and old_ptr point into the old and new buffers,
2792 respectively. (If no relocations cause the buffer to
2793 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2794 The bytes at higher address have been copied and relocations
2795 handled; those at lower addresses remain to process. */
2797 /* Lower old index of piece to be copied with no relocation.
2798 I.e. high index of the first piece that does need relocation. */
2799 int start = reloc == NULL ? 0
2800 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2801 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2802 ? reloc->offset + 4
2803 : reloc->offset + 2;
2804 int32 value;
2805 int new_offset;
2806 int n = (old_ptr - old_buffer) - start;
2807 new_ptr -= n;
2808 old_ptr -= n;
2809 /* Don't "copy" bytes in place, this causes valgrind
2810 warnings. */
2811 if (n > 0 && new_ptr != old_ptr)
2812 memcpy (new_ptr, old_ptr, n);
2813 if (old_ptr == old_buffer)
2814 break;
2816 new_offset = new_ptr - chunk->data;
2817 new_offset -= (reloc->kind == -1 ? 2 : 4);
2818 if (reloc->kind == 0)
2820 old_ptr -= 4;
2821 value = GET_u4 (old_ptr);
2823 else if (reloc->kind == BLOCK_START_RELOC)
2825 old_ptr -= 4;
2826 value = 0;
2827 new_offset = 0;
2829 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2831 int where = block->pc + reloc->offset;
2832 int pad = ((where + 3) & ~3) - where;
2833 while (--pad >= 0)
2834 *--new_ptr = 0;
2835 continue;
2837 else
2839 old_ptr -= 2;
2840 value = GET_u2 (old_ptr);
2842 value += reloc->label->pc - (block->pc + new_offset);
2843 *--new_ptr = (unsigned char) value; value >>= 8;
2844 *--new_ptr = (unsigned char) value; value >>= 8;
2845 if (reloc->kind != -1)
2847 *--new_ptr = (unsigned char) value; value >>= 8;
2848 *--new_ptr = (unsigned char) value;
2850 if (reloc->kind > BLOCK_START_RELOC)
2852 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2853 --old_ptr;
2854 *--new_ptr = reloc->kind;
2856 else if (reloc->kind < -1)
2858 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2859 --old_ptr;
2860 *--new_ptr = OPCODE_goto_w;
2861 *--new_ptr = 3;
2862 *--new_ptr = 0;
2863 *--new_ptr = - reloc->kind;
2866 if (new_ptr != chunk->data)
2867 abort ();
2869 state->code_length = pc;
2872 static void
2873 init_jcf_state (struct jcf_partial *state, struct obstack *work)
2875 state->chunk_obstack = work;
2876 state->first = state->chunk = NULL;
2877 CPOOL_INIT (&state->cpool);
2878 BUFFER_INIT (&state->localvars);
2879 BUFFER_INIT (&state->bytecode);
2882 static void
2883 init_jcf_method (struct jcf_partial *state, tree method)
2885 state->current_method = method;
2886 state->blocks = state->last_block = NULL;
2887 state->linenumber_count = 0;
2888 state->first_lvar = state->last_lvar = NULL;
2889 state->lvar_count = 0;
2890 state->labeled_blocks = NULL;
2891 state->code_length = 0;
2892 BUFFER_RESET (&state->bytecode);
2893 BUFFER_RESET (&state->localvars);
2894 state->code_SP = 0;
2895 state->code_SP_max = 0;
2896 state->handlers = NULL;
2897 state->last_handler = NULL;
2898 state->num_handlers = 0;
2899 state->num_finalizers = 0;
2900 state->return_value_decl = NULL_TREE;
2903 static void
2904 release_jcf_state (struct jcf_partial *state)
2906 CPOOL_FINISH (&state->cpool);
2907 obstack_free (state->chunk_obstack, state->first);
2910 /* Get the access flags (modifiers) of a class (TYPE_DECL) to be used in the
2911 access_flags field of the class file header. */
2913 static int
2914 get_classfile_modifiers (tree class)
2916 /* These are the flags which are valid class file modifiers.
2917 See JVMS2 S4.1. */
2918 int valid_toplevel_class_flags = (ACC_PUBLIC | ACC_FINAL | ACC_SUPER |
2919 ACC_INTERFACE | ACC_ABSTRACT);
2920 int flags = get_access_flags (class);
2922 /* ACC_SUPER should always be set, except for interfaces. */
2923 if (! (flags & ACC_INTERFACE))
2924 flags |= ACC_SUPER;
2926 /* A protected member class becomes public at the top level. */
2927 if (flags & ACC_PROTECTED)
2928 flags |= ACC_PUBLIC;
2930 /* Filter out flags that are not valid for a class or interface in the
2931 top-level access_flags field. */
2932 flags &= valid_toplevel_class_flags;
2934 return flags;
2937 /* Get the access flags (modifiers) for a method to be used in the class
2938 file. */
2940 static int
2941 get_method_access_flags (tree decl)
2943 int flags = get_access_flags (decl);
2945 /* Promote "private" inner-class constructors to package-private. */
2946 if (DECL_CONSTRUCTOR_P (decl)
2947 && INNER_CLASS_DECL_P (TYPE_NAME (DECL_CONTEXT (decl))))
2948 flags &= ~(ACC_PRIVATE);
2950 return flags;
2953 /* Generate and return a list of chunks containing the class CLAS
2954 in the .class file representation. The list can be written to a
2955 .class file using write_chunks. Allocate chunks from obstack WORK. */
2957 static GTY(()) tree SourceFile_node;
2958 static struct chunk *
2959 generate_classfile (tree clas, struct jcf_partial *state)
2961 struct chunk *cpool_chunk;
2962 const char *source_file, *s;
2963 unsigned char *ptr;
2964 int i;
2965 unsigned char *fields_count_ptr;
2966 int fields_count = 0;
2967 unsigned char *methods_count_ptr;
2968 int methods_count = 0;
2969 tree part;
2970 int total_supers
2971 = clas == object_type_node ? 0 : BINFO_N_BASE_BINFOS (TYPE_BINFO (clas));
2973 ptr = append_chunk (NULL, 8, state);
2974 PUT4 (0xCafeBabe); /* Magic number */
2975 PUT2 (3); /* Minor version */
2976 PUT2 (45); /* Major version */
2978 append_chunk (NULL, 0, state);
2979 cpool_chunk = state->chunk;
2981 /* Next allocate the chunk containing access_flags through fields_count. */
2982 if (clas == object_type_node)
2983 i = 10;
2984 else
2985 i = 8 + 2 * total_supers;
2986 ptr = append_chunk (NULL, i, state);
2987 i = get_classfile_modifiers (TYPE_NAME (clas));
2988 PUT2 (i); /* access_flags */
2989 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2990 if (clas == object_type_node)
2992 PUT2(0); /* super_class */
2993 PUT2(0); /* interfaces_count */
2995 else
2997 tree binfo = TYPE_BINFO (clas);
2998 tree base_binfo = BINFO_BASE_BINFO (binfo, 0);
2999 int j = find_class_constant (&state->cpool, BINFO_TYPE (base_binfo));
3001 PUT2 (j); /* super_class */
3002 PUT2 (total_supers - 1); /* interfaces_count */
3003 for (i = 1; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
3005 j = find_class_constant (&state->cpool, BINFO_TYPE (base_binfo));
3006 PUT2 (j);
3009 fields_count_ptr = ptr;
3011 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
3013 int have_value, attr_count = 0;
3014 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
3015 continue;
3016 ptr = append_chunk (NULL, 8, state);
3017 i = get_access_flags (part); PUT2 (i);
3018 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
3019 i = find_utf8_constant (&state->cpool,
3020 build_java_signature (TREE_TYPE (part)));
3021 PUT2(i);
3022 have_value = DECL_INITIAL (part) != NULL_TREE
3023 && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
3024 && FIELD_FINAL (part)
3025 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
3026 || TREE_TYPE (part) == string_ptr_type_node);
3027 if (have_value)
3028 attr_count++;
3030 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
3031 || FIELD_SYNTHETIC (part))
3032 attr_count++;
3033 if (FIELD_DEPRECATED (part))
3034 attr_count++;
3036 PUT2 (attr_count); /* attributes_count */
3037 if (have_value)
3039 tree init = DECL_INITIAL (part);
3040 static tree ConstantValue_node = NULL_TREE;
3041 if (TREE_TYPE (part) != TREE_TYPE (init))
3042 fatal_error ("field initializer type mismatch");
3043 ptr = append_chunk (NULL, 8, state);
3044 if (ConstantValue_node == NULL_TREE)
3045 ConstantValue_node = get_identifier ("ConstantValue");
3046 i = find_utf8_constant (&state->cpool, ConstantValue_node);
3047 PUT2 (i); /* attribute_name_index */
3048 PUT4 (2); /* attribute_length */
3049 i = find_constant_index (init, state); PUT2 (i);
3051 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
3052 fields and other fields which need it. */
3053 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
3054 || FIELD_SYNTHETIC (part))
3055 ptr = append_synthetic_attribute (state);
3056 if (FIELD_DEPRECATED (part))
3057 append_deprecated_attribute (state);
3058 fields_count++;
3060 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
3062 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
3063 PUT2 (0);
3065 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
3067 struct jcf_block *block;
3068 tree function_body = DECL_FUNCTION_BODY (part);
3069 tree body = function_body == NULL_TREE ? NULL_TREE
3070 : BLOCK_EXPR_BODY (function_body);
3071 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
3072 : DECL_NAME (part);
3073 tree type = TREE_TYPE (part);
3074 tree save_function = current_function_decl;
3075 int synthetic_p = 0;
3077 /* Invisible Miranda methods shouldn't end up in the .class
3078 file. */
3079 if (METHOD_INVISIBLE (part))
3080 continue;
3082 current_function_decl = part;
3083 ptr = append_chunk (NULL, 8, state);
3084 i = get_method_access_flags (part); PUT2 (i);
3085 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3086 i = find_utf8_constant (&state->cpool, build_java_signature (type));
3087 PUT2 (i);
3088 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
3090 /* Make room for the Synthetic attribute (of zero length.) */
3091 if (DECL_FINIT_P (part)
3092 || DECL_INSTINIT_P (part)
3093 || NESTED_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
3094 || TYPE_DOT_CLASS (clas) == part)
3096 i++;
3097 synthetic_p = 1;
3099 /* Make room for Deprecated attribute. */
3100 if (METHOD_DEPRECATED (part))
3101 i++;
3103 PUT2 (i); /* attributes_count */
3105 if (synthetic_p)
3106 ptr = append_synthetic_attribute (state);
3108 if (body != NULL_TREE)
3110 int code_attributes_count = 0;
3111 static tree Code_node = NULL_TREE;
3112 tree t;
3113 unsigned char *attr_len_ptr;
3114 struct jcf_handler *handler;
3115 if (Code_node == NULL_TREE)
3116 Code_node = get_identifier ("Code");
3117 ptr = append_chunk (NULL, 14, state);
3118 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
3119 attr_len_ptr = ptr;
3120 init_jcf_method (state, part);
3121 get_jcf_label_here (state); /* Force a first block. */
3122 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3123 localvar_alloc (t, state);
3124 state->num_jsrs = 0;
3125 generate_bytecode_insns (body, IGNORE_TARGET, state);
3126 if (CAN_COMPLETE_NORMALLY (body))
3128 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
3129 abort();
3130 RESERVE (1);
3131 OP1 (OPCODE_return);
3133 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3134 maybe_free_localvar (t, state, 1);
3135 if (state->return_value_decl != NULL_TREE)
3136 maybe_free_localvar (state->return_value_decl, state, 1);
3137 finish_jcf_block (state);
3138 perform_relocations (state);
3140 ptr = attr_len_ptr;
3141 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
3142 if (state->linenumber_count > 0)
3144 code_attributes_count++;
3145 i += 8 + 4 * state->linenumber_count;
3147 if (state->lvar_count > 0)
3149 code_attributes_count++;
3150 i += 8 + 10 * state->lvar_count;
3152 UNSAFE_PUT4 (i); /* attribute_length */
3153 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
3154 UNSAFE_PUT2 (localvar_max); /* max_locals */
3155 UNSAFE_PUT4 (state->code_length);
3157 /* Emit the exception table. */
3158 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3159 PUT2 (state->num_handlers); /* exception_table_length */
3160 handler = state->handlers;
3161 for (; handler != NULL; handler = handler->next)
3163 int type_index;
3164 PUT2 (handler->start_label->pc);
3165 PUT2 (handler->end_label->pc);
3166 PUT2 (handler->handler_label->pc);
3167 if (handler->type == NULL_TREE)
3168 type_index = 0;
3169 else
3170 type_index = find_class_constant (&state->cpool,
3171 handler->type);
3172 PUT2 (type_index);
3175 ptr = append_chunk (NULL, 2, state);
3176 PUT2 (code_attributes_count);
3178 /* Write the LineNumberTable attribute. */
3179 if (state->linenumber_count > 0)
3181 static tree LineNumberTable_node = NULL_TREE;
3182 ptr = append_chunk (NULL,
3183 8 + 4 * state->linenumber_count, state);
3184 if (LineNumberTable_node == NULL_TREE)
3185 LineNumberTable_node = get_identifier ("LineNumberTable");
3186 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3187 PUT2 (i); /* attribute_name_index */
3188 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3189 i = state->linenumber_count; PUT2 (i);
3190 for (block = state->blocks; block != NULL; block = block->next)
3192 int line = block->linenumber;
3193 if (line > 0)
3195 PUT2 (block->pc);
3196 PUT2 (line);
3201 /* Write the LocalVariableTable attribute. */
3202 if (state->lvar_count > 0)
3204 static tree LocalVariableTable_node = NULL_TREE;
3205 struct localvar_info *lvar = state->first_lvar;
3206 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3207 if (LocalVariableTable_node == NULL_TREE)
3208 LocalVariableTable_node = get_identifier("LocalVariableTable");
3209 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3210 PUT2 (i); /* attribute_name_index */
3211 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3212 i = state->lvar_count; PUT2 (i);
3213 for ( ; lvar != NULL; lvar = lvar->next)
3215 tree name = DECL_NAME (lvar->decl);
3216 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3217 i = lvar->start_label->pc; PUT2 (i);
3218 i = lvar->end_label->pc - i; PUT2 (i);
3219 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3220 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3221 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3225 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3227 tree t = DECL_FUNCTION_THROWS (part);
3228 int throws_count = list_length (t);
3229 static tree Exceptions_node = NULL_TREE;
3230 if (Exceptions_node == NULL_TREE)
3231 Exceptions_node = get_identifier ("Exceptions");
3232 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3233 i = find_utf8_constant (&state->cpool, Exceptions_node);
3234 PUT2 (i); /* attribute_name_index */
3235 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3236 i = throws_count; PUT2 (i);
3237 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3239 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3240 PUT2 (i);
3244 if (METHOD_DEPRECATED (part))
3245 append_deprecated_attribute (state);
3247 methods_count++;
3248 current_function_decl = save_function;
3250 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3252 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3253 for (s = source_file; ; s++)
3255 char ch = *s;
3256 if (ch == '\0')
3257 break;
3258 if (ch == '/' || ch == '\\')
3259 source_file = s+1;
3261 ptr = append_chunk (NULL, 10, state);
3263 i = 1; /* Source file always exists as an attribute */
3264 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3265 i++;
3266 if (clas == object_type_node)
3267 i++;
3268 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3269 i++;
3271 PUT2 (i); /* attributes_count */
3273 /* generate the SourceFile attribute. */
3274 if (SourceFile_node == NULL_TREE)
3276 SourceFile_node = get_identifier ("SourceFile");
3279 i = find_utf8_constant (&state->cpool, SourceFile_node);
3280 PUT2 (i); /* attribute_name_index */
3281 PUT4 (2);
3282 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3283 PUT2 (i);
3284 append_gcj_attribute (state, clas);
3285 append_innerclasses_attribute (state, clas);
3286 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3287 append_deprecated_attribute (state);
3289 /* New finally generate the contents of the constant pool chunk. */
3290 i = count_constant_pool_bytes (&state->cpool);
3291 ptr = obstack_alloc (state->chunk_obstack, i);
3292 cpool_chunk->data = ptr;
3293 cpool_chunk->size = i;
3294 write_constant_pool (&state->cpool, ptr, i);
3295 return state->first;
3298 static GTY(()) tree Synthetic_node;
3299 static unsigned char *
3300 append_synthetic_attribute (struct jcf_partial *state)
3302 unsigned char *ptr = append_chunk (NULL, 6, state);
3303 int i;
3305 if (Synthetic_node == NULL_TREE)
3307 Synthetic_node = get_identifier ("Synthetic");
3309 i = find_utf8_constant (&state->cpool, Synthetic_node);
3310 PUT2 (i); /* Attribute string index */
3311 PUT4 (0); /* Attribute length */
3313 return ptr;
3316 static void
3317 append_deprecated_attribute (struct jcf_partial *state)
3319 unsigned char *ptr = append_chunk (NULL, 6, state);
3320 int i;
3322 i = find_utf8_constant (&state->cpool, get_identifier ("Deprecated"));
3323 PUT2 (i); /* Attribute string index */
3324 PUT4 (0); /* Attribute length */
3327 static void
3328 append_gcj_attribute (struct jcf_partial *state, tree class)
3330 unsigned char *ptr;
3331 int i;
3333 if (class != object_type_node)
3334 return;
3336 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3337 i = find_utf8_constant (&state->cpool,
3338 get_identifier ("gnu.gcj.gcj-compiled"));
3339 PUT2 (i); /* Attribute string index */
3340 PUT4 (0); /* Attribute length */
3343 static tree InnerClasses_node;
3344 static void
3345 append_innerclasses_attribute (struct jcf_partial *state, tree class)
3347 tree orig_decl = TYPE_NAME (class);
3348 tree current, decl;
3349 int length = 0, i;
3350 unsigned char *ptr, *length_marker, *number_marker;
3352 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3353 return;
3355 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3357 if (InnerClasses_node == NULL_TREE)
3359 InnerClasses_node = get_identifier ("InnerClasses");
3361 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3362 PUT2 (i);
3363 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3364 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3366 /* Generate the entries: all inner classes visible from the one we
3367 process: itself, up and down. */
3368 while (class && INNER_CLASS_TYPE_P (class))
3370 const char *n;
3372 decl = TYPE_NAME (class);
3373 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3374 IDENTIFIER_LENGTH (DECL_NAME (decl));
3376 while (n[-1] != '$')
3377 n--;
3378 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3379 length++;
3381 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3384 decl = orig_decl;
3385 for (current = DECL_INNER_CLASS_LIST (decl);
3386 current; current = TREE_CHAIN (current))
3388 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3389 TREE_VALUE (current));
3390 length++;
3393 ptr = length_marker; PUT4 (8*length+2);
3394 ptr = number_marker; PUT2 (length);
3397 static void
3398 append_innerclasses_attribute_entry (struct jcf_partial *state,
3399 tree decl, tree name)
3401 int icii, icaf;
3402 int ocii = 0, ini = 0;
3403 unsigned char *ptr = append_chunk (NULL, 8, state);
3405 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3407 /* Sun's implementation seems to generate ocii to 0 for inner
3408 classes (which aren't considered members of the class they're
3409 in.) The specs are saying that if the class is anonymous,
3410 inner_name_index must be zero. */
3411 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3413 ocii = find_class_constant (&state->cpool,
3414 TREE_TYPE (DECL_CONTEXT (decl)));
3415 ini = find_utf8_constant (&state->cpool, name);
3417 icaf = get_access_flags (decl);
3419 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3422 static char *
3423 make_class_file_name (tree clas)
3425 const char *dname, *cname, *slash;
3426 char *r;
3427 struct stat sb;
3428 char sep;
3430 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3431 "", '.', DIR_SEPARATOR,
3432 ".class"));
3433 if (jcf_write_base_directory == NULL)
3435 /* Make sure we put the class file into the .java file's
3436 directory, and not into some subdirectory thereof. */
3437 char *t;
3438 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3439 slash = strrchr (dname, DIR_SEPARATOR);
3440 #ifdef DIR_SEPARATOR_2
3441 if (! slash)
3442 slash = strrchr (dname, DIR_SEPARATOR_2);
3443 #endif
3444 if (! slash)
3446 dname = ".";
3447 slash = dname + 1;
3448 sep = DIR_SEPARATOR;
3450 else
3451 sep = *slash;
3453 t = strrchr (cname, DIR_SEPARATOR);
3454 if (t)
3455 cname = t + 1;
3457 else
3459 char *s;
3461 dname = jcf_write_base_directory;
3463 s = strrchr (dname, DIR_SEPARATOR);
3464 #ifdef DIR_SEPARATOR_2
3465 if (! s)
3466 s = strrchr (dname, DIR_SEPARATOR_2);
3467 #endif
3468 if (s)
3469 sep = *s;
3470 else
3471 sep = DIR_SEPARATOR;
3473 slash = dname + strlen (dname);
3476 r = xmalloc (slash - dname + strlen (cname) + 2);
3477 strncpy (r, dname, slash - dname);
3478 r[slash - dname] = sep;
3479 strcpy (&r[slash - dname + 1], cname);
3481 /* We try to make new directories when we need them. We only do
3482 this for directories which "might not" exist. For instance, we
3483 assume the `-d' directory exists, but we don't assume that any
3484 subdirectory below it exists. It might be worthwhile to keep
3485 track of which directories we've created to avoid gratuitous
3486 stat()s. */
3487 dname = r + (slash - dname) + 1;
3488 while (1)
3490 char *s = strchr (dname, sep);
3491 if (s == NULL)
3492 break;
3493 *s = '\0';
3494 /* Try to make directory if it doesn't already exist. */
3495 if (stat (r, &sb) == -1
3496 && mkdir (r, 0755) == -1
3497 /* The directory might have been made by another process. */
3498 && errno != EEXIST)
3499 fatal_error ("can't create directory %s: %m", r);
3501 *s = sep;
3502 /* Skip consecutive separators. */
3503 for (dname = s + 1; *dname && *dname == sep; ++dname)
3507 return r;
3510 /* Write out the contents of a class (RECORD_TYPE) CLAS, as a .class file.
3511 The output .class file name is make_class_file_name(CLAS). */
3513 void
3514 write_classfile (tree clas)
3516 struct obstack *work = &temporary_obstack;
3517 struct jcf_partial state[1];
3518 char *class_file_name = make_class_file_name (clas);
3519 struct chunk *chunks;
3521 if (class_file_name != NULL)
3523 FILE *stream;
3524 char *temporary_file_name;
3525 char pid [sizeof (long) * 2 + 2];
3527 /* The .class file is initially written to a ".PID" file so that
3528 if multiple instances of the compiler are running at once
3529 they do not see partially formed class files nor override
3530 each other, which may happen in libjava with parallel build.
3532 sprintf (pid, ".%lx", (unsigned long) getpid ());
3533 temporary_file_name = concat (class_file_name, pid, NULL);
3534 stream = fopen (temporary_file_name, "wb");
3535 if (stream == NULL)
3536 fatal_error ("can't open %s for writing: %m", temporary_file_name);
3538 jcf_dependency_add_target (class_file_name);
3539 init_jcf_state (state, work);
3540 chunks = generate_classfile (clas, state);
3541 write_chunks (stream, chunks);
3542 if (fclose (stream))
3543 fatal_error ("error closing %s: %m", temporary_file_name);
3545 /* If a file named by the string pointed to by `new' exists
3546 prior to the call to the `rename' function, the behavior
3547 is implementation-defined. ISO 9899-1990 7.9.4.2.
3549 For example, on Win32 with MSVCRT, it is an error. */
3551 unlink (class_file_name);
3553 if (rename (temporary_file_name, class_file_name) == -1)
3555 int errno_saved = errno;
3556 remove (temporary_file_name);
3557 errno = errno_saved;
3558 fatal_error ("can't create %s: %m", class_file_name);
3560 free (temporary_file_name);
3561 free (class_file_name);
3563 release_jcf_state (state);
3566 /* TODO:
3567 string concatenation
3568 synchronized statement
3571 #include "gt-java-jcf-write.h"