java-tree.h: Moved comment for TYPE_DOT_CLASS adjacent to its declaration.
[official-gcc.git] / gcc / java / jcf-write.c
blob534336190918a883d0f49fe3486f85ee6daba6b4
1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA.
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "jcf.h"
30 #include "tree.h"
31 #include "real.h"
32 #include "java-tree.h"
33 #include "obstack.h"
34 #include "rtl.h"
35 #include "flags.h"
36 #include "java-opcodes.h"
37 #include "parse.h" /* for BLOCK_EXPR_BODY */
38 #include "buffer.h"
39 #include "toplev.h"
40 #include "ggc.h"
41 #include "tm_p.h"
43 extern struct obstack temporary_obstack;
45 /* Base directory in which `.class' files should be written.
46 NULL means to put the file into the same directory as the
47 corresponding .java file. */
48 const char *jcf_write_base_directory = NULL;
50 /* Make sure bytecode.data is big enough for at least N more bytes. */
52 #define RESERVE(N) \
53 do { CHECK_OP(state); \
54 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
55 buffer_grow (&state->bytecode, N); } while (0)
57 /* Add a 1-byte instruction/operand I to bytecode.data,
58 assuming space has already been RESERVE'd. */
60 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
62 /* Like OP1, but I is a 2-byte big endian integer. */
64 #define OP2(I) \
65 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
67 /* Like OP1, but I is a 4-byte big endian integer. */
69 #define OP4(I) \
70 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
71 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73 /* Macro to call each time we push I words on the JVM stack. */
75 #define NOTE_PUSH(I) \
76 do { state->code_SP += (I); \
77 if (state->code_SP > state->code_SP_max) \
78 state->code_SP_max = state->code_SP; } while (0)
80 /* Macro to call each time we pop I words from the JVM stack. */
82 #define NOTE_POP(I) \
83 do { state->code_SP -= (I); gcc_assert (state->code_SP >= 0); } while (0)
85 /* A chunk or segment of a .class file. */
87 struct chunk
89 /* The next segment of this .class file. */
90 struct chunk *next;
92 /* The actual data in this segment to be written to the .class file. */
93 unsigned char *data;
95 /* The size of the segment to be written to the .class file. */
96 int size;
99 #define PENDING_CLEANUP_PC (-3)
100 #define PENDING_EXIT_PC (-2)
101 #define UNDEFINED_PC (-1)
103 /* Each "block" represents a label plus the bytecode instructions following.
104 There may be branches out of the block, but no incoming jumps, except
105 to the beginning of the block.
107 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
108 associated code yet), but it is an undefined label.
111 struct jcf_block
113 /* For blocks that that are defined, the next block (in pc order).
114 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
115 or a cleanup expression (from a TRY_FINALLY_EXPR),
116 this is the next (outer) such end label, in a stack headed by
117 labeled_blocks in jcf_partial. */
118 struct jcf_block *next;
120 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
121 pc is PENDING_EXIT_PC.
122 In the not-yet-defined end label for pending cleanup subroutine,
123 pc is PENDING_CLEANUP_PC.
124 For other not-yet-defined labels, pc is UNDEFINED_PC.
126 If the label has been defined:
127 Until perform_relocations is finished, this is the maximum possible
128 value of the bytecode offset at the beginning of this block.
129 After perform_relocations, it is the actual offset (pc). */
130 int pc;
132 int linenumber;
134 /* After finish_jcf_block is called, the actual instructions
135 contained in this block. Before that NULL, and the instructions
136 are in state->bytecode. */
137 union {
138 struct chunk *chunk;
140 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
141 covered by the cleanup. */
142 struct jcf_block *start_label;
143 } v;
145 union {
146 /* Set of relocations (in reverse offset order) for this block. */
147 struct jcf_relocation *relocations;
149 /* If this block is that of the not-yet-defined end label of
150 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
151 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
152 tree labeled_block;
153 } u;
156 /* A "relocation" type for the 0-3 bytes of padding at the start
157 of a tableswitch or a lookupswitch. */
158 #define SWITCH_ALIGN_RELOC 4
160 /* A relocation type for the labels in a tableswitch or a lookupswitch;
161 these are relative to the start of the instruction, but (due to
162 th 0-3 bytes of padding), we don't know the offset before relocation. */
163 #define BLOCK_START_RELOC 1
165 struct jcf_relocation
167 /* Next relocation for the current jcf_block. */
168 struct jcf_relocation *next;
170 /* The (byte) offset within the current block that needs to be relocated. */
171 HOST_WIDE_INT offset;
173 /* 0 if offset is a 4-byte relative offset.
174 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
175 for proper alignment in tableswitch/lookupswitch instructions.
176 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
177 to the start of the containing block.
178 -1 if offset is a 2-byte relative offset.
179 < -1 if offset is the address of an instruction with a 2-byte offset
180 that does not have a corresponding 4-byte offset version, in which
181 case the absolute value of kind is the inverted opcode.
182 > 4 if offset is the address of an instruction (such as jsr) with a
183 2-byte offset that does have a corresponding 4-byte offset version,
184 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
185 int kind;
187 /* The label the relocation wants to actually transfer to. */
188 struct jcf_block *label;
191 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
192 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194 /* State for single catch clause. */
196 struct jcf_handler
198 struct jcf_handler *next;
200 struct jcf_block *start_label;
201 struct jcf_block *end_label;
202 struct jcf_block *handler_label;
204 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
205 tree type;
208 /* State for the current switch statement. */
210 struct jcf_switch_state
212 struct jcf_switch_state *prev;
213 struct jcf_block *default_label;
215 struct jcf_relocation *cases;
216 int num_cases;
217 HOST_WIDE_INT min_case, max_case;
220 /* This structure is used to contain the various pieces that will
221 become a .class file. */
223 struct jcf_partial
225 struct chunk *first;
226 struct chunk *chunk;
227 struct obstack *chunk_obstack;
228 tree current_method;
230 /* List of basic blocks for the current method. */
231 struct jcf_block *blocks;
232 struct jcf_block *last_block;
234 struct localvar_info *first_lvar;
235 struct localvar_info *last_lvar;
236 int lvar_count;
238 CPool cpool;
240 int linenumber_count;
242 /* Until perform_relocations, this is a upper bound on the number
243 of bytes (so far) in the instructions for the current method. */
244 int code_length;
246 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
247 struct jcf_block *labeled_blocks;
249 /* The current stack size (stack pointer) in the current method. */
250 int code_SP;
252 /* The largest extent of stack size (stack pointer) in the current method. */
253 int code_SP_max;
255 /* Contains a mapping from local var slot number to localvar_info. */
256 struct buffer localvars;
258 /* The buffer allocated for bytecode for the current jcf_block. */
259 struct buffer bytecode;
261 /* Chain of exception handlers for the current method. */
262 struct jcf_handler *handlers;
264 /* Last element in handlers chain. */
265 struct jcf_handler *last_handler;
267 /* Number of exception handlers for the current method. */
268 int num_handlers;
270 /* Number of finalizers we are currently nested within. */
271 int num_finalizers;
273 /* If non-NULL, use this for the return value. */
274 tree return_value_decl;
276 /* Information about the current switch statement. */
277 struct jcf_switch_state *sw_state;
279 /* The count of jsr instructions that have been emitted. */
280 long num_jsrs;
283 static void generate_bytecode_insns (tree, int, struct jcf_partial *);
284 static struct chunk * alloc_chunk (struct chunk *, unsigned char *,
285 int, struct obstack *);
286 static unsigned char * append_chunk (unsigned char *, int,
287 struct jcf_partial *);
288 static void append_chunk_copy (unsigned char *, int, struct jcf_partial *);
289 static struct jcf_block * gen_jcf_label (struct jcf_partial *);
290 static void finish_jcf_block (struct jcf_partial *);
291 static void define_jcf_label (struct jcf_block *, struct jcf_partial *);
292 static struct jcf_block * get_jcf_label_here (struct jcf_partial *);
293 static void put_linenumber (int, struct jcf_partial *);
294 static void localvar_alloc (tree, struct jcf_partial *);
295 static void maybe_free_localvar (tree, struct jcf_partial *, int);
296 static int get_access_flags (tree);
297 static void write_chunks (FILE *, struct chunk *);
298 static int adjust_typed_op (tree, int);
299 static void generate_bytecode_conditional (tree, struct jcf_block *,
300 struct jcf_block *, int,
301 struct jcf_partial *);
302 static void generate_bytecode_return (tree, struct jcf_partial *);
303 static void perform_relocations (struct jcf_partial *);
304 static void init_jcf_state (struct jcf_partial *, struct obstack *);
305 static void init_jcf_method (struct jcf_partial *, tree);
306 static void release_jcf_state (struct jcf_partial *);
307 static int get_classfile_modifiers (tree class);
308 static struct chunk * generate_classfile (tree, struct jcf_partial *);
309 static struct jcf_handler *alloc_handler (struct jcf_block *,
310 struct jcf_block *,
311 struct jcf_partial *);
312 static void emit_iinc (tree, HOST_WIDE_INT, struct jcf_partial *);
313 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
314 struct jcf_partial *);
315 static void push_constant1 (HOST_WIDE_INT, struct jcf_partial *);
316 static void push_constant2 (HOST_WIDE_INT, struct jcf_partial *);
317 static void push_int_const (HOST_WIDE_INT, struct jcf_partial *);
318 static int find_constant_wide (HOST_WIDE_INT, HOST_WIDE_INT,
319 struct jcf_partial *);
320 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
321 struct jcf_partial *);
322 static int find_constant_index (tree, struct jcf_partial *);
323 static void push_long_const (HOST_WIDE_INT, HOST_WIDE_INT,
324 struct jcf_partial *);
325 static void field_op (tree, int, struct jcf_partial *);
326 static void maybe_wide (int, int, struct jcf_partial *);
327 static void emit_dup (int, int, struct jcf_partial *);
328 static void emit_pop (int, struct jcf_partial *);
329 static void emit_load_or_store (tree, int, struct jcf_partial *);
330 static void emit_load (tree, struct jcf_partial *);
331 static void emit_store (tree, struct jcf_partial *);
332 static void emit_unop (enum java_opcode, tree, struct jcf_partial *);
333 static void emit_binop (enum java_opcode, tree, struct jcf_partial *);
334 static void emit_reloc (HOST_WIDE_INT, int, struct jcf_block *,
335 struct jcf_partial *);
336 static void emit_switch_reloc (struct jcf_block *, struct jcf_partial *);
337 static void emit_case_reloc (struct jcf_relocation *, struct jcf_partial *);
338 static void emit_if (struct jcf_block *, int, int, struct jcf_partial *);
339 static void emit_goto (struct jcf_block *, struct jcf_partial *);
340 static void emit_jsr (struct jcf_block *, struct jcf_partial *);
341 static void call_cleanups (struct jcf_block *, struct jcf_partial *);
342 static char *make_class_file_name (tree);
343 static unsigned char *append_synthetic_attribute (struct jcf_partial *);
344 static void append_deprecated_attribute (struct jcf_partial *);
345 static void append_innerclasses_attribute (struct jcf_partial *, tree);
346 static void append_innerclasses_attribute_entry (struct jcf_partial *, tree, tree);
347 static void append_gcj_attribute (struct jcf_partial *, tree);
349 /* Utility macros for appending (big-endian) data to a buffer.
350 We assume a local variable 'ptr' points into where we want to
351 write next, and we assume enough space has been allocated. */
353 #ifdef ENABLE_JC1_CHECKING
354 static int CHECK_PUT (void *, struct jcf_partial *, int);
356 static int
357 CHECK_PUT (void *ptr, struct jcf_partial *state, int i)
359 gcc_assert ((unsigned char *) ptr >= state->chunk->data
360 && (unsigned char *) ptr + i <= state->chunk->data + state->chunk->size);
361 return 0;
363 #else
364 #define CHECK_PUT(PTR, STATE, I) ((void)0)
365 #endif
367 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
368 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
369 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
370 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
372 /* There are some cases below where CHECK_PUT is guaranteed to fail.
373 Use the following macros in those specific cases. */
374 #define UNSAFE_PUT1(X) (*ptr++ = (X))
375 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
376 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
377 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
380 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
381 Set the data and size fields to DATA and SIZE, respectively.
382 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
384 static struct chunk *
385 alloc_chunk (struct chunk *last, unsigned char *data,
386 int size, struct obstack *work)
388 struct chunk *chunk = obstack_alloc (work, sizeof(struct chunk));
390 if (data == NULL && size > 0)
391 data = obstack_alloc (work, size);
393 chunk->next = NULL;
394 chunk->data = data;
395 chunk->size = size;
396 if (last != NULL)
397 last->next = chunk;
398 return chunk;
401 #ifdef ENABLE_JC1_CHECKING
402 static int CHECK_OP (struct jcf_partial *);
404 static int
405 CHECK_OP (struct jcf_partial *state)
407 gcc_assert (state->bytecode.ptr <= state->bytecode.limit);
408 return 0;
410 #else
411 #define CHECK_OP(STATE) ((void) 0)
412 #endif
414 static unsigned char *
415 append_chunk (unsigned char *data, int size, struct jcf_partial *state)
417 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
418 if (state->first == NULL)
419 state->first = state->chunk;
420 return state->chunk->data;
423 static void
424 append_chunk_copy (unsigned char *data, int size, struct jcf_partial *state)
426 unsigned char *ptr = append_chunk (NULL, size, state);
427 memcpy (ptr, data, size);
430 static struct jcf_block *
431 gen_jcf_label (struct jcf_partial *state)
433 struct jcf_block *block
434 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
435 block->next = NULL;
436 block->linenumber = -1;
437 block->pc = UNDEFINED_PC;
438 return block;
441 static void
442 finish_jcf_block (struct jcf_partial *state)
444 struct jcf_block *block = state->last_block;
445 struct jcf_relocation *reloc;
446 int code_length = BUFFER_LENGTH (&state->bytecode);
447 int pc = state->code_length;
448 append_chunk_copy (state->bytecode.data, code_length, state);
449 BUFFER_RESET (&state->bytecode);
450 block->v.chunk = state->chunk;
452 /* Calculate code_length to the maximum value it can have. */
453 pc += block->v.chunk->size;
454 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
456 int kind = reloc->kind;
457 if (kind == SWITCH_ALIGN_RELOC)
458 pc += 3;
459 else if (kind > BLOCK_START_RELOC)
460 pc += 2; /* 2-byte offset may grow to 4-byte offset */
461 else if (kind < -1)
462 pc += 5; /* May need to add a goto_w. */
464 state->code_length = pc;
467 static void
468 define_jcf_label (struct jcf_block *label, struct jcf_partial *state)
470 if (state->last_block != NULL)
471 finish_jcf_block (state);
472 label->pc = state->code_length;
473 if (state->blocks == NULL)
474 state->blocks = label;
475 else
476 state->last_block->next = label;
477 state->last_block = label;
478 label->next = NULL;
479 label->u.relocations = NULL;
482 static struct jcf_block *
483 get_jcf_label_here (struct jcf_partial *state)
485 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
486 return state->last_block;
487 else
489 struct jcf_block *label = gen_jcf_label (state);
490 define_jcf_label (label, state);
491 return label;
495 /* Note a line number entry for the current PC and given LINE. */
497 static void
498 put_linenumber (int line, struct jcf_partial *state)
500 struct jcf_block *label = get_jcf_label_here (state);
501 if (label->linenumber > 0)
503 label = gen_jcf_label (state);
504 define_jcf_label (label, state);
506 label->linenumber = line;
507 state->linenumber_count++;
510 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
511 in the range (START_LABEL, END_LABEL). */
513 static struct jcf_handler *
514 alloc_handler (struct jcf_block *start_label, struct jcf_block *end_label,
515 struct jcf_partial *state)
517 struct jcf_handler *handler
518 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
519 handler->start_label = start_label;
520 handler->end_label = end_label;
521 handler->handler_label = get_jcf_label_here (state);
522 if (state->handlers == NULL)
523 state->handlers = handler;
524 else
525 state->last_handler->next = handler;
526 state->last_handler = handler;
527 handler->next = NULL;
528 state->num_handlers++;
529 return handler;
533 /* The index of jvm local variable allocated for this DECL.
534 This is assigned when generating .class files;
535 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
536 (We don't allocate DECL_LANG_SPECIFIC for locals from Java source code.) */
538 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
540 struct localvar_info
542 struct localvar_info *next;
544 tree decl;
545 struct jcf_block *start_label;
546 struct jcf_block *end_label;
549 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
550 #define localvar_max \
551 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
553 static void
554 localvar_alloc (tree decl, struct jcf_partial *state)
556 struct jcf_block *start_label = get_jcf_label_here (state);
557 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
558 int index;
559 struct localvar_info *info;
560 struct localvar_info **ptr = localvar_buffer;
561 struct localvar_info **limit
562 = (struct localvar_info**) state->localvars.ptr;
563 for (index = 0; ptr < limit; index++, ptr++)
565 if (ptr[0] == NULL
566 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
567 break;
569 if (ptr == limit)
571 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
572 ptr = (struct localvar_info**) state->localvars.data + index;
573 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
575 info = obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
576 ptr[0] = info;
577 if (wide)
578 ptr[1] = (struct localvar_info *)(~0);
579 DECL_LOCAL_INDEX (decl) = index;
580 info->decl = decl;
581 info->start_label = start_label;
583 if (debug_info_level > DINFO_LEVEL_TERSE
584 && DECL_NAME (decl) != NULL_TREE)
586 /* Generate debugging info. */
587 info->next = NULL;
588 if (state->last_lvar != NULL)
589 state->last_lvar->next = info;
590 else
591 state->first_lvar = info;
592 state->last_lvar = info;
593 state->lvar_count++;
597 static void
598 maybe_free_localvar (tree decl, struct jcf_partial *state, int really)
600 struct jcf_block *end_label = get_jcf_label_here (state);
601 int index = DECL_LOCAL_INDEX (decl);
602 struct localvar_info **ptr = &localvar_buffer [index];
603 struct localvar_info *info = *ptr;
604 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
606 info->end_label = end_label;
608 gcc_assert (info->decl == decl);
609 if (! really)
610 return;
611 ptr[0] = NULL;
612 if (wide)
614 gcc_assert (ptr[1] == (struct localvar_info *) (~0));
615 ptr[1] = NULL;
620 #define STACK_TARGET 1
621 #define IGNORE_TARGET 2
623 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
624 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
626 static int
627 get_access_flags (tree decl)
629 int flags = 0;
630 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
632 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
634 if (TREE_PROTECTED (decl))
635 flags |= ACC_PROTECTED;
636 if (TREE_PRIVATE (decl))
637 flags |= ACC_PRIVATE;
639 else if (TREE_CODE (decl) == TYPE_DECL)
641 if (CLASS_PUBLIC (decl))
642 flags |= ACC_PUBLIC;
643 if (CLASS_FINAL (decl))
644 flags |= ACC_FINAL;
645 if (CLASS_SUPER (decl))
646 flags |= ACC_SUPER;
647 if (CLASS_ABSTRACT (decl))
648 flags |= ACC_ABSTRACT;
649 if (CLASS_INTERFACE (decl))
650 flags |= ACC_INTERFACE;
651 if (CLASS_STATIC (decl))
652 flags |= ACC_STATIC;
653 if (CLASS_PRIVATE (decl))
654 flags |= ACC_PRIVATE;
655 if (CLASS_PROTECTED (decl))
656 flags |= ACC_PROTECTED;
657 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
658 || LOCAL_CLASS_P (TREE_TYPE (decl)))
659 flags |= ACC_PRIVATE;
660 if (CLASS_STRICTFP (decl))
661 flags |= ACC_STRICT;
663 else
664 gcc_unreachable ();
666 if (TREE_CODE (decl) == FUNCTION_DECL)
668 if (METHOD_PUBLIC (decl))
669 flags |= ACC_PUBLIC;
670 if (METHOD_FINAL (decl))
671 flags |= ACC_FINAL;
672 if (METHOD_NATIVE (decl))
673 flags |= ACC_NATIVE;
674 if (METHOD_STATIC (decl))
675 flags |= ACC_STATIC;
676 if (METHOD_SYNCHRONIZED (decl))
677 flags |= ACC_SYNCHRONIZED;
678 if (METHOD_ABSTRACT (decl))
679 flags |= ACC_ABSTRACT;
680 if (METHOD_STRICTFP (decl))
681 flags |= ACC_STRICT;
683 if (isfield)
685 if (FIELD_PUBLIC (decl))
686 flags |= ACC_PUBLIC;
687 if (FIELD_FINAL (decl))
688 flags |= ACC_FINAL;
689 if (FIELD_STATIC (decl))
690 flags |= ACC_STATIC;
691 if (FIELD_VOLATILE (decl))
692 flags |= ACC_VOLATILE;
693 if (FIELD_TRANSIENT (decl))
694 flags |= ACC_TRANSIENT;
696 return flags;
699 /* Write the list of segments starting at CHUNKS to STREAM. */
701 static void
702 write_chunks (FILE* stream, struct chunk *chunks)
704 for (; chunks != NULL; chunks = chunks->next)
705 fwrite (chunks->data, chunks->size, 1, stream);
708 /* Push a 1-word constant in the constant pool at the given INDEX.
709 (Caller is responsible for doing NOTE_PUSH.) */
711 static void
712 push_constant1 (HOST_WIDE_INT index, struct jcf_partial *state)
714 RESERVE (3);
715 if (index < 256)
717 OP1 (OPCODE_ldc);
718 OP1 (index);
720 else
722 OP1 (OPCODE_ldc_w);
723 OP2 (index);
727 /* Push a 2-word constant in the constant pool at the given INDEX.
728 (Caller is responsible for doing NOTE_PUSH.) */
730 static void
731 push_constant2 (HOST_WIDE_INT index, struct jcf_partial *state)
733 RESERVE (3);
734 OP1 (OPCODE_ldc2_w);
735 OP2 (index);
738 /* Push 32-bit integer constant on VM stack.
739 Caller is responsible for doing NOTE_PUSH. */
741 static void
742 push_int_const (HOST_WIDE_INT i, struct jcf_partial *state)
744 RESERVE(3);
745 if (i >= -1 && i <= 5)
746 OP1(OPCODE_iconst_0 + i);
747 else if (i >= -128 && i < 128)
749 OP1(OPCODE_bipush);
750 OP1(i);
752 else if (i >= -32768 && i < 32768)
754 OP1(OPCODE_sipush);
755 OP2(i);
757 else
759 i = find_constant1 (&state->cpool, CONSTANT_Integer,
760 (jword)(i & 0xFFFFFFFF));
761 push_constant1 (i, state);
765 static int
766 find_constant_wide (HOST_WIDE_INT lo, HOST_WIDE_INT hi,
767 struct jcf_partial *state)
769 unsigned HOST_WIDE_INT w1;
770 HOST_WIDE_INT w2;
771 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
772 return find_constant2 (&state->cpool, CONSTANT_Long,
773 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
776 /* Find or allocate a constant pool entry for the given VALUE.
777 Return the index in the constant pool. */
779 static int
780 find_constant_index (tree value, struct jcf_partial *state)
782 if (TREE_CODE (value) == INTEGER_CST)
784 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
785 return find_constant1 (&state->cpool, CONSTANT_Integer,
786 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
787 else
788 return find_constant_wide (TREE_INT_CST_LOW (value),
789 TREE_INT_CST_HIGH (value), state);
791 else if (TREE_CODE (value) == REAL_CST)
793 long words[2];
795 /* IEEE NaN can have many values, but the Java VM spec defines a
796 canonical NaN. */
797 if (flag_emit_class_files
798 && REAL_VALUE_ISNAN (TREE_REAL_CST (value)))
800 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
801 return find_constant1 (&state->cpool, CONSTANT_Float,
802 0x7fc00000);
803 else
804 return find_constant2 (&state->cpool, CONSTANT_Double,
805 0x7ff80000, 0x00000000);
808 real_to_target (words, &TREE_REAL_CST (value),
809 TYPE_MODE (TREE_TYPE (value)));
810 words[0] &= 0xffffffff;
811 words[1] &= 0xffffffff;
813 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
814 return find_constant1 (&state->cpool, CONSTANT_Float, (jword)words[0]);
815 else
816 return find_constant2 (&state->cpool, CONSTANT_Double,
817 (jword)words[1-FLOAT_WORDS_BIG_ENDIAN],
818 (jword)words[FLOAT_WORDS_BIG_ENDIAN]);
820 else if (TREE_CODE (value) == STRING_CST)
821 return find_string_constant (&state->cpool, value);
823 else
824 gcc_unreachable ();
827 /* Push 64-bit long constant on VM stack.
828 Caller is responsible for doing NOTE_PUSH. */
830 static void
831 push_long_const (HOST_WIDE_INT lo, HOST_WIDE_INT hi, struct jcf_partial *state)
833 unsigned HOST_WIDE_INT highpart;
834 HOST_WIDE_INT dummy;
835 jint lowpart = WORD_TO_INT (lo);
837 rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
839 if (highpart == 0 && (lowpart == 0 || lowpart == 1))
841 RESERVE(1);
842 OP1(OPCODE_lconst_0 + lowpart);
844 else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
845 || (highpart == (unsigned HOST_WIDE_INT)-1
846 && lowpart < 0 && lowpart >= -32768))
848 push_int_const (lowpart, state);
849 RESERVE (1);
850 OP1 (OPCODE_i2l);
852 else
853 push_constant2 (find_constant_wide (lo, hi, state), state);
856 static void
857 field_op (tree field, int opcode, struct jcf_partial *state)
859 int index = find_fieldref_index (&state->cpool, field);
860 RESERVE (3);
861 OP1 (opcode);
862 OP2 (index);
865 /* Returns an integer in the range 0 (for 'int') through 4 (for object
866 reference) to 7 (for 'short') which matches the pattern of how JVM
867 opcodes typically depend on the operand type. */
869 static int
870 adjust_typed_op (tree type, int max)
872 switch (TREE_CODE (type))
874 case POINTER_TYPE:
875 case RECORD_TYPE: return 4;
876 case BOOLEAN_TYPE:
877 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
878 case INTEGER_TYPE:
879 if (type == char_type_node || type == promoted_char_type_node)
880 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
881 switch (TYPE_PRECISION (type))
883 case 8: return max < 5 ? 0 : 5;
884 case 16: return max < 7 ? 0 : 7;
885 case 32: return 0;
886 case 64: return 1;
888 break;
889 case REAL_TYPE:
890 switch (TYPE_PRECISION (type))
892 case 32: return 2;
893 case 64: return 3;
895 break;
896 default:
897 break;
899 gcc_unreachable ();
902 static void
903 maybe_wide (int opcode, int index, struct jcf_partial *state)
905 if (index >= 256)
907 RESERVE (4);
908 OP1 (OPCODE_wide);
909 OP1 (opcode);
910 OP2 (index);
912 else
914 RESERVE (2);
915 OP1 (opcode);
916 OP1 (index);
920 /* Compile code to duplicate with offset, where
921 SIZE is the size of the stack item to duplicate (1 or 2), abd
922 OFFSET is where to insert the result (must be 0, 1, or 2).
923 (The new words get inserted at stack[SP-size-offset].) */
925 static void
926 emit_dup (int size, int offset, struct jcf_partial *state)
928 int kind;
929 if (size == 0)
930 return;
931 RESERVE(1);
932 if (offset == 0)
933 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
934 else if (offset == 1)
935 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
936 else if (offset == 2)
937 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
938 else
939 gcc_unreachable ();
940 OP1 (kind);
941 NOTE_PUSH (size);
944 static void
945 emit_pop (int size, struct jcf_partial *state)
947 RESERVE (1);
948 OP1 (OPCODE_pop - 1 + size);
951 static void
952 emit_iinc (tree var, HOST_WIDE_INT value, struct jcf_partial *state)
954 int slot = DECL_LOCAL_INDEX (var);
956 if (value < -128 || value > 127 || slot >= 256)
958 RESERVE (6);
959 OP1 (OPCODE_wide);
960 OP1 (OPCODE_iinc);
961 OP2 (slot);
962 OP2 (value);
964 else
966 RESERVE (3);
967 OP1 (OPCODE_iinc);
968 OP1 (slot);
969 OP1 (value);
973 static void
974 emit_load_or_store (tree var, /* Variable to load from or store into. */
975 int opcode, /* Either OPCODE_iload or OPCODE_istore. */
976 struct jcf_partial *state)
978 tree type = TREE_TYPE (var);
979 int kind = adjust_typed_op (type, 4);
980 int index = DECL_LOCAL_INDEX (var);
981 if (index <= 3)
983 RESERVE (1);
984 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
986 else
987 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
990 static void
991 emit_load (tree var, struct jcf_partial *state)
993 emit_load_or_store (var, OPCODE_iload, state);
994 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
997 static void
998 emit_store (tree var, struct jcf_partial *state)
1000 emit_load_or_store (var, OPCODE_istore, state);
1001 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1004 static void
1005 emit_unop (enum java_opcode opcode, tree type ATTRIBUTE_UNUSED,
1006 struct jcf_partial *state)
1008 RESERVE(1);
1009 OP1 (opcode);
1012 static void
1013 emit_binop (enum java_opcode opcode, tree type, struct jcf_partial *state)
1015 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1016 RESERVE(1);
1017 OP1 (opcode);
1018 NOTE_POP (size);
1021 static void
1022 emit_reloc (HOST_WIDE_INT value, int kind,
1023 struct jcf_block *target, struct jcf_partial *state)
1025 struct jcf_relocation *reloc
1026 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1027 struct jcf_block *block = state->last_block;
1028 reloc->next = block->u.relocations;
1029 block->u.relocations = reloc;
1030 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1031 reloc->label = target;
1032 reloc->kind = kind;
1033 if (kind == 0 || kind == BLOCK_START_RELOC)
1034 OP4 (value);
1035 else if (kind != SWITCH_ALIGN_RELOC)
1036 OP2 (value);
1039 static void
1040 emit_switch_reloc (struct jcf_block *label, struct jcf_partial *state)
1042 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1045 /* Similar to emit_switch_reloc,
1046 but re-uses an existing case reloc. */
1048 static void
1049 emit_case_reloc (struct jcf_relocation *reloc, struct jcf_partial *state)
1051 struct jcf_block *block = state->last_block;
1052 reloc->next = block->u.relocations;
1053 block->u.relocations = reloc;
1054 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1055 reloc->kind = BLOCK_START_RELOC;
1056 OP4 (0);
1059 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1060 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1062 static void
1063 emit_if (struct jcf_block *target, int opcode, int inv_opcode,
1064 struct jcf_partial *state)
1066 RESERVE(3);
1067 OP1 (opcode);
1068 /* value is 1 byte from reloc back to start of instruction. */
1069 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1072 static void
1073 emit_goto (struct jcf_block *target, struct jcf_partial *state)
1075 RESERVE(3);
1076 OP1 (OPCODE_goto);
1077 /* Value is 1 byte from reloc back to start of instruction. */
1078 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1081 static void
1082 emit_jsr (struct jcf_block *target, struct jcf_partial *state)
1084 RESERVE(3);
1085 OP1 (OPCODE_jsr);
1086 /* Value is 1 byte from reloc back to start of instruction. */
1087 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1088 state->num_jsrs++;
1091 /* Generate code to evaluate EXP. If the result is true,
1092 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1093 TRUE_BRANCH_FIRST is a code generation hint that the
1094 TRUE_LABEL may follow right after this. (The idea is that we
1095 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1097 static void
1098 generate_bytecode_conditional (tree exp,
1099 struct jcf_block *true_label,
1100 struct jcf_block *false_label,
1101 int true_branch_first,
1102 struct jcf_partial *state)
1104 tree exp0, exp1, type;
1105 int save_SP = state->code_SP;
1106 enum java_opcode op, negop;
1107 bool unordered = 0;
1109 switch (TREE_CODE (exp))
1111 case INTEGER_CST:
1112 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1113 break;
1114 case COND_EXPR:
1116 struct jcf_block *then_label = gen_jcf_label (state);
1117 struct jcf_block *else_label = gen_jcf_label (state);
1118 int save_SP_before, save_SP_after;
1119 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1120 then_label, else_label, 1, state);
1121 define_jcf_label (then_label, state);
1122 save_SP_before = state->code_SP;
1123 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1124 true_label, false_label, 1, state);
1125 save_SP_after = state->code_SP;
1126 state->code_SP = save_SP_before;
1127 define_jcf_label (else_label, state);
1128 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1129 true_label, false_label,
1130 true_branch_first, state);
1131 gcc_assert (state->code_SP == save_SP_after);
1133 break;
1134 case TRUTH_NOT_EXPR:
1135 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1136 true_label, ! true_branch_first, state);
1137 break;
1138 case TRUTH_ANDIF_EXPR:
1140 struct jcf_block *next_label = gen_jcf_label (state);
1141 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1142 next_label, false_label, 1, state);
1143 define_jcf_label (next_label, state);
1144 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1145 true_label, false_label, 1, state);
1147 break;
1148 case TRUTH_ORIF_EXPR:
1150 struct jcf_block *next_label = gen_jcf_label (state);
1151 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1152 true_label, next_label, 1, state);
1153 define_jcf_label (next_label, state);
1154 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1155 true_label, false_label, 1, state);
1157 break;
1158 compare_1:
1159 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1160 set it to the corresponding 1-operand if<COND> instructions. */
1161 op = op - 6;
1162 /* FALLTHROUGH */
1163 compare_2:
1164 /* The opcodes with their inverses are allocated in pairs.
1165 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1166 negop = (op & 1) ? op + 1 : op - 1;
1167 compare_2_ptr:
1168 if (true_branch_first)
1170 emit_if (false_label, negop, op, state);
1171 emit_goto (true_label, state);
1173 else
1175 emit_if (true_label, op, negop, state);
1176 emit_goto (false_label, state);
1178 break;
1180 case UNEQ_EXPR:
1181 unordered = 1;
1182 case EQ_EXPR:
1183 op = OPCODE_if_icmpeq;
1184 goto compare;
1186 case LTGT_EXPR:
1187 unordered = 1;
1188 case NE_EXPR:
1189 op = OPCODE_if_icmpne;
1190 goto compare;
1192 case UNLE_EXPR:
1193 unordered = 1;
1194 case GT_EXPR:
1195 op = OPCODE_if_icmpgt;
1196 goto compare;
1198 case UNGE_EXPR:
1199 unordered = 1;
1200 case LT_EXPR:
1201 op = OPCODE_if_icmplt;
1202 goto compare;
1204 case UNLT_EXPR:
1205 unordered = 1;
1206 case GE_EXPR:
1207 op = OPCODE_if_icmpge;
1208 goto compare;
1210 case UNGT_EXPR:
1211 unordered = 1;
1212 case LE_EXPR:
1213 op = OPCODE_if_icmple;
1214 goto compare;
1216 compare:
1217 if (unordered)
1219 /* UNLT_EXPR(a, b) means 'a < b || unordered(a, b)'. This is
1220 the same as the Java source expression '!(a >= b)', so handle
1221 it that way. */
1222 struct jcf_block *tmp = true_label;
1223 true_label = false_label;
1224 false_label = tmp;
1225 true_branch_first = !true_branch_first;
1228 exp0 = TREE_OPERAND (exp, 0);
1229 exp1 = TREE_OPERAND (exp, 1);
1230 type = TREE_TYPE (exp0);
1231 switch (TREE_CODE (type))
1233 int opf;
1234 case POINTER_TYPE: case RECORD_TYPE:
1235 switch (TREE_CODE (exp))
1237 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1238 case NE_EXPR: op = OPCODE_if_acmpne; break;
1239 default:
1240 gcc_unreachable ();
1242 if (integer_zerop (exp1) || integer_zerop (exp0))
1244 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1245 STACK_TARGET, state);
1246 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1247 negop = (op & 1) ? op - 1 : op + 1;
1248 NOTE_POP (1);
1249 goto compare_2_ptr;
1251 generate_bytecode_insns (exp0, STACK_TARGET, state);
1252 generate_bytecode_insns (exp1, STACK_TARGET, state);
1253 NOTE_POP (2);
1254 goto compare_2;
1255 case REAL_TYPE:
1256 generate_bytecode_insns (exp0, STACK_TARGET, state);
1257 generate_bytecode_insns (exp1, STACK_TARGET, state);
1258 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1259 opf = OPCODE_fcmpg;
1260 else
1261 opf = OPCODE_fcmpl;
1262 if (TYPE_PRECISION (type) > 32)
1264 opf += 2;
1265 NOTE_POP (4);
1267 else
1268 NOTE_POP (2);
1269 RESERVE (1);
1270 OP1 (opf);
1271 goto compare_1;
1272 case INTEGER_TYPE:
1273 if (TYPE_PRECISION (type) > 32)
1275 generate_bytecode_insns (exp0, STACK_TARGET, state);
1276 generate_bytecode_insns (exp1, STACK_TARGET, state);
1277 NOTE_POP (4);
1278 RESERVE (1);
1279 OP1 (OPCODE_lcmp);
1280 goto compare_1;
1282 /* FALLTHROUGH */
1283 default:
1284 if (integer_zerop (exp1))
1286 generate_bytecode_insns (exp0, STACK_TARGET, state);
1287 NOTE_POP (1);
1288 goto compare_1;
1290 if (integer_zerop (exp0))
1292 switch (op)
1294 case OPCODE_if_icmplt:
1295 case OPCODE_if_icmpge:
1296 op += 2;
1297 break;
1298 case OPCODE_if_icmpgt:
1299 case OPCODE_if_icmple:
1300 op -= 2;
1301 break;
1302 default:
1303 break;
1305 generate_bytecode_insns (exp1, STACK_TARGET, state);
1306 NOTE_POP (1);
1307 goto compare_1;
1309 generate_bytecode_insns (exp0, STACK_TARGET, state);
1310 generate_bytecode_insns (exp1, STACK_TARGET, state);
1311 NOTE_POP (2);
1312 goto compare_2;
1315 default:
1316 generate_bytecode_insns (exp, STACK_TARGET, state);
1317 NOTE_POP (1);
1318 if (true_branch_first)
1320 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1321 emit_goto (true_label, state);
1323 else
1325 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1326 emit_goto (false_label, state);
1328 break;
1330 gcc_assert (save_SP == state->code_SP);
1333 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1334 but only as far out as LIMIT (since we are about to jump to the
1335 emit label that is LIMIT). */
1337 static void
1338 call_cleanups (struct jcf_block *limit, struct jcf_partial *state)
1340 struct jcf_block *block = state->labeled_blocks;
1341 for (; block != limit; block = block->next)
1343 if (block->pc == PENDING_CLEANUP_PC)
1344 emit_jsr (block, state);
1348 static void
1349 generate_bytecode_return (tree exp, struct jcf_partial *state)
1351 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1352 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1353 int op;
1354 again:
1355 if (exp != NULL)
1357 switch (TREE_CODE (exp))
1359 case COMPOUND_EXPR:
1360 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1361 state);
1362 exp = TREE_OPERAND (exp, 1);
1363 goto again;
1364 case COND_EXPR:
1366 struct jcf_block *then_label = gen_jcf_label (state);
1367 struct jcf_block *else_label = gen_jcf_label (state);
1368 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1369 then_label, else_label, 1, state);
1370 define_jcf_label (then_label, state);
1371 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1372 define_jcf_label (else_label, state);
1373 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1375 return;
1376 default:
1377 generate_bytecode_insns (exp,
1378 returns_void ? IGNORE_TARGET
1379 : STACK_TARGET, state);
1382 if (returns_void)
1384 op = OPCODE_return;
1385 call_cleanups (NULL, state);
1387 else
1389 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1390 if (state->num_finalizers > 0)
1392 if (state->return_value_decl == NULL_TREE)
1394 state->return_value_decl
1395 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1396 localvar_alloc (state->return_value_decl, state);
1398 emit_store (state->return_value_decl, state);
1399 call_cleanups (NULL, state);
1400 emit_load (state->return_value_decl, state);
1401 /* If we call maybe_free_localvar (state->return_value_decl, state, 1),
1402 then we risk the save decl erroneously re-used in the
1403 finalizer. Instead, we keep the state->return_value_decl
1404 allocated through the rest of the method. This is not
1405 the greatest solution, but it is at least simple and safe. */
1408 RESERVE (1);
1409 OP1 (op);
1412 /* Generate bytecode for sub-expression EXP of METHOD.
1413 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1415 static void
1416 generate_bytecode_insns (tree exp, int target, struct jcf_partial *state)
1418 tree type, arg;
1419 enum java_opcode jopcode;
1420 int op;
1421 HOST_WIDE_INT value;
1422 int post_op;
1423 int size;
1424 int offset;
1426 if (exp == NULL && target == IGNORE_TARGET)
1427 return;
1429 type = TREE_TYPE (exp);
1431 switch (TREE_CODE (exp))
1433 case BLOCK:
1434 if (BLOCK_EXPR_BODY (exp))
1436 tree local;
1437 tree body = BLOCK_EXPR_BODY (exp);
1438 long jsrs = state->num_jsrs;
1439 for (local = BLOCK_EXPR_DECLS (exp); local; )
1441 tree next = TREE_CHAIN (local);
1442 localvar_alloc (local, state);
1443 local = next;
1445 /* Avoid deep recursion for long blocks. */
1446 while (TREE_CODE (body) == COMPOUND_EXPR)
1448 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1449 body = TREE_OPERAND (body, 1);
1451 generate_bytecode_insns (body, target, state);
1453 for (local = BLOCK_EXPR_DECLS (exp); local; )
1455 tree next = TREE_CHAIN (local);
1456 maybe_free_localvar (local, state, state->num_jsrs <= jsrs);
1457 local = next;
1460 break;
1461 case COMPOUND_EXPR:
1462 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1463 /* Normally the first operand to a COMPOUND_EXPR must complete
1464 normally. However, in the special case of a do-while
1465 statement this is not necessarily the case. */
1466 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1467 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1468 break;
1469 case EXPR_WITH_FILE_LOCATION:
1471 location_t saved_location = input_location;
1472 tree body = EXPR_WFL_NODE (exp);
1473 if (IS_EMPTY_STMT (body))
1474 break;
1475 #ifdef USE_MAPPED_LOCATION
1476 input_location = EXPR_LOCATION (exp);
1477 #else
1478 input_filename = EXPR_WFL_FILENAME (exp);
1479 input_line = EXPR_WFL_LINENO (exp);
1480 #endif
1481 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && input_line > 0
1482 && debug_info_level > DINFO_LEVEL_NONE)
1483 put_linenumber (input_line, state);
1484 generate_bytecode_insns (body, target, state);
1485 input_location = saved_location;
1487 break;
1488 case INTEGER_CST:
1489 if (target == IGNORE_TARGET) ; /* do nothing */
1490 else if (TREE_CODE (type) == POINTER_TYPE)
1492 gcc_assert (integer_zerop (exp));
1493 RESERVE(1);
1494 OP1 (OPCODE_aconst_null);
1495 NOTE_PUSH (1);
1497 else if (TYPE_PRECISION (type) <= 32)
1499 push_int_const (TREE_INT_CST_LOW (exp), state);
1500 NOTE_PUSH (1);
1502 else
1504 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1505 state);
1506 NOTE_PUSH (2);
1508 break;
1509 case REAL_CST:
1511 int prec = TYPE_PRECISION (type) >> 5;
1512 RESERVE(1);
1513 if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1514 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1515 else if (real_onep (exp))
1516 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1517 else if (prec == 1 && real_twop (exp))
1518 OP1 (OPCODE_fconst_2);
1519 /* ??? We could also use iconst_3/ldc followed by i2f/i2d
1520 for other float/double when the value is a small integer. */
1521 else
1523 offset = find_constant_index (exp, state);
1524 if (prec == 1)
1525 push_constant1 (offset, state);
1526 else
1527 push_constant2 (offset, state);
1529 NOTE_PUSH (prec);
1531 break;
1532 case STRING_CST:
1533 push_constant1 (find_string_constant (&state->cpool, exp), state);
1534 NOTE_PUSH (1);
1535 break;
1536 case VAR_DECL:
1537 if (TREE_STATIC (exp))
1539 field_op (exp, OPCODE_getstatic, state);
1540 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1541 break;
1543 /* ... fall through ... */
1544 case PARM_DECL:
1545 emit_load (exp, state);
1546 break;
1547 case NON_LVALUE_EXPR:
1548 case INDIRECT_REF:
1549 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1550 break;
1551 case ARRAY_REF:
1552 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1553 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1554 if (target != IGNORE_TARGET)
1556 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1557 RESERVE(1);
1558 OP1 (jopcode);
1559 if (! TYPE_IS_WIDE (type))
1560 NOTE_POP (1);
1562 break;
1563 case COMPONENT_REF:
1565 tree obj = TREE_OPERAND (exp, 0);
1566 tree field = TREE_OPERAND (exp, 1);
1567 int is_static = FIELD_STATIC (field);
1568 generate_bytecode_insns (obj,
1569 is_static ? IGNORE_TARGET : target, state);
1570 if (target != IGNORE_TARGET)
1572 if (DECL_NAME (field) == length_identifier_node && !is_static
1573 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1575 RESERVE (1);
1576 OP1 (OPCODE_arraylength);
1578 else
1580 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1581 state);
1582 if (! is_static)
1583 NOTE_POP (1);
1584 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1588 break;
1589 case TRUTH_ANDIF_EXPR:
1590 case TRUTH_ORIF_EXPR:
1591 case EQ_EXPR:
1592 case NE_EXPR:
1593 case GT_EXPR:
1594 case LT_EXPR:
1595 case GE_EXPR:
1596 case LE_EXPR:
1597 case UNLT_EXPR:
1598 case UNLE_EXPR:
1599 case UNGT_EXPR:
1600 case UNGE_EXPR:
1601 case UNEQ_EXPR:
1602 case LTGT_EXPR:
1604 struct jcf_block *then_label = gen_jcf_label (state);
1605 struct jcf_block *else_label = gen_jcf_label (state);
1606 struct jcf_block *end_label = gen_jcf_label (state);
1607 generate_bytecode_conditional (exp,
1608 then_label, else_label, 1, state);
1609 define_jcf_label (then_label, state);
1610 push_int_const (1, state);
1611 emit_goto (end_label, state);
1612 define_jcf_label (else_label, state);
1613 push_int_const (0, state);
1614 define_jcf_label (end_label, state);
1615 NOTE_PUSH (1);
1617 break;
1618 case COND_EXPR:
1620 struct jcf_block *then_label = gen_jcf_label (state);
1621 struct jcf_block *else_label = gen_jcf_label (state);
1622 struct jcf_block *end_label = gen_jcf_label (state);
1623 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1624 then_label, else_label, 1, state);
1625 define_jcf_label (then_label, state);
1626 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1627 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1628 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1629 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1630 emit_goto (end_label, state);
1631 define_jcf_label (else_label, state);
1632 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1633 define_jcf_label (end_label, state);
1634 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1635 if (TREE_TYPE (exp) != void_type_node)
1636 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1638 break;
1639 case CASE_EXPR:
1641 struct jcf_switch_state *sw_state = state->sw_state;
1642 struct jcf_relocation *reloc
1643 = obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1644 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1645 reloc->kind = 0;
1646 reloc->label = get_jcf_label_here (state);
1647 reloc->offset = case_value;
1648 reloc->next = sw_state->cases;
1649 sw_state->cases = reloc;
1650 if (sw_state->num_cases == 0)
1652 sw_state->min_case = case_value;
1653 sw_state->max_case = case_value;
1655 else
1657 if (case_value < sw_state->min_case)
1658 sw_state->min_case = case_value;
1659 if (case_value > sw_state->max_case)
1660 sw_state->max_case = case_value;
1662 sw_state->num_cases++;
1664 break;
1665 case DEFAULT_EXPR:
1666 state->sw_state->default_label = get_jcf_label_here (state);
1667 break;
1669 case SWITCH_EXPR:
1671 /* The SWITCH_EXPR has three parts, generated in the following order:
1672 1. the switch_expression (the value used to select the correct case);
1673 2. the switch_body;
1674 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1675 After code generation, we will re-order them in the order 1, 3, 2.
1676 This is to avoid any extra GOTOs. */
1677 struct jcf_switch_state sw_state;
1678 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1679 struct jcf_block *body_last; /* Last block of the switch_body. */
1680 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1681 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1682 struct jcf_block *body_block;
1683 int switch_length;
1684 sw_state.prev = state->sw_state;
1685 state->sw_state = &sw_state;
1686 sw_state.cases = NULL;
1687 sw_state.num_cases = 0;
1688 sw_state.default_label = NULL;
1689 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1690 expression_last = state->last_block;
1691 /* Force a new block here. */
1692 body_block = gen_jcf_label (state);
1693 define_jcf_label (body_block, state);
1694 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1695 body_last = state->last_block;
1697 switch_instruction = gen_jcf_label (state);
1698 define_jcf_label (switch_instruction, state);
1699 if (sw_state.default_label == NULL)
1700 sw_state.default_label = gen_jcf_label (state);
1702 if (sw_state.num_cases <= 1)
1704 if (sw_state.num_cases == 0)
1706 emit_pop (1, state);
1707 NOTE_POP (1);
1709 else
1711 push_int_const (sw_state.cases->offset, state);
1712 NOTE_PUSH (1);
1713 emit_if (sw_state.cases->label,
1714 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1716 emit_goto (sw_state.default_label, state);
1718 else
1720 HOST_WIDE_INT i;
1721 unsigned HOST_WIDE_INT delta;
1722 /* Copy the chain of relocs into a sorted array. */
1723 struct jcf_relocation **relocs
1724 = XNEWVEC (struct jcf_relocation *, sw_state.num_cases);
1725 /* The relocs arrays is a buffer with a gap.
1726 The assumption is that cases will normally come in "runs". */
1727 int gap_start = 0;
1728 int gap_end = sw_state.num_cases;
1729 struct jcf_relocation *reloc;
1730 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1732 HOST_WIDE_INT case_value = reloc->offset;
1733 while (gap_end < sw_state.num_cases)
1735 struct jcf_relocation *end = relocs[gap_end];
1736 if (case_value <= end->offset)
1737 break;
1738 relocs[gap_start++] = end;
1739 gap_end++;
1741 while (gap_start > 0)
1743 struct jcf_relocation *before = relocs[gap_start-1];
1744 if (case_value >= before->offset)
1745 break;
1746 relocs[--gap_end] = before;
1747 gap_start--;
1749 relocs[gap_start++] = reloc;
1750 /* Note we don't check for duplicates. This is
1751 handled by the parser. */
1754 /* We could have DELTA < 0 if sw_state.min_case is
1755 something like Integer.MIN_VALUE. That is why delta is
1756 unsigned. */
1757 delta = sw_state.max_case - sw_state.min_case;
1758 if (2 * (unsigned) sw_state.num_cases >= delta)
1759 { /* Use tableswitch. */
1760 int index = 0;
1761 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1762 OP1 (OPCODE_tableswitch);
1763 emit_reloc (RELOCATION_VALUE_0,
1764 SWITCH_ALIGN_RELOC, NULL, state);
1765 emit_switch_reloc (sw_state.default_label, state);
1766 OP4 (sw_state.min_case);
1767 OP4 (sw_state.max_case);
1768 for (i = sw_state.min_case; ; )
1770 reloc = relocs[index];
1771 if (i == reloc->offset)
1773 emit_case_reloc (reloc, state);
1774 if (i == sw_state.max_case)
1775 break;
1776 index++;
1778 else
1779 emit_switch_reloc (sw_state.default_label, state);
1780 i++;
1783 else
1784 { /* Use lookupswitch. */
1785 RESERVE(9 + 8 * sw_state.num_cases);
1786 OP1 (OPCODE_lookupswitch);
1787 emit_reloc (RELOCATION_VALUE_0,
1788 SWITCH_ALIGN_RELOC, NULL, state);
1789 emit_switch_reloc (sw_state.default_label, state);
1790 OP4 (sw_state.num_cases);
1791 for (i = 0; i < sw_state.num_cases; i++)
1793 struct jcf_relocation *reloc = relocs[i];
1794 OP4 (reloc->offset);
1795 emit_case_reloc (reloc, state);
1798 free (relocs);
1801 instruction_last = state->last_block;
1802 if (sw_state.default_label->pc < 0)
1803 define_jcf_label (sw_state.default_label, state);
1804 else /* Force a new block. */
1805 sw_state.default_label = get_jcf_label_here (state);
1806 /* Now re-arrange the blocks so the switch_instruction
1807 comes before the switch_body. */
1808 switch_length = state->code_length - switch_instruction->pc;
1809 switch_instruction->pc = body_block->pc;
1810 instruction_last->next = body_block;
1811 instruction_last->v.chunk->next = body_block->v.chunk;
1812 expression_last->next = switch_instruction;
1813 expression_last->v.chunk->next = switch_instruction->v.chunk;
1814 body_last->next = sw_state.default_label;
1815 body_last->v.chunk->next = NULL;
1816 state->chunk = body_last->v.chunk;
1817 for (; body_block != sw_state.default_label; body_block = body_block->next)
1818 body_block->pc += switch_length;
1820 state->sw_state = sw_state.prev;
1821 break;
1824 case RETURN_EXPR:
1825 exp = TREE_OPERAND (exp, 0);
1826 if (exp == NULL_TREE)
1827 exp = build_java_empty_stmt ();
1828 else if (TREE_CODE (exp) != MODIFY_EXPR)
1829 gcc_unreachable ();
1830 else
1831 exp = TREE_OPERAND (exp, 1);
1832 generate_bytecode_return (exp, state);
1833 break;
1834 case LABELED_BLOCK_EXPR:
1836 struct jcf_block *end_label = gen_jcf_label (state);
1837 end_label->next = state->labeled_blocks;
1838 state->labeled_blocks = end_label;
1839 end_label->pc = PENDING_EXIT_PC;
1840 end_label->u.labeled_block = exp;
1841 if (LABELED_BLOCK_BODY (exp))
1842 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1843 gcc_assert (state->labeled_blocks == end_label);
1844 state->labeled_blocks = end_label->next;
1845 define_jcf_label (end_label, state);
1847 break;
1848 case LOOP_EXPR:
1850 tree body = TREE_OPERAND (exp, 0);
1851 #if 0
1852 if (TREE_CODE (body) == COMPOUND_EXPR
1853 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1855 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1856 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1857 struct jcf_block *head_label;
1858 struct jcf_block *body_label;
1859 struct jcf_block *end_label = gen_jcf_label (state);
1860 struct jcf_block *exit_label = state->labeled_blocks;
1861 head_label = gen_jcf_label (state);
1862 emit_goto (head_label, state);
1863 body_label = get_jcf_label_here (state);
1864 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1865 define_jcf_label (head_label, state);
1866 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1867 end_label, body_label, 1, state);
1868 define_jcf_label (end_label, state);
1870 else
1871 #endif
1873 struct jcf_block *head_label = get_jcf_label_here (state);
1874 generate_bytecode_insns (body, IGNORE_TARGET, state);
1875 if (CAN_COMPLETE_NORMALLY (body))
1876 emit_goto (head_label, state);
1879 break;
1880 case EXIT_EXPR:
1882 struct jcf_block *label = state->labeled_blocks;
1883 struct jcf_block *end_label = gen_jcf_label (state);
1884 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1885 label, end_label, 0, state);
1886 define_jcf_label (end_label, state);
1888 break;
1889 case EXIT_BLOCK_EXPR:
1891 struct jcf_block *label = state->labeled_blocks;
1892 while (label->u.labeled_block != EXIT_BLOCK_LABELED_BLOCK (exp))
1893 label = label->next;
1894 call_cleanups (label, state);
1895 emit_goto (label, state);
1897 break;
1899 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1900 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1901 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1902 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1903 increment:
1905 arg = TREE_OPERAND (exp, 1);
1906 exp = TREE_OPERAND (exp, 0);
1907 type = TREE_TYPE (exp);
1908 size = TYPE_IS_WIDE (type) ? 2 : 1;
1909 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1910 && ! TREE_STATIC (exp)
1911 && TREE_CODE (type) == INTEGER_TYPE
1912 && TYPE_PRECISION (type) == 32)
1914 if (target != IGNORE_TARGET && post_op)
1915 emit_load (exp, state);
1916 emit_iinc (exp, value, state);
1917 if (target != IGNORE_TARGET && ! post_op)
1918 emit_load (exp, state);
1919 break;
1921 if (TREE_CODE (exp) == COMPONENT_REF)
1923 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1924 emit_dup (1, 0, state);
1925 /* Stack: ..., objectref, objectref. */
1926 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1927 NOTE_PUSH (size-1);
1928 /* Stack: ..., objectref, oldvalue. */
1929 offset = 1;
1931 else if (TREE_CODE (exp) == ARRAY_REF)
1933 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1934 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1935 emit_dup (2, 0, state);
1936 /* Stack: ..., array, index, array, index. */
1937 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1938 RESERVE(1);
1939 OP1 (jopcode);
1940 NOTE_POP (2-size);
1941 /* Stack: ..., array, index, oldvalue. */
1942 offset = 2;
1944 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1946 generate_bytecode_insns (exp, STACK_TARGET, state);
1947 /* Stack: ..., oldvalue. */
1948 offset = 0;
1950 else
1951 gcc_unreachable ();
1953 if (target != IGNORE_TARGET && post_op)
1954 emit_dup (size, offset, state);
1955 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1956 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1957 /* Stack, otherwise: ..., [result, ] oldvalue. */
1958 generate_bytecode_insns (arg, STACK_TARGET, state);
1959 emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1960 + adjust_typed_op (type, 3),
1961 type, state);
1962 if (target != IGNORE_TARGET && ! post_op)
1963 emit_dup (size, offset, state);
1964 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1965 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1966 /* Stack, otherwise: ..., [result, ] newvalue. */
1967 goto finish_assignment;
1969 case MODIFY_EXPR:
1971 tree lhs = TREE_OPERAND (exp, 0);
1972 tree rhs = TREE_OPERAND (exp, 1);
1973 int offset = 0;
1975 /* See if we can use the iinc instruction. */
1976 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1977 && ! TREE_STATIC (lhs)
1978 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1979 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1980 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1982 tree arg0 = TREE_OPERAND (rhs, 0);
1983 tree arg1 = TREE_OPERAND (rhs, 1);
1984 HOST_WIDE_INT min_value = -32768;
1985 HOST_WIDE_INT max_value = 32767;
1986 if (TREE_CODE (rhs) == MINUS_EXPR)
1988 min_value++;
1989 max_value++;
1991 else if (arg1 == lhs)
1993 arg0 = arg1;
1994 arg1 = TREE_OPERAND (rhs, 0);
1996 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1998 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1999 value = TREE_INT_CST_LOW (arg1);
2000 if ((hi_value == 0 && value <= max_value)
2001 || (hi_value == -1 && value >= min_value))
2003 if (TREE_CODE (rhs) == MINUS_EXPR)
2004 value = -value;
2005 emit_iinc (lhs, value, state);
2006 if (target != IGNORE_TARGET)
2007 emit_load (lhs, state);
2008 break;
2013 if (TREE_CODE (lhs) == COMPONENT_REF)
2015 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
2016 STACK_TARGET, state);
2017 offset = 1;
2019 else if (TREE_CODE (lhs) == ARRAY_REF)
2021 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2022 STACK_TARGET, state);
2023 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2024 STACK_TARGET, state);
2025 offset = 2;
2027 else
2028 offset = 0;
2030 /* If the rhs is a binary expression and the left operand is
2031 `==' to the lhs then we have an OP= expression. In this
2032 case we must do some special processing. */
2033 if (BINARY_CLASS_P (rhs) && lhs == TREE_OPERAND (rhs, 0))
2035 if (TREE_CODE (lhs) == COMPONENT_REF)
2037 tree field = TREE_OPERAND (lhs, 1);
2038 if (! FIELD_STATIC (field))
2040 /* Duplicate the object reference so we can get
2041 the field. */
2042 emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
2043 NOTE_POP (1);
2045 field_op (field, (FIELD_STATIC (field)
2046 ? OPCODE_getstatic
2047 : OPCODE_getfield),
2048 state);
2050 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2052 else if (TREE_CODE (lhs) == VAR_DECL
2053 || TREE_CODE (lhs) == PARM_DECL)
2055 if (FIELD_STATIC (lhs))
2057 field_op (lhs, OPCODE_getstatic, state);
2058 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2060 else
2061 emit_load (lhs, state);
2063 else if (TREE_CODE (lhs) == ARRAY_REF)
2065 /* Duplicate the array and index, which are on the
2066 stack, so that we can load the old value. */
2067 emit_dup (2, 0, state);
2068 NOTE_POP (2);
2069 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2070 RESERVE (1);
2071 OP1 (jopcode);
2072 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2074 else
2075 gcc_unreachable ();
2077 /* This function correctly handles the case where the LHS
2078 of a binary expression is NULL_TREE. */
2079 rhs = build2 (TREE_CODE (rhs), TREE_TYPE (rhs),
2080 NULL_TREE, TREE_OPERAND (rhs, 1));
2083 generate_bytecode_insns (rhs, STACK_TARGET, state);
2084 if (target != IGNORE_TARGET)
2085 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2086 exp = lhs;
2088 /* FALLTHROUGH */
2090 finish_assignment:
2091 if (TREE_CODE (exp) == COMPONENT_REF)
2093 tree field = TREE_OPERAND (exp, 1);
2094 if (! FIELD_STATIC (field))
2095 NOTE_POP (1);
2096 field_op (field,
2097 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2098 state);
2100 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2102 else if (TREE_CODE (exp) == VAR_DECL
2103 || TREE_CODE (exp) == PARM_DECL)
2105 if (FIELD_STATIC (exp))
2107 field_op (exp, OPCODE_putstatic, state);
2108 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2110 else
2111 emit_store (exp, state);
2113 else if (TREE_CODE (exp) == ARRAY_REF)
2115 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2116 RESERVE (1);
2117 OP1 (jopcode);
2118 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2120 else
2121 gcc_unreachable ();
2122 break;
2123 case PLUS_EXPR:
2124 jopcode = OPCODE_iadd;
2125 goto binop;
2126 case MINUS_EXPR:
2127 jopcode = OPCODE_isub;
2128 goto binop;
2129 case MULT_EXPR:
2130 jopcode = OPCODE_imul;
2131 goto binop;
2132 case TRUNC_DIV_EXPR:
2133 case RDIV_EXPR:
2134 jopcode = OPCODE_idiv;
2135 goto binop;
2136 case TRUNC_MOD_EXPR:
2137 jopcode = OPCODE_irem;
2138 goto binop;
2139 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2140 case RSHIFT_EXPR:
2141 jopcode = TYPE_UNSIGNED (type) ? OPCODE_iushr : OPCODE_ishr;
2142 goto binop;
2143 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2144 case TRUTH_AND_EXPR:
2145 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2146 case TRUTH_OR_EXPR:
2147 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2148 case TRUTH_XOR_EXPR:
2149 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2150 binop:
2152 tree arg0 = TREE_OPERAND (exp, 0);
2153 tree arg1 = TREE_OPERAND (exp, 1);
2154 jopcode += adjust_typed_op (type, 3);
2155 if (arg0 != NULL_TREE && operand_equal_p (arg0, arg1, 0))
2157 /* fold may (e.g) convert 2*x to x+x. */
2158 generate_bytecode_insns (arg0, target, state);
2159 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2161 else
2163 /* ARG0 will be NULL_TREE if we're handling an `OP='
2164 expression. In this case the stack already holds the
2165 LHS. See the MODIFY_EXPR case. */
2166 if (arg0 != NULL_TREE)
2167 generate_bytecode_insns (arg0, target, state);
2168 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2169 arg1 = convert (int_type_node, arg1);
2170 generate_bytecode_insns (arg1, target, state);
2172 /* For most binary operations, both operands and the result have the
2173 same type. Shift operations are different. Using arg1's type
2174 gets us the correct SP adjustment in all cases. */
2175 if (target == STACK_TARGET)
2176 emit_binop (jopcode, TREE_TYPE (arg1), state);
2177 break;
2179 case TRUTH_NOT_EXPR:
2180 case BIT_NOT_EXPR:
2181 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2182 if (target == STACK_TARGET)
2184 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2185 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2186 RESERVE (2);
2187 if (is_long)
2188 OP1 (OPCODE_i2l);
2189 NOTE_PUSH (1 + is_long);
2190 OP1 (OPCODE_ixor + is_long);
2191 NOTE_POP (1 + is_long);
2193 break;
2194 case NEGATE_EXPR:
2195 jopcode = OPCODE_ineg;
2196 jopcode += adjust_typed_op (type, 3);
2197 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2198 if (target == STACK_TARGET)
2199 emit_unop (jopcode, type, state);
2200 break;
2201 case INSTANCEOF_EXPR:
2203 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2204 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2205 RESERVE (3);
2206 OP1 (OPCODE_instanceof);
2207 OP2 (index);
2209 break;
2210 case SAVE_EXPR:
2211 /* The first time through, the argument of the SAVE_EXPR will be
2212 something complex. Evaluate it, and replace the argument with
2213 a VAR_DECL that holds the result. */
2214 arg = TREE_OPERAND (exp, 0);
2215 if (TREE_CODE (arg) != VAR_DECL || DECL_NAME (arg))
2217 tree type = TREE_TYPE (exp);
2218 tree decl = build_decl (VAR_DECL, NULL_TREE, type);
2219 generate_bytecode_insns (arg, STACK_TARGET, state);
2220 localvar_alloc (decl, state);
2221 TREE_OPERAND (exp, 0) = decl;
2222 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1, 0, state);
2223 emit_store (decl, state);
2225 else
2227 emit_load (arg, state);
2229 break;
2230 case CONVERT_EXPR:
2231 case NOP_EXPR:
2232 case FLOAT_EXPR:
2233 case FIX_TRUNC_EXPR:
2235 tree src = TREE_OPERAND (exp, 0);
2236 tree src_type = TREE_TYPE (src);
2237 tree dst_type = TREE_TYPE (exp);
2238 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2239 if (target == IGNORE_TARGET || src_type == dst_type)
2240 break;
2241 if (TREE_CODE (dst_type) == POINTER_TYPE)
2243 if (TREE_CODE (exp) == CONVERT_EXPR)
2245 int index = find_class_constant (&state->cpool,
2246 TREE_TYPE (dst_type));
2247 RESERVE (3);
2248 OP1 (OPCODE_checkcast);
2249 OP2 (index);
2252 else /* Convert numeric types. */
2254 int src_prec = TYPE_PRECISION (src_type);
2255 int dst_prec = TYPE_PRECISION (dst_type);
2256 int wide_src = src_prec > 32;
2257 int wide_dst = dst_prec > 32;
2258 if (TREE_CODE (dst_type) == REAL_TYPE)
2260 NOTE_POP (1 + wide_src);
2261 RESERVE (1);
2262 if (TREE_CODE (src_type) == REAL_TYPE)
2263 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2264 else if (src_prec == 64)
2265 OP1 (OPCODE_l2f + wide_dst);
2266 else
2267 OP1 (OPCODE_i2f + wide_dst);
2268 NOTE_PUSH (1 + wide_dst);
2270 /* Convert to integral type (but ignore non-widening
2271 and non-narrowing integer type conversions). */
2272 else if (TREE_CODE (src_type) == REAL_TYPE
2273 || src_prec != dst_prec)
2275 NOTE_POP (1 + wide_src);
2276 RESERVE (1);
2277 if (TREE_CODE (src_type) == REAL_TYPE)
2278 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2279 else if (wide_dst)
2280 OP1 (OPCODE_i2l);
2281 else if (wide_src)
2282 OP1 (OPCODE_l2i);
2283 if (dst_prec < 32)
2285 RESERVE (1);
2286 /* Already converted to int, if needed. */
2287 if (dst_prec <= 8)
2288 OP1 (OPCODE_i2b);
2289 else if (TYPE_UNSIGNED (dst_type))
2290 OP1 (OPCODE_i2c);
2291 else
2292 OP1 (OPCODE_i2s);
2294 NOTE_PUSH (1 + wide_dst);
2298 break;
2300 case TRY_EXPR:
2302 tree try_clause = TREE_OPERAND (exp, 0);
2303 struct jcf_block *start_label = get_jcf_label_here (state);
2304 struct jcf_block *end_label; /* End of try clause. */
2305 struct jcf_block *finished_label = gen_jcf_label (state);
2306 tree clause = TREE_OPERAND (exp, 1);
2307 gcc_assert (target == IGNORE_TARGET);
2308 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2309 end_label = get_jcf_label_here (state);
2310 if (end_label == start_label)
2311 break;
2312 if (CAN_COMPLETE_NORMALLY (try_clause))
2313 emit_goto (finished_label, state);
2314 while (clause != NULL_TREE)
2316 tree catch_clause = TREE_OPERAND (clause, 0);
2317 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2318 struct jcf_handler *handler = alloc_handler (start_label,
2319 end_label, state);
2320 if (exception_decl == NULL_TREE)
2321 handler->type = NULL_TREE;
2322 else
2323 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2324 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2325 clause = TREE_CHAIN (clause);
2326 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2327 emit_goto (finished_label, state);
2329 define_jcf_label (finished_label, state);
2331 break;
2333 case TRY_FINALLY_EXPR:
2335 struct jcf_block *finished_label = NULL;
2336 struct jcf_block *finally_label, *start_label, *end_label;
2337 struct jcf_handler *handler;
2338 tree try_block = TREE_OPERAND (exp, 0);
2339 tree finally = TREE_OPERAND (exp, 1);
2340 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2342 tree exception_type;
2344 finally_label = gen_jcf_label (state);
2345 start_label = get_jcf_label_here (state);
2346 /* If the `finally' clause can complete normally, we emit it
2347 as a subroutine and let the other clauses call it via
2348 `jsr'. If it can't complete normally, then we simply emit
2349 `goto's directly to it. */
2350 if (CAN_COMPLETE_NORMALLY (finally))
2352 finally_label->pc = PENDING_CLEANUP_PC;
2353 finally_label->next = state->labeled_blocks;
2354 state->labeled_blocks = finally_label;
2355 state->num_finalizers++;
2358 generate_bytecode_insns (try_block, target, state);
2360 if (CAN_COMPLETE_NORMALLY (finally))
2362 gcc_assert (state->labeled_blocks == finally_label);
2363 state->labeled_blocks = finally_label->next;
2365 end_label = get_jcf_label_here (state);
2367 if (end_label == start_label)
2369 state->num_finalizers--;
2370 define_jcf_label (finally_label, state);
2371 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2372 break;
2375 if (CAN_COMPLETE_NORMALLY (finally))
2377 return_link = build_decl (VAR_DECL, NULL_TREE,
2378 return_address_type_node);
2379 finished_label = gen_jcf_label (state);
2382 if (CAN_COMPLETE_NORMALLY (try_block))
2384 if (CAN_COMPLETE_NORMALLY (finally))
2386 emit_jsr (finally_label, state);
2387 emit_goto (finished_label, state);
2389 else
2390 emit_goto (finally_label, state);
2393 /* Handle exceptions. */
2395 exception_type = build_pointer_type (throwable_type_node);
2396 if (CAN_COMPLETE_NORMALLY (finally))
2398 /* We're going to generate a subroutine, so we'll need to
2399 save and restore the exception around the `jsr'. */
2400 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2401 localvar_alloc (return_link, state);
2403 handler = alloc_handler (start_label, end_label, state);
2404 handler->type = NULL_TREE;
2405 if (CAN_COMPLETE_NORMALLY (finally))
2407 localvar_alloc (exception_decl, state);
2408 NOTE_PUSH (1);
2409 emit_store (exception_decl, state);
2410 emit_jsr (finally_label, state);
2411 emit_load (exception_decl, state);
2412 RESERVE (1);
2413 OP1 (OPCODE_athrow);
2414 NOTE_POP (1);
2416 else
2418 /* We're not generating a subroutine. In this case we can
2419 simply have the exception handler pop the exception and
2420 then fall through to the `finally' block. */
2421 NOTE_PUSH (1);
2422 emit_pop (1, state);
2423 NOTE_POP (1);
2426 /* The finally block. If we're generating a subroutine, first
2427 save return PC into return_link. Otherwise, just generate
2428 the code for the `finally' block. */
2429 define_jcf_label (finally_label, state);
2430 if (CAN_COMPLETE_NORMALLY (finally))
2432 NOTE_PUSH (1);
2433 emit_store (return_link, state);
2436 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2437 if (CAN_COMPLETE_NORMALLY (finally))
2439 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2440 maybe_free_localvar (exception_decl, state, 1);
2441 maybe_free_localvar (return_link, state, 1);
2442 define_jcf_label (finished_label, state);
2445 break;
2446 case THROW_EXPR:
2447 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2448 RESERVE (1);
2449 OP1 (OPCODE_athrow);
2450 break;
2451 case NEW_ARRAY_INIT:
2453 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2454 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2455 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2456 unsigned HOST_WIDE_INT idx;
2457 tree value;
2458 HOST_WIDE_INT length = java_array_type_length (array_type);
2459 if (target == IGNORE_TARGET)
2461 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2462 generate_bytecode_insns (value, target, state);
2463 break;
2465 push_int_const (length, state);
2466 NOTE_PUSH (1);
2467 RESERVE (3);
2468 if (JPRIMITIVE_TYPE_P (element_type))
2470 int atype = encode_newarray_type (element_type);
2471 OP1 (OPCODE_newarray);
2472 OP1 (atype);
2474 else
2476 int index = find_class_constant (&state->cpool,
2477 TREE_TYPE (element_type));
2478 OP1 (OPCODE_anewarray);
2479 OP2 (index);
2481 offset = 0;
2482 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2483 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2485 int save_SP = state->code_SP;
2486 emit_dup (1, 0, state);
2487 push_int_const (offset, state);
2488 NOTE_PUSH (1);
2489 generate_bytecode_insns (value, STACK_TARGET, state);
2490 RESERVE (1);
2491 OP1 (jopcode);
2492 state->code_SP = save_SP;
2493 offset++;
2496 break;
2497 case JAVA_EXC_OBJ_EXPR:
2498 NOTE_PUSH (1); /* Pushed by exception system. */
2499 break;
2500 case MIN_EXPR:
2501 case MAX_EXPR:
2503 /* This copes with cases where fold() has created MIN or MAX
2504 from a conditional expression. */
2505 enum tree_code code = TREE_CODE (exp) == MIN_EXPR ? LT_EXPR : GT_EXPR;
2506 tree op0 = TREE_OPERAND (exp, 0);
2507 tree op1 = TREE_OPERAND (exp, 1);
2508 tree x;
2509 gcc_assert (! TREE_SIDE_EFFECTS (op0) && ! TREE_SIDE_EFFECTS (op1));
2510 x = build3 (COND_EXPR, TREE_TYPE (exp),
2511 build2 (code, boolean_type_node, op0, op1),
2512 op0, op1);
2513 generate_bytecode_insns (x, target, state);
2514 break;
2516 case NEW_CLASS_EXPR:
2518 tree class = TREE_TYPE (TREE_TYPE (exp));
2519 int need_result = target != IGNORE_TARGET;
2520 int index = find_class_constant (&state->cpool, class);
2521 RESERVE (4);
2522 OP1 (OPCODE_new);
2523 OP2 (index);
2524 if (need_result)
2525 OP1 (OPCODE_dup);
2526 NOTE_PUSH (1 + need_result);
2528 /* ... fall though ... */
2529 case CALL_EXPR:
2531 tree f = TREE_OPERAND (exp, 0);
2532 tree x = TREE_OPERAND (exp, 1);
2533 int save_SP = state->code_SP;
2534 int nargs;
2535 if (TREE_CODE (f) == ADDR_EXPR)
2536 f = TREE_OPERAND (f, 0);
2537 if (f == soft_newarray_node)
2539 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2540 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2541 STACK_TARGET, state);
2542 RESERVE (2);
2543 OP1 (OPCODE_newarray);
2544 OP1 (type_code);
2545 break;
2547 else if (f == soft_multianewarray_node)
2549 int ndims;
2550 int idim;
2551 int index = find_class_constant (&state->cpool,
2552 TREE_TYPE (TREE_TYPE (exp)));
2553 x = TREE_CHAIN (x); /* Skip class argument. */
2554 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2555 for (idim = ndims; --idim >= 0; )
2557 x = TREE_CHAIN (x);
2558 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2560 RESERVE (4);
2561 OP1 (OPCODE_multianewarray);
2562 OP2 (index);
2563 OP1 (ndims);
2564 NOTE_POP (ndims - 1);
2565 break;
2567 else if (f == soft_anewarray_node)
2569 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2570 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2571 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2572 RESERVE (3);
2573 OP1 (OPCODE_anewarray);
2574 OP2 (index);
2575 break;
2577 else if (f == soft_monitorenter_node
2578 || f == soft_monitorexit_node
2579 || f == throw_node)
2581 if (f == soft_monitorenter_node)
2582 op = OPCODE_monitorenter;
2583 else if (f == soft_monitorexit_node)
2584 op = OPCODE_monitorexit;
2585 else
2586 op = OPCODE_athrow;
2587 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2588 RESERVE (1);
2589 OP1 (op);
2590 NOTE_POP (1);
2591 break;
2593 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2595 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2597 nargs = state->code_SP - save_SP;
2598 state->code_SP = save_SP;
2599 if (f == soft_fmod_node)
2601 RESERVE (1);
2602 OP1 (OPCODE_drem);
2603 NOTE_PUSH (2);
2604 break;
2606 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2607 NOTE_POP (1); /* Pop implicit this. */
2608 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2610 tree context = DECL_CONTEXT (f);
2611 int index, interface = 0;
2612 RESERVE (5);
2613 if (METHOD_STATIC (f))
2614 OP1 (OPCODE_invokestatic);
2615 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2616 || METHOD_PRIVATE (f))
2617 OP1 (OPCODE_invokespecial);
2618 else
2620 if (CLASS_INTERFACE (TYPE_NAME (context)))
2622 tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2623 context = TREE_TYPE (TREE_TYPE (arg1));
2624 if (CLASS_INTERFACE (TYPE_NAME (context)))
2625 interface = 1;
2627 if (interface)
2628 OP1 (OPCODE_invokeinterface);
2629 else
2630 OP1 (OPCODE_invokevirtual);
2632 index = find_methodref_with_class_index (&state->cpool, f, context);
2633 OP2 (index);
2634 if (interface)
2636 gcc_assert (nargs > 0);
2637 OP1 (nargs);
2638 OP1 (0);
2640 f = TREE_TYPE (TREE_TYPE (f));
2641 if (TREE_CODE (f) != VOID_TYPE)
2643 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2644 if (target == IGNORE_TARGET)
2645 emit_pop (size, state);
2646 else
2647 NOTE_PUSH (size);
2649 break;
2652 /* fall through */
2653 default:
2654 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2655 tree_code_name [(int) TREE_CODE (exp)]);
2659 static void
2660 perform_relocations (struct jcf_partial *state)
2662 struct jcf_block *block;
2663 struct jcf_relocation *reloc;
2664 int pc;
2665 int shrink;
2667 /* Before we start, the pc field of each block is an upper bound on
2668 the block's start pc (it may be less, if previous blocks need less
2669 than their maximum).
2671 The minimum size of each block is in the block's chunk->size. */
2673 /* First, figure out the actual locations of each block. */
2674 pc = 0;
2675 shrink = 0;
2676 for (block = state->blocks; block != NULL; block = block->next)
2678 int block_size = block->v.chunk->size;
2680 block->pc = pc;
2682 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2683 Assumes relocations are in reverse order. */
2684 reloc = block->u.relocations;
2685 while (reloc != NULL
2686 && reloc->kind == OPCODE_goto_w
2687 && reloc->label->pc == block->next->pc
2688 && reloc->offset + 2 == block_size)
2690 reloc = reloc->next;
2691 block->u.relocations = reloc;
2692 block->v.chunk->size -= 3;
2693 block_size -= 3;
2694 shrink += 3;
2697 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2698 jump directly to X. We're careful here to avoid an infinite
2699 loop if the `goto's themselves form one. We do this
2700 optimization because we can generate a goto-to-goto for some
2701 try/finally blocks. */
2702 while (reloc != NULL
2703 && reloc->kind == OPCODE_goto_w
2704 && reloc->label != block
2705 && reloc->label->v.chunk->data != NULL
2706 && reloc->label->v.chunk->data[0] == OPCODE_goto)
2708 /* Find the reloc for the first instruction of the
2709 destination block. */
2710 struct jcf_relocation *first_reloc;
2711 for (first_reloc = reloc->label->u.relocations;
2712 first_reloc;
2713 first_reloc = first_reloc->next)
2715 if (first_reloc->offset == 1
2716 && first_reloc->kind == OPCODE_goto_w)
2718 reloc->label = first_reloc->label;
2719 break;
2723 /* If we didn't do anything, exit the loop. */
2724 if (first_reloc == NULL)
2725 break;
2728 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2730 if (reloc->kind == SWITCH_ALIGN_RELOC)
2732 /* We assume this is the first relocation in this block,
2733 so we know its final pc. */
2734 int where = pc + reloc->offset;
2735 int pad = ((where + 3) & ~3) - where;
2736 block_size += pad;
2738 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2740 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2741 int expand = reloc->kind > 0 ? 2 : 5;
2743 if (delta > 0)
2744 delta -= shrink;
2745 if (delta >= -32768 && delta <= 32767)
2747 shrink += expand;
2748 reloc->kind = -1;
2750 else
2751 block_size += expand;
2754 pc += block_size;
2757 for (block = state->blocks; block != NULL; block = block->next)
2759 struct chunk *chunk = block->v.chunk;
2760 int old_size = chunk->size;
2761 int next_pc = block->next == NULL ? pc : block->next->pc;
2762 int new_size = next_pc - block->pc;
2763 unsigned char *new_ptr;
2764 unsigned char *old_buffer = chunk->data;
2765 unsigned char *old_ptr = old_buffer + old_size;
2766 if (new_size != old_size)
2768 chunk->data = obstack_alloc (state->chunk_obstack, new_size);
2769 chunk->size = new_size;
2771 new_ptr = chunk->data + new_size;
2773 /* We do the relocations from back to front, because
2774 the relocations are in reverse order. */
2775 for (reloc = block->u.relocations; ; reloc = reloc->next)
2777 /* new_ptr and old_ptr point into the old and new buffers,
2778 respectively. (If no relocations cause the buffer to
2779 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2780 The bytes at higher address have been copied and relocations
2781 handled; those at lower addresses remain to process. */
2783 /* Lower old index of piece to be copied with no relocation.
2784 I.e. high index of the first piece that does need relocation. */
2785 int start = reloc == NULL ? 0
2786 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2787 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2788 ? reloc->offset + 4
2789 : reloc->offset + 2;
2790 int32 value;
2791 int new_offset;
2792 int n = (old_ptr - old_buffer) - start;
2793 new_ptr -= n;
2794 old_ptr -= n;
2795 /* Don't "copy" bytes in place, this causes valgrind
2796 warnings. */
2797 if (n > 0 && new_ptr != old_ptr)
2798 memcpy (new_ptr, old_ptr, n);
2799 if (old_ptr == old_buffer)
2800 break;
2802 new_offset = new_ptr - chunk->data;
2803 new_offset -= (reloc->kind == -1 ? 2 : 4);
2804 if (reloc->kind == 0)
2806 old_ptr -= 4;
2807 value = GET_u4 (old_ptr);
2809 else if (reloc->kind == BLOCK_START_RELOC)
2811 old_ptr -= 4;
2812 value = 0;
2813 new_offset = 0;
2815 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2817 int where = block->pc + reloc->offset;
2818 int pad = ((where + 3) & ~3) - where;
2819 while (--pad >= 0)
2820 *--new_ptr = 0;
2821 continue;
2823 else
2825 old_ptr -= 2;
2826 value = GET_u2 (old_ptr);
2828 value += reloc->label->pc - (block->pc + new_offset);
2829 *--new_ptr = (unsigned char) value; value >>= 8;
2830 *--new_ptr = (unsigned char) value; value >>= 8;
2831 if (reloc->kind != -1)
2833 *--new_ptr = (unsigned char) value; value >>= 8;
2834 *--new_ptr = (unsigned char) value;
2836 if (reloc->kind > BLOCK_START_RELOC)
2838 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2839 --old_ptr;
2840 *--new_ptr = reloc->kind;
2842 else if (reloc->kind < -1)
2844 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2845 --old_ptr;
2846 *--new_ptr = OPCODE_goto_w;
2847 *--new_ptr = 3;
2848 *--new_ptr = 0;
2849 *--new_ptr = - reloc->kind;
2852 gcc_assert (new_ptr == chunk->data);
2854 state->code_length = pc;
2857 static void
2858 init_jcf_state (struct jcf_partial *state, struct obstack *work)
2860 state->chunk_obstack = work;
2861 state->first = state->chunk = NULL;
2862 CPOOL_INIT (&state->cpool);
2863 BUFFER_INIT (&state->localvars);
2864 BUFFER_INIT (&state->bytecode);
2867 static void
2868 init_jcf_method (struct jcf_partial *state, tree method)
2870 state->current_method = method;
2871 state->blocks = state->last_block = NULL;
2872 state->linenumber_count = 0;
2873 state->first_lvar = state->last_lvar = NULL;
2874 state->lvar_count = 0;
2875 state->labeled_blocks = NULL;
2876 state->code_length = 0;
2877 BUFFER_RESET (&state->bytecode);
2878 BUFFER_RESET (&state->localvars);
2879 state->code_SP = 0;
2880 state->code_SP_max = 0;
2881 state->handlers = NULL;
2882 state->last_handler = NULL;
2883 state->num_handlers = 0;
2884 state->num_finalizers = 0;
2885 state->return_value_decl = NULL_TREE;
2888 static void
2889 release_jcf_state (struct jcf_partial *state)
2891 CPOOL_FINISH (&state->cpool);
2892 obstack_free (state->chunk_obstack, state->first);
2895 /* Get the access flags (modifiers) of a class (TYPE_DECL) to be used in the
2896 access_flags field of the class file header. */
2898 static int
2899 get_classfile_modifiers (tree class)
2901 /* These are the flags which are valid class file modifiers.
2902 See JVMS2 S4.1. */
2903 int valid_toplevel_class_flags = (ACC_PUBLIC | ACC_FINAL | ACC_SUPER |
2904 ACC_INTERFACE | ACC_ABSTRACT);
2905 int flags = get_access_flags (class);
2907 /* ACC_SUPER should always be set, except for interfaces. */
2908 if (! (flags & ACC_INTERFACE))
2909 flags |= ACC_SUPER;
2911 /* A protected member class becomes public at the top level. */
2912 if (flags & ACC_PROTECTED)
2913 flags |= ACC_PUBLIC;
2915 /* Filter out flags that are not valid for a class or interface in the
2916 top-level access_flags field. */
2917 flags &= valid_toplevel_class_flags;
2919 return flags;
2922 /* Get the access flags (modifiers) for a method to be used in the class
2923 file. */
2925 static int
2926 get_method_access_flags (tree decl)
2928 int flags = get_access_flags (decl);
2930 /* Promote "private" inner-class constructors to package-private. */
2931 if (DECL_CONSTRUCTOR_P (decl)
2932 && INNER_CLASS_DECL_P (TYPE_NAME (DECL_CONTEXT (decl))))
2933 flags &= ~(ACC_PRIVATE);
2935 return flags;
2938 /* Generate and return a list of chunks containing the class CLAS
2939 in the .class file representation. The list can be written to a
2940 .class file using write_chunks. Allocate chunks from obstack WORK. */
2942 static GTY(()) tree SourceFile_node;
2943 static struct chunk *
2944 generate_classfile (tree clas, struct jcf_partial *state)
2946 struct chunk *cpool_chunk;
2947 const char *source_file, *s;
2948 unsigned char *ptr;
2949 int i;
2950 unsigned char *fields_count_ptr;
2951 int fields_count = 0;
2952 unsigned char *methods_count_ptr;
2953 int methods_count = 0;
2954 tree part;
2955 int total_supers
2956 = clas == object_type_node ? 0 : BINFO_N_BASE_BINFOS (TYPE_BINFO (clas));
2958 ptr = append_chunk (NULL, 8, state);
2959 PUT4 (0xCafeBabe); /* Magic number */
2960 PUT2 (3); /* Minor version */
2961 PUT2 (45); /* Major version */
2963 append_chunk (NULL, 0, state);
2964 cpool_chunk = state->chunk;
2966 /* Next allocate the chunk containing access_flags through fields_count. */
2967 if (clas == object_type_node)
2968 i = 10;
2969 else
2970 i = 8 + 2 * total_supers;
2971 ptr = append_chunk (NULL, i, state);
2972 i = get_classfile_modifiers (TYPE_NAME (clas));
2973 PUT2 (i); /* access_flags */
2974 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2975 if (clas == object_type_node)
2977 PUT2(0); /* super_class */
2978 PUT2(0); /* interfaces_count */
2980 else
2982 tree binfo = TYPE_BINFO (clas);
2983 tree base_binfo = BINFO_BASE_BINFO (binfo, 0);
2984 int j = find_class_constant (&state->cpool, BINFO_TYPE (base_binfo));
2986 PUT2 (j); /* super_class */
2987 PUT2 (total_supers - 1); /* interfaces_count */
2988 for (i = 1; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
2990 j = find_class_constant (&state->cpool, BINFO_TYPE (base_binfo));
2991 PUT2 (j);
2994 fields_count_ptr = ptr;
2996 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2998 int have_value, attr_count = 0;
2999 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
3000 continue;
3001 ptr = append_chunk (NULL, 8, state);
3002 i = get_access_flags (part); PUT2 (i);
3003 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
3004 i = find_utf8_constant (&state->cpool,
3005 build_java_signature (TREE_TYPE (part)));
3006 PUT2(i);
3007 have_value = DECL_INITIAL (part) != NULL_TREE
3008 && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
3009 && FIELD_FINAL (part)
3010 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
3011 || TREE_TYPE (part) == string_ptr_type_node);
3012 if (have_value)
3013 attr_count++;
3015 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
3016 || FIELD_SYNTHETIC (part))
3017 attr_count++;
3018 if (FIELD_DEPRECATED (part))
3019 attr_count++;
3021 PUT2 (attr_count); /* attributes_count */
3022 if (have_value)
3024 tree init = DECL_INITIAL (part);
3025 static tree ConstantValue_node = NULL_TREE;
3026 if (TREE_TYPE (part) != TREE_TYPE (init))
3027 fatal_error ("field initializer type mismatch");
3028 ptr = append_chunk (NULL, 8, state);
3029 if (ConstantValue_node == NULL_TREE)
3030 ConstantValue_node = get_identifier ("ConstantValue");
3031 i = find_utf8_constant (&state->cpool, ConstantValue_node);
3032 PUT2 (i); /* attribute_name_index */
3033 PUT4 (2); /* attribute_length */
3034 i = find_constant_index (init, state); PUT2 (i);
3036 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
3037 fields and other fields which need it. */
3038 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
3039 || FIELD_SYNTHETIC (part))
3040 ptr = append_synthetic_attribute (state);
3041 if (FIELD_DEPRECATED (part))
3042 append_deprecated_attribute (state);
3043 fields_count++;
3045 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
3047 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
3048 PUT2 (0);
3050 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
3052 struct jcf_block *block;
3053 tree function_body = DECL_FUNCTION_BODY (part);
3054 tree body = function_body == NULL_TREE ? NULL_TREE
3055 : BLOCK_EXPR_BODY (function_body);
3056 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
3057 : DECL_NAME (part);
3058 tree type = TREE_TYPE (part);
3059 tree save_function = current_function_decl;
3060 int synthetic_p = 0;
3062 /* Invisible Miranda methods shouldn't end up in the .class
3063 file. */
3064 if (METHOD_INVISIBLE (part))
3065 continue;
3067 current_function_decl = part;
3068 ptr = append_chunk (NULL, 8, state);
3069 i = get_method_access_flags (part); PUT2 (i);
3070 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3071 i = find_utf8_constant (&state->cpool, build_java_signature (type));
3072 PUT2 (i);
3073 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
3075 /* Make room for the Synthetic attribute (of zero length.) */
3076 if (DECL_FINIT_P (part)
3077 || DECL_INSTINIT_P (part)
3078 || NESTED_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
3079 || TYPE_DOT_CLASS (clas) == part)
3081 i++;
3082 synthetic_p = 1;
3084 /* Make room for Deprecated attribute. */
3085 if (METHOD_DEPRECATED (part))
3086 i++;
3088 PUT2 (i); /* attributes_count */
3090 if (synthetic_p)
3091 ptr = append_synthetic_attribute (state);
3093 if (body != NULL_TREE)
3095 int code_attributes_count = 0;
3096 static tree Code_node = NULL_TREE;
3097 tree t;
3098 unsigned char *attr_len_ptr;
3099 struct jcf_handler *handler;
3100 if (Code_node == NULL_TREE)
3101 Code_node = get_identifier ("Code");
3102 ptr = append_chunk (NULL, 14, state);
3103 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
3104 attr_len_ptr = ptr;
3105 init_jcf_method (state, part);
3106 get_jcf_label_here (state); /* Force a first block. */
3107 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3108 localvar_alloc (t, state);
3109 state->num_jsrs = 0;
3110 generate_bytecode_insns (body, IGNORE_TARGET, state);
3111 if (CAN_COMPLETE_NORMALLY (body))
3113 gcc_assert (TREE_CODE (TREE_TYPE (type)) == VOID_TYPE);
3114 RESERVE (1);
3115 OP1 (OPCODE_return);
3117 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3118 maybe_free_localvar (t, state, 1);
3119 if (state->return_value_decl != NULL_TREE)
3120 maybe_free_localvar (state->return_value_decl, state, 1);
3121 finish_jcf_block (state);
3122 perform_relocations (state);
3124 ptr = attr_len_ptr;
3125 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
3126 if (state->linenumber_count > 0)
3128 code_attributes_count++;
3129 i += 8 + 4 * state->linenumber_count;
3131 if (state->lvar_count > 0)
3133 code_attributes_count++;
3134 i += 8 + 10 * state->lvar_count;
3136 UNSAFE_PUT4 (i); /* attribute_length */
3137 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
3138 UNSAFE_PUT2 (localvar_max); /* max_locals */
3139 UNSAFE_PUT4 (state->code_length);
3141 /* Emit the exception table. */
3142 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3143 PUT2 (state->num_handlers); /* exception_table_length */
3144 handler = state->handlers;
3145 for (; handler != NULL; handler = handler->next)
3147 int type_index;
3148 PUT2 (handler->start_label->pc);
3149 PUT2 (handler->end_label->pc);
3150 PUT2 (handler->handler_label->pc);
3151 if (handler->type == NULL_TREE)
3152 type_index = 0;
3153 else
3154 type_index = find_class_constant (&state->cpool,
3155 handler->type);
3156 PUT2 (type_index);
3159 ptr = append_chunk (NULL, 2, state);
3160 PUT2 (code_attributes_count);
3162 /* Write the LineNumberTable attribute. */
3163 if (state->linenumber_count > 0)
3165 static tree LineNumberTable_node = NULL_TREE;
3166 ptr = append_chunk (NULL,
3167 8 + 4 * state->linenumber_count, state);
3168 if (LineNumberTable_node == NULL_TREE)
3169 LineNumberTable_node = get_identifier ("LineNumberTable");
3170 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3171 PUT2 (i); /* attribute_name_index */
3172 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3173 i = state->linenumber_count; PUT2 (i);
3174 for (block = state->blocks; block != NULL; block = block->next)
3176 int line = block->linenumber;
3177 if (line > 0)
3179 PUT2 (block->pc);
3180 PUT2 (line);
3185 /* Write the LocalVariableTable attribute. */
3186 if (state->lvar_count > 0)
3188 static tree LocalVariableTable_node = NULL_TREE;
3189 struct localvar_info *lvar = state->first_lvar;
3190 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3191 if (LocalVariableTable_node == NULL_TREE)
3192 LocalVariableTable_node = get_identifier("LocalVariableTable");
3193 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3194 PUT2 (i); /* attribute_name_index */
3195 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3196 i = state->lvar_count; PUT2 (i);
3197 for ( ; lvar != NULL; lvar = lvar->next)
3199 tree name = DECL_NAME (lvar->decl);
3200 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3201 i = lvar->start_label->pc; PUT2 (i);
3202 i = lvar->end_label->pc - i; PUT2 (i);
3203 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3204 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3205 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3209 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3211 tree t = DECL_FUNCTION_THROWS (part);
3212 int throws_count = list_length (t);
3213 static tree Exceptions_node = NULL_TREE;
3214 if (Exceptions_node == NULL_TREE)
3215 Exceptions_node = get_identifier ("Exceptions");
3216 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3217 i = find_utf8_constant (&state->cpool, Exceptions_node);
3218 PUT2 (i); /* attribute_name_index */
3219 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3220 i = throws_count; PUT2 (i);
3221 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3223 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3224 PUT2 (i);
3228 if (METHOD_DEPRECATED (part))
3229 append_deprecated_attribute (state);
3231 methods_count++;
3232 current_function_decl = save_function;
3234 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3236 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3237 for (s = source_file; ; s++)
3239 char ch = *s;
3240 if (ch == '\0')
3241 break;
3242 if (ch == '/' || ch == '\\')
3243 source_file = s+1;
3245 ptr = append_chunk (NULL, 10, state);
3247 i = 1; /* Source file always exists as an attribute */
3248 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3249 i++;
3250 if (clas == object_type_node)
3251 i++;
3252 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3253 i++;
3255 PUT2 (i); /* attributes_count */
3257 /* generate the SourceFile attribute. */
3258 if (SourceFile_node == NULL_TREE)
3260 SourceFile_node = get_identifier ("SourceFile");
3263 i = find_utf8_constant (&state->cpool, SourceFile_node);
3264 PUT2 (i); /* attribute_name_index */
3265 PUT4 (2);
3266 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3267 PUT2 (i);
3268 append_gcj_attribute (state, clas);
3269 append_innerclasses_attribute (state, clas);
3270 if (CLASS_DEPRECATED (TYPE_NAME (clas)))
3271 append_deprecated_attribute (state);
3273 /* New finally generate the contents of the constant pool chunk. */
3274 i = count_constant_pool_bytes (&state->cpool);
3275 ptr = obstack_alloc (state->chunk_obstack, i);
3276 cpool_chunk->data = ptr;
3277 cpool_chunk->size = i;
3278 write_constant_pool (&state->cpool, ptr, i);
3279 return state->first;
3282 static GTY(()) tree Synthetic_node;
3283 static unsigned char *
3284 append_synthetic_attribute (struct jcf_partial *state)
3286 unsigned char *ptr = append_chunk (NULL, 6, state);
3287 int i;
3289 if (Synthetic_node == NULL_TREE)
3291 Synthetic_node = get_identifier ("Synthetic");
3293 i = find_utf8_constant (&state->cpool, Synthetic_node);
3294 PUT2 (i); /* Attribute string index */
3295 PUT4 (0); /* Attribute length */
3297 return ptr;
3300 static void
3301 append_deprecated_attribute (struct jcf_partial *state)
3303 unsigned char *ptr = append_chunk (NULL, 6, state);
3304 int i;
3306 i = find_utf8_constant (&state->cpool, get_identifier ("Deprecated"));
3307 PUT2 (i); /* Attribute string index */
3308 PUT4 (0); /* Attribute length */
3311 static void
3312 append_gcj_attribute (struct jcf_partial *state, tree class)
3314 unsigned char *ptr;
3315 int i;
3317 if (class != object_type_node)
3318 return;
3320 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3321 i = find_utf8_constant (&state->cpool,
3322 get_identifier ("gnu.gcj.gcj-compiled"));
3323 PUT2 (i); /* Attribute string index */
3324 PUT4 (0); /* Attribute length */
3327 static tree InnerClasses_node;
3328 static void
3329 append_innerclasses_attribute (struct jcf_partial *state, tree class)
3331 tree orig_decl = TYPE_NAME (class);
3332 tree current, decl;
3333 int length = 0, i;
3334 unsigned char *ptr, *length_marker, *number_marker;
3336 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3337 return;
3339 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3341 if (InnerClasses_node == NULL_TREE)
3343 InnerClasses_node = get_identifier ("InnerClasses");
3345 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3346 PUT2 (i);
3347 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3348 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3350 /* Generate the entries: all inner classes visible from the one we
3351 process: itself, up and down. */
3352 while (class && INNER_CLASS_TYPE_P (class))
3354 const char *n;
3356 decl = TYPE_NAME (class);
3357 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3358 IDENTIFIER_LENGTH (DECL_NAME (decl));
3360 while (n[-1] != '$')
3361 n--;
3362 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3363 length++;
3365 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3368 decl = orig_decl;
3369 for (current = DECL_INNER_CLASS_LIST (decl);
3370 current; current = TREE_CHAIN (current))
3372 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3373 TREE_VALUE (current));
3374 length++;
3377 ptr = length_marker; PUT4 (8*length+2);
3378 ptr = number_marker; PUT2 (length);
3381 static void
3382 append_innerclasses_attribute_entry (struct jcf_partial *state,
3383 tree decl, tree name)
3385 int icii, icaf;
3386 int ocii = 0, ini = 0;
3387 unsigned char *ptr = append_chunk (NULL, 8, state);
3389 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3391 /* Sun's implementation seems to generate ocii to 0 for inner
3392 classes (which aren't considered members of the class they're
3393 in.) The specs are saying that if the class is anonymous,
3394 inner_name_index must be zero. */
3395 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3397 ocii = find_class_constant (&state->cpool,
3398 TREE_TYPE (DECL_CONTEXT (decl)));
3399 ini = find_utf8_constant (&state->cpool, name);
3401 icaf = get_access_flags (decl);
3403 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3406 static char *
3407 make_class_file_name (tree clas)
3409 const char *dname, *cname, *slash;
3410 char *r;
3411 struct stat sb;
3412 char sep;
3414 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3415 "", '.', DIR_SEPARATOR,
3416 ".class"));
3417 if (jcf_write_base_directory == NULL)
3419 /* Make sure we put the class file into the .java file's
3420 directory, and not into some subdirectory thereof. */
3421 char *t;
3422 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3423 slash = strrchr (dname, DIR_SEPARATOR);
3424 #ifdef DIR_SEPARATOR_2
3425 if (! slash)
3426 slash = strrchr (dname, DIR_SEPARATOR_2);
3427 #endif
3428 if (! slash)
3430 dname = ".";
3431 slash = dname + 1;
3432 sep = DIR_SEPARATOR;
3434 else
3435 sep = *slash;
3437 t = strrchr (cname, DIR_SEPARATOR);
3438 if (t)
3439 cname = t + 1;
3441 else
3443 char *s;
3445 dname = jcf_write_base_directory;
3447 s = strrchr (dname, DIR_SEPARATOR);
3448 #ifdef DIR_SEPARATOR_2
3449 if (! s)
3450 s = strrchr (dname, DIR_SEPARATOR_2);
3451 #endif
3452 if (s)
3453 sep = *s;
3454 else
3455 sep = DIR_SEPARATOR;
3457 slash = dname + strlen (dname);
3460 r = XNEWVEC (char, slash - dname + strlen (cname) + 2);
3461 strncpy (r, dname, slash - dname);
3462 r[slash - dname] = sep;
3463 strcpy (&r[slash - dname + 1], cname);
3465 /* We try to make new directories when we need them. We only do
3466 this for directories which "might not" exist. For instance, we
3467 assume the `-d' directory exists, but we don't assume that any
3468 subdirectory below it exists. It might be worthwhile to keep
3469 track of which directories we've created to avoid gratuitous
3470 stat()s. */
3471 dname = r + (slash - dname) + 1;
3472 while (1)
3474 char *s = strchr (dname, sep);
3475 if (s == NULL)
3476 break;
3477 *s = '\0';
3478 /* Try to make directory if it doesn't already exist. */
3479 if (stat (r, &sb) == -1
3480 && mkdir (r, 0755) == -1
3481 /* The directory might have been made by another process. */
3482 && errno != EEXIST)
3483 fatal_error ("can't create directory %s: %m", r);
3485 *s = sep;
3486 /* Skip consecutive separators. */
3487 for (dname = s + 1; *dname && *dname == sep; ++dname)
3491 return r;
3494 /* Write out the contents of a class (RECORD_TYPE) CLAS, as a .class file.
3495 The output .class file name is make_class_file_name(CLAS). */
3497 void
3498 write_classfile (tree clas)
3500 struct obstack *work = &temporary_obstack;
3501 struct jcf_partial state[1];
3502 char *class_file_name = make_class_file_name (clas);
3503 struct chunk *chunks;
3505 if (class_file_name != NULL)
3507 FILE *stream;
3508 char *temporary_file_name;
3509 char pid [sizeof (long) * 2 + 2];
3511 /* The .class file is initially written to a ".PID" file so that
3512 if multiple instances of the compiler are running at once
3513 they do not see partially formed class files nor override
3514 each other, which may happen in libjava with parallel build.
3516 sprintf (pid, ".%lx", (unsigned long) getpid ());
3517 temporary_file_name = concat (class_file_name, pid, NULL);
3518 stream = fopen (temporary_file_name, "wb");
3519 if (stream == NULL)
3520 fatal_error ("can't open %s for writing: %m", temporary_file_name);
3522 jcf_dependency_add_target (class_file_name);
3523 init_jcf_state (state, work);
3524 chunks = generate_classfile (clas, state);
3525 write_chunks (stream, chunks);
3526 if (fclose (stream))
3527 fatal_error ("error closing %s: %m", temporary_file_name);
3529 /* If a file named by the string pointed to by `new' exists
3530 prior to the call to the `rename' function, the behavior
3531 is implementation-defined. ISO 9899-1990 7.9.4.2.
3533 For example, on Win32 with MSVCRT, it is an error. */
3535 unlink (class_file_name);
3537 if (rename (temporary_file_name, class_file_name) == -1)
3539 int errno_saved = errno;
3540 remove (temporary_file_name);
3541 errno = errno_saved;
3542 fatal_error ("can't create %s: %m", class_file_name);
3544 free (temporary_file_name);
3545 free (class_file_name);
3547 release_jcf_state (state);
3550 /* TODO:
3551 string concatenation
3552 synchronized statement
3555 #include "gt-java-jcf-write.h"