(arm_gen_movstrqi): New function.
[official-gcc.git] / gcc / expr.c
blob2e0f5ef9013a41e6c36f7f1f926cdc6f31579236
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 #include "config.h"
22 #include "machmode.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "obstack.h"
26 #include "flags.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "expr.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "output.h"
35 #include "typeclass.h"
37 #include "bytecode.h"
38 #include "bc-opcode.h"
39 #include "bc-typecd.h"
40 #include "bc-optab.h"
41 #include "bc-emit.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
52 #ifdef PUSH_ROUNDING
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
58 #endif
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* If this is nonzero, we do not bother generating VOLATILE
72 around volatile memory references, and we are willing to
73 output indirect addresses. If cse is to follow, we reject
74 indirect addresses so a useful potential cse is generated;
75 if it is used only once, instruction combination will produce
76 the same indirect address eventually. */
77 int cse_not_expected;
79 /* Nonzero to generate code for all the subroutines within an
80 expression before generating the upper levels of the expression.
81 Nowadays this is never zero. */
82 int do_preexpand_calls = 1;
84 /* Number of units that we should eventually pop off the stack.
85 These are the arguments to function calls that have already returned. */
86 int pending_stack_adjust;
88 /* Nonzero means stack pops must not be deferred, and deferred stack
89 pops must not be output. It is nonzero inside a function call,
90 inside a conditional expression, inside a statement expression,
91 and in other cases as well. */
92 int inhibit_defer_pop;
94 /* A list of all cleanups which belong to the arguments of
95 function calls being expanded by expand_call. */
96 tree cleanups_this_call;
98 /* Nonzero means __builtin_saveregs has already been done in this function.
99 The value is the pseudoreg containing the value __builtin_saveregs
100 returned. */
101 static rtx saveregs_value;
103 /* Similarly for __builtin_apply_args. */
104 static rtx apply_args_value;
106 /* This structure is used by move_by_pieces to describe the move to
107 be performed. */
109 struct move_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 rtx from;
116 rtx from_addr;
117 int autinc_from;
118 int explicit_inc_from;
119 int len;
120 int offset;
121 int reverse;
124 /* Used to generate bytecodes: keep track of size of local variables,
125 as well as depth of arithmetic stack. (Notice that variables are
126 stored on the machine's stack, not the arithmetic stack.) */
128 extern int local_vars_size;
129 extern int stack_depth;
130 extern int max_stack_depth;
131 extern struct obstack permanent_obstack;
134 static rtx enqueue_insn PROTO((rtx, rtx));
135 static int queued_subexp_p PROTO((rtx));
136 static void init_queue PROTO((void));
137 static void move_by_pieces PROTO((rtx, rtx, int, int));
138 static int move_by_pieces_ninsns PROTO((unsigned int, int));
139 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
140 struct move_by_pieces *));
141 static void store_constructor PROTO((tree, rtx));
142 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
143 enum machine_mode, int, int, int));
144 static tree save_noncopied_parts PROTO((tree, tree));
145 static tree init_noncopied_parts PROTO((tree, tree));
146 static int safe_from_p PROTO((rtx, tree));
147 static int fixed_type_p PROTO((tree));
148 static int get_pointer_alignment PROTO((tree, unsigned));
149 static tree string_constant PROTO((tree, tree *));
150 static tree c_strlen PROTO((tree));
151 static rtx expand_builtin PROTO((tree, rtx, rtx,
152 enum machine_mode, int));
153 static int apply_args_size PROTO((void));
154 static int apply_result_size PROTO((void));
155 static rtx result_vector PROTO((int, rtx));
156 static rtx expand_builtin_apply_args PROTO((void));
157 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
158 static void expand_builtin_return PROTO((rtx));
159 static rtx expand_increment PROTO((tree, int));
160 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
161 tree bc_runtime_type_code PROTO((tree));
162 rtx bc_allocate_local PROTO((int, int));
163 void bc_store_memory PROTO((tree, tree));
164 tree bc_expand_component_address PROTO((tree));
165 tree bc_expand_address PROTO((tree));
166 void bc_expand_constructor PROTO((tree));
167 void bc_adjust_stack PROTO((int));
168 tree bc_canonicalize_array_ref PROTO((tree));
169 void bc_load_memory PROTO((tree, tree));
170 void bc_load_externaddr PROTO((rtx));
171 void bc_load_externaddr_id PROTO((tree, int));
172 void bc_load_localaddr PROTO((rtx));
173 void bc_load_parmaddr PROTO((rtx));
174 static void preexpand_calls PROTO((tree));
175 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
176 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
177 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
178 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
179 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
180 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
181 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
182 static tree defer_cleanups_to PROTO((tree));
184 /* Record for each mode whether we can move a register directly to or
185 from an object of that mode in memory. If we can't, we won't try
186 to use that mode directly when accessing a field of that mode. */
188 static char direct_load[NUM_MACHINE_MODES];
189 static char direct_store[NUM_MACHINE_MODES];
191 /* MOVE_RATIO is the number of move instructions that is better than
192 a block move. */
194 #ifndef MOVE_RATIO
195 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
196 #define MOVE_RATIO 2
197 #else
198 /* A value of around 6 would minimize code size; infinity would minimize
199 execution time. */
200 #define MOVE_RATIO 15
201 #endif
202 #endif
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
209 #ifndef SLOW_UNALIGNED_ACCESS
210 #define SLOW_UNALIGNED_ACCESS 0
211 #endif
213 /* Register mappings for target machines without register windows. */
214 #ifndef INCOMING_REGNO
215 #define INCOMING_REGNO(OUT) (OUT)
216 #endif
217 #ifndef OUTGOING_REGNO
218 #define OUTGOING_REGNO(IN) (IN)
219 #endif
221 /* Maps used to convert modes to const, load, and store bytecodes. */
222 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
223 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
224 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
226 /* Initialize maps used to convert modes to const, load, and store
227 bytecodes. */
228 void
229 bc_init_mode_to_opcode_maps ()
231 int mode;
233 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
234 mode_to_const_map[mode] =
235 mode_to_load_map[mode] =
236 mode_to_store_map[mode] = neverneverland;
238 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
239 mode_to_const_map[(int) SYM] = CONST; \
240 mode_to_load_map[(int) SYM] = LOAD; \
241 mode_to_store_map[(int) SYM] = STORE;
243 #include "modemap.def"
244 #undef DEF_MODEMAP
247 /* This is run once per compilation to set up which modes can be used
248 directly in memory and to initialize the block move optab. */
250 void
251 init_expr_once ()
253 rtx insn, pat;
254 enum machine_mode mode;
255 /* Try indexing by frame ptr and try by stack ptr.
256 It is known that on the Convex the stack ptr isn't a valid index.
257 With luck, one or the other is valid on any machine. */
258 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
259 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
261 start_sequence ();
262 insn = emit_insn (gen_rtx (SET, 0, 0));
263 pat = PATTERN (insn);
265 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
266 mode = (enum machine_mode) ((int) mode + 1))
268 int regno;
269 rtx reg;
270 int num_clobbers;
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
274 PUT_MODE (mem1, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
287 reg = gen_rtx (REG, mode, regno);
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 end_sequence ();
314 /* This is run at the start of compiling a function. */
316 void
317 init_expr ()
319 init_queue ();
321 pending_stack_adjust = 0;
322 inhibit_defer_pop = 0;
323 cleanups_this_call = 0;
324 saveregs_value = 0;
325 apply_args_value = 0;
326 forced_labels = 0;
329 /* Save all variables describing the current status into the structure *P.
330 This is used before starting a nested function. */
332 void
333 save_expr_status (p)
334 struct function *p;
336 /* Instead of saving the postincrement queue, empty it. */
337 emit_queue ();
339 p->pending_stack_adjust = pending_stack_adjust;
340 p->inhibit_defer_pop = inhibit_defer_pop;
341 p->cleanups_this_call = cleanups_this_call;
342 p->saveregs_value = saveregs_value;
343 p->apply_args_value = apply_args_value;
344 p->forced_labels = forced_labels;
346 pending_stack_adjust = 0;
347 inhibit_defer_pop = 0;
348 cleanups_this_call = 0;
349 saveregs_value = 0;
350 apply_args_value = 0;
351 forced_labels = 0;
354 /* Restore all variables describing the current status from the structure *P.
355 This is used after a nested function. */
357 void
358 restore_expr_status (p)
359 struct function *p;
361 pending_stack_adjust = p->pending_stack_adjust;
362 inhibit_defer_pop = p->inhibit_defer_pop;
363 cleanups_this_call = p->cleanups_this_call;
364 saveregs_value = p->saveregs_value;
365 apply_args_value = p->apply_args_value;
366 forced_labels = p->forced_labels;
369 /* Manage the queue of increment instructions to be output
370 for POSTINCREMENT_EXPR expressions, etc. */
372 static rtx pending_chain;
374 /* Queue up to increment (or change) VAR later. BODY says how:
375 BODY should be the same thing you would pass to emit_insn
376 to increment right away. It will go to emit_insn later on.
378 The value is a QUEUED expression to be used in place of VAR
379 where you want to guarantee the pre-incrementation value of VAR. */
381 static rtx
382 enqueue_insn (var, body)
383 rtx var, body;
385 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
386 var, NULL_RTX, NULL_RTX, body, pending_chain);
387 return pending_chain;
390 /* Use protect_from_queue to convert a QUEUED expression
391 into something that you can put immediately into an instruction.
392 If the queued incrementation has not happened yet,
393 protect_from_queue returns the variable itself.
394 If the incrementation has happened, protect_from_queue returns a temp
395 that contains a copy of the old value of the variable.
397 Any time an rtx which might possibly be a QUEUED is to be put
398 into an instruction, it must be passed through protect_from_queue first.
399 QUEUED expressions are not meaningful in instructions.
401 Do not pass a value through protect_from_queue and then hold
402 on to it for a while before putting it in an instruction!
403 If the queue is flushed in between, incorrect code will result. */
406 protect_from_queue (x, modify)
407 register rtx x;
408 int modify;
410 register RTX_CODE code = GET_CODE (x);
412 #if 0 /* A QUEUED can hang around after the queue is forced out. */
413 /* Shortcut for most common case. */
414 if (pending_chain == 0)
415 return x;
416 #endif
418 if (code != QUEUED)
420 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
421 use of autoincrement. Make a copy of the contents of the memory
422 location rather than a copy of the address, but not if the value is
423 of mode BLKmode. Don't modify X in place since it might be
424 shared. */
425 if (code == MEM && GET_MODE (x) != BLKmode
426 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
428 register rtx y = XEXP (x, 0);
429 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
431 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
432 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
433 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
435 if (QUEUED_INSN (y))
437 register rtx temp = gen_reg_rtx (GET_MODE (new));
438 emit_insn_before (gen_move_insn (temp, new),
439 QUEUED_INSN (y));
440 return temp;
442 return new;
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
466 return x;
468 /* If the increment has not happened, use the variable itself. */
469 if (QUEUED_INSN (x) == 0)
470 return QUEUED_VAR (x);
471 /* If the increment has happened and a pre-increment copy exists,
472 use that copy. */
473 if (QUEUED_COPY (x) != 0)
474 return QUEUED_COPY (x);
475 /* The increment has happened but we haven't set up a pre-increment copy.
476 Set one up now, and use it. */
477 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
478 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
479 QUEUED_INSN (x));
480 return QUEUED_COPY (x);
483 /* Return nonzero if X contains a QUEUED expression:
484 if it contains anything that will be altered by a queued increment.
485 We handle only combinations of MEM, PLUS, MINUS and MULT operators
486 since memory addresses generally contain only those. */
488 static int
489 queued_subexp_p (x)
490 rtx x;
492 register enum rtx_code code = GET_CODE (x);
493 switch (code)
495 case QUEUED:
496 return 1;
497 case MEM:
498 return queued_subexp_p (XEXP (x, 0));
499 case MULT:
500 case PLUS:
501 case MINUS:
502 return queued_subexp_p (XEXP (x, 0))
503 || queued_subexp_p (XEXP (x, 1));
505 return 0;
508 /* Perform all the pending incrementations. */
510 void
511 emit_queue ()
513 register rtx p;
514 while (p = pending_chain)
516 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
517 pending_chain = QUEUED_NEXT (p);
521 static void
522 init_queue ()
524 if (pending_chain)
525 abort ();
528 /* Copy data from FROM to TO, where the machine modes are not the same.
529 Both modes may be integer, or both may be floating.
530 UNSIGNEDP should be nonzero if FROM is an unsigned type.
531 This causes zero-extension instead of sign-extension. */
533 void
534 convert_move (to, from, unsignedp)
535 register rtx to, from;
536 int unsignedp;
538 enum machine_mode to_mode = GET_MODE (to);
539 enum machine_mode from_mode = GET_MODE (from);
540 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
541 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
542 enum insn_code code;
543 rtx libcall;
545 /* rtx code for making an equivalent value. */
546 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
548 to = protect_from_queue (to, 1);
549 from = protect_from_queue (from, 0);
551 if (to_real != from_real)
552 abort ();
554 /* If FROM is a SUBREG that indicates that we have already done at least
555 the required extension, strip it. We don't handle such SUBREGs as
556 TO here. */
558 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
559 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
560 >= GET_MODE_SIZE (to_mode))
561 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
562 from = gen_lowpart (to_mode, from), from_mode = to_mode;
564 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
565 abort ();
567 if (to_mode == from_mode
568 || (from_mode == VOIDmode && CONSTANT_P (from)))
570 emit_move_insn (to, from);
571 return;
574 if (to_real)
576 rtx value;
578 #ifdef HAVE_extendqfhf2
579 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
581 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
582 return;
584 #endif
585 #ifdef HAVE_extendqfsf2
586 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
588 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
589 return;
591 #endif
592 #ifdef HAVE_extendqfdf2
593 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
595 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
596 return;
598 #endif
599 #ifdef HAVE_extendqfxf2
600 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
602 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
603 return;
605 #endif
606 #ifdef HAVE_extendqftf2
607 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
609 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
610 return;
612 #endif
614 #ifdef HAVE_extendhfsf2
615 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
617 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
618 return;
620 #endif
621 #ifdef HAVE_extendhfdf2
622 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
624 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
625 return;
627 #endif
628 #ifdef HAVE_extendhfxf2
629 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
631 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
632 return;
634 #endif
635 #ifdef HAVE_extendhftf2
636 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
638 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
639 return;
641 #endif
643 #ifdef HAVE_extendsfdf2
644 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
646 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
647 return;
649 #endif
650 #ifdef HAVE_extendsfxf2
651 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
653 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
654 return;
656 #endif
657 #ifdef HAVE_extendsftf2
658 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
660 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
661 return;
663 #endif
664 #ifdef HAVE_extenddfxf2
665 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
667 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
668 return;
670 #endif
671 #ifdef HAVE_extenddftf2
672 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
674 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
675 return;
677 #endif
679 #ifdef HAVE_trunchfqf2
680 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
682 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
683 return;
685 #endif
686 #ifdef HAVE_truncsfqf2
687 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
689 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
690 return;
692 #endif
693 #ifdef HAVE_truncdfqf2
694 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
696 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
697 return;
699 #endif
700 #ifdef HAVE_truncxfqf2
701 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
703 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
704 return;
706 #endif
707 #ifdef HAVE_trunctfqf2
708 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
710 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
711 return;
713 #endif
714 #ifdef HAVE_truncsfhf2
715 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
717 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
718 return;
720 #endif
721 #ifdef HAVE_truncdfhf2
722 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
724 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
725 return;
727 #endif
728 #ifdef HAVE_truncxfhf2
729 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
731 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
732 return;
734 #endif
735 #ifdef HAVE_trunctfhf2
736 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
738 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
739 return;
741 #endif
742 #ifdef HAVE_truncdfsf2
743 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
746 return;
748 #endif
749 #ifdef HAVE_truncxfsf2
750 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
753 return;
755 #endif
756 #ifdef HAVE_trunctfsf2
757 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
759 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
760 return;
762 #endif
763 #ifdef HAVE_truncxfdf2
764 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
767 return;
769 #endif
770 #ifdef HAVE_trunctfdf2
771 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
773 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
774 return;
776 #endif
778 libcall = (rtx) 0;
779 switch (from_mode)
781 case SFmode:
782 switch (to_mode)
784 case DFmode:
785 libcall = extendsfdf2_libfunc;
786 break;
788 case XFmode:
789 libcall = extendsfxf2_libfunc;
790 break;
792 case TFmode:
793 libcall = extendsftf2_libfunc;
794 break;
796 break;
798 case DFmode:
799 switch (to_mode)
801 case SFmode:
802 libcall = truncdfsf2_libfunc;
803 break;
805 case XFmode:
806 libcall = extenddfxf2_libfunc;
807 break;
809 case TFmode:
810 libcall = extenddftf2_libfunc;
811 break;
813 break;
815 case XFmode:
816 switch (to_mode)
818 case SFmode:
819 libcall = truncxfsf2_libfunc;
820 break;
822 case DFmode:
823 libcall = truncxfdf2_libfunc;
824 break;
826 break;
828 case TFmode:
829 switch (to_mode)
831 case SFmode:
832 libcall = trunctfsf2_libfunc;
833 break;
835 case DFmode:
836 libcall = trunctfdf2_libfunc;
837 break;
839 break;
842 if (libcall == (rtx) 0)
843 /* This conversion is not implemented yet. */
844 abort ();
846 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
847 1, from, from_mode);
848 emit_move_insn (to, value);
849 return;
852 /* Now both modes are integers. */
854 /* Handle expanding beyond a word. */
855 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
856 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
858 rtx insns;
859 rtx lowpart;
860 rtx fill_value;
861 rtx lowfrom;
862 int i;
863 enum machine_mode lowpart_mode;
864 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
866 /* Try converting directly if the insn is supported. */
867 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
868 != CODE_FOR_nothing)
870 /* If FROM is a SUBREG, put it into a register. Do this
871 so that we always generate the same set of insns for
872 better cse'ing; if an intermediate assignment occurred,
873 we won't be doing the operation directly on the SUBREG. */
874 if (optimize > 0 && GET_CODE (from) == SUBREG)
875 from = force_reg (from_mode, from);
876 emit_unop_insn (code, to, from, equiv_code);
877 return;
879 /* Next, try converting via full word. */
880 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
881 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
882 != CODE_FOR_nothing))
884 if (GET_CODE (to) == REG)
885 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
886 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
887 emit_unop_insn (code, to,
888 gen_lowpart (word_mode, to), equiv_code);
889 return;
892 /* No special multiword conversion insn; do it by hand. */
893 start_sequence ();
895 /* Get a copy of FROM widened to a word, if necessary. */
896 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
897 lowpart_mode = word_mode;
898 else
899 lowpart_mode = from_mode;
901 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
903 lowpart = gen_lowpart (lowpart_mode, to);
904 emit_move_insn (lowpart, lowfrom);
906 /* Compute the value to put in each remaining word. */
907 if (unsignedp)
908 fill_value = const0_rtx;
909 else
911 #ifdef HAVE_slt
912 if (HAVE_slt
913 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
914 && STORE_FLAG_VALUE == -1)
916 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
917 lowpart_mode, 0, 0);
918 fill_value = gen_reg_rtx (word_mode);
919 emit_insn (gen_slt (fill_value));
921 else
922 #endif
924 fill_value
925 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
926 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
927 NULL_RTX, 0);
928 fill_value = convert_to_mode (word_mode, fill_value, 1);
932 /* Fill the remaining words. */
933 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
935 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
936 rtx subword = operand_subword (to, index, 1, to_mode);
938 if (subword == 0)
939 abort ();
941 if (fill_value != subword)
942 emit_move_insn (subword, fill_value);
945 insns = get_insns ();
946 end_sequence ();
948 emit_no_conflict_block (insns, to, from, NULL_RTX,
949 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
950 return;
953 /* Truncating multi-word to a word or less. */
954 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
955 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
957 if (!((GET_CODE (from) == MEM
958 && ! MEM_VOLATILE_P (from)
959 && direct_load[(int) to_mode]
960 && ! mode_dependent_address_p (XEXP (from, 0)))
961 || GET_CODE (from) == REG
962 || GET_CODE (from) == SUBREG))
963 from = force_reg (from_mode, from);
964 convert_move (to, gen_lowpart (word_mode, from), 0);
965 return;
968 /* Handle pointer conversion */ /* SPEE 900220 */
969 if (to_mode == PSImode)
971 if (from_mode != SImode)
972 from = convert_to_mode (SImode, from, unsignedp);
974 #ifdef HAVE_truncsipsi2
975 if (HAVE_truncsipsi2)
977 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
978 return;
980 #endif /* HAVE_truncsipsi2 */
981 abort ();
984 if (from_mode == PSImode)
986 if (to_mode != SImode)
988 from = convert_to_mode (SImode, from, unsignedp);
989 from_mode = SImode;
991 else
993 #ifdef HAVE_extendpsisi2
994 if (HAVE_extendpsisi2)
996 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
997 return;
999 #endif /* HAVE_extendpsisi2 */
1000 abort ();
1004 /* Now follow all the conversions between integers
1005 no more than a word long. */
1007 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1008 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1009 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1010 GET_MODE_BITSIZE (from_mode)))
1012 if (!((GET_CODE (from) == MEM
1013 && ! MEM_VOLATILE_P (from)
1014 && direct_load[(int) to_mode]
1015 && ! mode_dependent_address_p (XEXP (from, 0)))
1016 || GET_CODE (from) == REG
1017 || GET_CODE (from) == SUBREG))
1018 from = force_reg (from_mode, from);
1019 emit_move_insn (to, gen_lowpart (to_mode, from));
1020 return;
1023 /* Handle extension. */
1024 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1026 /* Convert directly if that works. */
1027 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1028 != CODE_FOR_nothing)
1030 /* If FROM is a SUBREG, put it into a register. Do this
1031 so that we always generate the same set of insns for
1032 better cse'ing; if an intermediate assignment occurred,
1033 we won't be doing the operation directly on the SUBREG. */
1034 if (optimize > 0 && GET_CODE (from) == SUBREG)
1035 from = force_reg (from_mode, from);
1036 emit_unop_insn (code, to, from, equiv_code);
1037 return;
1039 else
1041 enum machine_mode intermediate;
1043 /* Search for a mode to convert via. */
1044 for (intermediate = from_mode; intermediate != VOIDmode;
1045 intermediate = GET_MODE_WIDER_MODE (intermediate))
1046 if (((can_extend_p (to_mode, intermediate, unsignedp)
1047 != CODE_FOR_nothing)
1048 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1049 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1050 && (can_extend_p (intermediate, from_mode, unsignedp)
1051 != CODE_FOR_nothing))
1053 convert_move (to, convert_to_mode (intermediate, from,
1054 unsignedp), unsignedp);
1055 return;
1058 /* No suitable intermediate mode. */
1059 abort ();
1063 /* Support special truncate insns for certain modes. */
1065 if (from_mode == DImode && to_mode == SImode)
1067 #ifdef HAVE_truncdisi2
1068 if (HAVE_truncdisi2)
1070 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1071 return;
1073 #endif
1074 convert_move (to, force_reg (from_mode, from), unsignedp);
1075 return;
1078 if (from_mode == DImode && to_mode == HImode)
1080 #ifdef HAVE_truncdihi2
1081 if (HAVE_truncdihi2)
1083 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1084 return;
1086 #endif
1087 convert_move (to, force_reg (from_mode, from), unsignedp);
1088 return;
1091 if (from_mode == DImode && to_mode == QImode)
1093 #ifdef HAVE_truncdiqi2
1094 if (HAVE_truncdiqi2)
1096 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1097 return;
1099 #endif
1100 convert_move (to, force_reg (from_mode, from), unsignedp);
1101 return;
1104 if (from_mode == SImode && to_mode == HImode)
1106 #ifdef HAVE_truncsihi2
1107 if (HAVE_truncsihi2)
1109 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1110 return;
1112 #endif
1113 convert_move (to, force_reg (from_mode, from), unsignedp);
1114 return;
1117 if (from_mode == SImode && to_mode == QImode)
1119 #ifdef HAVE_truncsiqi2
1120 if (HAVE_truncsiqi2)
1122 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1123 return;
1125 #endif
1126 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 return;
1130 if (from_mode == HImode && to_mode == QImode)
1132 #ifdef HAVE_trunchiqi2
1133 if (HAVE_trunchiqi2)
1135 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1136 return;
1138 #endif
1139 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 return;
1143 /* Handle truncation of volatile memrefs, and so on;
1144 the things that couldn't be truncated directly,
1145 and for which there was no special instruction. */
1146 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1148 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1149 emit_move_insn (to, temp);
1150 return;
1153 /* Mode combination is not recognized. */
1154 abort ();
1157 /* Return an rtx for a value that would result
1158 from converting X to mode MODE.
1159 Both X and MODE may be floating, or both integer.
1160 UNSIGNEDP is nonzero if X is an unsigned value.
1161 This can be done by referring to a part of X in place
1162 or by copying to a new temporary with conversion.
1164 This function *must not* call protect_from_queue
1165 except when putting X into an insn (in which case convert_move does it). */
1168 convert_to_mode (mode, x, unsignedp)
1169 enum machine_mode mode;
1170 rtx x;
1171 int unsignedp;
1173 return convert_modes (mode, VOIDmode, x, unsignedp);
1176 /* Return an rtx for a value that would result
1177 from converting X from mode OLDMODE to mode MODE.
1178 Both modes may be floating, or both integer.
1179 UNSIGNEDP is nonzero if X is an unsigned value.
1181 This can be done by referring to a part of X in place
1182 or by copying to a new temporary with conversion.
1184 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1186 This function *must not* call protect_from_queue
1187 except when putting X into an insn (in which case convert_move does it). */
1190 convert_modes (mode, oldmode, x, unsignedp)
1191 enum machine_mode mode, oldmode;
1192 rtx x;
1193 int unsignedp;
1195 register rtx temp;
1197 /* If FROM is a SUBREG that indicates that we have already done at least
1198 the required extension, strip it. */
1200 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1201 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1202 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1203 x = gen_lowpart (mode, x);
1205 if (GET_MODE (x) != VOIDmode)
1206 oldmode = GET_MODE (x);
1208 if (mode == oldmode)
1209 return x;
1211 /* There is one case that we must handle specially: If we are converting
1212 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1213 we are to interpret the constant as unsigned, gen_lowpart will do
1214 the wrong if the constant appears negative. What we want to do is
1215 make the high-order word of the constant zero, not all ones. */
1217 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1218 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1219 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1220 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1222 /* We can do this with a gen_lowpart if both desired and current modes
1223 are integer, and this is either a constant integer, a register, or a
1224 non-volatile MEM. Except for the constant case where MODE is no
1225 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1227 if ((GET_CODE (x) == CONST_INT
1228 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1229 || (GET_MODE_CLASS (mode) == MODE_INT
1230 && GET_MODE_CLASS (oldmode) == MODE_INT
1231 && (GET_CODE (x) == CONST_DOUBLE
1232 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1233 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1234 && direct_load[(int) mode])
1235 || (GET_CODE (x) == REG
1236 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1237 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1239 /* ?? If we don't know OLDMODE, we have to assume here that
1240 X does not need sign- or zero-extension. This may not be
1241 the case, but it's the best we can do. */
1242 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1243 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1245 HOST_WIDE_INT val = INTVAL (x);
1246 int width = GET_MODE_BITSIZE (oldmode);
1248 /* We must sign or zero-extend in this case. Start by
1249 zero-extending, then sign extend if we need to. */
1250 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1251 if (! unsignedp
1252 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1253 val |= (HOST_WIDE_INT) (-1) << width;
1255 return GEN_INT (val);
1258 return gen_lowpart (mode, x);
1261 temp = gen_reg_rtx (mode);
1262 convert_move (temp, x, unsignedp);
1263 return temp;
1266 /* Generate several move instructions to copy LEN bytes
1267 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1268 The caller must pass FROM and TO
1269 through protect_from_queue before calling.
1270 ALIGN (in bytes) is maximum alignment we can assume. */
1272 static void
1273 move_by_pieces (to, from, len, align)
1274 rtx to, from;
1275 int len, align;
1277 struct move_by_pieces data;
1278 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1279 int max_size = MOVE_MAX + 1;
1281 data.offset = 0;
1282 data.to_addr = to_addr;
1283 data.from_addr = from_addr;
1284 data.to = to;
1285 data.from = from;
1286 data.autinc_to
1287 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1288 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1289 data.autinc_from
1290 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1291 || GET_CODE (from_addr) == POST_INC
1292 || GET_CODE (from_addr) == POST_DEC);
1294 data.explicit_inc_from = 0;
1295 data.explicit_inc_to = 0;
1296 data.reverse
1297 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1298 if (data.reverse) data.offset = len;
1299 data.len = len;
1301 /* If copying requires more than two move insns,
1302 copy addresses to registers (to make displacements shorter)
1303 and use post-increment if available. */
1304 if (!(data.autinc_from && data.autinc_to)
1305 && move_by_pieces_ninsns (len, align) > 2)
1307 #ifdef HAVE_PRE_DECREMENT
1308 if (data.reverse && ! data.autinc_from)
1310 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1311 data.autinc_from = 1;
1312 data.explicit_inc_from = -1;
1314 #endif
1315 #ifdef HAVE_POST_INCREMENT
1316 if (! data.autinc_from)
1318 data.from_addr = copy_addr_to_reg (from_addr);
1319 data.autinc_from = 1;
1320 data.explicit_inc_from = 1;
1322 #endif
1323 if (!data.autinc_from && CONSTANT_P (from_addr))
1324 data.from_addr = copy_addr_to_reg (from_addr);
1325 #ifdef HAVE_PRE_DECREMENT
1326 if (data.reverse && ! data.autinc_to)
1328 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1329 data.autinc_to = 1;
1330 data.explicit_inc_to = -1;
1332 #endif
1333 #ifdef HAVE_POST_INCREMENT
1334 if (! data.reverse && ! data.autinc_to)
1336 data.to_addr = copy_addr_to_reg (to_addr);
1337 data.autinc_to = 1;
1338 data.explicit_inc_to = 1;
1340 #endif
1341 if (!data.autinc_to && CONSTANT_P (to_addr))
1342 data.to_addr = copy_addr_to_reg (to_addr);
1345 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1346 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1347 align = MOVE_MAX;
1349 /* First move what we can in the largest integer mode, then go to
1350 successively smaller modes. */
1352 while (max_size > 1)
1354 enum machine_mode mode = VOIDmode, tmode;
1355 enum insn_code icode;
1357 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1358 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1359 if (GET_MODE_SIZE (tmode) < max_size)
1360 mode = tmode;
1362 if (mode == VOIDmode)
1363 break;
1365 icode = mov_optab->handlers[(int) mode].insn_code;
1366 if (icode != CODE_FOR_nothing
1367 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1368 GET_MODE_SIZE (mode)))
1369 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1371 max_size = GET_MODE_SIZE (mode);
1374 /* The code above should have handled everything. */
1375 if (data.len != 0)
1376 abort ();
1379 /* Return number of insns required to move L bytes by pieces.
1380 ALIGN (in bytes) is maximum alignment we can assume. */
1382 static int
1383 move_by_pieces_ninsns (l, align)
1384 unsigned int l;
1385 int align;
1387 register int n_insns = 0;
1388 int max_size = MOVE_MAX + 1;
1390 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1391 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1392 align = MOVE_MAX;
1394 while (max_size > 1)
1396 enum machine_mode mode = VOIDmode, tmode;
1397 enum insn_code icode;
1399 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1400 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1401 if (GET_MODE_SIZE (tmode) < max_size)
1402 mode = tmode;
1404 if (mode == VOIDmode)
1405 break;
1407 icode = mov_optab->handlers[(int) mode].insn_code;
1408 if (icode != CODE_FOR_nothing
1409 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1410 GET_MODE_SIZE (mode)))
1411 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1413 max_size = GET_MODE_SIZE (mode);
1416 return n_insns;
1419 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1420 with move instructions for mode MODE. GENFUN is the gen_... function
1421 to make a move insn for that mode. DATA has all the other info. */
1423 static void
1424 move_by_pieces_1 (genfun, mode, data)
1425 rtx (*genfun) ();
1426 enum machine_mode mode;
1427 struct move_by_pieces *data;
1429 register int size = GET_MODE_SIZE (mode);
1430 register rtx to1, from1;
1432 while (data->len >= size)
1434 if (data->reverse) data->offset -= size;
1436 to1 = (data->autinc_to
1437 ? gen_rtx (MEM, mode, data->to_addr)
1438 : change_address (data->to, mode,
1439 plus_constant (data->to_addr, data->offset)));
1440 from1 =
1441 (data->autinc_from
1442 ? gen_rtx (MEM, mode, data->from_addr)
1443 : change_address (data->from, mode,
1444 plus_constant (data->from_addr, data->offset)));
1446 #ifdef HAVE_PRE_DECREMENT
1447 if (data->explicit_inc_to < 0)
1448 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1449 if (data->explicit_inc_from < 0)
1450 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1451 #endif
1453 emit_insn ((*genfun) (to1, from1));
1454 #ifdef HAVE_POST_INCREMENT
1455 if (data->explicit_inc_to > 0)
1456 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1457 if (data->explicit_inc_from > 0)
1458 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1459 #endif
1461 if (! data->reverse) data->offset += size;
1463 data->len -= size;
1467 /* Emit code to move a block Y to a block X.
1468 This may be done with string-move instructions,
1469 with multiple scalar move instructions, or with a library call.
1471 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1472 with mode BLKmode.
1473 SIZE is an rtx that says how long they are.
1474 ALIGN is the maximum alignment we can assume they have,
1475 measured in bytes. */
1477 void
1478 emit_block_move (x, y, size, align)
1479 rtx x, y;
1480 rtx size;
1481 int align;
1483 if (GET_MODE (x) != BLKmode)
1484 abort ();
1486 if (GET_MODE (y) != BLKmode)
1487 abort ();
1489 x = protect_from_queue (x, 1);
1490 y = protect_from_queue (y, 0);
1491 size = protect_from_queue (size, 0);
1493 if (GET_CODE (x) != MEM)
1494 abort ();
1495 if (GET_CODE (y) != MEM)
1496 abort ();
1497 if (size == 0)
1498 abort ();
1500 if (GET_CODE (size) == CONST_INT
1501 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1502 move_by_pieces (x, y, INTVAL (size), align);
1503 else
1505 /* Try the most limited insn first, because there's no point
1506 including more than one in the machine description unless
1507 the more limited one has some advantage. */
1509 rtx opalign = GEN_INT (align);
1510 enum machine_mode mode;
1512 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1513 mode = GET_MODE_WIDER_MODE (mode))
1515 enum insn_code code = movstr_optab[(int) mode];
1517 if (code != CODE_FOR_nothing
1518 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1519 here because if SIZE is less than the mode mask, as it is
1520 returned by the macro, it will definitely be less than the
1521 actual mode mask. */
1522 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1523 && (insn_operand_predicate[(int) code][0] == 0
1524 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1525 && (insn_operand_predicate[(int) code][1] == 0
1526 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1527 && (insn_operand_predicate[(int) code][3] == 0
1528 || (*insn_operand_predicate[(int) code][3]) (opalign,
1529 VOIDmode)))
1531 rtx op2;
1532 rtx last = get_last_insn ();
1533 rtx pat;
1535 op2 = convert_to_mode (mode, size, 1);
1536 if (insn_operand_predicate[(int) code][2] != 0
1537 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1538 op2 = copy_to_mode_reg (mode, op2);
1540 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1541 if (pat)
1543 emit_insn (pat);
1544 return;
1546 else
1547 delete_insns_since (last);
1551 #ifdef TARGET_MEM_FUNCTIONS
1552 emit_library_call (memcpy_libfunc, 0,
1553 VOIDmode, 3, XEXP (x, 0), Pmode,
1554 XEXP (y, 0), Pmode,
1555 convert_to_mode (TYPE_MODE (sizetype), size,
1556 TREE_UNSIGNED (sizetype)),
1557 TYPE_MODE (sizetype));
1558 #else
1559 emit_library_call (bcopy_libfunc, 0,
1560 VOIDmode, 3, XEXP (y, 0), Pmode,
1561 XEXP (x, 0), Pmode,
1562 convert_to_mode (TYPE_MODE (sizetype), size,
1563 TREE_UNSIGNED (sizetype)),
1564 TYPE_MODE (sizetype));
1565 #endif
1569 /* Copy all or part of a value X into registers starting at REGNO.
1570 The number of registers to be filled is NREGS. */
1572 void
1573 move_block_to_reg (regno, x, nregs, mode)
1574 int regno;
1575 rtx x;
1576 int nregs;
1577 enum machine_mode mode;
1579 int i;
1580 rtx pat, last;
1582 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1583 x = validize_mem (force_const_mem (mode, x));
1585 /* See if the machine can do this with a load multiple insn. */
1586 #ifdef HAVE_load_multiple
1587 if (HAVE_load_multiple)
1589 last = get_last_insn ();
1590 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1591 GEN_INT (nregs));
1592 if (pat)
1594 emit_insn (pat);
1595 return;
1597 else
1598 delete_insns_since (last);
1600 #endif
1602 for (i = 0; i < nregs; i++)
1603 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1604 operand_subword_force (x, i, mode));
1607 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1608 The number of registers to be filled is NREGS. SIZE indicates the number
1609 of bytes in the object X. */
1612 void
1613 move_block_from_reg (regno, x, nregs, size)
1614 int regno;
1615 rtx x;
1616 int nregs;
1617 int size;
1619 int i;
1620 rtx pat, last;
1622 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1623 to the left before storing to memory. */
1624 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1626 rtx tem = operand_subword (x, 0, 1, BLKmode);
1627 rtx shift;
1629 if (tem == 0)
1630 abort ();
1632 shift = expand_shift (LSHIFT_EXPR, word_mode,
1633 gen_rtx (REG, word_mode, regno),
1634 build_int_2 ((UNITS_PER_WORD - size)
1635 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1636 emit_move_insn (tem, shift);
1637 return;
1640 /* See if the machine can do this with a store multiple insn. */
1641 #ifdef HAVE_store_multiple
1642 if (HAVE_store_multiple)
1644 last = get_last_insn ();
1645 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1646 GEN_INT (nregs));
1647 if (pat)
1649 emit_insn (pat);
1650 return;
1652 else
1653 delete_insns_since (last);
1655 #endif
1657 for (i = 0; i < nregs; i++)
1659 rtx tem = operand_subword (x, i, 1, BLKmode);
1661 if (tem == 0)
1662 abort ();
1664 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1668 /* Add a USE expression for REG to the (possibly empty) list pointed
1669 to by CALL_FUSAGE. REG must denote a hard register. */
1671 void
1672 use_reg (call_fusage, reg)
1673 rtx *call_fusage, reg;
1675 if (GET_CODE (reg) != REG
1676 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1677 abort();
1679 *call_fusage
1680 = gen_rtx (EXPR_LIST, VOIDmode,
1681 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1684 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1685 starting at REGNO. All of these registers must be hard registers. */
1687 void
1688 use_regs (call_fusage, regno, nregs)
1689 rtx *call_fusage;
1690 int regno;
1691 int nregs;
1693 int i;
1695 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1696 abort ();
1698 for (i = 0; i < nregs; i++)
1699 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1702 /* Write zeros through the storage of OBJECT.
1703 If OBJECT has BLKmode, SIZE is its length in bytes. */
1705 void
1706 clear_storage (object, size)
1707 rtx object;
1708 int size;
1710 if (GET_MODE (object) == BLKmode)
1712 #ifdef TARGET_MEM_FUNCTIONS
1713 emit_library_call (memset_libfunc, 0,
1714 VOIDmode, 3,
1715 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1716 GEN_INT (size), Pmode);
1717 #else
1718 emit_library_call (bzero_libfunc, 0,
1719 VOIDmode, 2,
1720 XEXP (object, 0), Pmode,
1721 GEN_INT (size), Pmode);
1722 #endif
1724 else
1725 emit_move_insn (object, const0_rtx);
1728 /* Generate code to copy Y into X.
1729 Both Y and X must have the same mode, except that
1730 Y can be a constant with VOIDmode.
1731 This mode cannot be BLKmode; use emit_block_move for that.
1733 Return the last instruction emitted. */
1736 emit_move_insn (x, y)
1737 rtx x, y;
1739 enum machine_mode mode = GET_MODE (x);
1741 x = protect_from_queue (x, 1);
1742 y = protect_from_queue (y, 0);
1744 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1745 abort ();
1747 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1748 y = force_const_mem (mode, y);
1750 /* If X or Y are memory references, verify that their addresses are valid
1751 for the machine. */
1752 if (GET_CODE (x) == MEM
1753 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1754 && ! push_operand (x, GET_MODE (x)))
1755 || (flag_force_addr
1756 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1757 x = change_address (x, VOIDmode, XEXP (x, 0));
1759 if (GET_CODE (y) == MEM
1760 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1761 || (flag_force_addr
1762 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1763 y = change_address (y, VOIDmode, XEXP (y, 0));
1765 if (mode == BLKmode)
1766 abort ();
1768 return emit_move_insn_1 (x, y);
1771 /* Low level part of emit_move_insn.
1772 Called just like emit_move_insn, but assumes X and Y
1773 are basically valid. */
1776 emit_move_insn_1 (x, y)
1777 rtx x, y;
1779 enum machine_mode mode = GET_MODE (x);
1780 enum machine_mode submode;
1781 enum mode_class class = GET_MODE_CLASS (mode);
1782 int i;
1784 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1785 return
1786 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1788 /* Expand complex moves by moving real part and imag part, if possible. */
1789 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1790 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1791 * BITS_PER_UNIT),
1792 (class == MODE_COMPLEX_INT
1793 ? MODE_INT : MODE_FLOAT),
1795 && (mov_optab->handlers[(int) submode].insn_code
1796 != CODE_FOR_nothing))
1798 /* Don't split destination if it is a stack push. */
1799 int stack = push_operand (x, GET_MODE (x));
1800 rtx insns;
1802 start_sequence ();
1804 /* If this is a stack, push the highpart first, so it
1805 will be in the argument order.
1807 In that case, change_address is used only to convert
1808 the mode, not to change the address. */
1809 if (stack)
1811 /* Note that the real part always precedes the imag part in memory
1812 regardless of machine's endianness. */
1813 #ifdef STACK_GROWS_DOWNWARD
1814 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1815 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1816 gen_imagpart (submode, y)));
1817 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1818 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1819 gen_realpart (submode, y)));
1820 #else
1821 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1822 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1823 gen_realpart (submode, y)));
1824 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1825 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1826 gen_imagpart (submode, y)));
1827 #endif
1829 else
1831 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1832 (gen_realpart (submode, x), gen_realpart (submode, y)));
1833 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1834 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1837 insns = get_insns ();
1838 end_sequence ();
1840 /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
1841 each with a separate pseudo as destination.
1842 It's not correct for flow to treat them as a unit. */
1843 if (GET_CODE (x) != CONCAT)
1844 emit_no_conflict_block (insns, x, y, NULL_RTX, NULL_RTX);
1845 else
1846 emit_insns (insns);
1848 return get_last_insn ();
1851 /* This will handle any multi-word mode that lacks a move_insn pattern.
1852 However, you will get better code if you define such patterns,
1853 even if they must turn into multiple assembler instructions. */
1854 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1856 rtx last_insn = 0;
1857 rtx insns;
1859 start_sequence ();
1861 for (i = 0;
1862 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1863 i++)
1865 rtx xpart = operand_subword (x, i, 1, mode);
1866 rtx ypart = operand_subword (y, i, 1, mode);
1868 /* If we can't get a part of Y, put Y into memory if it is a
1869 constant. Otherwise, force it into a register. If we still
1870 can't get a part of Y, abort. */
1871 if (ypart == 0 && CONSTANT_P (y))
1873 y = force_const_mem (mode, y);
1874 ypart = operand_subword (y, i, 1, mode);
1876 else if (ypart == 0)
1877 ypart = operand_subword_force (y, i, mode);
1879 if (xpart == 0 || ypart == 0)
1880 abort ();
1882 last_insn = emit_move_insn (xpart, ypart);
1885 insns = get_insns ();
1886 end_sequence ();
1887 emit_no_conflict_block (insns, x, y, NULL_RTX, NULL_RTX);
1889 return last_insn;
1891 else
1892 abort ();
1895 /* Pushing data onto the stack. */
1897 /* Push a block of length SIZE (perhaps variable)
1898 and return an rtx to address the beginning of the block.
1899 Note that it is not possible for the value returned to be a QUEUED.
1900 The value may be virtual_outgoing_args_rtx.
1902 EXTRA is the number of bytes of padding to push in addition to SIZE.
1903 BELOW nonzero means this padding comes at low addresses;
1904 otherwise, the padding comes at high addresses. */
1907 push_block (size, extra, below)
1908 rtx size;
1909 int extra, below;
1911 register rtx temp;
1912 if (CONSTANT_P (size))
1913 anti_adjust_stack (plus_constant (size, extra));
1914 else if (GET_CODE (size) == REG && extra == 0)
1915 anti_adjust_stack (size);
1916 else
1918 rtx temp = copy_to_mode_reg (Pmode, size);
1919 if (extra != 0)
1920 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1921 temp, 0, OPTAB_LIB_WIDEN);
1922 anti_adjust_stack (temp);
1925 #ifdef STACK_GROWS_DOWNWARD
1926 temp = virtual_outgoing_args_rtx;
1927 if (extra != 0 && below)
1928 temp = plus_constant (temp, extra);
1929 #else
1930 if (GET_CODE (size) == CONST_INT)
1931 temp = plus_constant (virtual_outgoing_args_rtx,
1932 - INTVAL (size) - (below ? 0 : extra));
1933 else if (extra != 0 && !below)
1934 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1935 negate_rtx (Pmode, plus_constant (size, extra)));
1936 else
1937 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1938 negate_rtx (Pmode, size));
1939 #endif
1941 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1945 gen_push_operand ()
1947 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1950 /* Generate code to push X onto the stack, assuming it has mode MODE and
1951 type TYPE.
1952 MODE is redundant except when X is a CONST_INT (since they don't
1953 carry mode info).
1954 SIZE is an rtx for the size of data to be copied (in bytes),
1955 needed only if X is BLKmode.
1957 ALIGN (in bytes) is maximum alignment we can assume.
1959 If PARTIAL and REG are both nonzero, then copy that many of the first
1960 words of X into registers starting with REG, and push the rest of X.
1961 The amount of space pushed is decreased by PARTIAL words,
1962 rounded *down* to a multiple of PARM_BOUNDARY.
1963 REG must be a hard register in this case.
1964 If REG is zero but PARTIAL is not, take any all others actions for an
1965 argument partially in registers, but do not actually load any
1966 registers.
1968 EXTRA is the amount in bytes of extra space to leave next to this arg.
1969 This is ignored if an argument block has already been allocated.
1971 On a machine that lacks real push insns, ARGS_ADDR is the address of
1972 the bottom of the argument block for this call. We use indexing off there
1973 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1974 argument block has not been preallocated.
1976 ARGS_SO_FAR is the size of args previously pushed for this call. */
1978 void
1979 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1980 args_addr, args_so_far)
1981 register rtx x;
1982 enum machine_mode mode;
1983 tree type;
1984 rtx size;
1985 int align;
1986 int partial;
1987 rtx reg;
1988 int extra;
1989 rtx args_addr;
1990 rtx args_so_far;
1992 rtx xinner;
1993 enum direction stack_direction
1994 #ifdef STACK_GROWS_DOWNWARD
1995 = downward;
1996 #else
1997 = upward;
1998 #endif
2000 /* Decide where to pad the argument: `downward' for below,
2001 `upward' for above, or `none' for don't pad it.
2002 Default is below for small data on big-endian machines; else above. */
2003 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2005 /* Invert direction if stack is post-update. */
2006 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2007 if (where_pad != none)
2008 where_pad = (where_pad == downward ? upward : downward);
2010 xinner = x = protect_from_queue (x, 0);
2012 if (mode == BLKmode)
2014 /* Copy a block into the stack, entirely or partially. */
2016 register rtx temp;
2017 int used = partial * UNITS_PER_WORD;
2018 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2019 int skip;
2021 if (size == 0)
2022 abort ();
2024 used -= offset;
2026 /* USED is now the # of bytes we need not copy to the stack
2027 because registers will take care of them. */
2029 if (partial != 0)
2030 xinner = change_address (xinner, BLKmode,
2031 plus_constant (XEXP (xinner, 0), used));
2033 /* If the partial register-part of the arg counts in its stack size,
2034 skip the part of stack space corresponding to the registers.
2035 Otherwise, start copying to the beginning of the stack space,
2036 by setting SKIP to 0. */
2037 #ifndef REG_PARM_STACK_SPACE
2038 skip = 0;
2039 #else
2040 skip = used;
2041 #endif
2043 #ifdef PUSH_ROUNDING
2044 /* Do it with several push insns if that doesn't take lots of insns
2045 and if there is no difficulty with push insns that skip bytes
2046 on the stack for alignment purposes. */
2047 if (args_addr == 0
2048 && GET_CODE (size) == CONST_INT
2049 && skip == 0
2050 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2051 < MOVE_RATIO)
2052 /* Here we avoid the case of a structure whose weak alignment
2053 forces many pushes of a small amount of data,
2054 and such small pushes do rounding that causes trouble. */
2055 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2056 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2057 || PUSH_ROUNDING (align) == align)
2058 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2060 /* Push padding now if padding above and stack grows down,
2061 or if padding below and stack grows up.
2062 But if space already allocated, this has already been done. */
2063 if (extra && args_addr == 0
2064 && where_pad != none && where_pad != stack_direction)
2065 anti_adjust_stack (GEN_INT (extra));
2067 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2068 INTVAL (size) - used, align);
2070 else
2071 #endif /* PUSH_ROUNDING */
2073 /* Otherwise make space on the stack and copy the data
2074 to the address of that space. */
2076 /* Deduct words put into registers from the size we must copy. */
2077 if (partial != 0)
2079 if (GET_CODE (size) == CONST_INT)
2080 size = GEN_INT (INTVAL (size) - used);
2081 else
2082 size = expand_binop (GET_MODE (size), sub_optab, size,
2083 GEN_INT (used), NULL_RTX, 0,
2084 OPTAB_LIB_WIDEN);
2087 /* Get the address of the stack space.
2088 In this case, we do not deal with EXTRA separately.
2089 A single stack adjust will do. */
2090 if (! args_addr)
2092 temp = push_block (size, extra, where_pad == downward);
2093 extra = 0;
2095 else if (GET_CODE (args_so_far) == CONST_INT)
2096 temp = memory_address (BLKmode,
2097 plus_constant (args_addr,
2098 skip + INTVAL (args_so_far)));
2099 else
2100 temp = memory_address (BLKmode,
2101 plus_constant (gen_rtx (PLUS, Pmode,
2102 args_addr, args_so_far),
2103 skip));
2105 /* TEMP is the address of the block. Copy the data there. */
2106 if (GET_CODE (size) == CONST_INT
2107 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2108 < MOVE_RATIO))
2110 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2111 INTVAL (size), align);
2112 goto ret;
2114 /* Try the most limited insn first, because there's no point
2115 including more than one in the machine description unless
2116 the more limited one has some advantage. */
2117 #ifdef HAVE_movstrqi
2118 if (HAVE_movstrqi
2119 && GET_CODE (size) == CONST_INT
2120 && ((unsigned) INTVAL (size)
2121 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2123 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2124 xinner, size, GEN_INT (align));
2125 if (pat != 0)
2127 emit_insn (pat);
2128 goto ret;
2131 #endif
2132 #ifdef HAVE_movstrhi
2133 if (HAVE_movstrhi
2134 && GET_CODE (size) == CONST_INT
2135 && ((unsigned) INTVAL (size)
2136 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2138 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2139 xinner, size, GEN_INT (align));
2140 if (pat != 0)
2142 emit_insn (pat);
2143 goto ret;
2146 #endif
2147 #ifdef HAVE_movstrsi
2148 if (HAVE_movstrsi)
2150 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2151 xinner, size, GEN_INT (align));
2152 if (pat != 0)
2154 emit_insn (pat);
2155 goto ret;
2158 #endif
2159 #ifdef HAVE_movstrdi
2160 if (HAVE_movstrdi)
2162 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2163 xinner, size, GEN_INT (align));
2164 if (pat != 0)
2166 emit_insn (pat);
2167 goto ret;
2170 #endif
2172 #ifndef ACCUMULATE_OUTGOING_ARGS
2173 /* If the source is referenced relative to the stack pointer,
2174 copy it to another register to stabilize it. We do not need
2175 to do this if we know that we won't be changing sp. */
2177 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2178 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2179 temp = copy_to_reg (temp);
2180 #endif
2182 /* Make inhibit_defer_pop nonzero around the library call
2183 to force it to pop the bcopy-arguments right away. */
2184 NO_DEFER_POP;
2185 #ifdef TARGET_MEM_FUNCTIONS
2186 emit_library_call (memcpy_libfunc, 0,
2187 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2188 convert_to_mode (TYPE_MODE (sizetype),
2189 size, TREE_UNSIGNED (sizetype)),
2190 TYPE_MODE (sizetype));
2191 #else
2192 emit_library_call (bcopy_libfunc, 0,
2193 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2194 convert_to_mode (TYPE_MODE (sizetype),
2195 size, TREE_UNSIGNED (sizetype)),
2196 TYPE_MODE (sizetype));
2197 #endif
2198 OK_DEFER_POP;
2201 else if (partial > 0)
2203 /* Scalar partly in registers. */
2205 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2206 int i;
2207 int not_stack;
2208 /* # words of start of argument
2209 that we must make space for but need not store. */
2210 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2211 int args_offset = INTVAL (args_so_far);
2212 int skip;
2214 /* Push padding now if padding above and stack grows down,
2215 or if padding below and stack grows up.
2216 But if space already allocated, this has already been done. */
2217 if (extra && args_addr == 0
2218 && where_pad != none && where_pad != stack_direction)
2219 anti_adjust_stack (GEN_INT (extra));
2221 /* If we make space by pushing it, we might as well push
2222 the real data. Otherwise, we can leave OFFSET nonzero
2223 and leave the space uninitialized. */
2224 if (args_addr == 0)
2225 offset = 0;
2227 /* Now NOT_STACK gets the number of words that we don't need to
2228 allocate on the stack. */
2229 not_stack = partial - offset;
2231 /* If the partial register-part of the arg counts in its stack size,
2232 skip the part of stack space corresponding to the registers.
2233 Otherwise, start copying to the beginning of the stack space,
2234 by setting SKIP to 0. */
2235 #ifndef REG_PARM_STACK_SPACE
2236 skip = 0;
2237 #else
2238 skip = not_stack;
2239 #endif
2241 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2242 x = validize_mem (force_const_mem (mode, x));
2244 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2245 SUBREGs of such registers are not allowed. */
2246 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2247 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2248 x = copy_to_reg (x);
2250 /* Loop over all the words allocated on the stack for this arg. */
2251 /* We can do it by words, because any scalar bigger than a word
2252 has a size a multiple of a word. */
2253 #ifndef PUSH_ARGS_REVERSED
2254 for (i = not_stack; i < size; i++)
2255 #else
2256 for (i = size - 1; i >= not_stack; i--)
2257 #endif
2258 if (i >= not_stack + offset)
2259 emit_push_insn (operand_subword_force (x, i, mode),
2260 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2261 0, args_addr,
2262 GEN_INT (args_offset + ((i - not_stack + skip)
2263 * UNITS_PER_WORD)));
2265 else
2267 rtx addr;
2269 /* Push padding now if padding above and stack grows down,
2270 or if padding below and stack grows up.
2271 But if space already allocated, this has already been done. */
2272 if (extra && args_addr == 0
2273 && where_pad != none && where_pad != stack_direction)
2274 anti_adjust_stack (GEN_INT (extra));
2276 #ifdef PUSH_ROUNDING
2277 if (args_addr == 0)
2278 addr = gen_push_operand ();
2279 else
2280 #endif
2281 if (GET_CODE (args_so_far) == CONST_INT)
2282 addr
2283 = memory_address (mode,
2284 plus_constant (args_addr, INTVAL (args_so_far)));
2285 else
2286 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2287 args_so_far));
2289 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2292 ret:
2293 /* If part should go in registers, copy that part
2294 into the appropriate registers. Do this now, at the end,
2295 since mem-to-mem copies above may do function calls. */
2296 if (partial > 0 && reg != 0)
2297 move_block_to_reg (REGNO (reg), x, partial, mode);
2299 if (extra && args_addr == 0 && where_pad == stack_direction)
2300 anti_adjust_stack (GEN_INT (extra));
2303 /* Expand an assignment that stores the value of FROM into TO.
2304 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2305 (This may contain a QUEUED rtx;
2306 if the value is constant, this rtx is a constant.)
2307 Otherwise, the returned value is NULL_RTX.
2309 SUGGEST_REG is no longer actually used.
2310 It used to mean, copy the value through a register
2311 and return that register, if that is possible.
2312 We now use WANT_VALUE to decide whether to do this. */
2315 expand_assignment (to, from, want_value, suggest_reg)
2316 tree to, from;
2317 int want_value;
2318 int suggest_reg;
2320 register rtx to_rtx = 0;
2321 rtx result;
2323 /* Don't crash if the lhs of the assignment was erroneous. */
2325 if (TREE_CODE (to) == ERROR_MARK)
2327 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2328 return want_value ? result : NULL_RTX;
2331 if (output_bytecode)
2333 tree dest_innermost;
2335 bc_expand_expr (from);
2336 bc_emit_instruction (duplicate);
2338 dest_innermost = bc_expand_address (to);
2340 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2341 take care of it here. */
2343 bc_store_memory (TREE_TYPE (to), dest_innermost);
2344 return NULL;
2347 /* Assignment of a structure component needs special treatment
2348 if the structure component's rtx is not simply a MEM.
2349 Assignment of an array element at a constant index
2350 has the same problem. */
2352 if (TREE_CODE (to) == COMPONENT_REF
2353 || TREE_CODE (to) == BIT_FIELD_REF
2354 || (TREE_CODE (to) == ARRAY_REF
2355 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2356 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2358 enum machine_mode mode1;
2359 int bitsize;
2360 int bitpos;
2361 tree offset;
2362 int unsignedp;
2363 int volatilep = 0;
2364 tree tem;
2365 int alignment;
2367 push_temp_slots ();
2368 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2369 &mode1, &unsignedp, &volatilep);
2371 /* If we are going to use store_bit_field and extract_bit_field,
2372 make sure to_rtx will be safe for multiple use. */
2374 if (mode1 == VOIDmode && want_value)
2375 tem = stabilize_reference (tem);
2377 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2378 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2379 if (offset != 0)
2381 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2383 if (GET_CODE (to_rtx) != MEM)
2384 abort ();
2385 to_rtx = change_address (to_rtx, VOIDmode,
2386 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2387 force_reg (Pmode, offset_rtx)));
2388 /* If we have a variable offset, the known alignment
2389 is only that of the innermost structure containing the field.
2390 (Actually, we could sometimes do better by using the
2391 align of an element of the innermost array, but no need.) */
2392 if (TREE_CODE (to) == COMPONENT_REF
2393 || TREE_CODE (to) == BIT_FIELD_REF)
2394 alignment
2395 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2397 if (volatilep)
2399 if (GET_CODE (to_rtx) == MEM)
2400 MEM_VOLATILE_P (to_rtx) = 1;
2401 #if 0 /* This was turned off because, when a field is volatile
2402 in an object which is not volatile, the object may be in a register,
2403 and then we would abort over here. */
2404 else
2405 abort ();
2406 #endif
2409 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2410 (want_value
2411 /* Spurious cast makes HPUX compiler happy. */
2412 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2413 : VOIDmode),
2414 unsignedp,
2415 /* Required alignment of containing datum. */
2416 alignment,
2417 int_size_in_bytes (TREE_TYPE (tem)));
2418 preserve_temp_slots (result);
2419 free_temp_slots ();
2420 pop_temp_slots ();
2422 /* If the value is meaningful, convert RESULT to the proper mode.
2423 Otherwise, return nothing. */
2424 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2425 TYPE_MODE (TREE_TYPE (from)),
2426 result,
2427 TREE_UNSIGNED (TREE_TYPE (to)))
2428 : NULL_RTX);
2431 /* If the rhs is a function call and its value is not an aggregate,
2432 call the function before we start to compute the lhs.
2433 This is needed for correct code for cases such as
2434 val = setjmp (buf) on machines where reference to val
2435 requires loading up part of an address in a separate insn.
2437 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2438 a promoted variable where the zero- or sign- extension needs to be done.
2439 Handling this in the normal way is safe because no computation is done
2440 before the call. */
2441 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2442 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2444 rtx value;
2446 push_temp_slots ();
2447 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2448 if (to_rtx == 0)
2449 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2450 emit_move_insn (to_rtx, value);
2451 preserve_temp_slots (to_rtx);
2452 free_temp_slots ();
2453 pop_temp_slots ();
2454 return want_value ? to_rtx : NULL_RTX;
2457 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2458 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2460 if (to_rtx == 0)
2461 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2463 /* Don't move directly into a return register. */
2464 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2466 rtx temp;
2468 push_temp_slots ();
2469 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2470 emit_move_insn (to_rtx, temp);
2471 preserve_temp_slots (to_rtx);
2472 free_temp_slots ();
2473 pop_temp_slots ();
2474 return want_value ? to_rtx : NULL_RTX;
2477 /* In case we are returning the contents of an object which overlaps
2478 the place the value is being stored, use a safe function when copying
2479 a value through a pointer into a structure value return block. */
2480 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2481 && current_function_returns_struct
2482 && !current_function_returns_pcc_struct)
2484 rtx from_rtx, size;
2486 push_temp_slots ();
2487 size = expr_size (from);
2488 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2490 #ifdef TARGET_MEM_FUNCTIONS
2491 emit_library_call (memcpy_libfunc, 0,
2492 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2493 XEXP (from_rtx, 0), Pmode,
2494 convert_to_mode (TYPE_MODE (sizetype),
2495 size, TREE_UNSIGNED (sizetype)),
2496 TYPE_MODE (sizetype));
2497 #else
2498 emit_library_call (bcopy_libfunc, 0,
2499 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2500 XEXP (to_rtx, 0), Pmode,
2501 convert_to_mode (TYPE_MODE (sizetype),
2502 size, TREE_UNSIGNED (sizetype)),
2503 TYPE_MODE (sizetype));
2504 #endif
2506 preserve_temp_slots (to_rtx);
2507 free_temp_slots ();
2508 pop_temp_slots ();
2509 return want_value ? to_rtx : NULL_RTX;
2512 /* Compute FROM and store the value in the rtx we got. */
2514 push_temp_slots ();
2515 result = store_expr (from, to_rtx, want_value);
2516 preserve_temp_slots (result);
2517 free_temp_slots ();
2518 pop_temp_slots ();
2519 return want_value ? result : NULL_RTX;
2522 /* Generate code for computing expression EXP,
2523 and storing the value into TARGET.
2524 TARGET may contain a QUEUED rtx.
2526 If WANT_VALUE is nonzero, return a copy of the value
2527 not in TARGET, so that we can be sure to use the proper
2528 value in a containing expression even if TARGET has something
2529 else stored in it. If possible, we copy the value through a pseudo
2530 and return that pseudo. Or, if the value is constant, we try to
2531 return the constant. In some cases, we return a pseudo
2532 copied *from* TARGET.
2534 If the mode is BLKmode then we may return TARGET itself.
2535 It turns out that in BLKmode it doesn't cause a problem.
2536 because C has no operators that could combine two different
2537 assignments into the same BLKmode object with different values
2538 with no sequence point. Will other languages need this to
2539 be more thorough?
2541 If WANT_VALUE is 0, we return NULL, to make sure
2542 to catch quickly any cases where the caller uses the value
2543 and fails to set WANT_VALUE. */
2546 store_expr (exp, target, want_value)
2547 register tree exp;
2548 register rtx target;
2549 int want_value;
2551 register rtx temp;
2552 int dont_return_target = 0;
2554 if (TREE_CODE (exp) == COMPOUND_EXPR)
2556 /* Perform first part of compound expression, then assign from second
2557 part. */
2558 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2559 emit_queue ();
2560 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2562 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2564 /* For conditional expression, get safe form of the target. Then
2565 test the condition, doing the appropriate assignment on either
2566 side. This avoids the creation of unnecessary temporaries.
2567 For non-BLKmode, it is more efficient not to do this. */
2569 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2571 emit_queue ();
2572 target = protect_from_queue (target, 1);
2574 NO_DEFER_POP;
2575 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2576 store_expr (TREE_OPERAND (exp, 1), target, 0);
2577 emit_queue ();
2578 emit_jump_insn (gen_jump (lab2));
2579 emit_barrier ();
2580 emit_label (lab1);
2581 store_expr (TREE_OPERAND (exp, 2), target, 0);
2582 emit_queue ();
2583 emit_label (lab2);
2584 OK_DEFER_POP;
2585 return want_value ? target : NULL_RTX;
2587 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2588 && GET_MODE (target) != BLKmode)
2589 /* If target is in memory and caller wants value in a register instead,
2590 arrange that. Pass TARGET as target for expand_expr so that,
2591 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2592 We know expand_expr will not use the target in that case.
2593 Don't do this if TARGET is volatile because we are supposed
2594 to write it and then read it. */
2596 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2597 GET_MODE (target), 0);
2598 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2599 temp = copy_to_reg (temp);
2600 dont_return_target = 1;
2602 else if (queued_subexp_p (target))
2603 /* If target contains a postincrement, let's not risk
2604 using it as the place to generate the rhs. */
2606 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2608 /* Expand EXP into a new pseudo. */
2609 temp = gen_reg_rtx (GET_MODE (target));
2610 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2612 else
2613 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2615 /* If target is volatile, ANSI requires accessing the value
2616 *from* the target, if it is accessed. So make that happen.
2617 In no case return the target itself. */
2618 if (! MEM_VOLATILE_P (target) && want_value)
2619 dont_return_target = 1;
2621 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2622 /* If this is an scalar in a register that is stored in a wider mode
2623 than the declared mode, compute the result into its declared mode
2624 and then convert to the wider mode. Our value is the computed
2625 expression. */
2627 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2629 /* If TEMP is a volatile MEM and we want a result value, make
2630 the access now so it gets done only once. */
2631 if (GET_CODE (temp) == MEM && MEM_VOLATILE_P (temp))
2632 temp = copy_to_reg (temp);
2634 /* If TEMP is a VOIDmode constant, use convert_modes to make
2635 sure that we properly convert it. */
2636 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2637 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2638 TYPE_MODE (TREE_TYPE (exp)), temp,
2639 SUBREG_PROMOTED_UNSIGNED_P (target));
2641 convert_move (SUBREG_REG (target), temp,
2642 SUBREG_PROMOTED_UNSIGNED_P (target));
2643 return want_value ? temp : NULL_RTX;
2645 else
2647 temp = expand_expr (exp, target, GET_MODE (target), 0);
2648 /* Return TARGET if it's a specified hardware register.
2649 If TARGET is a volatile mem ref, either return TARGET
2650 or return a reg copied *from* TARGET; ANSI requires this.
2652 Otherwise, if TEMP is not TARGET, return TEMP
2653 if it is constant (for efficiency),
2654 or if we really want the correct value. */
2655 if (!(target && GET_CODE (target) == REG
2656 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2657 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2658 && temp != target
2659 && (CONSTANT_P (temp) || want_value))
2660 dont_return_target = 1;
2663 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2664 the same as that of TARGET, adjust the constant. This is needed, for
2665 example, in case it is a CONST_DOUBLE and we want only a word-sized
2666 value. */
2667 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2668 && TREE_CODE (exp) != ERROR_MARK
2669 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2670 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2671 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2673 /* If value was not generated in the target, store it there.
2674 Convert the value to TARGET's type first if nec. */
2676 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2678 target = protect_from_queue (target, 1);
2679 if (GET_MODE (temp) != GET_MODE (target)
2680 && GET_MODE (temp) != VOIDmode)
2682 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2683 if (dont_return_target)
2685 /* In this case, we will return TEMP,
2686 so make sure it has the proper mode.
2687 But don't forget to store the value into TARGET. */
2688 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2689 emit_move_insn (target, temp);
2691 else
2692 convert_move (target, temp, unsignedp);
2695 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2697 /* Handle copying a string constant into an array.
2698 The string constant may be shorter than the array.
2699 So copy just the string's actual length, and clear the rest. */
2700 rtx size;
2702 /* Get the size of the data type of the string,
2703 which is actually the size of the target. */
2704 size = expr_size (exp);
2705 if (GET_CODE (size) == CONST_INT
2706 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2707 emit_block_move (target, temp, size,
2708 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2709 else
2711 /* Compute the size of the data to copy from the string. */
2712 tree copy_size
2713 = size_binop (MIN_EXPR,
2714 make_tree (sizetype, size),
2715 convert (sizetype,
2716 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2717 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2718 VOIDmode, 0);
2719 rtx label = 0;
2721 /* Copy that much. */
2722 emit_block_move (target, temp, copy_size_rtx,
2723 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2725 /* Figure out how much is left in TARGET
2726 that we have to clear. */
2727 if (GET_CODE (copy_size_rtx) == CONST_INT)
2729 temp = plus_constant (XEXP (target, 0),
2730 TREE_STRING_LENGTH (exp));
2731 size = plus_constant (size,
2732 - TREE_STRING_LENGTH (exp));
2734 else
2736 enum machine_mode size_mode = Pmode;
2738 temp = force_reg (Pmode, XEXP (target, 0));
2739 temp = expand_binop (size_mode, add_optab, temp,
2740 copy_size_rtx, NULL_RTX, 0,
2741 OPTAB_LIB_WIDEN);
2743 size = expand_binop (size_mode, sub_optab, size,
2744 copy_size_rtx, NULL_RTX, 0,
2745 OPTAB_LIB_WIDEN);
2747 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2748 GET_MODE (size), 0, 0);
2749 label = gen_label_rtx ();
2750 emit_jump_insn (gen_blt (label));
2753 if (size != const0_rtx)
2755 #ifdef TARGET_MEM_FUNCTIONS
2756 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2757 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2758 #else
2759 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2760 temp, Pmode, size, Pmode);
2761 #endif
2763 if (label)
2764 emit_label (label);
2767 else if (GET_MODE (temp) == BLKmode)
2768 emit_block_move (target, temp, expr_size (exp),
2769 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2770 else
2771 emit_move_insn (target, temp);
2774 /* If we don't want a value, return NULL_RTX. */
2775 if (! want_value)
2776 return NULL_RTX;
2778 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2779 ??? The latter test doesn't seem to make sense. */
2780 else if (dont_return_target && GET_CODE (temp) != MEM)
2781 return temp;
2783 /* Return TARGET itself if it is a hard register. */
2784 else if (want_value && GET_MODE (target) != BLKmode
2785 && ! (GET_CODE (target) == REG
2786 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2787 return copy_to_reg (target);
2789 else
2790 return target;
2793 /* Store the value of constructor EXP into the rtx TARGET.
2794 TARGET is either a REG or a MEM. */
2796 static void
2797 store_constructor (exp, target)
2798 tree exp;
2799 rtx target;
2801 tree type = TREE_TYPE (exp);
2803 /* We know our target cannot conflict, since safe_from_p has been called. */
2804 #if 0
2805 /* Don't try copying piece by piece into a hard register
2806 since that is vulnerable to being clobbered by EXP.
2807 Instead, construct in a pseudo register and then copy it all. */
2808 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2810 rtx temp = gen_reg_rtx (GET_MODE (target));
2811 store_constructor (exp, temp);
2812 emit_move_insn (target, temp);
2813 return;
2815 #endif
2817 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2818 || TREE_CODE (type) == QUAL_UNION_TYPE)
2820 register tree elt;
2822 /* Inform later passes that the whole union value is dead. */
2823 if (TREE_CODE (type) == UNION_TYPE
2824 || TREE_CODE (type) == QUAL_UNION_TYPE)
2825 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2827 /* If we are building a static constructor into a register,
2828 set the initial value as zero so we can fold the value into
2829 a constant. */
2830 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2831 emit_move_insn (target, const0_rtx);
2833 /* If the constructor has fewer fields than the structure,
2834 clear the whole structure first. */
2835 else if (list_length (CONSTRUCTOR_ELTS (exp))
2836 != list_length (TYPE_FIELDS (type)))
2837 clear_storage (target, int_size_in_bytes (type));
2838 else
2839 /* Inform later passes that the old value is dead. */
2840 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2842 /* Store each element of the constructor into
2843 the corresponding field of TARGET. */
2845 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2847 register tree field = TREE_PURPOSE (elt);
2848 register enum machine_mode mode;
2849 int bitsize;
2850 int bitpos = 0;
2851 int unsignedp;
2852 tree pos, constant = 0, offset = 0;
2853 rtx to_rtx = target;
2855 /* Just ignore missing fields.
2856 We cleared the whole structure, above,
2857 if any fields are missing. */
2858 if (field == 0)
2859 continue;
2861 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2862 unsignedp = TREE_UNSIGNED (field);
2863 mode = DECL_MODE (field);
2864 if (DECL_BIT_FIELD (field))
2865 mode = VOIDmode;
2867 pos = DECL_FIELD_BITPOS (field);
2868 if (TREE_CODE (pos) == INTEGER_CST)
2869 constant = pos;
2870 else if (TREE_CODE (pos) == PLUS_EXPR
2871 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2872 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2873 else
2874 offset = pos;
2876 if (constant)
2877 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2879 if (offset)
2881 rtx offset_rtx;
2883 if (contains_placeholder_p (offset))
2884 offset = build (WITH_RECORD_EXPR, sizetype,
2885 offset, exp);
2887 offset = size_binop (FLOOR_DIV_EXPR, offset,
2888 size_int (BITS_PER_UNIT));
2890 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2891 if (GET_CODE (to_rtx) != MEM)
2892 abort ();
2894 to_rtx
2895 = change_address (to_rtx, VOIDmode,
2896 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2897 force_reg (Pmode, offset_rtx)));
2900 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
2901 /* The alignment of TARGET is
2902 at least what its type requires. */
2903 VOIDmode, 0,
2904 TYPE_ALIGN (type) / BITS_PER_UNIT,
2905 int_size_in_bytes (type));
2908 else if (TREE_CODE (type) == ARRAY_TYPE)
2910 register tree elt;
2911 register int i;
2912 tree domain = TYPE_DOMAIN (type);
2913 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2914 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2915 tree elttype = TREE_TYPE (type);
2917 /* If the constructor has fewer fields than the structure,
2918 clear the whole structure first. Similarly if this this is
2919 static constructor of a non-BLKmode object. */
2921 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2922 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2923 clear_storage (target, int_size_in_bytes (type));
2924 else
2925 /* Inform later passes that the old value is dead. */
2926 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2928 /* Store each element of the constructor into
2929 the corresponding element of TARGET, determined
2930 by counting the elements. */
2931 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2932 elt;
2933 elt = TREE_CHAIN (elt), i++)
2935 register enum machine_mode mode;
2936 int bitsize;
2937 int bitpos;
2938 int unsignedp;
2939 tree index = TREE_PURPOSE (elt);
2940 rtx xtarget = target;
2942 mode = TYPE_MODE (elttype);
2943 bitsize = GET_MODE_BITSIZE (mode);
2944 unsignedp = TREE_UNSIGNED (elttype);
2946 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2948 /* We don't currently allow variable indices in a
2949 C initializer, but let's try here to support them. */
2950 rtx pos_rtx, addr, xtarget;
2951 tree position;
2953 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2954 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2955 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2956 xtarget = change_address (target, mode, addr);
2957 store_expr (TREE_VALUE (elt), xtarget, 0);
2959 else
2961 if (index != 0)
2962 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
2963 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2964 else
2965 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2967 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2968 /* The alignment of TARGET is
2969 at least what its type requires. */
2970 VOIDmode, 0,
2971 TYPE_ALIGN (type) / BITS_PER_UNIT,
2972 int_size_in_bytes (type));
2977 else
2978 abort ();
2981 /* Store the value of EXP (an expression tree)
2982 into a subfield of TARGET which has mode MODE and occupies
2983 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2984 If MODE is VOIDmode, it means that we are storing into a bit-field.
2986 If VALUE_MODE is VOIDmode, return nothing in particular.
2987 UNSIGNEDP is not used in this case.
2989 Otherwise, return an rtx for the value stored. This rtx
2990 has mode VALUE_MODE if that is convenient to do.
2991 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2993 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2994 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2996 static rtx
2997 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2998 unsignedp, align, total_size)
2999 rtx target;
3000 int bitsize, bitpos;
3001 enum machine_mode mode;
3002 tree exp;
3003 enum machine_mode value_mode;
3004 int unsignedp;
3005 int align;
3006 int total_size;
3008 HOST_WIDE_INT width_mask = 0;
3010 if (bitsize < HOST_BITS_PER_WIDE_INT)
3011 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3013 /* If we are storing into an unaligned field of an aligned union that is
3014 in a register, we may have the mode of TARGET being an integer mode but
3015 MODE == BLKmode. In that case, get an aligned object whose size and
3016 alignment are the same as TARGET and store TARGET into it (we can avoid
3017 the store if the field being stored is the entire width of TARGET). Then
3018 call ourselves recursively to store the field into a BLKmode version of
3019 that object. Finally, load from the object into TARGET. This is not
3020 very efficient in general, but should only be slightly more expensive
3021 than the otherwise-required unaligned accesses. Perhaps this can be
3022 cleaned up later. */
3024 if (mode == BLKmode
3025 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3027 rtx object = assign_stack_temp (GET_MODE (target),
3028 GET_MODE_SIZE (GET_MODE (target)), 0);
3029 rtx blk_object = copy_rtx (object);
3031 PUT_MODE (blk_object, BLKmode);
3033 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3034 emit_move_insn (object, target);
3036 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3037 align, total_size);
3039 /* Even though we aren't returning target, we need to
3040 give it the updated value. */
3041 emit_move_insn (target, object);
3043 return blk_object;
3046 /* If the structure is in a register or if the component
3047 is a bit field, we cannot use addressing to access it.
3048 Use bit-field techniques or SUBREG to store in it. */
3050 if (mode == VOIDmode
3051 || (mode != BLKmode && ! direct_store[(int) mode])
3052 || GET_CODE (target) == REG
3053 || GET_CODE (target) == SUBREG
3054 /* If the field isn't aligned enough to store as an ordinary memref,
3055 store it as a bit field. */
3056 || (STRICT_ALIGNMENT
3057 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3058 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3060 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3062 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3063 MODE. */
3064 if (mode != VOIDmode && mode != BLKmode
3065 && mode != TYPE_MODE (TREE_TYPE (exp)))
3066 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3068 /* Store the value in the bitfield. */
3069 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3070 if (value_mode != VOIDmode)
3072 /* The caller wants an rtx for the value. */
3073 /* If possible, avoid refetching from the bitfield itself. */
3074 if (width_mask != 0
3075 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3077 tree count;
3078 enum machine_mode tmode;
3080 if (unsignedp)
3081 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3082 tmode = GET_MODE (temp);
3083 if (tmode == VOIDmode)
3084 tmode = value_mode;
3085 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3086 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3087 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3089 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3090 NULL_RTX, value_mode, 0, align,
3091 total_size);
3093 return const0_rtx;
3095 else
3097 rtx addr = XEXP (target, 0);
3098 rtx to_rtx;
3100 /* If a value is wanted, it must be the lhs;
3101 so make the address stable for multiple use. */
3103 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3104 && ! CONSTANT_ADDRESS_P (addr)
3105 /* A frame-pointer reference is already stable. */
3106 && ! (GET_CODE (addr) == PLUS
3107 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3108 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3109 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3110 addr = copy_to_reg (addr);
3112 /* Now build a reference to just the desired component. */
3114 to_rtx = change_address (target, mode,
3115 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3116 MEM_IN_STRUCT_P (to_rtx) = 1;
3118 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3122 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3123 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3124 ARRAY_REFs and find the ultimate containing object, which we return.
3126 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3127 bit position, and *PUNSIGNEDP to the signedness of the field.
3128 If the position of the field is variable, we store a tree
3129 giving the variable offset (in units) in *POFFSET.
3130 This offset is in addition to the bit position.
3131 If the position is not variable, we store 0 in *POFFSET.
3133 If any of the extraction expressions is volatile,
3134 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3136 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3137 is a mode that can be used to access the field. In that case, *PBITSIZE
3138 is redundant.
3140 If the field describes a variable-sized object, *PMODE is set to
3141 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3142 this case, but the address of the object can be found. */
3144 tree
3145 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3146 punsignedp, pvolatilep)
3147 tree exp;
3148 int *pbitsize;
3149 int *pbitpos;
3150 tree *poffset;
3151 enum machine_mode *pmode;
3152 int *punsignedp;
3153 int *pvolatilep;
3155 tree orig_exp = exp;
3156 tree size_tree = 0;
3157 enum machine_mode mode = VOIDmode;
3158 tree offset = integer_zero_node;
3160 if (TREE_CODE (exp) == COMPONENT_REF)
3162 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3163 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3164 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3165 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3167 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3169 size_tree = TREE_OPERAND (exp, 1);
3170 *punsignedp = TREE_UNSIGNED (exp);
3172 else
3174 mode = TYPE_MODE (TREE_TYPE (exp));
3175 *pbitsize = GET_MODE_BITSIZE (mode);
3176 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3179 if (size_tree)
3181 if (TREE_CODE (size_tree) != INTEGER_CST)
3182 mode = BLKmode, *pbitsize = -1;
3183 else
3184 *pbitsize = TREE_INT_CST_LOW (size_tree);
3187 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3188 and find the ultimate containing object. */
3190 *pbitpos = 0;
3192 while (1)
3194 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3196 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3197 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3198 : TREE_OPERAND (exp, 2));
3200 /* If this field hasn't been filled in yet, don't go
3201 past it. This should only happen when folding expressions
3202 made during type construction. */
3203 if (pos == 0)
3204 break;
3206 if (TREE_CODE (pos) == PLUS_EXPR)
3208 tree constant, var;
3209 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3211 constant = TREE_OPERAND (pos, 0);
3212 var = TREE_OPERAND (pos, 1);
3214 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3216 constant = TREE_OPERAND (pos, 1);
3217 var = TREE_OPERAND (pos, 0);
3219 else
3220 abort ();
3222 *pbitpos += TREE_INT_CST_LOW (constant);
3223 offset = size_binop (PLUS_EXPR, offset,
3224 size_binop (FLOOR_DIV_EXPR, var,
3225 size_int (BITS_PER_UNIT)));
3227 else if (TREE_CODE (pos) == INTEGER_CST)
3228 *pbitpos += TREE_INT_CST_LOW (pos);
3229 else
3231 /* Assume here that the offset is a multiple of a unit.
3232 If not, there should be an explicitly added constant. */
3233 offset = size_binop (PLUS_EXPR, offset,
3234 size_binop (FLOOR_DIV_EXPR, pos,
3235 size_int (BITS_PER_UNIT)));
3239 else if (TREE_CODE (exp) == ARRAY_REF)
3241 /* This code is based on the code in case ARRAY_REF in expand_expr
3242 below. We assume here that the size of an array element is
3243 always an integral multiple of BITS_PER_UNIT. */
3245 tree index = TREE_OPERAND (exp, 1);
3246 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3247 tree low_bound
3248 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3249 tree index_type = TREE_TYPE (index);
3251 if (! integer_zerop (low_bound))
3252 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3254 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3256 index = convert (type_for_size (POINTER_SIZE, 0), index);
3257 index_type = TREE_TYPE (index);
3260 index = fold (build (MULT_EXPR, index_type, index,
3261 TYPE_SIZE (TREE_TYPE (exp))));
3263 if (TREE_CODE (index) == INTEGER_CST
3264 && TREE_INT_CST_HIGH (index) == 0)
3265 *pbitpos += TREE_INT_CST_LOW (index);
3266 else
3267 offset = size_binop (PLUS_EXPR, offset,
3268 size_binop (FLOOR_DIV_EXPR, index,
3269 size_int (BITS_PER_UNIT)));
3271 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3272 && ! ((TREE_CODE (exp) == NOP_EXPR
3273 || TREE_CODE (exp) == CONVERT_EXPR)
3274 && (TYPE_MODE (TREE_TYPE (exp))
3275 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3276 break;
3278 /* If any reference in the chain is volatile, the effect is volatile. */
3279 if (TREE_THIS_VOLATILE (exp))
3280 *pvolatilep = 1;
3281 exp = TREE_OPERAND (exp, 0);
3284 /* If this was a bit-field, see if there is a mode that allows direct
3285 access in case EXP is in memory. */
3286 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3288 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3289 if (mode == BLKmode)
3290 mode = VOIDmode;
3293 if (integer_zerop (offset))
3294 offset = 0;
3296 if (offset != 0 && contains_placeholder_p (offset))
3297 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3299 *pmode = mode;
3300 *poffset = offset;
3301 return exp;
3304 /* Given an rtx VALUE that may contain additions and multiplications,
3305 return an equivalent value that just refers to a register or memory.
3306 This is done by generating instructions to perform the arithmetic
3307 and returning a pseudo-register containing the value.
3309 The returned value may be a REG, SUBREG, MEM or constant. */
3312 force_operand (value, target)
3313 rtx value, target;
3315 register optab binoptab = 0;
3316 /* Use a temporary to force order of execution of calls to
3317 `force_operand'. */
3318 rtx tmp;
3319 register rtx op2;
3320 /* Use subtarget as the target for operand 0 of a binary operation. */
3321 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3323 if (GET_CODE (value) == PLUS)
3324 binoptab = add_optab;
3325 else if (GET_CODE (value) == MINUS)
3326 binoptab = sub_optab;
3327 else if (GET_CODE (value) == MULT)
3329 op2 = XEXP (value, 1);
3330 if (!CONSTANT_P (op2)
3331 && !(GET_CODE (op2) == REG && op2 != subtarget))
3332 subtarget = 0;
3333 tmp = force_operand (XEXP (value, 0), subtarget);
3334 return expand_mult (GET_MODE (value), tmp,
3335 force_operand (op2, NULL_RTX),
3336 target, 0);
3339 if (binoptab)
3341 op2 = XEXP (value, 1);
3342 if (!CONSTANT_P (op2)
3343 && !(GET_CODE (op2) == REG && op2 != subtarget))
3344 subtarget = 0;
3345 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3347 binoptab = add_optab;
3348 op2 = negate_rtx (GET_MODE (value), op2);
3351 /* Check for an addition with OP2 a constant integer and our first
3352 operand a PLUS of a virtual register and something else. In that
3353 case, we want to emit the sum of the virtual register and the
3354 constant first and then add the other value. This allows virtual
3355 register instantiation to simply modify the constant rather than
3356 creating another one around this addition. */
3357 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3358 && GET_CODE (XEXP (value, 0)) == PLUS
3359 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3360 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3361 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3363 rtx temp = expand_binop (GET_MODE (value), binoptab,
3364 XEXP (XEXP (value, 0), 0), op2,
3365 subtarget, 0, OPTAB_LIB_WIDEN);
3366 return expand_binop (GET_MODE (value), binoptab, temp,
3367 force_operand (XEXP (XEXP (value, 0), 1), 0),
3368 target, 0, OPTAB_LIB_WIDEN);
3371 tmp = force_operand (XEXP (value, 0), subtarget);
3372 return expand_binop (GET_MODE (value), binoptab, tmp,
3373 force_operand (op2, NULL_RTX),
3374 target, 0, OPTAB_LIB_WIDEN);
3375 /* We give UNSIGNEDP = 0 to expand_binop
3376 because the only operations we are expanding here are signed ones. */
3378 return value;
3381 /* Subroutine of expand_expr:
3382 save the non-copied parts (LIST) of an expr (LHS), and return a list
3383 which can restore these values to their previous values,
3384 should something modify their storage. */
3386 static tree
3387 save_noncopied_parts (lhs, list)
3388 tree lhs;
3389 tree list;
3391 tree tail;
3392 tree parts = 0;
3394 for (tail = list; tail; tail = TREE_CHAIN (tail))
3395 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3396 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3397 else
3399 tree part = TREE_VALUE (tail);
3400 tree part_type = TREE_TYPE (part);
3401 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3402 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3403 int_size_in_bytes (part_type), 0);
3404 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3405 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3406 parts = tree_cons (to_be_saved,
3407 build (RTL_EXPR, part_type, NULL_TREE,
3408 (tree) target),
3409 parts);
3410 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3412 return parts;
3415 /* Subroutine of expand_expr:
3416 record the non-copied parts (LIST) of an expr (LHS), and return a list
3417 which specifies the initial values of these parts. */
3419 static tree
3420 init_noncopied_parts (lhs, list)
3421 tree lhs;
3422 tree list;
3424 tree tail;
3425 tree parts = 0;
3427 for (tail = list; tail; tail = TREE_CHAIN (tail))
3428 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3429 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3430 else
3432 tree part = TREE_VALUE (tail);
3433 tree part_type = TREE_TYPE (part);
3434 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3435 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3437 return parts;
3440 /* Subroutine of expand_expr: return nonzero iff there is no way that
3441 EXP can reference X, which is being modified. */
3443 static int
3444 safe_from_p (x, exp)
3445 rtx x;
3446 tree exp;
3448 rtx exp_rtl = 0;
3449 int i, nops;
3451 if (x == 0)
3452 return 1;
3454 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3455 find the underlying pseudo. */
3456 if (GET_CODE (x) == SUBREG)
3458 x = SUBREG_REG (x);
3459 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3460 return 0;
3463 /* If X is a location in the outgoing argument area, it is always safe. */
3464 if (GET_CODE (x) == MEM
3465 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3466 || (GET_CODE (XEXP (x, 0)) == PLUS
3467 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3468 return 1;
3470 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3472 case 'd':
3473 exp_rtl = DECL_RTL (exp);
3474 break;
3476 case 'c':
3477 return 1;
3479 case 'x':
3480 if (TREE_CODE (exp) == TREE_LIST)
3481 return ((TREE_VALUE (exp) == 0
3482 || safe_from_p (x, TREE_VALUE (exp)))
3483 && (TREE_CHAIN (exp) == 0
3484 || safe_from_p (x, TREE_CHAIN (exp))));
3485 else
3486 return 0;
3488 case '1':
3489 return safe_from_p (x, TREE_OPERAND (exp, 0));
3491 case '2':
3492 case '<':
3493 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3494 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3496 case 'e':
3497 case 'r':
3498 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3499 the expression. If it is set, we conflict iff we are that rtx or
3500 both are in memory. Otherwise, we check all operands of the
3501 expression recursively. */
3503 switch (TREE_CODE (exp))
3505 case ADDR_EXPR:
3506 return (staticp (TREE_OPERAND (exp, 0))
3507 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3509 case INDIRECT_REF:
3510 if (GET_CODE (x) == MEM)
3511 return 0;
3512 break;
3514 case CALL_EXPR:
3515 exp_rtl = CALL_EXPR_RTL (exp);
3516 if (exp_rtl == 0)
3518 /* Assume that the call will clobber all hard registers and
3519 all of memory. */
3520 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3521 || GET_CODE (x) == MEM)
3522 return 0;
3525 break;
3527 case RTL_EXPR:
3528 exp_rtl = RTL_EXPR_RTL (exp);
3529 if (exp_rtl == 0)
3530 /* We don't know what this can modify. */
3531 return 0;
3533 break;
3535 case WITH_CLEANUP_EXPR:
3536 exp_rtl = RTL_EXPR_RTL (exp);
3537 break;
3539 case CLEANUP_POINT_EXPR:
3540 return safe_from_p (x, TREE_OPERAND (exp, 0));
3542 case SAVE_EXPR:
3543 exp_rtl = SAVE_EXPR_RTL (exp);
3544 break;
3546 case BIND_EXPR:
3547 /* The only operand we look at is operand 1. The rest aren't
3548 part of the expression. */
3549 return safe_from_p (x, TREE_OPERAND (exp, 1));
3551 case METHOD_CALL_EXPR:
3552 /* This takes a rtx argument, but shouldn't appear here. */
3553 abort ();
3556 /* If we have an rtx, we do not need to scan our operands. */
3557 if (exp_rtl)
3558 break;
3560 nops = tree_code_length[(int) TREE_CODE (exp)];
3561 for (i = 0; i < nops; i++)
3562 if (TREE_OPERAND (exp, i) != 0
3563 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3564 return 0;
3567 /* If we have an rtl, find any enclosed object. Then see if we conflict
3568 with it. */
3569 if (exp_rtl)
3571 if (GET_CODE (exp_rtl) == SUBREG)
3573 exp_rtl = SUBREG_REG (exp_rtl);
3574 if (GET_CODE (exp_rtl) == REG
3575 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3576 return 0;
3579 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3580 are memory and EXP is not readonly. */
3581 return ! (rtx_equal_p (x, exp_rtl)
3582 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3583 && ! TREE_READONLY (exp)));
3586 /* If we reach here, it is safe. */
3587 return 1;
3590 /* Subroutine of expand_expr: return nonzero iff EXP is an
3591 expression whose type is statically determinable. */
3593 static int
3594 fixed_type_p (exp)
3595 tree exp;
3597 if (TREE_CODE (exp) == PARM_DECL
3598 || TREE_CODE (exp) == VAR_DECL
3599 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3600 || TREE_CODE (exp) == COMPONENT_REF
3601 || TREE_CODE (exp) == ARRAY_REF)
3602 return 1;
3603 return 0;
3606 /* expand_expr: generate code for computing expression EXP.
3607 An rtx for the computed value is returned. The value is never null.
3608 In the case of a void EXP, const0_rtx is returned.
3610 The value may be stored in TARGET if TARGET is nonzero.
3611 TARGET is just a suggestion; callers must assume that
3612 the rtx returned may not be the same as TARGET.
3614 If TARGET is CONST0_RTX, it means that the value will be ignored.
3616 If TMODE is not VOIDmode, it suggests generating the
3617 result in mode TMODE. But this is done only when convenient.
3618 Otherwise, TMODE is ignored and the value generated in its natural mode.
3619 TMODE is just a suggestion; callers must assume that
3620 the rtx returned may not have mode TMODE.
3622 Note that TARGET may have neither TMODE nor MODE. In that case, it
3623 probably will not be used.
3625 If MODIFIER is EXPAND_SUM then when EXP is an addition
3626 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3627 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3628 products as above, or REG or MEM, or constant.
3629 Ordinarily in such cases we would output mul or add instructions
3630 and then return a pseudo reg containing the sum.
3632 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3633 it also marks a label as absolutely required (it can't be dead).
3634 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3635 This is used for outputting expressions used in initializers.
3637 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3638 with a constant address even if that address is not normally legitimate.
3639 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
3642 expand_expr (exp, target, tmode, modifier)
3643 register tree exp;
3644 rtx target;
3645 enum machine_mode tmode;
3646 enum expand_modifier modifier;
3648 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3649 This is static so it will be accessible to our recursive callees. */
3650 static tree placeholder_list = 0;
3651 register rtx op0, op1, temp;
3652 tree type = TREE_TYPE (exp);
3653 int unsignedp = TREE_UNSIGNED (type);
3654 register enum machine_mode mode = TYPE_MODE (type);
3655 register enum tree_code code = TREE_CODE (exp);
3656 optab this_optab;
3657 /* Use subtarget as the target for operand 0 of a binary operation. */
3658 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3659 rtx original_target = target;
3660 /* Maybe defer this until sure not doing bytecode? */
3661 int ignore = (target == const0_rtx
3662 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3663 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3664 || code == COND_EXPR)
3665 && TREE_CODE (type) == VOID_TYPE));
3666 tree context;
3669 if (output_bytecode && modifier != EXPAND_INITIALIZER)
3671 bc_expand_expr (exp);
3672 return NULL;
3675 /* Don't use hard regs as subtargets, because the combiner
3676 can only handle pseudo regs. */
3677 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3678 subtarget = 0;
3679 /* Avoid subtargets inside loops,
3680 since they hide some invariant expressions. */
3681 if (preserve_subexpressions_p ())
3682 subtarget = 0;
3684 /* If we are going to ignore this result, we need only do something
3685 if there is a side-effect somewhere in the expression. If there
3686 is, short-circuit the most common cases here. Note that we must
3687 not call expand_expr with anything but const0_rtx in case this
3688 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
3690 if (ignore)
3692 if (! TREE_SIDE_EFFECTS (exp))
3693 return const0_rtx;
3695 /* Ensure we reference a volatile object even if value is ignored. */
3696 if (TREE_THIS_VOLATILE (exp)
3697 && TREE_CODE (exp) != FUNCTION_DECL
3698 && mode != VOIDmode && mode != BLKmode)
3700 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3701 if (GET_CODE (temp) == MEM)
3702 temp = copy_to_reg (temp);
3703 return const0_rtx;
3706 if (TREE_CODE_CLASS (code) == '1')
3707 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3708 VOIDmode, modifier);
3709 else if (TREE_CODE_CLASS (code) == '2'
3710 || TREE_CODE_CLASS (code) == '<')
3712 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3713 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3714 return const0_rtx;
3716 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3717 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3718 /* If the second operand has no side effects, just evaluate
3719 the first. */
3720 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3721 VOIDmode, modifier);
3723 target = 0;
3726 /* If will do cse, generate all results into pseudo registers
3727 since 1) that allows cse to find more things
3728 and 2) otherwise cse could produce an insn the machine
3729 cannot support. */
3731 if (! cse_not_expected && mode != BLKmode && target
3732 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3733 target = subtarget;
3735 switch (code)
3737 case LABEL_DECL:
3739 tree function = decl_function_context (exp);
3740 /* Handle using a label in a containing function. */
3741 if (function != current_function_decl && function != 0)
3743 struct function *p = find_function_data (function);
3744 /* Allocate in the memory associated with the function
3745 that the label is in. */
3746 push_obstacks (p->function_obstack,
3747 p->function_maybepermanent_obstack);
3749 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3750 label_rtx (exp), p->forced_labels);
3751 pop_obstacks ();
3753 else if (modifier == EXPAND_INITIALIZER)
3754 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3755 label_rtx (exp), forced_labels);
3756 temp = gen_rtx (MEM, FUNCTION_MODE,
3757 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3758 if (function != current_function_decl && function != 0)
3759 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3760 return temp;
3763 case PARM_DECL:
3764 if (DECL_RTL (exp) == 0)
3766 error_with_decl (exp, "prior parameter's size depends on `%s'");
3767 return CONST0_RTX (mode);
3770 /* ... fall through ... */
3772 case VAR_DECL:
3773 /* If a static var's type was incomplete when the decl was written,
3774 but the type is complete now, lay out the decl now. */
3775 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3776 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3778 push_obstacks_nochange ();
3779 end_temporary_allocation ();
3780 layout_decl (exp, 0);
3781 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3782 pop_obstacks ();
3785 /* ... fall through ... */
3787 case FUNCTION_DECL:
3788 case RESULT_DECL:
3789 if (DECL_RTL (exp) == 0)
3790 abort ();
3792 /* Ensure variable marked as used even if it doesn't go through
3793 a parser. If it hasn't be used yet, write out an external
3794 definition. */
3795 if (! TREE_USED (exp))
3797 assemble_external (exp);
3798 TREE_USED (exp) = 1;
3801 /* Handle variables inherited from containing functions. */
3802 context = decl_function_context (exp);
3804 /* We treat inline_function_decl as an alias for the current function
3805 because that is the inline function whose vars, types, etc.
3806 are being merged into the current function.
3807 See expand_inline_function. */
3809 if (context != 0 && context != current_function_decl
3810 && context != inline_function_decl
3811 /* If var is static, we don't need a static chain to access it. */
3812 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3813 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3815 rtx addr;
3817 /* Mark as non-local and addressable. */
3818 DECL_NONLOCAL (exp) = 1;
3819 mark_addressable (exp);
3820 if (GET_CODE (DECL_RTL (exp)) != MEM)
3821 abort ();
3822 addr = XEXP (DECL_RTL (exp), 0);
3823 if (GET_CODE (addr) == MEM)
3824 addr = gen_rtx (MEM, Pmode,
3825 fix_lexical_addr (XEXP (addr, 0), exp));
3826 else
3827 addr = fix_lexical_addr (addr, exp);
3828 return change_address (DECL_RTL (exp), mode, addr);
3831 /* This is the case of an array whose size is to be determined
3832 from its initializer, while the initializer is still being parsed.
3833 See expand_decl. */
3835 if (GET_CODE (DECL_RTL (exp)) == MEM
3836 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3837 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3838 XEXP (DECL_RTL (exp), 0));
3840 /* If DECL_RTL is memory, we are in the normal case and either
3841 the address is not valid or it is not a register and -fforce-addr
3842 is specified, get the address into a register. */
3844 if (GET_CODE (DECL_RTL (exp)) == MEM
3845 && modifier != EXPAND_CONST_ADDRESS
3846 && modifier != EXPAND_SUM
3847 && modifier != EXPAND_INITIALIZER
3848 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3849 || (flag_force_addr
3850 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
3851 return change_address (DECL_RTL (exp), VOIDmode,
3852 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3854 /* If the mode of DECL_RTL does not match that of the decl, it
3855 must be a promoted value. We return a SUBREG of the wanted mode,
3856 but mark it so that we know that it was already extended. */
3858 if (GET_CODE (DECL_RTL (exp)) == REG
3859 && GET_MODE (DECL_RTL (exp)) != mode)
3861 /* Get the signedness used for this variable. Ensure we get the
3862 same mode we got when the variable was declared. */
3863 if (GET_MODE (DECL_RTL (exp))
3864 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
3865 abort ();
3867 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3868 SUBREG_PROMOTED_VAR_P (temp) = 1;
3869 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3870 return temp;
3873 return DECL_RTL (exp);
3875 case INTEGER_CST:
3876 return immed_double_const (TREE_INT_CST_LOW (exp),
3877 TREE_INT_CST_HIGH (exp),
3878 mode);
3880 case CONST_DECL:
3881 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3883 case REAL_CST:
3884 /* If optimized, generate immediate CONST_DOUBLE
3885 which will be turned into memory by reload if necessary.
3887 We used to force a register so that loop.c could see it. But
3888 this does not allow gen_* patterns to perform optimizations with
3889 the constants. It also produces two insns in cases like "x = 1.0;".
3890 On most machines, floating-point constants are not permitted in
3891 many insns, so we'd end up copying it to a register in any case.
3893 Now, we do the copying in expand_binop, if appropriate. */
3894 return immed_real_const (exp);
3896 case COMPLEX_CST:
3897 case STRING_CST:
3898 if (! TREE_CST_RTL (exp))
3899 output_constant_def (exp);
3901 /* TREE_CST_RTL probably contains a constant address.
3902 On RISC machines where a constant address isn't valid,
3903 make some insns to get that address into a register. */
3904 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3905 && modifier != EXPAND_CONST_ADDRESS
3906 && modifier != EXPAND_INITIALIZER
3907 && modifier != EXPAND_SUM
3908 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
3909 || (flag_force_addr
3910 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
3911 return change_address (TREE_CST_RTL (exp), VOIDmode,
3912 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3913 return TREE_CST_RTL (exp);
3915 case SAVE_EXPR:
3916 context = decl_function_context (exp);
3918 /* We treat inline_function_decl as an alias for the current function
3919 because that is the inline function whose vars, types, etc.
3920 are being merged into the current function.
3921 See expand_inline_function. */
3922 if (context == current_function_decl || context == inline_function_decl)
3923 context = 0;
3925 /* If this is non-local, handle it. */
3926 if (context)
3928 temp = SAVE_EXPR_RTL (exp);
3929 if (temp && GET_CODE (temp) == REG)
3931 put_var_into_stack (exp);
3932 temp = SAVE_EXPR_RTL (exp);
3934 if (temp == 0 || GET_CODE (temp) != MEM)
3935 abort ();
3936 return change_address (temp, mode,
3937 fix_lexical_addr (XEXP (temp, 0), exp));
3939 if (SAVE_EXPR_RTL (exp) == 0)
3941 if (mode == BLKmode)
3943 temp
3944 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3945 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
3947 else
3948 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
3950 SAVE_EXPR_RTL (exp) = temp;
3951 if (!optimize && GET_CODE (temp) == REG)
3952 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3953 save_expr_regs);
3955 /* If the mode of TEMP does not match that of the expression, it
3956 must be a promoted value. We pass store_expr a SUBREG of the
3957 wanted mode but mark it so that we know that it was already
3958 extended. Note that `unsignedp' was modified above in
3959 this case. */
3961 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3963 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3964 SUBREG_PROMOTED_VAR_P (temp) = 1;
3965 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3968 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3971 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3972 must be a promoted value. We return a SUBREG of the wanted mode,
3973 but mark it so that we know that it was already extended. */
3975 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3976 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3978 /* Compute the signedness and make the proper SUBREG. */
3979 promote_mode (type, mode, &unsignedp, 0);
3980 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3981 SUBREG_PROMOTED_VAR_P (temp) = 1;
3982 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3983 return temp;
3986 return SAVE_EXPR_RTL (exp);
3988 case PLACEHOLDER_EXPR:
3989 /* If there is an object on the head of the placeholder list,
3990 see if some object in it's references is of type TYPE. For
3991 further information, see tree.def. */
3992 if (placeholder_list)
3994 tree object;
3995 tree old_list = placeholder_list;
3997 for (object = TREE_PURPOSE (placeholder_list);
3998 TREE_TYPE (object) != type
3999 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4000 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4001 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4002 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4003 object = TREE_OPERAND (object, 0))
4006 if (object && TREE_TYPE (object) == type)
4008 /* Expand this object skipping the list entries before
4009 it was found in case it is also a PLACEHOLDER_EXPR.
4010 In that case, we want to translate it using subsequent
4011 entries. */
4012 placeholder_list = TREE_CHAIN (placeholder_list);
4013 temp = expand_expr (object, original_target, tmode, modifier);
4014 placeholder_list = old_list;
4015 return temp;
4019 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4020 abort ();
4022 case WITH_RECORD_EXPR:
4023 /* Put the object on the placeholder list, expand our first operand,
4024 and pop the list. */
4025 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4026 placeholder_list);
4027 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4028 tmode, modifier);
4029 placeholder_list = TREE_CHAIN (placeholder_list);
4030 return target;
4032 case EXIT_EXPR:
4033 expand_exit_loop_if_false (NULL_PTR,
4034 invert_truthvalue (TREE_OPERAND (exp, 0)));
4035 return const0_rtx;
4037 case LOOP_EXPR:
4038 push_temp_slots ();
4039 expand_start_loop (1);
4040 expand_expr_stmt (TREE_OPERAND (exp, 0));
4041 expand_end_loop ();
4042 pop_temp_slots ();
4044 return const0_rtx;
4046 case BIND_EXPR:
4048 tree vars = TREE_OPERAND (exp, 0);
4049 int vars_need_expansion = 0;
4051 /* Need to open a binding contour here because
4052 if there are any cleanups they most be contained here. */
4053 expand_start_bindings (0);
4055 /* Mark the corresponding BLOCK for output in its proper place. */
4056 if (TREE_OPERAND (exp, 2) != 0
4057 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4058 insert_block (TREE_OPERAND (exp, 2));
4060 /* If VARS have not yet been expanded, expand them now. */
4061 while (vars)
4063 if (DECL_RTL (vars) == 0)
4065 vars_need_expansion = 1;
4066 expand_decl (vars);
4068 expand_decl_init (vars);
4069 vars = TREE_CHAIN (vars);
4072 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4074 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4076 return temp;
4079 case RTL_EXPR:
4080 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4081 abort ();
4082 emit_insns (RTL_EXPR_SEQUENCE (exp));
4083 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4084 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4085 free_temps_for_rtl_expr (exp);
4086 return RTL_EXPR_RTL (exp);
4088 case CONSTRUCTOR:
4089 /* If we don't need the result, just ensure we evaluate any
4090 subexpressions. */
4091 if (ignore)
4093 tree elt;
4094 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4095 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4096 return const0_rtx;
4099 /* All elts simple constants => refer to a constant in memory. But
4100 if this is a non-BLKmode mode, let it store a field at a time
4101 since that should make a CONST_INT or CONST_DOUBLE when we
4102 fold. Likewise, if we have a target we can use, it is best to
4103 store directly into the target unless the type is large enough
4104 that memcpy will be used. If we are making an initializer and
4105 all operands are constant, put it in memory as well. */
4106 else if ((TREE_STATIC (exp)
4107 && ((mode == BLKmode
4108 && ! (target != 0 && safe_from_p (target, exp)))
4109 || TREE_ADDRESSABLE (exp)
4110 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4111 && (move_by_pieces_ninsns
4112 (TREE_INT_CST_LOW (TYPE_SIZE (type)),
4113 TYPE_ALIGN (type))
4114 > MOVE_RATIO))))
4115 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4117 rtx constructor = output_constant_def (exp);
4118 if (modifier != EXPAND_CONST_ADDRESS
4119 && modifier != EXPAND_INITIALIZER
4120 && modifier != EXPAND_SUM
4121 && (! memory_address_p (GET_MODE (constructor),
4122 XEXP (constructor, 0))
4123 || (flag_force_addr
4124 && GET_CODE (XEXP (constructor, 0)) != REG)))
4125 constructor = change_address (constructor, VOIDmode,
4126 XEXP (constructor, 0));
4127 return constructor;
4130 else
4132 if (target == 0 || ! safe_from_p (target, exp))
4134 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4135 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4136 else
4138 target
4139 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4140 if (AGGREGATE_TYPE_P (type))
4141 MEM_IN_STRUCT_P (target) = 1;
4144 store_constructor (exp, target);
4145 return target;
4148 case INDIRECT_REF:
4150 tree exp1 = TREE_OPERAND (exp, 0);
4151 tree exp2;
4153 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4154 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4155 This code has the same general effect as simply doing
4156 expand_expr on the save expr, except that the expression PTR
4157 is computed for use as a memory address. This means different
4158 code, suitable for indexing, may be generated. */
4159 if (TREE_CODE (exp1) == SAVE_EXPR
4160 && SAVE_EXPR_RTL (exp1) == 0
4161 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4162 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4163 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4165 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4166 VOIDmode, EXPAND_SUM);
4167 op0 = memory_address (mode, temp);
4168 op0 = copy_all_regs (op0);
4169 SAVE_EXPR_RTL (exp1) = op0;
4171 else
4173 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4174 op0 = memory_address (mode, op0);
4177 temp = gen_rtx (MEM, mode, op0);
4178 /* If address was computed by addition,
4179 mark this as an element of an aggregate. */
4180 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4181 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4182 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4183 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
4184 || (TREE_CODE (exp1) == ADDR_EXPR
4185 && (exp2 = TREE_OPERAND (exp1, 0))
4186 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
4187 MEM_IN_STRUCT_P (temp) = 1;
4188 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4189 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4190 a location is accessed through a pointer to const does not mean
4191 that the value there can never change. */
4192 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4193 #endif
4194 return temp;
4197 case ARRAY_REF:
4198 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4199 abort ();
4202 tree array = TREE_OPERAND (exp, 0);
4203 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4204 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4205 tree index = TREE_OPERAND (exp, 1);
4206 tree index_type = TREE_TYPE (index);
4207 int i;
4209 if (TREE_CODE (low_bound) != INTEGER_CST
4210 && contains_placeholder_p (low_bound))
4211 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4213 /* Optimize the special-case of a zero lower bound.
4215 We convert the low_bound to sizetype to avoid some problems
4216 with constant folding. (E.g. suppose the lower bound is 1,
4217 and its mode is QI. Without the conversion, (ARRAY
4218 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4219 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4221 But sizetype isn't quite right either (especially if
4222 the lowbound is negative). FIXME */
4224 if (! integer_zerop (low_bound))
4225 index = fold (build (MINUS_EXPR, index_type, index,
4226 convert (sizetype, low_bound)));
4228 if (TREE_CODE (index) != INTEGER_CST
4229 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4231 /* Nonconstant array index or nonconstant element size.
4232 Generate the tree for *(&array+index) and expand that,
4233 except do it in a language-independent way
4234 and don't complain about non-lvalue arrays.
4235 `mark_addressable' should already have been called
4236 for any array for which this case will be reached. */
4238 /* Don't forget the const or volatile flag from the array
4239 element. */
4240 tree variant_type = build_type_variant (type,
4241 TREE_READONLY (exp),
4242 TREE_THIS_VOLATILE (exp));
4243 tree array_adr = build1 (ADDR_EXPR,
4244 build_pointer_type (variant_type), array);
4245 tree elt;
4246 tree size = size_in_bytes (type);
4248 /* Convert the integer argument to a type the same size as a
4249 pointer so the multiply won't overflow spuriously. */
4250 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4251 index = convert (type_for_size (POINTER_SIZE, 0), index);
4253 if (TREE_CODE (size) != INTEGER_CST
4254 && contains_placeholder_p (size))
4255 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4257 /* Don't think the address has side effects
4258 just because the array does.
4259 (In some cases the address might have side effects,
4260 and we fail to record that fact here. However, it should not
4261 matter, since expand_expr should not care.) */
4262 TREE_SIDE_EFFECTS (array_adr) = 0;
4264 elt = build1 (INDIRECT_REF, type,
4265 fold (build (PLUS_EXPR,
4266 TYPE_POINTER_TO (variant_type),
4267 array_adr,
4268 fold (build (MULT_EXPR,
4269 TYPE_POINTER_TO (variant_type),
4270 index, size)))));
4272 /* Volatility, etc., of new expression is same as old
4273 expression. */
4274 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4275 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4276 TREE_READONLY (elt) = TREE_READONLY (exp);
4278 return expand_expr (elt, target, tmode, modifier);
4281 /* Fold an expression like: "foo"[2].
4282 This is not done in fold so it won't happen inside &. */
4284 if (TREE_CODE (array) == STRING_CST
4285 && TREE_CODE (index) == INTEGER_CST
4286 && !TREE_INT_CST_HIGH (index)
4287 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4288 && GET_MODE_CLASS (mode) == MODE_INT)
4289 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4291 /* If this is a constant index into a constant array,
4292 just get the value from the array. Handle both the cases when
4293 we have an explicit constructor and when our operand is a variable
4294 that was declared const. */
4296 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4298 if (TREE_CODE (index) == INTEGER_CST
4299 && TREE_INT_CST_HIGH (index) == 0)
4301 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4303 i = TREE_INT_CST_LOW (index);
4304 while (elem && i--)
4305 elem = TREE_CHAIN (elem);
4306 if (elem)
4307 return expand_expr (fold (TREE_VALUE (elem)), target,
4308 tmode, modifier);
4312 else if (optimize >= 1
4313 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4314 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4315 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4317 if (TREE_CODE (index) == INTEGER_CST
4318 && TREE_INT_CST_HIGH (index) == 0)
4320 tree init = DECL_INITIAL (array);
4322 i = TREE_INT_CST_LOW (index);
4323 if (TREE_CODE (init) == CONSTRUCTOR)
4325 tree elem = CONSTRUCTOR_ELTS (init);
4327 while (elem
4328 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4329 elem = TREE_CHAIN (elem);
4330 if (elem)
4331 return expand_expr (fold (TREE_VALUE (elem)), target,
4332 tmode, modifier);
4334 else if (TREE_CODE (init) == STRING_CST
4335 && i < TREE_STRING_LENGTH (init))
4336 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4341 /* Treat array-ref with constant index as a component-ref. */
4343 case COMPONENT_REF:
4344 case BIT_FIELD_REF:
4345 /* If the operand is a CONSTRUCTOR, we can just extract the
4346 appropriate field if it is present. */
4347 if (code != ARRAY_REF
4348 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4350 tree elt;
4352 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4353 elt = TREE_CHAIN (elt))
4354 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4355 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4359 enum machine_mode mode1;
4360 int bitsize;
4361 int bitpos;
4362 tree offset;
4363 int volatilep = 0;
4364 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4365 &mode1, &unsignedp, &volatilep);
4366 int alignment;
4368 /* If we got back the original object, something is wrong. Perhaps
4369 we are evaluating an expression too early. In any event, don't
4370 infinitely recurse. */
4371 if (tem == exp)
4372 abort ();
4374 /* In some cases, we will be offsetting OP0's address by a constant.
4375 So get it as a sum, if possible. If we will be using it
4376 directly in an insn, we validate it. */
4377 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4379 /* If this is a constant, put it into a register if it is a
4380 legitimate constant and memory if it isn't. */
4381 if (CONSTANT_P (op0))
4383 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4384 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4385 op0 = force_reg (mode, op0);
4386 else
4387 op0 = validize_mem (force_const_mem (mode, op0));
4390 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4391 if (offset != 0)
4393 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4395 if (GET_CODE (op0) != MEM)
4396 abort ();
4397 op0 = change_address (op0, VOIDmode,
4398 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4399 force_reg (Pmode, offset_rtx)));
4400 /* If we have a variable offset, the known alignment
4401 is only that of the innermost structure containing the field.
4402 (Actually, we could sometimes do better by using the
4403 size of an element of the innermost array, but no need.) */
4404 if (TREE_CODE (exp) == COMPONENT_REF
4405 || TREE_CODE (exp) == BIT_FIELD_REF)
4406 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4407 / BITS_PER_UNIT);
4410 /* Don't forget about volatility even if this is a bitfield. */
4411 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4413 op0 = copy_rtx (op0);
4414 MEM_VOLATILE_P (op0) = 1;
4417 /* In cases where an aligned union has an unaligned object
4418 as a field, we might be extracting a BLKmode value from
4419 an integer-mode (e.g., SImode) object. Handle this case
4420 by doing the extract into an object as wide as the field
4421 (which we know to be the width of a basic mode), then
4422 storing into memory, and changing the mode to BLKmode. */
4423 if (mode1 == VOIDmode
4424 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4425 && modifier != EXPAND_CONST_ADDRESS
4426 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4427 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4428 /* If the field isn't aligned enough to fetch as a memref,
4429 fetch it as a bit field. */
4430 || (STRICT_ALIGNMENT
4431 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4432 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4434 enum machine_mode ext_mode = mode;
4436 if (ext_mode == BLKmode)
4437 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4439 if (ext_mode == BLKmode)
4440 abort ();
4442 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4443 unsignedp, target, ext_mode, ext_mode,
4444 alignment,
4445 int_size_in_bytes (TREE_TYPE (tem)));
4446 if (mode == BLKmode)
4448 rtx new = assign_stack_temp (ext_mode,
4449 bitsize / BITS_PER_UNIT, 0);
4451 emit_move_insn (new, op0);
4452 op0 = copy_rtx (new);
4453 PUT_MODE (op0, BLKmode);
4454 MEM_IN_STRUCT_P (op0) = 1;
4457 return op0;
4460 /* Get a reference to just this component. */
4461 if (modifier == EXPAND_CONST_ADDRESS
4462 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4463 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4464 (bitpos / BITS_PER_UNIT)));
4465 else
4466 op0 = change_address (op0, mode1,
4467 plus_constant (XEXP (op0, 0),
4468 (bitpos / BITS_PER_UNIT)));
4469 MEM_IN_STRUCT_P (op0) = 1;
4470 MEM_VOLATILE_P (op0) |= volatilep;
4471 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4472 return op0;
4473 if (target == 0)
4474 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4475 convert_move (target, op0, unsignedp);
4476 return target;
4479 case OFFSET_REF:
4481 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4482 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4483 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4484 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4485 MEM_IN_STRUCT_P (temp) = 1;
4486 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4487 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4488 a location is accessed through a pointer to const does not mean
4489 that the value there can never change. */
4490 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4491 #endif
4492 return temp;
4495 /* Intended for a reference to a buffer of a file-object in Pascal.
4496 But it's not certain that a special tree code will really be
4497 necessary for these. INDIRECT_REF might work for them. */
4498 case BUFFER_REF:
4499 abort ();
4501 case IN_EXPR:
4503 /* Pascal set IN expression.
4505 Algorithm:
4506 rlo = set_low - (set_low%bits_per_word);
4507 the_word = set [ (index - rlo)/bits_per_word ];
4508 bit_index = index % bits_per_word;
4509 bitmask = 1 << bit_index;
4510 return !!(the_word & bitmask); */
4512 tree set = TREE_OPERAND (exp, 0);
4513 tree index = TREE_OPERAND (exp, 1);
4514 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4515 tree set_type = TREE_TYPE (set);
4516 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4517 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4518 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4519 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4520 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4521 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4522 rtx setaddr = XEXP (setval, 0);
4523 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4524 rtx rlow;
4525 rtx diff, quo, rem, addr, bit, result;
4527 preexpand_calls (exp);
4529 /* If domain is empty, answer is no. Likewise if index is constant
4530 and out of bounds. */
4531 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4532 && TREE_CODE (set_low_bound) == INTEGER_CST
4533 && tree_int_cst_lt (set_high_bound, set_low_bound)
4534 || (TREE_CODE (index) == INTEGER_CST
4535 && TREE_CODE (set_low_bound) == INTEGER_CST
4536 && tree_int_cst_lt (index, set_low_bound))
4537 || (TREE_CODE (set_high_bound) == INTEGER_CST
4538 && TREE_CODE (index) == INTEGER_CST
4539 && tree_int_cst_lt (set_high_bound, index))))
4540 return const0_rtx;
4542 if (target == 0)
4543 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4545 /* If we get here, we have to generate the code for both cases
4546 (in range and out of range). */
4548 op0 = gen_label_rtx ();
4549 op1 = gen_label_rtx ();
4551 if (! (GET_CODE (index_val) == CONST_INT
4552 && GET_CODE (lo_r) == CONST_INT))
4554 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4555 GET_MODE (index_val), iunsignedp, 0);
4556 emit_jump_insn (gen_blt (op1));
4559 if (! (GET_CODE (index_val) == CONST_INT
4560 && GET_CODE (hi_r) == CONST_INT))
4562 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4563 GET_MODE (index_val), iunsignedp, 0);
4564 emit_jump_insn (gen_bgt (op1));
4567 /* Calculate the element number of bit zero in the first word
4568 of the set. */
4569 if (GET_CODE (lo_r) == CONST_INT)
4570 rlow = GEN_INT (INTVAL (lo_r)
4571 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4572 else
4573 rlow = expand_binop (index_mode, and_optab, lo_r,
4574 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4575 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4577 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4578 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4580 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4581 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4582 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4583 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4585 addr = memory_address (byte_mode,
4586 expand_binop (index_mode, add_optab, diff,
4587 setaddr, NULL_RTX, iunsignedp,
4588 OPTAB_LIB_WIDEN));
4590 /* Extract the bit we want to examine */
4591 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4592 gen_rtx (MEM, byte_mode, addr),
4593 make_tree (TREE_TYPE (index), rem),
4594 NULL_RTX, 1);
4595 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4596 GET_MODE (target) == byte_mode ? target : 0,
4597 1, OPTAB_LIB_WIDEN);
4599 if (result != target)
4600 convert_move (target, result, 1);
4602 /* Output the code to handle the out-of-range case. */
4603 emit_jump (op0);
4604 emit_label (op1);
4605 emit_move_insn (target, const0_rtx);
4606 emit_label (op0);
4607 return target;
4610 case WITH_CLEANUP_EXPR:
4611 if (RTL_EXPR_RTL (exp) == 0)
4613 RTL_EXPR_RTL (exp)
4614 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4615 cleanups_this_call
4616 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4617 /* That's it for this cleanup. */
4618 TREE_OPERAND (exp, 2) = 0;
4620 return RTL_EXPR_RTL (exp);
4622 case CLEANUP_POINT_EXPR:
4624 tree old_cleanups = cleanups_this_call;
4625 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4626 expand_cleanups_to (old_cleanups);
4628 return op0;
4630 case CALL_EXPR:
4631 /* Check for a built-in function. */
4632 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4633 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4634 == FUNCTION_DECL)
4635 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4636 return expand_builtin (exp, target, subtarget, tmode, ignore);
4638 /* If this call was expanded already by preexpand_calls,
4639 just return the result we got. */
4640 if (CALL_EXPR_RTL (exp) != 0)
4641 return CALL_EXPR_RTL (exp);
4643 return expand_call (exp, target, ignore);
4645 case NON_LVALUE_EXPR:
4646 case NOP_EXPR:
4647 case CONVERT_EXPR:
4648 case REFERENCE_EXPR:
4649 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4650 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4652 if (TREE_CODE (type) == UNION_TYPE)
4654 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4655 if (target == 0)
4657 if (mode == BLKmode)
4659 if (TYPE_SIZE (type) == 0
4660 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4661 abort ();
4662 target = assign_stack_temp (BLKmode,
4663 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4664 + BITS_PER_UNIT - 1)
4665 / BITS_PER_UNIT, 0);
4667 else
4668 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4671 if (GET_CODE (target) == MEM)
4672 /* Store data into beginning of memory target. */
4673 store_expr (TREE_OPERAND (exp, 0),
4674 change_address (target, TYPE_MODE (valtype), 0), 0);
4676 else if (GET_CODE (target) == REG)
4677 /* Store this field into a union of the proper type. */
4678 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4679 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4680 VOIDmode, 0, 1,
4681 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4682 else
4683 abort ();
4685 /* Return the entire union. */
4686 return target;
4689 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4690 if (GET_MODE (op0) == mode)
4691 return op0;
4693 /* If OP0 is a constant, just convert it into the proper mode. */
4694 if (CONSTANT_P (op0))
4695 return
4696 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4697 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4699 if (modifier == EXPAND_INITIALIZER)
4700 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4702 if (flag_force_mem && GET_CODE (op0) == MEM)
4703 op0 = copy_to_reg (op0);
4705 if (target == 0)
4706 return
4707 convert_to_mode (mode, op0,
4708 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4709 else
4710 convert_move (target, op0,
4711 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4712 return target;
4714 case PLUS_EXPR:
4715 /* We come here from MINUS_EXPR when the second operand is a constant. */
4716 plus_expr:
4717 this_optab = add_optab;
4719 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4720 something else, make sure we add the register to the constant and
4721 then to the other thing. This case can occur during strength
4722 reduction and doing it this way will produce better code if the
4723 frame pointer or argument pointer is eliminated.
4725 fold-const.c will ensure that the constant is always in the inner
4726 PLUS_EXPR, so the only case we need to do anything about is if
4727 sp, ap, or fp is our second argument, in which case we must swap
4728 the innermost first argument and our second argument. */
4730 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4731 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4732 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4733 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4734 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4735 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4737 tree t = TREE_OPERAND (exp, 1);
4739 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4740 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4743 /* If the result is to be Pmode and we are adding an integer to
4744 something, we might be forming a constant. So try to use
4745 plus_constant. If it produces a sum and we can't accept it,
4746 use force_operand. This allows P = &ARR[const] to generate
4747 efficient code on machines where a SYMBOL_REF is not a valid
4748 address.
4750 If this is an EXPAND_SUM call, always return the sum. */
4751 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4752 || mode == Pmode)
4754 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4756 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4758 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4759 EXPAND_SUM);
4760 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4761 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4762 op1 = force_operand (op1, target);
4763 return op1;
4766 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4767 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4768 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4770 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4771 EXPAND_SUM);
4772 if (! CONSTANT_P (op0))
4774 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4775 VOIDmode, modifier);
4776 /* Don't go to both_summands if modifier
4777 says it's not right to return a PLUS. */
4778 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4779 goto binop2;
4780 goto both_summands;
4782 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4783 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4784 op0 = force_operand (op0, target);
4785 return op0;
4789 /* No sense saving up arithmetic to be done
4790 if it's all in the wrong mode to form part of an address.
4791 And force_operand won't know whether to sign-extend or
4792 zero-extend. */
4793 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4794 || mode != Pmode)
4795 goto binop;
4797 preexpand_calls (exp);
4798 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4799 subtarget = 0;
4801 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4802 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4804 both_summands:
4805 /* Make sure any term that's a sum with a constant comes last. */
4806 if (GET_CODE (op0) == PLUS
4807 && CONSTANT_P (XEXP (op0, 1)))
4809 temp = op0;
4810 op0 = op1;
4811 op1 = temp;
4813 /* If adding to a sum including a constant,
4814 associate it to put the constant outside. */
4815 if (GET_CODE (op1) == PLUS
4816 && CONSTANT_P (XEXP (op1, 1)))
4818 rtx constant_term = const0_rtx;
4820 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4821 if (temp != 0)
4822 op0 = temp;
4823 /* Ensure that MULT comes first if there is one. */
4824 else if (GET_CODE (op0) == MULT)
4825 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4826 else
4827 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4829 /* Let's also eliminate constants from op0 if possible. */
4830 op0 = eliminate_constant_term (op0, &constant_term);
4832 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4833 their sum should be a constant. Form it into OP1, since the
4834 result we want will then be OP0 + OP1. */
4836 temp = simplify_binary_operation (PLUS, mode, constant_term,
4837 XEXP (op1, 1));
4838 if (temp != 0)
4839 op1 = temp;
4840 else
4841 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4844 /* Put a constant term last and put a multiplication first. */
4845 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4846 temp = op1, op1 = op0, op0 = temp;
4848 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4849 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4851 case MINUS_EXPR:
4852 /* For initializers, we are allowed to return a MINUS of two
4853 symbolic constants. Here we handle all cases when both operands
4854 are constant. */
4855 /* Handle difference of two symbolic constants,
4856 for the sake of an initializer. */
4857 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4858 && really_constant_p (TREE_OPERAND (exp, 0))
4859 && really_constant_p (TREE_OPERAND (exp, 1)))
4861 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4862 VOIDmode, modifier);
4863 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4864 VOIDmode, modifier);
4866 /* If one operand is a CONST_INT, put it last. */
4867 if (GET_CODE (op0) == CONST_INT)
4868 temp = op0, op0 = op1, op1 = temp;
4870 /* If the last operand is a CONST_INT, use plus_constant of
4871 the negated constant. Else make the MINUS. */
4872 if (GET_CODE (op1) == CONST_INT)
4873 return plus_constant (op0, - INTVAL (op1));
4874 else
4875 return gen_rtx (MINUS, mode, op0, op1);
4877 /* Convert A - const to A + (-const). */
4878 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4880 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4881 fold (build1 (NEGATE_EXPR, type,
4882 TREE_OPERAND (exp, 1))));
4883 goto plus_expr;
4885 this_optab = sub_optab;
4886 goto binop;
4888 case MULT_EXPR:
4889 preexpand_calls (exp);
4890 /* If first operand is constant, swap them.
4891 Thus the following special case checks need only
4892 check the second operand. */
4893 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4895 register tree t1 = TREE_OPERAND (exp, 0);
4896 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4897 TREE_OPERAND (exp, 1) = t1;
4900 /* Attempt to return something suitable for generating an
4901 indexed address, for machines that support that. */
4903 if (modifier == EXPAND_SUM && mode == Pmode
4904 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4905 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4907 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4909 /* Apply distributive law if OP0 is x+c. */
4910 if (GET_CODE (op0) == PLUS
4911 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4912 return gen_rtx (PLUS, mode,
4913 gen_rtx (MULT, mode, XEXP (op0, 0),
4914 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4915 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4916 * INTVAL (XEXP (op0, 1))));
4918 if (GET_CODE (op0) != REG)
4919 op0 = force_operand (op0, NULL_RTX);
4920 if (GET_CODE (op0) != REG)
4921 op0 = copy_to_mode_reg (mode, op0);
4923 return gen_rtx (MULT, mode, op0,
4924 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4927 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4928 subtarget = 0;
4930 /* Check for multiplying things that have been extended
4931 from a narrower type. If this machine supports multiplying
4932 in that narrower type with a result in the desired type,
4933 do it that way, and avoid the explicit type-conversion. */
4934 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4935 && TREE_CODE (type) == INTEGER_TYPE
4936 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4937 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4938 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4939 && int_fits_type_p (TREE_OPERAND (exp, 1),
4940 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4941 /* Don't use a widening multiply if a shift will do. */
4942 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4943 > HOST_BITS_PER_WIDE_INT)
4944 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4946 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4947 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4949 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4950 /* If both operands are extended, they must either both
4951 be zero-extended or both be sign-extended. */
4952 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4954 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4956 enum machine_mode innermode
4957 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4958 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4959 ? umul_widen_optab : smul_widen_optab);
4960 if (mode == GET_MODE_WIDER_MODE (innermode)
4961 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4963 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4964 NULL_RTX, VOIDmode, 0);
4965 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4966 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4967 VOIDmode, 0);
4968 else
4969 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4970 NULL_RTX, VOIDmode, 0);
4971 goto binop2;
4974 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4975 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4976 return expand_mult (mode, op0, op1, target, unsignedp);
4978 case TRUNC_DIV_EXPR:
4979 case FLOOR_DIV_EXPR:
4980 case CEIL_DIV_EXPR:
4981 case ROUND_DIV_EXPR:
4982 case EXACT_DIV_EXPR:
4983 preexpand_calls (exp);
4984 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4985 subtarget = 0;
4986 /* Possible optimization: compute the dividend with EXPAND_SUM
4987 then if the divisor is constant can optimize the case
4988 where some terms of the dividend have coeffs divisible by it. */
4989 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4990 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4991 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4993 case RDIV_EXPR:
4994 this_optab = flodiv_optab;
4995 goto binop;
4997 case TRUNC_MOD_EXPR:
4998 case FLOOR_MOD_EXPR:
4999 case CEIL_MOD_EXPR:
5000 case ROUND_MOD_EXPR:
5001 preexpand_calls (exp);
5002 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5003 subtarget = 0;
5004 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5005 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5006 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5008 case FIX_ROUND_EXPR:
5009 case FIX_FLOOR_EXPR:
5010 case FIX_CEIL_EXPR:
5011 abort (); /* Not used for C. */
5013 case FIX_TRUNC_EXPR:
5014 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5015 if (target == 0)
5016 target = gen_reg_rtx (mode);
5017 expand_fix (target, op0, unsignedp);
5018 return target;
5020 case FLOAT_EXPR:
5021 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5022 if (target == 0)
5023 target = gen_reg_rtx (mode);
5024 /* expand_float can't figure out what to do if FROM has VOIDmode.
5025 So give it the correct mode. With -O, cse will optimize this. */
5026 if (GET_MODE (op0) == VOIDmode)
5027 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5028 op0);
5029 expand_float (target, op0,
5030 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5031 return target;
5033 case NEGATE_EXPR:
5034 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5035 temp = expand_unop (mode, neg_optab, op0, target, 0);
5036 if (temp == 0)
5037 abort ();
5038 return temp;
5040 case ABS_EXPR:
5041 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5043 /* Handle complex values specially. */
5044 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5045 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5046 return expand_complex_abs (mode, op0, target, unsignedp);
5048 /* Unsigned abs is simply the operand. Testing here means we don't
5049 risk generating incorrect code below. */
5050 if (TREE_UNSIGNED (type))
5051 return op0;
5053 /* First try to do it with a special abs instruction. */
5054 temp = expand_unop (mode, abs_optab, op0, target, 0);
5055 if (temp != 0)
5056 return temp;
5058 /* If this machine has expensive jumps, we can do integer absolute
5059 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5060 where W is the width of MODE. */
5062 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5064 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5065 size_int (GET_MODE_BITSIZE (mode) - 1),
5066 NULL_RTX, 0);
5068 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5069 OPTAB_LIB_WIDEN);
5070 if (temp != 0)
5071 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5072 OPTAB_LIB_WIDEN);
5074 if (temp != 0)
5075 return temp;
5078 /* If that does not win, use conditional jump and negate. */
5079 target = original_target;
5080 op1 = gen_label_rtx ();
5081 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5082 || GET_MODE (target) != mode
5083 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5084 || (GET_CODE (target) == REG
5085 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5086 target = gen_reg_rtx (mode);
5088 emit_move_insn (target, op0);
5089 NO_DEFER_POP;
5091 /* If this mode is an integer too wide to compare properly,
5092 compare word by word. Rely on CSE to optimize constant cases. */
5093 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode))
5094 do_jump_by_parts_greater_rtx (mode, 0, target, const0_rtx,
5095 NULL_RTX, op1);
5096 else
5098 temp = compare_from_rtx (target, CONST0_RTX (mode), GE, 0, mode,
5099 NULL_RTX, 0);
5100 if (temp == const1_rtx)
5101 return target;
5102 else if (temp != const0_rtx)
5104 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5105 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op1));
5106 else
5107 abort ();
5111 op0 = expand_unop (mode, neg_optab, target, target, 0);
5112 if (op0 != target)
5113 emit_move_insn (target, op0);
5114 emit_label (op1);
5115 OK_DEFER_POP;
5116 return target;
5118 case MAX_EXPR:
5119 case MIN_EXPR:
5120 target = original_target;
5121 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5122 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5123 || GET_MODE (target) != mode
5124 || (GET_CODE (target) == REG
5125 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5126 target = gen_reg_rtx (mode);
5127 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5128 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5130 /* First try to do it with a special MIN or MAX instruction.
5131 If that does not win, use a conditional jump to select the proper
5132 value. */
5133 this_optab = (TREE_UNSIGNED (type)
5134 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5135 : (code == MIN_EXPR ? smin_optab : smax_optab));
5137 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5138 OPTAB_WIDEN);
5139 if (temp != 0)
5140 return temp;
5142 if (target != op0)
5143 emit_move_insn (target, op0);
5145 op0 = gen_label_rtx ();
5147 /* If this mode is an integer too wide to compare properly,
5148 compare word by word. Rely on cse to optimize constant cases. */
5149 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5151 if (code == MAX_EXPR)
5152 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5153 target, op1, NULL_RTX, op0);
5154 else
5155 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5156 op1, target, NULL_RTX, op0);
5157 emit_move_insn (target, op1);
5159 else
5161 if (code == MAX_EXPR)
5162 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5163 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5164 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5165 else
5166 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5167 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5168 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5169 if (temp == const0_rtx)
5170 emit_move_insn (target, op1);
5171 else if (temp != const_true_rtx)
5173 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5174 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5175 else
5176 abort ();
5177 emit_move_insn (target, op1);
5180 emit_label (op0);
5181 return target;
5183 case BIT_NOT_EXPR:
5184 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5185 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5186 if (temp == 0)
5187 abort ();
5188 return temp;
5190 case FFS_EXPR:
5191 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5192 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5193 if (temp == 0)
5194 abort ();
5195 return temp;
5197 /* ??? Can optimize bitwise operations with one arg constant.
5198 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5199 and (a bitwise1 b) bitwise2 b (etc)
5200 but that is probably not worth while. */
5202 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5203 boolean values when we want in all cases to compute both of them. In
5204 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5205 as actual zero-or-1 values and then bitwise anding. In cases where
5206 there cannot be any side effects, better code would be made by
5207 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5208 how to recognize those cases. */
5210 case TRUTH_AND_EXPR:
5211 case BIT_AND_EXPR:
5212 this_optab = and_optab;
5213 goto binop;
5215 case TRUTH_OR_EXPR:
5216 case BIT_IOR_EXPR:
5217 this_optab = ior_optab;
5218 goto binop;
5220 case TRUTH_XOR_EXPR:
5221 case BIT_XOR_EXPR:
5222 this_optab = xor_optab;
5223 goto binop;
5225 case LSHIFT_EXPR:
5226 case RSHIFT_EXPR:
5227 case LROTATE_EXPR:
5228 case RROTATE_EXPR:
5229 preexpand_calls (exp);
5230 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5231 subtarget = 0;
5232 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5233 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5234 unsignedp);
5236 /* Could determine the answer when only additive constants differ. Also,
5237 the addition of one can be handled by changing the condition. */
5238 case LT_EXPR:
5239 case LE_EXPR:
5240 case GT_EXPR:
5241 case GE_EXPR:
5242 case EQ_EXPR:
5243 case NE_EXPR:
5244 preexpand_calls (exp);
5245 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5246 if (temp != 0)
5247 return temp;
5249 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5250 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5251 && original_target
5252 && GET_CODE (original_target) == REG
5253 && (GET_MODE (original_target)
5254 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5256 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5257 VOIDmode, 0);
5259 if (temp != original_target)
5260 temp = copy_to_reg (temp);
5262 op1 = gen_label_rtx ();
5263 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5264 GET_MODE (temp), unsignedp, 0);
5265 emit_jump_insn (gen_beq (op1));
5266 emit_move_insn (temp, const1_rtx);
5267 emit_label (op1);
5268 return temp;
5271 /* If no set-flag instruction, must generate a conditional
5272 store into a temporary variable. Drop through
5273 and handle this like && and ||. */
5275 case TRUTH_ANDIF_EXPR:
5276 case TRUTH_ORIF_EXPR:
5277 if (! ignore
5278 && (target == 0 || ! safe_from_p (target, exp)
5279 /* Make sure we don't have a hard reg (such as function's return
5280 value) live across basic blocks, if not optimizing. */
5281 || (!optimize && GET_CODE (target) == REG
5282 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5283 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5285 if (target)
5286 emit_clr_insn (target);
5288 op1 = gen_label_rtx ();
5289 jumpifnot (exp, op1);
5291 if (target)
5292 emit_0_to_1_insn (target);
5294 emit_label (op1);
5295 return ignore ? const0_rtx : target;
5297 case TRUTH_NOT_EXPR:
5298 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5299 /* The parser is careful to generate TRUTH_NOT_EXPR
5300 only with operands that are always zero or one. */
5301 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5302 target, 1, OPTAB_LIB_WIDEN);
5303 if (temp == 0)
5304 abort ();
5305 return temp;
5307 case COMPOUND_EXPR:
5308 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5309 emit_queue ();
5310 return expand_expr (TREE_OPERAND (exp, 1),
5311 (ignore ? const0_rtx : target),
5312 VOIDmode, 0);
5314 case COND_EXPR:
5316 rtx flag = NULL_RTX;
5317 tree left_cleanups = NULL_TREE;
5318 tree right_cleanups = NULL_TREE;
5320 /* Used to save a pointer to the place to put the setting of
5321 the flag that indicates if this side of the conditional was
5322 taken. We backpatch the code, if we find out later that we
5323 have any conditional cleanups that need to be performed. */
5324 rtx dest_right_flag = NULL_RTX;
5325 rtx dest_left_flag = NULL_RTX;
5327 /* Note that COND_EXPRs whose type is a structure or union
5328 are required to be constructed to contain assignments of
5329 a temporary variable, so that we can evaluate them here
5330 for side effect only. If type is void, we must do likewise. */
5332 /* If an arm of the branch requires a cleanup,
5333 only that cleanup is performed. */
5335 tree singleton = 0;
5336 tree binary_op = 0, unary_op = 0;
5337 tree old_cleanups = cleanups_this_call;
5339 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5340 convert it to our mode, if necessary. */
5341 if (integer_onep (TREE_OPERAND (exp, 1))
5342 && integer_zerop (TREE_OPERAND (exp, 2))
5343 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5345 if (ignore)
5347 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5348 modifier);
5349 return const0_rtx;
5352 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5353 if (GET_MODE (op0) == mode)
5354 return op0;
5356 if (target == 0)
5357 target = gen_reg_rtx (mode);
5358 convert_move (target, op0, unsignedp);
5359 return target;
5362 /* If we are not to produce a result, we have no target. Otherwise,
5363 if a target was specified use it; it will not be used as an
5364 intermediate target unless it is safe. If no target, use a
5365 temporary. */
5367 if (ignore)
5368 temp = 0;
5369 else if (original_target
5370 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5371 && GET_MODE (original_target) == mode)
5372 temp = original_target;
5373 else if (mode == BLKmode)
5375 if (TYPE_SIZE (type) == 0
5376 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5377 abort ();
5379 temp = assign_stack_temp (BLKmode,
5380 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5381 + BITS_PER_UNIT - 1)
5382 / BITS_PER_UNIT, 0);
5383 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
5385 else
5386 temp = gen_reg_rtx (mode);
5388 /* Check for X ? A + B : A. If we have this, we can copy
5389 A to the output and conditionally add B. Similarly for unary
5390 operations. Don't do this if X has side-effects because
5391 those side effects might affect A or B and the "?" operation is
5392 a sequence point in ANSI. (We test for side effects later.) */
5394 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5395 && operand_equal_p (TREE_OPERAND (exp, 2),
5396 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5397 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5398 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5399 && operand_equal_p (TREE_OPERAND (exp, 1),
5400 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5401 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5402 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5403 && operand_equal_p (TREE_OPERAND (exp, 2),
5404 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5405 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5406 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5407 && operand_equal_p (TREE_OPERAND (exp, 1),
5408 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5409 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5411 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5412 operation, do this as A + (X != 0). Similarly for other simple
5413 binary operators. */
5414 if (temp && singleton && binary_op
5415 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5416 && (TREE_CODE (binary_op) == PLUS_EXPR
5417 || TREE_CODE (binary_op) == MINUS_EXPR
5418 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5419 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5420 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5421 && integer_onep (TREE_OPERAND (binary_op, 1))
5422 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5424 rtx result;
5425 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5426 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5427 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5428 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5429 : and_optab);
5431 /* If we had X ? A : A + 1, do this as A + (X == 0).
5433 We have to invert the truth value here and then put it
5434 back later if do_store_flag fails. We cannot simply copy
5435 TREE_OPERAND (exp, 0) to another variable and modify that
5436 because invert_truthvalue can modify the tree pointed to
5437 by its argument. */
5438 if (singleton == TREE_OPERAND (exp, 1))
5439 TREE_OPERAND (exp, 0)
5440 = invert_truthvalue (TREE_OPERAND (exp, 0));
5442 result = do_store_flag (TREE_OPERAND (exp, 0),
5443 (safe_from_p (temp, singleton)
5444 ? temp : NULL_RTX),
5445 mode, BRANCH_COST <= 1);
5447 if (result)
5449 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5450 return expand_binop (mode, boptab, op1, result, temp,
5451 unsignedp, OPTAB_LIB_WIDEN);
5453 else if (singleton == TREE_OPERAND (exp, 1))
5454 TREE_OPERAND (exp, 0)
5455 = invert_truthvalue (TREE_OPERAND (exp, 0));
5458 NO_DEFER_POP;
5459 op0 = gen_label_rtx ();
5461 flag = gen_reg_rtx (word_mode);
5462 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5464 if (temp != 0)
5466 /* If the target conflicts with the other operand of the
5467 binary op, we can't use it. Also, we can't use the target
5468 if it is a hard register, because evaluating the condition
5469 might clobber it. */
5470 if ((binary_op
5471 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5472 || (GET_CODE (temp) == REG
5473 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5474 temp = gen_reg_rtx (mode);
5475 store_expr (singleton, temp, 0);
5477 else
5478 expand_expr (singleton,
5479 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5480 dest_left_flag = get_last_insn ();
5481 if (singleton == TREE_OPERAND (exp, 1))
5482 jumpif (TREE_OPERAND (exp, 0), op0);
5483 else
5484 jumpifnot (TREE_OPERAND (exp, 0), op0);
5486 /* Allows cleanups up to here. */
5487 old_cleanups = cleanups_this_call;
5488 if (binary_op && temp == 0)
5489 /* Just touch the other operand. */
5490 expand_expr (TREE_OPERAND (binary_op, 1),
5491 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5492 else if (binary_op)
5493 store_expr (build (TREE_CODE (binary_op), type,
5494 make_tree (type, temp),
5495 TREE_OPERAND (binary_op, 1)),
5496 temp, 0);
5497 else
5498 store_expr (build1 (TREE_CODE (unary_op), type,
5499 make_tree (type, temp)),
5500 temp, 0);
5501 op1 = op0;
5502 dest_right_flag = get_last_insn ();
5504 #if 0
5505 /* This is now done in jump.c and is better done there because it
5506 produces shorter register lifetimes. */
5508 /* Check for both possibilities either constants or variables
5509 in registers (but not the same as the target!). If so, can
5510 save branches by assigning one, branching, and assigning the
5511 other. */
5512 else if (temp && GET_MODE (temp) != BLKmode
5513 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5514 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5515 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5516 && DECL_RTL (TREE_OPERAND (exp, 1))
5517 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5518 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5519 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5520 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5521 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5522 && DECL_RTL (TREE_OPERAND (exp, 2))
5523 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5524 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5526 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5527 temp = gen_reg_rtx (mode);
5528 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5529 dest_left_flag = get_last_insn ();
5530 jumpifnot (TREE_OPERAND (exp, 0), op0);
5532 /* Allows cleanups up to here. */
5533 old_cleanups = cleanups_this_call;
5534 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5535 op1 = op0;
5536 dest_right_flag = get_last_insn ();
5538 #endif
5539 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5540 comparison operator. If we have one of these cases, set the
5541 output to A, branch on A (cse will merge these two references),
5542 then set the output to FOO. */
5543 else if (temp
5544 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5545 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5546 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5547 TREE_OPERAND (exp, 1), 0)
5548 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5549 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5551 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5552 temp = gen_reg_rtx (mode);
5553 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5554 dest_left_flag = get_last_insn ();
5555 jumpif (TREE_OPERAND (exp, 0), op0);
5557 /* Allows cleanups up to here. */
5558 old_cleanups = cleanups_this_call;
5559 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5560 op1 = op0;
5561 dest_right_flag = get_last_insn ();
5563 else if (temp
5564 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5565 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5566 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5567 TREE_OPERAND (exp, 2), 0)
5568 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5569 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5571 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5572 temp = gen_reg_rtx (mode);
5573 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5574 dest_left_flag = get_last_insn ();
5575 jumpifnot (TREE_OPERAND (exp, 0), op0);
5577 /* Allows cleanups up to here. */
5578 old_cleanups = cleanups_this_call;
5579 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5580 op1 = op0;
5581 dest_right_flag = get_last_insn ();
5583 else
5585 op1 = gen_label_rtx ();
5586 jumpifnot (TREE_OPERAND (exp, 0), op0);
5588 /* Allows cleanups up to here. */
5589 old_cleanups = cleanups_this_call;
5590 if (temp != 0)
5591 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5592 else
5593 expand_expr (TREE_OPERAND (exp, 1),
5594 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5595 dest_left_flag = get_last_insn ();
5597 /* Handle conditional cleanups, if any. */
5598 left_cleanups = defer_cleanups_to (old_cleanups);
5600 emit_queue ();
5601 emit_jump_insn (gen_jump (op1));
5602 emit_barrier ();
5603 emit_label (op0);
5604 if (temp != 0)
5605 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5606 else
5607 expand_expr (TREE_OPERAND (exp, 2),
5608 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5609 dest_right_flag = get_last_insn ();
5612 /* Handle conditional cleanups, if any. */
5613 right_cleanups = defer_cleanups_to (old_cleanups);
5615 emit_queue ();
5616 emit_label (op1);
5617 OK_DEFER_POP;
5619 /* Add back in, any conditional cleanups. */
5620 if (left_cleanups || right_cleanups)
5622 tree new_cleanups;
5623 tree cond;
5624 rtx last;
5626 /* Now that we know that a flag is needed, go back and add in the
5627 setting of the flag. */
5629 /* Do the left side flag. */
5630 last = get_last_insn ();
5631 /* Flag left cleanups as needed. */
5632 emit_move_insn (flag, const1_rtx);
5633 /* ??? deprecated, use sequences instead. */
5634 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
5636 /* Do the right side flag. */
5637 last = get_last_insn ();
5638 /* Flag left cleanups as needed. */
5639 emit_move_insn (flag, const0_rtx);
5640 /* ??? deprecated, use sequences instead. */
5641 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
5643 /* convert flag, which is an rtx, into a tree. */
5644 cond = make_node (RTL_EXPR);
5645 TREE_TYPE (cond) = integer_type_node;
5646 RTL_EXPR_RTL (cond) = flag;
5647 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
5649 if (! left_cleanups)
5650 left_cleanups = integer_zero_node;
5651 if (! right_cleanups)
5652 right_cleanups = integer_zero_node;
5653 new_cleanups = build (COND_EXPR, void_type_node, cond,
5654 left_cleanups, right_cleanups);
5655 new_cleanups = fold (new_cleanups);
5657 /* Now add in the conditionalized cleanups. */
5658 cleanups_this_call
5659 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
5661 return temp;
5664 case TARGET_EXPR:
5666 /* Something needs to be initialized, but we didn't know
5667 where that thing was when building the tree. For example,
5668 it could be the return value of a function, or a parameter
5669 to a function which lays down in the stack, or a temporary
5670 variable which must be passed by reference.
5672 We guarantee that the expression will either be constructed
5673 or copied into our original target. */
5675 tree slot = TREE_OPERAND (exp, 0);
5676 tree exp1;
5678 if (TREE_CODE (slot) != VAR_DECL)
5679 abort ();
5681 if (target == 0)
5683 if (DECL_RTL (slot) != 0)
5685 target = DECL_RTL (slot);
5686 /* If we have already expanded the slot, so don't do
5687 it again. (mrs) */
5688 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5689 return target;
5691 else
5693 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5694 /* All temp slots at this level must not conflict. */
5695 preserve_temp_slots (target);
5696 DECL_RTL (slot) = target;
5698 /* Since SLOT is not known to the called function
5699 to belong to its stack frame, we must build an explicit
5700 cleanup. This case occurs when we must build up a reference
5701 to pass the reference as an argument. In this case,
5702 it is very likely that such a reference need not be
5703 built here. */
5705 if (TREE_OPERAND (exp, 2) == 0)
5706 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5707 if (TREE_OPERAND (exp, 2))
5709 cleanups_this_call = tree_cons (NULL_TREE,
5710 TREE_OPERAND (exp, 2),
5711 cleanups_this_call);
5715 else
5717 /* This case does occur, when expanding a parameter which
5718 needs to be constructed on the stack. The target
5719 is the actual stack address that we want to initialize.
5720 The function we call will perform the cleanup in this case. */
5722 /* If we have already assigned it space, use that space,
5723 not target that we were passed in, as our target
5724 parameter is only a hint. */
5725 if (DECL_RTL (slot) != 0)
5727 target = DECL_RTL (slot);
5728 /* If we have already expanded the slot, so don't do
5729 it again. (mrs) */
5730 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5731 return target;
5734 DECL_RTL (slot) = target;
5737 exp1 = TREE_OPERAND (exp, 1);
5738 /* Mark it as expanded. */
5739 TREE_OPERAND (exp, 1) = NULL_TREE;
5741 return expand_expr (exp1, target, tmode, modifier);
5744 case INIT_EXPR:
5746 tree lhs = TREE_OPERAND (exp, 0);
5747 tree rhs = TREE_OPERAND (exp, 1);
5748 tree noncopied_parts = 0;
5749 tree lhs_type = TREE_TYPE (lhs);
5751 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5752 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5753 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5754 TYPE_NONCOPIED_PARTS (lhs_type));
5755 while (noncopied_parts != 0)
5757 expand_assignment (TREE_VALUE (noncopied_parts),
5758 TREE_PURPOSE (noncopied_parts), 0, 0);
5759 noncopied_parts = TREE_CHAIN (noncopied_parts);
5761 return temp;
5764 case MODIFY_EXPR:
5766 /* If lhs is complex, expand calls in rhs before computing it.
5767 That's so we don't compute a pointer and save it over a call.
5768 If lhs is simple, compute it first so we can give it as a
5769 target if the rhs is just a call. This avoids an extra temp and copy
5770 and that prevents a partial-subsumption which makes bad code.
5771 Actually we could treat component_ref's of vars like vars. */
5773 tree lhs = TREE_OPERAND (exp, 0);
5774 tree rhs = TREE_OPERAND (exp, 1);
5775 tree noncopied_parts = 0;
5776 tree lhs_type = TREE_TYPE (lhs);
5778 temp = 0;
5780 if (TREE_CODE (lhs) != VAR_DECL
5781 && TREE_CODE (lhs) != RESULT_DECL
5782 && TREE_CODE (lhs) != PARM_DECL)
5783 preexpand_calls (exp);
5785 /* Check for |= or &= of a bitfield of size one into another bitfield
5786 of size 1. In this case, (unless we need the result of the
5787 assignment) we can do this more efficiently with a
5788 test followed by an assignment, if necessary.
5790 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5791 things change so we do, this code should be enhanced to
5792 support it. */
5793 if (ignore
5794 && TREE_CODE (lhs) == COMPONENT_REF
5795 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5796 || TREE_CODE (rhs) == BIT_AND_EXPR)
5797 && TREE_OPERAND (rhs, 0) == lhs
5798 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5799 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5800 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5802 rtx label = gen_label_rtx ();
5804 do_jump (TREE_OPERAND (rhs, 1),
5805 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5806 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5807 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5808 (TREE_CODE (rhs) == BIT_IOR_EXPR
5809 ? integer_one_node
5810 : integer_zero_node)),
5811 0, 0);
5812 do_pending_stack_adjust ();
5813 emit_label (label);
5814 return const0_rtx;
5817 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5818 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5819 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5820 TYPE_NONCOPIED_PARTS (lhs_type));
5822 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5823 while (noncopied_parts != 0)
5825 expand_assignment (TREE_PURPOSE (noncopied_parts),
5826 TREE_VALUE (noncopied_parts), 0, 0);
5827 noncopied_parts = TREE_CHAIN (noncopied_parts);
5829 return temp;
5832 case PREINCREMENT_EXPR:
5833 case PREDECREMENT_EXPR:
5834 return expand_increment (exp, 0);
5836 case POSTINCREMENT_EXPR:
5837 case POSTDECREMENT_EXPR:
5838 /* Faster to treat as pre-increment if result is not used. */
5839 return expand_increment (exp, ! ignore);
5841 case ADDR_EXPR:
5842 /* If nonzero, TEMP will be set to the address of something that might
5843 be a MEM corresponding to a stack slot. */
5844 temp = 0;
5846 /* Are we taking the address of a nested function? */
5847 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5848 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5850 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5851 op0 = force_operand (op0, target);
5853 /* If we are taking the address of something erroneous, just
5854 return a zero. */
5855 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
5856 return const0_rtx;
5857 else
5859 /* We make sure to pass const0_rtx down if we came in with
5860 ignore set, to avoid doing the cleanups twice for something. */
5861 op0 = expand_expr (TREE_OPERAND (exp, 0),
5862 ignore ? const0_rtx : NULL_RTX, VOIDmode,
5863 (modifier == EXPAND_INITIALIZER
5864 ? modifier : EXPAND_CONST_ADDRESS));
5866 /* If we are going to ignore the result, OP0 will have been set
5867 to const0_rtx, so just return it. Don't get confused and
5868 think we are taking the address of the constant. */
5869 if (ignore)
5870 return op0;
5872 /* We would like the object in memory. If it is a constant,
5873 we can have it be statically allocated into memory. For
5874 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
5875 memory and store the value into it. */
5877 if (CONSTANT_P (op0))
5878 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5879 op0);
5880 else if (GET_CODE (op0) == MEM)
5881 temp = XEXP (op0, 0);
5883 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5884 || GET_CODE (op0) == CONCAT)
5886 /* If this object is in a register, it must be not
5887 be BLKmode. */
5888 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5889 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5890 rtx memloc
5891 = assign_stack_temp (inner_mode,
5892 int_size_in_bytes (inner_type), 1);
5894 emit_move_insn (memloc, op0);
5895 op0 = memloc;
5898 if (GET_CODE (op0) != MEM)
5899 abort ();
5901 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5902 return XEXP (op0, 0);
5904 op0 = force_operand (XEXP (op0, 0), target);
5907 if (flag_force_addr && GET_CODE (op0) != REG)
5908 op0 = force_reg (Pmode, op0);
5910 if (GET_CODE (op0) == REG)
5911 mark_reg_pointer (op0);
5913 /* If we might have had a temp slot, add an equivalent address
5914 for it. */
5915 if (temp != 0)
5916 update_temp_slot_address (temp, op0);
5918 return op0;
5920 case ENTRY_VALUE_EXPR:
5921 abort ();
5923 /* COMPLEX type for Extended Pascal & Fortran */
5924 case COMPLEX_EXPR:
5926 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5927 rtx insns;
5929 /* Get the rtx code of the operands. */
5930 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5931 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5933 if (! target)
5934 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5936 start_sequence ();
5938 /* Move the real (op0) and imaginary (op1) parts to their location. */
5939 emit_move_insn (gen_realpart (mode, target), op0);
5940 emit_move_insn (gen_imagpart (mode, target), op1);
5942 insns = get_insns ();
5943 end_sequence ();
5945 /* Complex construction should appear as a single unit. */
5946 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
5947 each with a separate pseudo as destination.
5948 It's not correct for flow to treat them as a unit. */
5949 if (GET_CODE (target) != CONCAT)
5950 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
5951 else
5952 emit_insns (insns);
5954 return target;
5957 case REALPART_EXPR:
5958 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5959 return gen_realpart (mode, op0);
5961 case IMAGPART_EXPR:
5962 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5963 return gen_imagpart (mode, op0);
5965 case CONJ_EXPR:
5967 rtx imag_t;
5968 rtx insns;
5970 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5972 if (! target)
5973 target = gen_reg_rtx (mode);
5975 start_sequence ();
5977 /* Store the realpart and the negated imagpart to target. */
5978 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5980 imag_t = gen_imagpart (mode, target);
5981 temp = expand_unop (mode, neg_optab,
5982 gen_imagpart (mode, op0), imag_t, 0);
5983 if (temp != imag_t)
5984 emit_move_insn (imag_t, temp);
5986 insns = get_insns ();
5987 end_sequence ();
5989 /* Conjugate should appear as a single unit
5990 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
5991 each with a separate pseudo as destination.
5992 It's not correct for flow to treat them as a unit. */
5993 if (GET_CODE (target) != CONCAT)
5994 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
5995 else
5996 emit_insns (insns);
5998 return target;
6001 case ERROR_MARK:
6002 op0 = CONST0_RTX (tmode);
6003 if (op0 != 0)
6004 return op0;
6005 return const0_rtx;
6007 default:
6008 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6011 /* Here to do an ordinary binary operator, generating an instruction
6012 from the optab already placed in `this_optab'. */
6013 binop:
6014 preexpand_calls (exp);
6015 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6016 subtarget = 0;
6017 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6018 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6019 binop2:
6020 temp = expand_binop (mode, this_optab, op0, op1, target,
6021 unsignedp, OPTAB_LIB_WIDEN);
6022 if (temp == 0)
6023 abort ();
6024 return temp;
6028 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6029 void
6030 bc_expand_expr (exp)
6031 tree exp;
6033 enum tree_code code;
6034 tree type, arg0;
6035 rtx r;
6036 struct binary_operator *binoptab;
6037 struct unary_operator *unoptab;
6038 struct increment_operator *incroptab;
6039 struct bc_label *lab, *lab1;
6040 enum bytecode_opcode opcode;
6043 code = TREE_CODE (exp);
6045 switch (code)
6047 case PARM_DECL:
6049 if (DECL_RTL (exp) == 0)
6051 error_with_decl (exp, "prior parameter's size depends on `%s'");
6052 return;
6055 bc_load_parmaddr (DECL_RTL (exp));
6056 bc_load_memory (TREE_TYPE (exp), exp);
6058 return;
6060 case VAR_DECL:
6062 if (DECL_RTL (exp) == 0)
6063 abort ();
6065 #if 0
6066 if (BYTECODE_LABEL (DECL_RTL (exp)))
6067 bc_load_externaddr (DECL_RTL (exp));
6068 else
6069 bc_load_localaddr (DECL_RTL (exp));
6070 #endif
6071 if (TREE_PUBLIC (exp))
6072 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6073 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
6074 else
6075 bc_load_localaddr (DECL_RTL (exp));
6077 bc_load_memory (TREE_TYPE (exp), exp);
6078 return;
6080 case INTEGER_CST:
6082 #ifdef DEBUG_PRINT_CODE
6083 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6084 #endif
6085 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6086 ? SImode
6087 : TYPE_MODE (TREE_TYPE (exp)))],
6088 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6089 return;
6091 case REAL_CST:
6093 #if 0
6094 #ifdef DEBUG_PRINT_CODE
6095 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6096 #endif
6097 /* FIX THIS: find a better way to pass real_cst's. -bson */
6098 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6099 (double) TREE_REAL_CST (exp));
6100 #else
6101 abort ();
6102 #endif
6104 return;
6106 case CALL_EXPR:
6108 /* We build a call description vector describing the type of
6109 the return value and of the arguments; this call vector,
6110 together with a pointer to a location for the return value
6111 and the base of the argument list, is passed to the low
6112 level machine dependent call subroutine, which is responsible
6113 for putting the arguments wherever real functions expect
6114 them, as well as getting the return value back. */
6116 tree calldesc = 0, arg;
6117 int nargs = 0, i;
6118 rtx retval;
6120 /* Push the evaluated args on the evaluation stack in reverse
6121 order. Also make an entry for each arg in the calldesc
6122 vector while we're at it. */
6124 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6126 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6128 ++nargs;
6129 bc_expand_expr (TREE_VALUE (arg));
6131 calldesc = tree_cons ((tree) 0,
6132 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6133 calldesc);
6134 calldesc = tree_cons ((tree) 0,
6135 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6136 calldesc);
6139 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6141 /* Allocate a location for the return value and push its
6142 address on the evaluation stack. Also make an entry
6143 at the front of the calldesc for the return value type. */
6145 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6146 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6147 bc_load_localaddr (retval);
6149 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6150 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6152 /* Prepend the argument count. */
6153 calldesc = tree_cons ((tree) 0,
6154 build_int_2 (nargs, 0),
6155 calldesc);
6157 /* Push the address of the call description vector on the stack. */
6158 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6159 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6160 build_index_type (build_int_2 (nargs * 2, 0)));
6161 r = output_constant_def (calldesc);
6162 bc_load_externaddr (r);
6164 /* Push the address of the function to be called. */
6165 bc_expand_expr (TREE_OPERAND (exp, 0));
6167 /* Call the function, popping its address and the calldesc vector
6168 address off the evaluation stack in the process. */
6169 bc_emit_instruction (call);
6171 /* Pop the arguments off the stack. */
6172 bc_adjust_stack (nargs);
6174 /* Load the return value onto the stack. */
6175 bc_load_localaddr (retval);
6176 bc_load_memory (type, TREE_OPERAND (exp, 0));
6178 return;
6180 case SAVE_EXPR:
6182 if (!SAVE_EXPR_RTL (exp))
6184 /* First time around: copy to local variable */
6185 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6186 TYPE_ALIGN (TREE_TYPE(exp)));
6187 bc_expand_expr (TREE_OPERAND (exp, 0));
6188 bc_emit_instruction (duplicate);
6190 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6191 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6193 else
6195 /* Consecutive reference: use saved copy */
6196 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6197 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6199 return;
6201 #if 0
6202 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6203 how are they handled instead? */
6204 case LET_STMT:
6206 TREE_USED (exp) = 1;
6207 bc_expand_expr (STMT_BODY (exp));
6208 return;
6209 #endif
6211 case NOP_EXPR:
6212 case CONVERT_EXPR:
6214 bc_expand_expr (TREE_OPERAND (exp, 0));
6215 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6216 return;
6218 case MODIFY_EXPR:
6220 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6221 return;
6223 case ADDR_EXPR:
6225 bc_expand_address (TREE_OPERAND (exp, 0));
6226 return;
6228 case INDIRECT_REF:
6230 bc_expand_expr (TREE_OPERAND (exp, 0));
6231 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6232 return;
6234 case ARRAY_REF:
6236 bc_expand_expr (bc_canonicalize_array_ref (exp));
6237 return;
6239 case COMPONENT_REF:
6241 bc_expand_component_address (exp);
6243 /* If we have a bitfield, generate a proper load */
6244 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6245 return;
6247 case COMPOUND_EXPR:
6249 bc_expand_expr (TREE_OPERAND (exp, 0));
6250 bc_emit_instruction (drop);
6251 bc_expand_expr (TREE_OPERAND (exp, 1));
6252 return;
6254 case COND_EXPR:
6256 bc_expand_expr (TREE_OPERAND (exp, 0));
6257 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6258 lab = bc_get_bytecode_label ();
6259 bc_emit_bytecode (xjumpifnot);
6260 bc_emit_bytecode_labelref (lab);
6262 #ifdef DEBUG_PRINT_CODE
6263 fputc ('\n', stderr);
6264 #endif
6265 bc_expand_expr (TREE_OPERAND (exp, 1));
6266 lab1 = bc_get_bytecode_label ();
6267 bc_emit_bytecode (jump);
6268 bc_emit_bytecode_labelref (lab1);
6270 #ifdef DEBUG_PRINT_CODE
6271 fputc ('\n', stderr);
6272 #endif
6274 bc_emit_bytecode_labeldef (lab);
6275 bc_expand_expr (TREE_OPERAND (exp, 2));
6276 bc_emit_bytecode_labeldef (lab1);
6277 return;
6279 case TRUTH_ANDIF_EXPR:
6281 opcode = xjumpifnot;
6282 goto andorif;
6284 case TRUTH_ORIF_EXPR:
6286 opcode = xjumpif;
6287 goto andorif;
6289 case PLUS_EXPR:
6291 binoptab = optab_plus_expr;
6292 goto binop;
6294 case MINUS_EXPR:
6296 binoptab = optab_minus_expr;
6297 goto binop;
6299 case MULT_EXPR:
6301 binoptab = optab_mult_expr;
6302 goto binop;
6304 case TRUNC_DIV_EXPR:
6305 case FLOOR_DIV_EXPR:
6306 case CEIL_DIV_EXPR:
6307 case ROUND_DIV_EXPR:
6308 case EXACT_DIV_EXPR:
6310 binoptab = optab_trunc_div_expr;
6311 goto binop;
6313 case TRUNC_MOD_EXPR:
6314 case FLOOR_MOD_EXPR:
6315 case CEIL_MOD_EXPR:
6316 case ROUND_MOD_EXPR:
6318 binoptab = optab_trunc_mod_expr;
6319 goto binop;
6321 case FIX_ROUND_EXPR:
6322 case FIX_FLOOR_EXPR:
6323 case FIX_CEIL_EXPR:
6324 abort (); /* Not used for C. */
6326 case FIX_TRUNC_EXPR:
6327 case FLOAT_EXPR:
6328 case MAX_EXPR:
6329 case MIN_EXPR:
6330 case FFS_EXPR:
6331 case LROTATE_EXPR:
6332 case RROTATE_EXPR:
6333 abort (); /* FIXME */
6335 case RDIV_EXPR:
6337 binoptab = optab_rdiv_expr;
6338 goto binop;
6340 case BIT_AND_EXPR:
6342 binoptab = optab_bit_and_expr;
6343 goto binop;
6345 case BIT_IOR_EXPR:
6347 binoptab = optab_bit_ior_expr;
6348 goto binop;
6350 case BIT_XOR_EXPR:
6352 binoptab = optab_bit_xor_expr;
6353 goto binop;
6355 case LSHIFT_EXPR:
6357 binoptab = optab_lshift_expr;
6358 goto binop;
6360 case RSHIFT_EXPR:
6362 binoptab = optab_rshift_expr;
6363 goto binop;
6365 case TRUTH_AND_EXPR:
6367 binoptab = optab_truth_and_expr;
6368 goto binop;
6370 case TRUTH_OR_EXPR:
6372 binoptab = optab_truth_or_expr;
6373 goto binop;
6375 case LT_EXPR:
6377 binoptab = optab_lt_expr;
6378 goto binop;
6380 case LE_EXPR:
6382 binoptab = optab_le_expr;
6383 goto binop;
6385 case GE_EXPR:
6387 binoptab = optab_ge_expr;
6388 goto binop;
6390 case GT_EXPR:
6392 binoptab = optab_gt_expr;
6393 goto binop;
6395 case EQ_EXPR:
6397 binoptab = optab_eq_expr;
6398 goto binop;
6400 case NE_EXPR:
6402 binoptab = optab_ne_expr;
6403 goto binop;
6405 case NEGATE_EXPR:
6407 unoptab = optab_negate_expr;
6408 goto unop;
6410 case BIT_NOT_EXPR:
6412 unoptab = optab_bit_not_expr;
6413 goto unop;
6415 case TRUTH_NOT_EXPR:
6417 unoptab = optab_truth_not_expr;
6418 goto unop;
6420 case PREDECREMENT_EXPR:
6422 incroptab = optab_predecrement_expr;
6423 goto increment;
6425 case PREINCREMENT_EXPR:
6427 incroptab = optab_preincrement_expr;
6428 goto increment;
6430 case POSTDECREMENT_EXPR:
6432 incroptab = optab_postdecrement_expr;
6433 goto increment;
6435 case POSTINCREMENT_EXPR:
6437 incroptab = optab_postincrement_expr;
6438 goto increment;
6440 case CONSTRUCTOR:
6442 bc_expand_constructor (exp);
6443 return;
6445 case ERROR_MARK:
6446 case RTL_EXPR:
6448 return;
6450 case BIND_EXPR:
6452 tree vars = TREE_OPERAND (exp, 0);
6453 int vars_need_expansion = 0;
6455 /* Need to open a binding contour here because
6456 if there are any cleanups they most be contained here. */
6457 expand_start_bindings (0);
6459 /* Mark the corresponding BLOCK for output. */
6460 if (TREE_OPERAND (exp, 2) != 0)
6461 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6463 /* If VARS have not yet been expanded, expand them now. */
6464 while (vars)
6466 if (DECL_RTL (vars) == 0)
6468 vars_need_expansion = 1;
6469 expand_decl (vars);
6471 expand_decl_init (vars);
6472 vars = TREE_CHAIN (vars);
6475 bc_expand_expr (TREE_OPERAND (exp, 1));
6477 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6479 return;
6483 abort ();
6485 binop:
6487 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6488 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6489 return;
6492 unop:
6494 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6495 return;
6498 andorif:
6500 bc_expand_expr (TREE_OPERAND (exp, 0));
6501 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6502 lab = bc_get_bytecode_label ();
6504 bc_emit_instruction (duplicate);
6505 bc_emit_bytecode (opcode);
6506 bc_emit_bytecode_labelref (lab);
6508 #ifdef DEBUG_PRINT_CODE
6509 fputc ('\n', stderr);
6510 #endif
6512 bc_emit_instruction (drop);
6514 bc_expand_expr (TREE_OPERAND (exp, 1));
6515 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6516 bc_emit_bytecode_labeldef (lab);
6517 return;
6520 increment:
6522 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6524 /* Push the quantum. */
6525 bc_expand_expr (TREE_OPERAND (exp, 1));
6527 /* Convert it to the lvalue's type. */
6528 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6530 /* Push the address of the lvalue */
6531 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6533 /* Perform actual increment */
6534 bc_expand_increment (incroptab, type);
6535 return;
6538 /* Return the alignment in bits of EXP, a pointer valued expression.
6539 But don't return more than MAX_ALIGN no matter what.
6540 The alignment returned is, by default, the alignment of the thing that
6541 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6543 Otherwise, look at the expression to see if we can do better, i.e., if the
6544 expression is actually pointing at an object whose alignment is tighter. */
6546 static int
6547 get_pointer_alignment (exp, max_align)
6548 tree exp;
6549 unsigned max_align;
6551 unsigned align, inner;
6553 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6554 return 0;
6556 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6557 align = MIN (align, max_align);
6559 while (1)
6561 switch (TREE_CODE (exp))
6563 case NOP_EXPR:
6564 case CONVERT_EXPR:
6565 case NON_LVALUE_EXPR:
6566 exp = TREE_OPERAND (exp, 0);
6567 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6568 return align;
6569 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6570 align = MIN (inner, max_align);
6571 break;
6573 case PLUS_EXPR:
6574 /* If sum of pointer + int, restrict our maximum alignment to that
6575 imposed by the integer. If not, we can't do any better than
6576 ALIGN. */
6577 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6578 return align;
6580 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6581 & (max_align - 1))
6582 != 0)
6583 max_align >>= 1;
6585 exp = TREE_OPERAND (exp, 0);
6586 break;
6588 case ADDR_EXPR:
6589 /* See what we are pointing at and look at its alignment. */
6590 exp = TREE_OPERAND (exp, 0);
6591 if (TREE_CODE (exp) == FUNCTION_DECL)
6592 align = FUNCTION_BOUNDARY;
6593 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6594 align = DECL_ALIGN (exp);
6595 #ifdef CONSTANT_ALIGNMENT
6596 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6597 align = CONSTANT_ALIGNMENT (exp, align);
6598 #endif
6599 return MIN (align, max_align);
6601 default:
6602 return align;
6607 /* Return the tree node and offset if a given argument corresponds to
6608 a string constant. */
6610 static tree
6611 string_constant (arg, ptr_offset)
6612 tree arg;
6613 tree *ptr_offset;
6615 STRIP_NOPS (arg);
6617 if (TREE_CODE (arg) == ADDR_EXPR
6618 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6620 *ptr_offset = integer_zero_node;
6621 return TREE_OPERAND (arg, 0);
6623 else if (TREE_CODE (arg) == PLUS_EXPR)
6625 tree arg0 = TREE_OPERAND (arg, 0);
6626 tree arg1 = TREE_OPERAND (arg, 1);
6628 STRIP_NOPS (arg0);
6629 STRIP_NOPS (arg1);
6631 if (TREE_CODE (arg0) == ADDR_EXPR
6632 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6634 *ptr_offset = arg1;
6635 return TREE_OPERAND (arg0, 0);
6637 else if (TREE_CODE (arg1) == ADDR_EXPR
6638 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6640 *ptr_offset = arg0;
6641 return TREE_OPERAND (arg1, 0);
6645 return 0;
6648 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6649 way, because it could contain a zero byte in the middle.
6650 TREE_STRING_LENGTH is the size of the character array, not the string.
6652 Unfortunately, string_constant can't access the values of const char
6653 arrays with initializers, so neither can we do so here. */
6655 static tree
6656 c_strlen (src)
6657 tree src;
6659 tree offset_node;
6660 int offset, max;
6661 char *ptr;
6663 src = string_constant (src, &offset_node);
6664 if (src == 0)
6665 return 0;
6666 max = TREE_STRING_LENGTH (src);
6667 ptr = TREE_STRING_POINTER (src);
6668 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6670 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6671 compute the offset to the following null if we don't know where to
6672 start searching for it. */
6673 int i;
6674 for (i = 0; i < max; i++)
6675 if (ptr[i] == 0)
6676 return 0;
6677 /* We don't know the starting offset, but we do know that the string
6678 has no internal zero bytes. We can assume that the offset falls
6679 within the bounds of the string; otherwise, the programmer deserves
6680 what he gets. Subtract the offset from the length of the string,
6681 and return that. */
6682 /* This would perhaps not be valid if we were dealing with named
6683 arrays in addition to literal string constants. */
6684 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6687 /* We have a known offset into the string. Start searching there for
6688 a null character. */
6689 if (offset_node == 0)
6690 offset = 0;
6691 else
6693 /* Did we get a long long offset? If so, punt. */
6694 if (TREE_INT_CST_HIGH (offset_node) != 0)
6695 return 0;
6696 offset = TREE_INT_CST_LOW (offset_node);
6698 /* If the offset is known to be out of bounds, warn, and call strlen at
6699 runtime. */
6700 if (offset < 0 || offset > max)
6702 warning ("offset outside bounds of constant string");
6703 return 0;
6705 /* Use strlen to search for the first zero byte. Since any strings
6706 constructed with build_string will have nulls appended, we win even
6707 if we get handed something like (char[4])"abcd".
6709 Since OFFSET is our starting index into the string, no further
6710 calculation is needed. */
6711 return size_int (strlen (ptr + offset));
6714 /* Expand an expression EXP that calls a built-in function,
6715 with result going to TARGET if that's convenient
6716 (and in mode MODE if that's convenient).
6717 SUBTARGET may be used as the target for computing one of EXP's operands.
6718 IGNORE is nonzero if the value is to be ignored. */
6720 #define CALLED_AS_BUILT_IN(NODE) \
6721 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
6723 static rtx
6724 expand_builtin (exp, target, subtarget, mode, ignore)
6725 tree exp;
6726 rtx target;
6727 rtx subtarget;
6728 enum machine_mode mode;
6729 int ignore;
6731 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6732 tree arglist = TREE_OPERAND (exp, 1);
6733 rtx op0;
6734 rtx lab1, insns;
6735 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6736 optab builtin_optab;
6738 switch (DECL_FUNCTION_CODE (fndecl))
6740 case BUILT_IN_ABS:
6741 case BUILT_IN_LABS:
6742 case BUILT_IN_FABS:
6743 /* build_function_call changes these into ABS_EXPR. */
6744 abort ();
6746 case BUILT_IN_SIN:
6747 case BUILT_IN_COS:
6748 case BUILT_IN_FSQRT:
6749 /* If not optimizing, call the library function. */
6750 if (! optimize)
6751 break;
6753 if (arglist == 0
6754 /* Arg could be wrong type if user redeclared this fcn wrong. */
6755 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6756 break;
6758 /* Stabilize and compute the argument. */
6759 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6760 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6762 exp = copy_node (exp);
6763 arglist = copy_node (arglist);
6764 TREE_OPERAND (exp, 1) = arglist;
6765 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6767 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6769 /* Make a suitable register to place result in. */
6770 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6772 emit_queue ();
6773 start_sequence ();
6775 switch (DECL_FUNCTION_CODE (fndecl))
6777 case BUILT_IN_SIN:
6778 builtin_optab = sin_optab; break;
6779 case BUILT_IN_COS:
6780 builtin_optab = cos_optab; break;
6781 case BUILT_IN_FSQRT:
6782 builtin_optab = sqrt_optab; break;
6783 default:
6784 abort ();
6787 /* Compute into TARGET.
6788 Set TARGET to wherever the result comes back. */
6789 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6790 builtin_optab, op0, target, 0);
6792 /* If we were unable to expand via the builtin, stop the
6793 sequence (without outputting the insns) and break, causing
6794 a call the the library function. */
6795 if (target == 0)
6797 end_sequence ();
6798 break;
6801 /* Check the results by default. But if flag_fast_math is turned on,
6802 then assume sqrt will always be called with valid arguments. */
6804 if (! flag_fast_math)
6806 /* Don't define the builtin FP instructions
6807 if your machine is not IEEE. */
6808 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6809 abort ();
6811 lab1 = gen_label_rtx ();
6813 /* Test the result; if it is NaN, set errno=EDOM because
6814 the argument was not in the domain. */
6815 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6816 emit_jump_insn (gen_beq (lab1));
6818 #ifdef TARGET_EDOM
6820 #ifdef GEN_ERRNO_RTX
6821 rtx errno_rtx = GEN_ERRNO_RTX;
6822 #else
6823 rtx errno_rtx
6824 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6825 #endif
6827 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6829 #else
6830 /* We can't set errno=EDOM directly; let the library call do it.
6831 Pop the arguments right away in case the call gets deleted. */
6832 NO_DEFER_POP;
6833 expand_call (exp, target, 0);
6834 OK_DEFER_POP;
6835 #endif
6837 emit_label (lab1);
6840 /* Output the entire sequence. */
6841 insns = get_insns ();
6842 end_sequence ();
6843 emit_insns (insns);
6845 return target;
6847 /* __builtin_apply_args returns block of memory allocated on
6848 the stack into which is stored the arg pointer, structure
6849 value address, static chain, and all the registers that might
6850 possibly be used in performing a function call. The code is
6851 moved to the start of the function so the incoming values are
6852 saved. */
6853 case BUILT_IN_APPLY_ARGS:
6854 /* Don't do __builtin_apply_args more than once in a function.
6855 Save the result of the first call and reuse it. */
6856 if (apply_args_value != 0)
6857 return apply_args_value;
6859 /* When this function is called, it means that registers must be
6860 saved on entry to this function. So we migrate the
6861 call to the first insn of this function. */
6862 rtx temp;
6863 rtx seq;
6865 start_sequence ();
6866 temp = expand_builtin_apply_args ();
6867 seq = get_insns ();
6868 end_sequence ();
6870 apply_args_value = temp;
6872 /* Put the sequence after the NOTE that starts the function.
6873 If this is inside a SEQUENCE, make the outer-level insn
6874 chain current, so the code is placed at the start of the
6875 function. */
6876 push_topmost_sequence ();
6877 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6878 pop_topmost_sequence ();
6879 return temp;
6882 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6883 FUNCTION with a copy of the parameters described by
6884 ARGUMENTS, and ARGSIZE. It returns a block of memory
6885 allocated on the stack into which is stored all the registers
6886 that might possibly be used for returning the result of a
6887 function. ARGUMENTS is the value returned by
6888 __builtin_apply_args. ARGSIZE is the number of bytes of
6889 arguments that must be copied. ??? How should this value be
6890 computed? We'll also need a safe worst case value for varargs
6891 functions. */
6892 case BUILT_IN_APPLY:
6893 if (arglist == 0
6894 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6895 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6896 || TREE_CHAIN (arglist) == 0
6897 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6898 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6899 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6900 return const0_rtx;
6901 else
6903 int i;
6904 tree t;
6905 rtx ops[3];
6907 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6908 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6910 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6913 /* __builtin_return (RESULT) causes the function to return the
6914 value described by RESULT. RESULT is address of the block of
6915 memory returned by __builtin_apply. */
6916 case BUILT_IN_RETURN:
6917 if (arglist
6918 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6919 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6920 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6921 NULL_RTX, VOIDmode, 0));
6922 return const0_rtx;
6924 case BUILT_IN_SAVEREGS:
6925 /* Don't do __builtin_saveregs more than once in a function.
6926 Save the result of the first call and reuse it. */
6927 if (saveregs_value != 0)
6928 return saveregs_value;
6930 /* When this function is called, it means that registers must be
6931 saved on entry to this function. So we migrate the
6932 call to the first insn of this function. */
6933 rtx temp;
6934 rtx seq;
6936 /* Now really call the function. `expand_call' does not call
6937 expand_builtin, so there is no danger of infinite recursion here. */
6938 start_sequence ();
6940 #ifdef EXPAND_BUILTIN_SAVEREGS
6941 /* Do whatever the machine needs done in this case. */
6942 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6943 #else
6944 /* The register where the function returns its value
6945 is likely to have something else in it, such as an argument.
6946 So preserve that register around the call. */
6948 if (value_mode != VOIDmode)
6950 rtx valreg = hard_libcall_value (value_mode);
6951 rtx saved_valreg = gen_reg_rtx (value_mode);
6953 emit_move_insn (saved_valreg, valreg);
6954 temp = expand_call (exp, target, ignore);
6955 emit_move_insn (valreg, saved_valreg);
6957 else
6958 /* Generate the call, putting the value in a pseudo. */
6959 temp = expand_call (exp, target, ignore);
6960 #endif
6962 seq = get_insns ();
6963 end_sequence ();
6965 saveregs_value = temp;
6967 /* Put the sequence after the NOTE that starts the function.
6968 If this is inside a SEQUENCE, make the outer-level insn
6969 chain current, so the code is placed at the start of the
6970 function. */
6971 push_topmost_sequence ();
6972 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6973 pop_topmost_sequence ();
6974 return temp;
6977 /* __builtin_args_info (N) returns word N of the arg space info
6978 for the current function. The number and meanings of words
6979 is controlled by the definition of CUMULATIVE_ARGS. */
6980 case BUILT_IN_ARGS_INFO:
6982 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6983 int i;
6984 int *word_ptr = (int *) &current_function_args_info;
6985 tree type, elts, result;
6987 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6988 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6989 __FILE__, __LINE__);
6991 if (arglist != 0)
6993 tree arg = TREE_VALUE (arglist);
6994 if (TREE_CODE (arg) != INTEGER_CST)
6995 error ("argument of `__builtin_args_info' must be constant");
6996 else
6998 int wordnum = TREE_INT_CST_LOW (arg);
7000 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7001 error ("argument of `__builtin_args_info' out of range");
7002 else
7003 return GEN_INT (word_ptr[wordnum]);
7006 else
7007 error ("missing argument in `__builtin_args_info'");
7009 return const0_rtx;
7011 #if 0
7012 for (i = 0; i < nwords; i++)
7013 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7015 type = build_array_type (integer_type_node,
7016 build_index_type (build_int_2 (nwords, 0)));
7017 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7018 TREE_CONSTANT (result) = 1;
7019 TREE_STATIC (result) = 1;
7020 result = build (INDIRECT_REF, build_pointer_type (type), result);
7021 TREE_CONSTANT (result) = 1;
7022 return expand_expr (result, NULL_RTX, VOIDmode, 0);
7023 #endif
7026 /* Return the address of the first anonymous stack arg. */
7027 case BUILT_IN_NEXT_ARG:
7029 tree fntype = TREE_TYPE (current_function_decl);
7031 if ((TYPE_ARG_TYPES (fntype) == 0
7032 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7033 == void_type_node))
7034 && ! current_function_varargs)
7036 error ("`va_start' used in function with fixed args");
7037 return const0_rtx;
7040 if (arglist)
7042 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7043 tree arg = TREE_VALUE (arglist);
7045 /* Strip off all nops for the sake of the comparison. This
7046 is not quite the same as STRIP_NOPS. It does more. */
7047 while (TREE_CODE (arg) == NOP_EXPR
7048 || TREE_CODE (arg) == CONVERT_EXPR
7049 || TREE_CODE (arg) == NON_LVALUE_EXPR)
7050 arg = TREE_OPERAND (arg, 0);
7051 if (arg != last_parm)
7052 warning ("second parameter of `va_start' not last named argument");
7054 else
7055 /* Evidently an out of date version of <stdarg.h>; can't validate
7056 va_start's second argument, but can still work as intended. */
7057 warning ("`__builtin_next_arg' called without an argument");
7060 return expand_binop (Pmode, add_optab,
7061 current_function_internal_arg_pointer,
7062 current_function_arg_offset_rtx,
7063 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7065 case BUILT_IN_CLASSIFY_TYPE:
7066 if (arglist != 0)
7068 tree type = TREE_TYPE (TREE_VALUE (arglist));
7069 enum tree_code code = TREE_CODE (type);
7070 if (code == VOID_TYPE)
7071 return GEN_INT (void_type_class);
7072 if (code == INTEGER_TYPE)
7073 return GEN_INT (integer_type_class);
7074 if (code == CHAR_TYPE)
7075 return GEN_INT (char_type_class);
7076 if (code == ENUMERAL_TYPE)
7077 return GEN_INT (enumeral_type_class);
7078 if (code == BOOLEAN_TYPE)
7079 return GEN_INT (boolean_type_class);
7080 if (code == POINTER_TYPE)
7081 return GEN_INT (pointer_type_class);
7082 if (code == REFERENCE_TYPE)
7083 return GEN_INT (reference_type_class);
7084 if (code == OFFSET_TYPE)
7085 return GEN_INT (offset_type_class);
7086 if (code == REAL_TYPE)
7087 return GEN_INT (real_type_class);
7088 if (code == COMPLEX_TYPE)
7089 return GEN_INT (complex_type_class);
7090 if (code == FUNCTION_TYPE)
7091 return GEN_INT (function_type_class);
7092 if (code == METHOD_TYPE)
7093 return GEN_INT (method_type_class);
7094 if (code == RECORD_TYPE)
7095 return GEN_INT (record_type_class);
7096 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7097 return GEN_INT (union_type_class);
7098 if (code == ARRAY_TYPE)
7100 if (TYPE_STRING_FLAG (type))
7101 return GEN_INT (string_type_class);
7102 else
7103 return GEN_INT (array_type_class);
7105 if (code == SET_TYPE)
7106 return GEN_INT (set_type_class);
7107 if (code == FILE_TYPE)
7108 return GEN_INT (file_type_class);
7109 if (code == LANG_TYPE)
7110 return GEN_INT (lang_type_class);
7112 return GEN_INT (no_type_class);
7114 case BUILT_IN_CONSTANT_P:
7115 if (arglist == 0)
7116 return const0_rtx;
7117 else
7118 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7119 ? const1_rtx : const0_rtx);
7121 case BUILT_IN_FRAME_ADDRESS:
7122 /* The argument must be a nonnegative integer constant.
7123 It counts the number of frames to scan up the stack.
7124 The value is the address of that frame. */
7125 case BUILT_IN_RETURN_ADDRESS:
7126 /* The argument must be a nonnegative integer constant.
7127 It counts the number of frames to scan up the stack.
7128 The value is the return address saved in that frame. */
7129 if (arglist == 0)
7130 /* Warning about missing arg was already issued. */
7131 return const0_rtx;
7132 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7134 error ("invalid arg to `__builtin_return_address'");
7135 return const0_rtx;
7137 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7139 error ("invalid arg to `__builtin_return_address'");
7140 return const0_rtx;
7142 else
7144 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7145 rtx tem = frame_pointer_rtx;
7146 int i;
7148 /* Some machines need special handling before we can access arbitrary
7149 frames. For example, on the sparc, we must first flush all
7150 register windows to the stack. */
7151 #ifdef SETUP_FRAME_ADDRESSES
7152 SETUP_FRAME_ADDRESSES ();
7153 #endif
7155 /* On the sparc, the return address is not in the frame, it is
7156 in a register. There is no way to access it off of the current
7157 frame pointer, but it can be accessed off the previous frame
7158 pointer by reading the value from the register window save
7159 area. */
7160 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7161 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7162 count--;
7163 #endif
7165 /* Scan back COUNT frames to the specified frame. */
7166 for (i = 0; i < count; i++)
7168 /* Assume the dynamic chain pointer is in the word that
7169 the frame address points to, unless otherwise specified. */
7170 #ifdef DYNAMIC_CHAIN_ADDRESS
7171 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7172 #endif
7173 tem = memory_address (Pmode, tem);
7174 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7177 /* For __builtin_frame_address, return what we've got. */
7178 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7179 return tem;
7181 /* For __builtin_return_address,
7182 Get the return address from that frame. */
7183 #ifdef RETURN_ADDR_RTX
7184 return RETURN_ADDR_RTX (count, tem);
7185 #else
7186 tem = memory_address (Pmode,
7187 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7188 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7189 #endif
7192 case BUILT_IN_ALLOCA:
7193 if (arglist == 0
7194 /* Arg could be non-integer if user redeclared this fcn wrong. */
7195 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7196 break;
7198 /* Compute the argument. */
7199 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7201 /* Allocate the desired space. */
7202 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7204 case BUILT_IN_FFS:
7205 /* If not optimizing, call the library function. */
7206 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7207 break;
7209 if (arglist == 0
7210 /* Arg could be non-integer if user redeclared this fcn wrong. */
7211 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7212 break;
7214 /* Compute the argument. */
7215 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7216 /* Compute ffs, into TARGET if possible.
7217 Set TARGET to wherever the result comes back. */
7218 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7219 ffs_optab, op0, target, 1);
7220 if (target == 0)
7221 abort ();
7222 return target;
7224 case BUILT_IN_STRLEN:
7225 /* If not optimizing, call the library function. */
7226 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7227 break;
7229 if (arglist == 0
7230 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7231 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7232 break;
7233 else
7235 tree src = TREE_VALUE (arglist);
7236 tree len = c_strlen (src);
7238 int align
7239 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7241 rtx result, src_rtx, char_rtx;
7242 enum machine_mode insn_mode = value_mode, char_mode;
7243 enum insn_code icode;
7245 /* If the length is known, just return it. */
7246 if (len != 0)
7247 return expand_expr (len, target, mode, 0);
7249 /* If SRC is not a pointer type, don't do this operation inline. */
7250 if (align == 0)
7251 break;
7253 /* Call a function if we can't compute strlen in the right mode. */
7255 while (insn_mode != VOIDmode)
7257 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7258 if (icode != CODE_FOR_nothing)
7259 break;
7261 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7263 if (insn_mode == VOIDmode)
7264 break;
7266 /* Make a place to write the result of the instruction. */
7267 result = target;
7268 if (! (result != 0
7269 && GET_CODE (result) == REG
7270 && GET_MODE (result) == insn_mode
7271 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7272 result = gen_reg_rtx (insn_mode);
7274 /* Make sure the operands are acceptable to the predicates. */
7276 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7277 result = gen_reg_rtx (insn_mode);
7279 src_rtx = memory_address (BLKmode,
7280 expand_expr (src, NULL_RTX, Pmode,
7281 EXPAND_NORMAL));
7282 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7283 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7285 char_rtx = const0_rtx;
7286 char_mode = insn_operand_mode[(int)icode][2];
7287 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7288 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7290 emit_insn (GEN_FCN (icode) (result,
7291 gen_rtx (MEM, BLKmode, src_rtx),
7292 char_rtx, GEN_INT (align)));
7294 /* Return the value in the proper mode for this function. */
7295 if (GET_MODE (result) == value_mode)
7296 return result;
7297 else if (target != 0)
7299 convert_move (target, result, 0);
7300 return target;
7302 else
7303 return convert_to_mode (value_mode, result, 0);
7306 case BUILT_IN_STRCPY:
7307 /* If not optimizing, call the library function. */
7308 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7309 break;
7311 if (arglist == 0
7312 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7313 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7314 || TREE_CHAIN (arglist) == 0
7315 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7316 break;
7317 else
7319 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7321 if (len == 0)
7322 break;
7324 len = size_binop (PLUS_EXPR, len, integer_one_node);
7326 chainon (arglist, build_tree_list (NULL_TREE, len));
7329 /* Drops in. */
7330 case BUILT_IN_MEMCPY:
7331 /* If not optimizing, call the library function. */
7332 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7333 break;
7335 if (arglist == 0
7336 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7337 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7338 || TREE_CHAIN (arglist) == 0
7339 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7340 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7341 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7342 break;
7343 else
7345 tree dest = TREE_VALUE (arglist);
7346 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7347 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7349 int src_align
7350 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7351 int dest_align
7352 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7353 rtx dest_rtx, dest_mem, src_mem;
7355 /* If either SRC or DEST is not a pointer type, don't do
7356 this operation in-line. */
7357 if (src_align == 0 || dest_align == 0)
7359 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7360 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7361 break;
7364 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7365 dest_mem = gen_rtx (MEM, BLKmode,
7366 memory_address (BLKmode, dest_rtx));
7367 src_mem = gen_rtx (MEM, BLKmode,
7368 memory_address (BLKmode,
7369 expand_expr (src, NULL_RTX,
7370 Pmode,
7371 EXPAND_NORMAL)));
7373 /* Copy word part most expediently. */
7374 emit_block_move (dest_mem, src_mem,
7375 expand_expr (len, NULL_RTX, VOIDmode, 0),
7376 MIN (src_align, dest_align));
7377 return dest_rtx;
7380 /* These comparison functions need an instruction that returns an actual
7381 index. An ordinary compare that just sets the condition codes
7382 is not enough. */
7383 #ifdef HAVE_cmpstrsi
7384 case BUILT_IN_STRCMP:
7385 /* If not optimizing, call the library function. */
7386 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7387 break;
7389 if (arglist == 0
7390 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7391 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7392 || TREE_CHAIN (arglist) == 0
7393 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7394 break;
7395 else if (!HAVE_cmpstrsi)
7396 break;
7398 tree arg1 = TREE_VALUE (arglist);
7399 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7400 tree offset;
7401 tree len, len2;
7403 len = c_strlen (arg1);
7404 if (len)
7405 len = size_binop (PLUS_EXPR, integer_one_node, len);
7406 len2 = c_strlen (arg2);
7407 if (len2)
7408 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7410 /* If we don't have a constant length for the first, use the length
7411 of the second, if we know it. We don't require a constant for
7412 this case; some cost analysis could be done if both are available
7413 but neither is constant. For now, assume they're equally cheap.
7415 If both strings have constant lengths, use the smaller. This
7416 could arise if optimization results in strcpy being called with
7417 two fixed strings, or if the code was machine-generated. We should
7418 add some code to the `memcmp' handler below to deal with such
7419 situations, someday. */
7420 if (!len || TREE_CODE (len) != INTEGER_CST)
7422 if (len2)
7423 len = len2;
7424 else if (len == 0)
7425 break;
7427 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7429 if (tree_int_cst_lt (len2, len))
7430 len = len2;
7433 chainon (arglist, build_tree_list (NULL_TREE, len));
7436 /* Drops in. */
7437 case BUILT_IN_MEMCMP:
7438 /* If not optimizing, call the library function. */
7439 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7440 break;
7442 if (arglist == 0
7443 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7444 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7445 || TREE_CHAIN (arglist) == 0
7446 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7447 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7448 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7449 break;
7450 else if (!HAVE_cmpstrsi)
7451 break;
7453 tree arg1 = TREE_VALUE (arglist);
7454 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7455 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7456 rtx result;
7458 int arg1_align
7459 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7460 int arg2_align
7461 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7462 enum machine_mode insn_mode
7463 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7465 /* If we don't have POINTER_TYPE, call the function. */
7466 if (arg1_align == 0 || arg2_align == 0)
7468 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7469 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7470 break;
7473 /* Make a place to write the result of the instruction. */
7474 result = target;
7475 if (! (result != 0
7476 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7477 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7478 result = gen_reg_rtx (insn_mode);
7480 emit_insn (gen_cmpstrsi (result,
7481 gen_rtx (MEM, BLKmode,
7482 expand_expr (arg1, NULL_RTX, Pmode,
7483 EXPAND_NORMAL)),
7484 gen_rtx (MEM, BLKmode,
7485 expand_expr (arg2, NULL_RTX, Pmode,
7486 EXPAND_NORMAL)),
7487 expand_expr (len, NULL_RTX, VOIDmode, 0),
7488 GEN_INT (MIN (arg1_align, arg2_align))));
7490 /* Return the value in the proper mode for this function. */
7491 mode = TYPE_MODE (TREE_TYPE (exp));
7492 if (GET_MODE (result) == mode)
7493 return result;
7494 else if (target != 0)
7496 convert_move (target, result, 0);
7497 return target;
7499 else
7500 return convert_to_mode (mode, result, 0);
7502 #else
7503 case BUILT_IN_STRCMP:
7504 case BUILT_IN_MEMCMP:
7505 break;
7506 #endif
7508 default: /* just do library call, if unknown builtin */
7509 error ("built-in function `%s' not currently supported",
7510 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7513 /* The switch statement above can drop through to cause the function
7514 to be called normally. */
7516 return expand_call (exp, target, ignore);
7519 /* Built-in functions to perform an untyped call and return. */
7521 /* For each register that may be used for calling a function, this
7522 gives a mode used to copy the register's value. VOIDmode indicates
7523 the register is not used for calling a function. If the machine
7524 has register windows, this gives only the outbound registers.
7525 INCOMING_REGNO gives the corresponding inbound register. */
7526 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7528 /* For each register that may be used for returning values, this gives
7529 a mode used to copy the register's value. VOIDmode indicates the
7530 register is not used for returning values. If the machine has
7531 register windows, this gives only the outbound registers.
7532 INCOMING_REGNO gives the corresponding inbound register. */
7533 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7535 /* For each register that may be used for calling a function, this
7536 gives the offset of that register into the block returned by
7537 __bultin_apply_args. 0 indicates that the register is not
7538 used for calling a function. */
7539 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7541 /* Return the offset of register REGNO into the block returned by
7542 __builtin_apply_args. This is not declared static, since it is
7543 needed in objc-act.c. */
7545 int
7546 apply_args_register_offset (regno)
7547 int regno;
7549 apply_args_size ();
7551 /* Arguments are always put in outgoing registers (in the argument
7552 block) if such make sense. */
7553 #ifdef OUTGOING_REGNO
7554 regno = OUTGOING_REGNO(regno);
7555 #endif
7556 return apply_args_reg_offset[regno];
7559 /* Return the size required for the block returned by __builtin_apply_args,
7560 and initialize apply_args_mode. */
7562 static int
7563 apply_args_size ()
7565 static int size = -1;
7566 int align, regno;
7567 enum machine_mode mode;
7569 /* The values computed by this function never change. */
7570 if (size < 0)
7572 /* The first value is the incoming arg-pointer. */
7573 size = GET_MODE_SIZE (Pmode);
7575 /* The second value is the structure value address unless this is
7576 passed as an "invisible" first argument. */
7577 if (struct_value_rtx)
7578 size += GET_MODE_SIZE (Pmode);
7580 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7581 if (FUNCTION_ARG_REGNO_P (regno))
7583 /* Search for the proper mode for copying this register's
7584 value. I'm not sure this is right, but it works so far. */
7585 enum machine_mode best_mode = VOIDmode;
7587 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7588 mode != VOIDmode;
7589 mode = GET_MODE_WIDER_MODE (mode))
7590 if (HARD_REGNO_MODE_OK (regno, mode)
7591 && HARD_REGNO_NREGS (regno, mode) == 1)
7592 best_mode = mode;
7594 if (best_mode == VOIDmode)
7595 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7596 mode != VOIDmode;
7597 mode = GET_MODE_WIDER_MODE (mode))
7598 if (HARD_REGNO_MODE_OK (regno, mode)
7599 && (mov_optab->handlers[(int) mode].insn_code
7600 != CODE_FOR_nothing))
7601 best_mode = mode;
7603 mode = best_mode;
7604 if (mode == VOIDmode)
7605 abort ();
7607 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7608 if (size % align != 0)
7609 size = CEIL (size, align) * align;
7610 apply_args_reg_offset[regno] = size;
7611 size += GET_MODE_SIZE (mode);
7612 apply_args_mode[regno] = mode;
7614 else
7616 apply_args_mode[regno] = VOIDmode;
7617 apply_args_reg_offset[regno] = 0;
7620 return size;
7623 /* Return the size required for the block returned by __builtin_apply,
7624 and initialize apply_result_mode. */
7626 static int
7627 apply_result_size ()
7629 static int size = -1;
7630 int align, regno;
7631 enum machine_mode mode;
7633 /* The values computed by this function never change. */
7634 if (size < 0)
7636 size = 0;
7638 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7639 if (FUNCTION_VALUE_REGNO_P (regno))
7641 /* Search for the proper mode for copying this register's
7642 value. I'm not sure this is right, but it works so far. */
7643 enum machine_mode best_mode = VOIDmode;
7645 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7646 mode != TImode;
7647 mode = GET_MODE_WIDER_MODE (mode))
7648 if (HARD_REGNO_MODE_OK (regno, mode))
7649 best_mode = mode;
7651 if (best_mode == VOIDmode)
7652 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7653 mode != VOIDmode;
7654 mode = GET_MODE_WIDER_MODE (mode))
7655 if (HARD_REGNO_MODE_OK (regno, mode)
7656 && (mov_optab->handlers[(int) mode].insn_code
7657 != CODE_FOR_nothing))
7658 best_mode = mode;
7660 mode = best_mode;
7661 if (mode == VOIDmode)
7662 abort ();
7664 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7665 if (size % align != 0)
7666 size = CEIL (size, align) * align;
7667 size += GET_MODE_SIZE (mode);
7668 apply_result_mode[regno] = mode;
7670 else
7671 apply_result_mode[regno] = VOIDmode;
7673 /* Allow targets that use untyped_call and untyped_return to override
7674 the size so that machine-specific information can be stored here. */
7675 #ifdef APPLY_RESULT_SIZE
7676 size = APPLY_RESULT_SIZE;
7677 #endif
7679 return size;
7682 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7683 /* Create a vector describing the result block RESULT. If SAVEP is true,
7684 the result block is used to save the values; otherwise it is used to
7685 restore the values. */
7687 static rtx
7688 result_vector (savep, result)
7689 int savep;
7690 rtx result;
7692 int regno, size, align, nelts;
7693 enum machine_mode mode;
7694 rtx reg, mem;
7695 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7697 size = nelts = 0;
7698 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7699 if ((mode = apply_result_mode[regno]) != VOIDmode)
7701 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7702 if (size % align != 0)
7703 size = CEIL (size, align) * align;
7704 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7705 mem = change_address (result, mode,
7706 plus_constant (XEXP (result, 0), size));
7707 savevec[nelts++] = (savep
7708 ? gen_rtx (SET, VOIDmode, mem, reg)
7709 : gen_rtx (SET, VOIDmode, reg, mem));
7710 size += GET_MODE_SIZE (mode);
7712 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7714 #endif /* HAVE_untyped_call or HAVE_untyped_return */
7716 /* Save the state required to perform an untyped call with the same
7717 arguments as were passed to the current function. */
7719 static rtx
7720 expand_builtin_apply_args ()
7722 rtx registers;
7723 int size, align, regno;
7724 enum machine_mode mode;
7726 /* Create a block where the arg-pointer, structure value address,
7727 and argument registers can be saved. */
7728 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7730 /* Walk past the arg-pointer and structure value address. */
7731 size = GET_MODE_SIZE (Pmode);
7732 if (struct_value_rtx)
7733 size += GET_MODE_SIZE (Pmode);
7735 /* Save each register used in calling a function to the block. */
7736 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7737 if ((mode = apply_args_mode[regno]) != VOIDmode)
7739 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7740 if (size % align != 0)
7741 size = CEIL (size, align) * align;
7742 emit_move_insn (change_address (registers, mode,
7743 plus_constant (XEXP (registers, 0),
7744 size)),
7745 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7746 size += GET_MODE_SIZE (mode);
7749 /* Save the arg pointer to the block. */
7750 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7751 copy_to_reg (virtual_incoming_args_rtx));
7752 size = GET_MODE_SIZE (Pmode);
7754 /* Save the structure value address unless this is passed as an
7755 "invisible" first argument. */
7756 if (struct_value_incoming_rtx)
7758 emit_move_insn (change_address (registers, Pmode,
7759 plus_constant (XEXP (registers, 0),
7760 size)),
7761 copy_to_reg (struct_value_incoming_rtx));
7762 size += GET_MODE_SIZE (Pmode);
7765 /* Return the address of the block. */
7766 return copy_addr_to_reg (XEXP (registers, 0));
7769 /* Perform an untyped call and save the state required to perform an
7770 untyped return of whatever value was returned by the given function. */
7772 static rtx
7773 expand_builtin_apply (function, arguments, argsize)
7774 rtx function, arguments, argsize;
7776 int size, align, regno;
7777 enum machine_mode mode;
7778 rtx incoming_args, result, reg, dest, call_insn;
7779 rtx old_stack_level = 0;
7780 rtx call_fusage = 0;
7782 /* Create a block where the return registers can be saved. */
7783 result = assign_stack_local (BLKmode, apply_result_size (), -1);
7785 /* ??? The argsize value should be adjusted here. */
7787 /* Fetch the arg pointer from the ARGUMENTS block. */
7788 incoming_args = gen_reg_rtx (Pmode);
7789 emit_move_insn (incoming_args,
7790 gen_rtx (MEM, Pmode, arguments));
7791 #ifndef STACK_GROWS_DOWNWARD
7792 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7793 incoming_args, 0, OPTAB_LIB_WIDEN);
7794 #endif
7796 /* Perform postincrements before actually calling the function. */
7797 emit_queue ();
7799 /* Push a new argument block and copy the arguments. */
7800 do_pending_stack_adjust ();
7801 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
7803 /* Push a block of memory onto the stack to store the memory arguments.
7804 Save the address in a register, and copy the memory arguments. ??? I
7805 haven't figured out how the calling convention macros effect this,
7806 but it's likely that the source and/or destination addresses in
7807 the block copy will need updating in machine specific ways. */
7808 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7809 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7810 gen_rtx (MEM, BLKmode, incoming_args),
7811 argsize,
7812 PARM_BOUNDARY / BITS_PER_UNIT);
7814 /* Refer to the argument block. */
7815 apply_args_size ();
7816 arguments = gen_rtx (MEM, BLKmode, arguments);
7818 /* Walk past the arg-pointer and structure value address. */
7819 size = GET_MODE_SIZE (Pmode);
7820 if (struct_value_rtx)
7821 size += GET_MODE_SIZE (Pmode);
7823 /* Restore each of the registers previously saved. Make USE insns
7824 for each of these registers for use in making the call. */
7825 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7826 if ((mode = apply_args_mode[regno]) != VOIDmode)
7828 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7829 if (size % align != 0)
7830 size = CEIL (size, align) * align;
7831 reg = gen_rtx (REG, mode, regno);
7832 emit_move_insn (reg,
7833 change_address (arguments, mode,
7834 plus_constant (XEXP (arguments, 0),
7835 size)));
7837 use_reg (&call_fusage, reg);
7838 size += GET_MODE_SIZE (mode);
7841 /* Restore the structure value address unless this is passed as an
7842 "invisible" first argument. */
7843 size = GET_MODE_SIZE (Pmode);
7844 if (struct_value_rtx)
7846 rtx value = gen_reg_rtx (Pmode);
7847 emit_move_insn (value,
7848 change_address (arguments, Pmode,
7849 plus_constant (XEXP (arguments, 0),
7850 size)));
7851 emit_move_insn (struct_value_rtx, value);
7852 if (GET_CODE (struct_value_rtx) == REG)
7853 use_reg (&call_fusage, struct_value_rtx);
7854 size += GET_MODE_SIZE (Pmode);
7857 /* All arguments and registers used for the call are set up by now! */
7858 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
7860 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7861 and we don't want to load it into a register as an optimization,
7862 because prepare_call_address already did it if it should be done. */
7863 if (GET_CODE (function) != SYMBOL_REF)
7864 function = memory_address (FUNCTION_MODE, function);
7866 /* Generate the actual call instruction and save the return value. */
7867 #ifdef HAVE_untyped_call
7868 if (HAVE_untyped_call)
7869 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7870 result, result_vector (1, result)));
7871 else
7872 #endif
7873 #ifdef HAVE_call_value
7874 if (HAVE_call_value)
7876 rtx valreg = 0;
7878 /* Locate the unique return register. It is not possible to
7879 express a call that sets more than one return register using
7880 call_value; use untyped_call for that. In fact, untyped_call
7881 only needs to save the return registers in the given block. */
7882 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7883 if ((mode = apply_result_mode[regno]) != VOIDmode)
7885 if (valreg)
7886 abort (); /* HAVE_untyped_call required. */
7887 valreg = gen_rtx (REG, mode, regno);
7890 emit_call_insn (gen_call_value (valreg,
7891 gen_rtx (MEM, FUNCTION_MODE, function),
7892 const0_rtx, NULL_RTX, const0_rtx));
7894 emit_move_insn (change_address (result, GET_MODE (valreg),
7895 XEXP (result, 0)),
7896 valreg);
7898 else
7899 #endif
7900 abort ();
7902 /* Find the CALL insn we just emitted. */
7903 for (call_insn = get_last_insn ();
7904 call_insn && GET_CODE (call_insn) != CALL_INSN;
7905 call_insn = PREV_INSN (call_insn))
7908 if (! call_insn)
7909 abort ();
7911 /* Put the register usage information on the CALL. If there is already
7912 some usage information, put ours at the end. */
7913 if (CALL_INSN_FUNCTION_USAGE (call_insn))
7915 rtx link;
7917 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
7918 link = XEXP (link, 1))
7921 XEXP (link, 1) = call_fusage;
7923 else
7924 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
7926 /* Restore the stack. */
7927 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
7929 /* Return the address of the result block. */
7930 return copy_addr_to_reg (XEXP (result, 0));
7933 /* Perform an untyped return. */
7935 static void
7936 expand_builtin_return (result)
7937 rtx result;
7939 int size, align, regno;
7940 enum machine_mode mode;
7941 rtx reg;
7942 rtx call_fusage = 0;
7944 apply_result_size ();
7945 result = gen_rtx (MEM, BLKmode, result);
7947 #ifdef HAVE_untyped_return
7948 if (HAVE_untyped_return)
7950 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7951 emit_barrier ();
7952 return;
7954 #endif
7956 /* Restore the return value and note that each value is used. */
7957 size = 0;
7958 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7959 if ((mode = apply_result_mode[regno]) != VOIDmode)
7961 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7962 if (size % align != 0)
7963 size = CEIL (size, align) * align;
7964 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7965 emit_move_insn (reg,
7966 change_address (result, mode,
7967 plus_constant (XEXP (result, 0),
7968 size)));
7970 push_to_sequence (call_fusage);
7971 emit_insn (gen_rtx (USE, VOIDmode, reg));
7972 call_fusage = get_insns ();
7973 end_sequence ();
7974 size += GET_MODE_SIZE (mode);
7977 /* Put the USE insns before the return. */
7978 emit_insns (call_fusage);
7980 /* Return whatever values was restored by jumping directly to the end
7981 of the function. */
7982 expand_null_return ();
7985 /* Expand code for a post- or pre- increment or decrement
7986 and return the RTX for the result.
7987 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
7989 static rtx
7990 expand_increment (exp, post)
7991 register tree exp;
7992 int post;
7994 register rtx op0, op1;
7995 register rtx temp, value;
7996 register tree incremented = TREE_OPERAND (exp, 0);
7997 optab this_optab = add_optab;
7998 int icode;
7999 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8000 int op0_is_copy = 0;
8001 int single_insn = 0;
8002 /* 1 means we can't store into OP0 directly,
8003 because it is a subreg narrower than a word,
8004 and we don't dare clobber the rest of the word. */
8005 int bad_subreg = 0;
8007 if (output_bytecode)
8009 bc_expand_expr (exp);
8010 return NULL_RTX;
8013 /* Stabilize any component ref that might need to be
8014 evaluated more than once below. */
8015 if (!post
8016 || TREE_CODE (incremented) == BIT_FIELD_REF
8017 || (TREE_CODE (incremented) == COMPONENT_REF
8018 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8019 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8020 incremented = stabilize_reference (incremented);
8021 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8022 ones into save exprs so that they don't accidentally get evaluated
8023 more than once by the code below. */
8024 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8025 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8026 incremented = save_expr (incremented);
8028 /* Compute the operands as RTX.
8029 Note whether OP0 is the actual lvalue or a copy of it:
8030 I believe it is a copy iff it is a register or subreg
8031 and insns were generated in computing it. */
8033 temp = get_last_insn ();
8034 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8036 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8037 in place but intead must do sign- or zero-extension during assignment,
8038 so we copy it into a new register and let the code below use it as
8039 a copy.
8041 Note that we can safely modify this SUBREG since it is know not to be
8042 shared (it was made by the expand_expr call above). */
8044 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8045 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8046 else if (GET_CODE (op0) == SUBREG
8047 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8049 /* We cannot increment this SUBREG in place. If we are
8050 post-incrementing, get a copy of the old value. Otherwise,
8051 just mark that we cannot increment in place. */
8052 if (post)
8053 op0 = copy_to_reg (op0);
8054 else
8055 bad_subreg = 1;
8058 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8059 && temp != get_last_insn ());
8060 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8062 /* Decide whether incrementing or decrementing. */
8063 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8064 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8065 this_optab = sub_optab;
8067 /* Convert decrement by a constant into a negative increment. */
8068 if (this_optab == sub_optab
8069 && GET_CODE (op1) == CONST_INT)
8071 op1 = GEN_INT (- INTVAL (op1));
8072 this_optab = add_optab;
8075 /* For a preincrement, see if we can do this with a single instruction. */
8076 if (!post)
8078 icode = (int) this_optab->handlers[(int) mode].insn_code;
8079 if (icode != (int) CODE_FOR_nothing
8080 /* Make sure that OP0 is valid for operands 0 and 1
8081 of the insn we want to queue. */
8082 && (*insn_operand_predicate[icode][0]) (op0, mode)
8083 && (*insn_operand_predicate[icode][1]) (op0, mode)
8084 && (*insn_operand_predicate[icode][2]) (op1, mode))
8085 single_insn = 1;
8088 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8089 then we cannot just increment OP0. We must therefore contrive to
8090 increment the original value. Then, for postincrement, we can return
8091 OP0 since it is a copy of the old value. For preincrement, expand here
8092 unless we can do it with a single insn.
8094 Likewise if storing directly into OP0 would clobber high bits
8095 we need to preserve (bad_subreg). */
8096 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8098 /* This is the easiest way to increment the value wherever it is.
8099 Problems with multiple evaluation of INCREMENTED are prevented
8100 because either (1) it is a component_ref or preincrement,
8101 in which case it was stabilized above, or (2) it is an array_ref
8102 with constant index in an array in a register, which is
8103 safe to reevaluate. */
8104 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8105 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8106 ? MINUS_EXPR : PLUS_EXPR),
8107 TREE_TYPE (exp),
8108 incremented,
8109 TREE_OPERAND (exp, 1));
8110 temp = expand_assignment (incremented, newexp, ! post, 0);
8111 return post ? op0 : temp;
8114 if (post)
8116 /* We have a true reference to the value in OP0.
8117 If there is an insn to add or subtract in this mode, queue it.
8118 Queueing the increment insn avoids the register shuffling
8119 that often results if we must increment now and first save
8120 the old value for subsequent use. */
8122 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8123 op0 = stabilize (op0);
8124 #endif
8126 icode = (int) this_optab->handlers[(int) mode].insn_code;
8127 if (icode != (int) CODE_FOR_nothing
8128 /* Make sure that OP0 is valid for operands 0 and 1
8129 of the insn we want to queue. */
8130 && (*insn_operand_predicate[icode][0]) (op0, mode)
8131 && (*insn_operand_predicate[icode][1]) (op0, mode))
8133 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8134 op1 = force_reg (mode, op1);
8136 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8140 /* Preincrement, or we can't increment with one simple insn. */
8141 if (post)
8142 /* Save a copy of the value before inc or dec, to return it later. */
8143 temp = value = copy_to_reg (op0);
8144 else
8145 /* Arrange to return the incremented value. */
8146 /* Copy the rtx because expand_binop will protect from the queue,
8147 and the results of that would be invalid for us to return
8148 if our caller does emit_queue before using our result. */
8149 temp = copy_rtx (value = op0);
8151 /* Increment however we can. */
8152 op1 = expand_binop (mode, this_optab, value, op1, op0,
8153 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8154 /* Make sure the value is stored into OP0. */
8155 if (op1 != op0)
8156 emit_move_insn (op0, op1);
8158 return temp;
8161 /* Expand all function calls contained within EXP, innermost ones first.
8162 But don't look within expressions that have sequence points.
8163 For each CALL_EXPR, record the rtx for its value
8164 in the CALL_EXPR_RTL field. */
8166 static void
8167 preexpand_calls (exp)
8168 tree exp;
8170 register int nops, i;
8171 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8173 if (! do_preexpand_calls)
8174 return;
8176 /* Only expressions and references can contain calls. */
8178 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8179 return;
8181 switch (TREE_CODE (exp))
8183 case CALL_EXPR:
8184 /* Do nothing if already expanded. */
8185 if (CALL_EXPR_RTL (exp) != 0)
8186 return;
8188 /* Do nothing to built-in functions. */
8189 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8190 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8191 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8192 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8193 return;
8195 case COMPOUND_EXPR:
8196 case COND_EXPR:
8197 case TRUTH_ANDIF_EXPR:
8198 case TRUTH_ORIF_EXPR:
8199 /* If we find one of these, then we can be sure
8200 the adjust will be done for it (since it makes jumps).
8201 Do it now, so that if this is inside an argument
8202 of a function, we don't get the stack adjustment
8203 after some other args have already been pushed. */
8204 do_pending_stack_adjust ();
8205 return;
8207 case BLOCK:
8208 case RTL_EXPR:
8209 case WITH_CLEANUP_EXPR:
8210 return;
8212 case SAVE_EXPR:
8213 if (SAVE_EXPR_RTL (exp) != 0)
8214 return;
8217 nops = tree_code_length[(int) TREE_CODE (exp)];
8218 for (i = 0; i < nops; i++)
8219 if (TREE_OPERAND (exp, i) != 0)
8221 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8222 if (type == 'e' || type == '<' || type == '1' || type == '2'
8223 || type == 'r')
8224 preexpand_calls (TREE_OPERAND (exp, i));
8228 /* At the start of a function, record that we have no previously-pushed
8229 arguments waiting to be popped. */
8231 void
8232 init_pending_stack_adjust ()
8234 pending_stack_adjust = 0;
8237 /* When exiting from function, if safe, clear out any pending stack adjust
8238 so the adjustment won't get done. */
8240 void
8241 clear_pending_stack_adjust ()
8243 #ifdef EXIT_IGNORE_STACK
8244 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8245 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8246 && ! flag_inline_functions)
8247 pending_stack_adjust = 0;
8248 #endif
8251 /* Pop any previously-pushed arguments that have not been popped yet. */
8253 void
8254 do_pending_stack_adjust ()
8256 if (inhibit_defer_pop == 0)
8258 if (pending_stack_adjust != 0)
8259 adjust_stack (GEN_INT (pending_stack_adjust));
8260 pending_stack_adjust = 0;
8264 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
8265 Returns the cleanups to be performed. */
8267 static tree
8268 defer_cleanups_to (old_cleanups)
8269 tree old_cleanups;
8271 tree new_cleanups = NULL_TREE;
8272 tree cleanups = cleanups_this_call;
8273 tree last = NULL_TREE;
8275 while (cleanups_this_call != old_cleanups)
8277 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8280 if (last)
8282 /* Remove the list from the chain of cleanups. */
8283 TREE_CHAIN (last) = NULL_TREE;
8285 /* reverse them so that we can build them in the right order. */
8286 cleanups = nreverse (cleanups);
8288 while (cleanups)
8290 if (new_cleanups)
8291 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8292 TREE_VALUE (cleanups), new_cleanups);
8293 else
8294 new_cleanups = TREE_VALUE (cleanups);
8296 cleanups = TREE_CHAIN (cleanups);
8300 return new_cleanups;
8303 /* Expand all cleanups up to OLD_CLEANUPS.
8304 Needed here, and also for language-dependent calls. */
8306 void
8307 expand_cleanups_to (old_cleanups)
8308 tree old_cleanups;
8310 while (cleanups_this_call != old_cleanups)
8312 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
8313 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8317 /* Expand conditional expressions. */
8319 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8320 LABEL is an rtx of code CODE_LABEL, in this function and all the
8321 functions here. */
8323 void
8324 jumpifnot (exp, label)
8325 tree exp;
8326 rtx label;
8328 do_jump (exp, label, NULL_RTX);
8331 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8333 void
8334 jumpif (exp, label)
8335 tree exp;
8336 rtx label;
8338 do_jump (exp, NULL_RTX, label);
8341 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8342 the result is zero, or IF_TRUE_LABEL if the result is one.
8343 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8344 meaning fall through in that case.
8346 do_jump always does any pending stack adjust except when it does not
8347 actually perform a jump. An example where there is no jump
8348 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8350 This function is responsible for optimizing cases such as
8351 &&, || and comparison operators in EXP. */
8353 void
8354 do_jump (exp, if_false_label, if_true_label)
8355 tree exp;
8356 rtx if_false_label, if_true_label;
8358 register enum tree_code code = TREE_CODE (exp);
8359 /* Some cases need to create a label to jump to
8360 in order to properly fall through.
8361 These cases set DROP_THROUGH_LABEL nonzero. */
8362 rtx drop_through_label = 0;
8363 rtx temp;
8364 rtx comparison = 0;
8365 int i;
8366 tree type;
8368 emit_queue ();
8370 switch (code)
8372 case ERROR_MARK:
8373 break;
8375 case INTEGER_CST:
8376 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8377 if (temp)
8378 emit_jump (temp);
8379 break;
8381 #if 0
8382 /* This is not true with #pragma weak */
8383 case ADDR_EXPR:
8384 /* The address of something can never be zero. */
8385 if (if_true_label)
8386 emit_jump (if_true_label);
8387 break;
8388 #endif
8390 case NOP_EXPR:
8391 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8392 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8393 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8394 goto normal;
8395 case CONVERT_EXPR:
8396 /* If we are narrowing the operand, we have to do the compare in the
8397 narrower mode. */
8398 if ((TYPE_PRECISION (TREE_TYPE (exp))
8399 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8400 goto normal;
8401 case NON_LVALUE_EXPR:
8402 case REFERENCE_EXPR:
8403 case ABS_EXPR:
8404 case NEGATE_EXPR:
8405 case LROTATE_EXPR:
8406 case RROTATE_EXPR:
8407 /* These cannot change zero->non-zero or vice versa. */
8408 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8409 break;
8411 #if 0
8412 /* This is never less insns than evaluating the PLUS_EXPR followed by
8413 a test and can be longer if the test is eliminated. */
8414 case PLUS_EXPR:
8415 /* Reduce to minus. */
8416 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8417 TREE_OPERAND (exp, 0),
8418 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8419 TREE_OPERAND (exp, 1))));
8420 /* Process as MINUS. */
8421 #endif
8423 case MINUS_EXPR:
8424 /* Non-zero iff operands of minus differ. */
8425 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8426 TREE_OPERAND (exp, 0),
8427 TREE_OPERAND (exp, 1)),
8428 NE, NE);
8429 break;
8431 case BIT_AND_EXPR:
8432 /* If we are AND'ing with a small constant, do this comparison in the
8433 smallest type that fits. If the machine doesn't have comparisons
8434 that small, it will be converted back to the wider comparison.
8435 This helps if we are testing the sign bit of a narrower object.
8436 combine can't do this for us because it can't know whether a
8437 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8439 if (! SLOW_BYTE_ACCESS
8440 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8441 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8442 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8443 && (type = type_for_size (i + 1, 1)) != 0
8444 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8445 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8446 != CODE_FOR_nothing))
8448 do_jump (convert (type, exp), if_false_label, if_true_label);
8449 break;
8451 goto normal;
8453 case TRUTH_NOT_EXPR:
8454 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8455 break;
8457 case TRUTH_ANDIF_EXPR:
8458 if (if_false_label == 0)
8459 if_false_label = drop_through_label = gen_label_rtx ();
8460 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8461 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8462 break;
8464 case TRUTH_ORIF_EXPR:
8465 if (if_true_label == 0)
8466 if_true_label = drop_through_label = gen_label_rtx ();
8467 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8468 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8469 break;
8471 case COMPOUND_EXPR:
8472 push_temp_slots ();
8473 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8474 free_temp_slots ();
8475 pop_temp_slots ();
8476 emit_queue ();
8477 do_pending_stack_adjust ();
8478 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8479 break;
8481 case COMPONENT_REF:
8482 case BIT_FIELD_REF:
8483 case ARRAY_REF:
8485 int bitsize, bitpos, unsignedp;
8486 enum machine_mode mode;
8487 tree type;
8488 tree offset;
8489 int volatilep = 0;
8491 /* Get description of this reference. We don't actually care
8492 about the underlying object here. */
8493 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8494 &mode, &unsignedp, &volatilep);
8496 type = type_for_size (bitsize, unsignedp);
8497 if (! SLOW_BYTE_ACCESS
8498 && type != 0 && bitsize >= 0
8499 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8500 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8501 != CODE_FOR_nothing))
8503 do_jump (convert (type, exp), if_false_label, if_true_label);
8504 break;
8506 goto normal;
8509 case COND_EXPR:
8510 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8511 if (integer_onep (TREE_OPERAND (exp, 1))
8512 && integer_zerop (TREE_OPERAND (exp, 2)))
8513 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8515 else if (integer_zerop (TREE_OPERAND (exp, 1))
8516 && integer_onep (TREE_OPERAND (exp, 2)))
8517 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8519 else
8521 register rtx label1 = gen_label_rtx ();
8522 drop_through_label = gen_label_rtx ();
8523 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8524 /* Now the THEN-expression. */
8525 do_jump (TREE_OPERAND (exp, 1),
8526 if_false_label ? if_false_label : drop_through_label,
8527 if_true_label ? if_true_label : drop_through_label);
8528 /* In case the do_jump just above never jumps. */
8529 do_pending_stack_adjust ();
8530 emit_label (label1);
8531 /* Now the ELSE-expression. */
8532 do_jump (TREE_OPERAND (exp, 2),
8533 if_false_label ? if_false_label : drop_through_label,
8534 if_true_label ? if_true_label : drop_through_label);
8536 break;
8538 case EQ_EXPR:
8539 if (integer_zerop (TREE_OPERAND (exp, 1)))
8540 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8541 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8542 == MODE_INT)
8544 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8545 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8546 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8547 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8548 else
8549 comparison = compare (exp, EQ, EQ);
8550 break;
8552 case NE_EXPR:
8553 if (integer_zerop (TREE_OPERAND (exp, 1)))
8554 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8555 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8556 == MODE_INT)
8558 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8559 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8560 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8561 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8562 else
8563 comparison = compare (exp, NE, NE);
8564 break;
8566 case LT_EXPR:
8567 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8568 == MODE_INT)
8569 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8570 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8571 else
8572 comparison = compare (exp, LT, LTU);
8573 break;
8575 case LE_EXPR:
8576 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8577 == MODE_INT)
8578 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8579 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8580 else
8581 comparison = compare (exp, LE, LEU);
8582 break;
8584 case GT_EXPR:
8585 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8586 == MODE_INT)
8587 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8588 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8589 else
8590 comparison = compare (exp, GT, GTU);
8591 break;
8593 case GE_EXPR:
8594 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8595 == MODE_INT)
8596 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8597 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8598 else
8599 comparison = compare (exp, GE, GEU);
8600 break;
8602 default:
8603 normal:
8604 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8605 #if 0
8606 /* This is not needed any more and causes poor code since it causes
8607 comparisons and tests from non-SI objects to have different code
8608 sequences. */
8609 /* Copy to register to avoid generating bad insns by cse
8610 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8611 if (!cse_not_expected && GET_CODE (temp) == MEM)
8612 temp = copy_to_reg (temp);
8613 #endif
8614 do_pending_stack_adjust ();
8615 if (GET_CODE (temp) == CONST_INT)
8616 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8617 else if (GET_CODE (temp) == LABEL_REF)
8618 comparison = const_true_rtx;
8619 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8620 && !can_compare_p (GET_MODE (temp)))
8621 /* Note swapping the labels gives us not-equal. */
8622 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8623 else if (GET_MODE (temp) != VOIDmode)
8624 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8625 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8626 GET_MODE (temp), NULL_RTX, 0);
8627 else
8628 abort ();
8631 /* Do any postincrements in the expression that was tested. */
8632 emit_queue ();
8634 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8635 straight into a conditional jump instruction as the jump condition.
8636 Otherwise, all the work has been done already. */
8638 if (comparison == const_true_rtx)
8640 if (if_true_label)
8641 emit_jump (if_true_label);
8643 else if (comparison == const0_rtx)
8645 if (if_false_label)
8646 emit_jump (if_false_label);
8648 else if (comparison)
8649 do_jump_for_compare (comparison, if_false_label, if_true_label);
8651 if (drop_through_label)
8653 /* If do_jump produces code that might be jumped around,
8654 do any stack adjusts from that code, before the place
8655 where control merges in. */
8656 do_pending_stack_adjust ();
8657 emit_label (drop_through_label);
8661 /* Given a comparison expression EXP for values too wide to be compared
8662 with one insn, test the comparison and jump to the appropriate label.
8663 The code of EXP is ignored; we always test GT if SWAP is 0,
8664 and LT if SWAP is 1. */
8666 static void
8667 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8668 tree exp;
8669 int swap;
8670 rtx if_false_label, if_true_label;
8672 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8673 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8674 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8675 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8676 rtx drop_through_label = 0;
8677 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8678 int i;
8680 if (! if_true_label || ! if_false_label)
8681 drop_through_label = gen_label_rtx ();
8682 if (! if_true_label)
8683 if_true_label = drop_through_label;
8684 if (! if_false_label)
8685 if_false_label = drop_through_label;
8687 /* Compare a word at a time, high order first. */
8688 for (i = 0; i < nwords; i++)
8690 rtx comp;
8691 rtx op0_word, op1_word;
8693 if (WORDS_BIG_ENDIAN)
8695 op0_word = operand_subword_force (op0, i, mode);
8696 op1_word = operand_subword_force (op1, i, mode);
8698 else
8700 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8701 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8704 /* All but high-order word must be compared as unsigned. */
8705 comp = compare_from_rtx (op0_word, op1_word,
8706 (unsignedp || i > 0) ? GTU : GT,
8707 unsignedp, word_mode, NULL_RTX, 0);
8708 if (comp == const_true_rtx)
8709 emit_jump (if_true_label);
8710 else if (comp != const0_rtx)
8711 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8713 /* Consider lower words only if these are equal. */
8714 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8715 NULL_RTX, 0);
8716 if (comp == const_true_rtx)
8717 emit_jump (if_false_label);
8718 else if (comp != const0_rtx)
8719 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8722 if (if_false_label)
8723 emit_jump (if_false_label);
8724 if (drop_through_label)
8725 emit_label (drop_through_label);
8728 /* Compare OP0 with OP1, word at a time, in mode MODE.
8729 UNSIGNEDP says to do unsigned comparison.
8730 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
8732 static void
8733 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8734 enum machine_mode mode;
8735 int unsignedp;
8736 rtx op0, op1;
8737 rtx if_false_label, if_true_label;
8739 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8740 rtx drop_through_label = 0;
8741 int i;
8743 if (! if_true_label || ! if_false_label)
8744 drop_through_label = gen_label_rtx ();
8745 if (! if_true_label)
8746 if_true_label = drop_through_label;
8747 if (! if_false_label)
8748 if_false_label = drop_through_label;
8750 /* Compare a word at a time, high order first. */
8751 for (i = 0; i < nwords; i++)
8753 rtx comp;
8754 rtx op0_word, op1_word;
8756 if (WORDS_BIG_ENDIAN)
8758 op0_word = operand_subword_force (op0, i, mode);
8759 op1_word = operand_subword_force (op1, i, mode);
8761 else
8763 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8764 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8767 /* All but high-order word must be compared as unsigned. */
8768 comp = compare_from_rtx (op0_word, op1_word,
8769 (unsignedp || i > 0) ? GTU : GT,
8770 unsignedp, word_mode, NULL_RTX, 0);
8771 if (comp == const_true_rtx)
8772 emit_jump (if_true_label);
8773 else if (comp != const0_rtx)
8774 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8776 /* Consider lower words only if these are equal. */
8777 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8778 NULL_RTX, 0);
8779 if (comp == const_true_rtx)
8780 emit_jump (if_false_label);
8781 else if (comp != const0_rtx)
8782 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8785 if (if_false_label)
8786 emit_jump (if_false_label);
8787 if (drop_through_label)
8788 emit_label (drop_through_label);
8791 /* Given an EQ_EXPR expression EXP for values too wide to be compared
8792 with one insn, test the comparison and jump to the appropriate label. */
8794 static void
8795 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8796 tree exp;
8797 rtx if_false_label, if_true_label;
8799 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8800 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8801 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8802 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8803 int i;
8804 rtx drop_through_label = 0;
8806 if (! if_false_label)
8807 drop_through_label = if_false_label = gen_label_rtx ();
8809 for (i = 0; i < nwords; i++)
8811 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8812 operand_subword_force (op1, i, mode),
8813 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8814 word_mode, NULL_RTX, 0);
8815 if (comp == const_true_rtx)
8816 emit_jump (if_false_label);
8817 else if (comp != const0_rtx)
8818 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8821 if (if_true_label)
8822 emit_jump (if_true_label);
8823 if (drop_through_label)
8824 emit_label (drop_through_label);
8827 /* Jump according to whether OP0 is 0.
8828 We assume that OP0 has an integer mode that is too wide
8829 for the available compare insns. */
8831 static void
8832 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8833 rtx op0;
8834 rtx if_false_label, if_true_label;
8836 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8837 int i;
8838 rtx drop_through_label = 0;
8840 if (! if_false_label)
8841 drop_through_label = if_false_label = gen_label_rtx ();
8843 for (i = 0; i < nwords; i++)
8845 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8846 GET_MODE (op0)),
8847 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8848 if (comp == const_true_rtx)
8849 emit_jump (if_false_label);
8850 else if (comp != const0_rtx)
8851 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8854 if (if_true_label)
8855 emit_jump (if_true_label);
8856 if (drop_through_label)
8857 emit_label (drop_through_label);
8860 /* Given a comparison expression in rtl form, output conditional branches to
8861 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
8863 static void
8864 do_jump_for_compare (comparison, if_false_label, if_true_label)
8865 rtx comparison, if_false_label, if_true_label;
8867 if (if_true_label)
8869 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8870 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8871 else
8872 abort ();
8874 if (if_false_label)
8875 emit_jump (if_false_label);
8877 else if (if_false_label)
8879 rtx insn;
8880 rtx prev = get_last_insn ();
8881 rtx branch = 0;
8883 if (prev != 0)
8884 prev = PREV_INSN (prev);
8886 /* Output the branch with the opposite condition. Then try to invert
8887 what is generated. If more than one insn is a branch, or if the
8888 branch is not the last insn written, abort. If we can't invert
8889 the branch, emit make a true label, redirect this jump to that,
8890 emit a jump to the false label and define the true label. */
8892 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8893 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8894 else
8895 abort ();
8897 /* Here we get the insn before what was just emitted.
8898 On some machines, emitting the branch can discard
8899 the previous compare insn and emit a replacement. */
8900 if (prev == 0)
8901 /* If there's only one preceding insn... */
8902 insn = get_insns ();
8903 else
8904 insn = NEXT_INSN (prev);
8906 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8907 if (GET_CODE (insn) == JUMP_INSN)
8909 if (branch)
8910 abort ();
8911 branch = insn;
8914 if (branch != get_last_insn ())
8915 abort ();
8917 JUMP_LABEL (branch) = if_false_label;
8918 if (! invert_jump (branch, if_false_label))
8920 if_true_label = gen_label_rtx ();
8921 redirect_jump (branch, if_true_label);
8922 emit_jump (if_false_label);
8923 emit_label (if_true_label);
8928 /* Generate code for a comparison expression EXP
8929 (including code to compute the values to be compared)
8930 and set (CC0) according to the result.
8931 SIGNED_CODE should be the rtx operation for this comparison for
8932 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8934 We force a stack adjustment unless there are currently
8935 things pushed on the stack that aren't yet used. */
8937 static rtx
8938 compare (exp, signed_code, unsigned_code)
8939 register tree exp;
8940 enum rtx_code signed_code, unsigned_code;
8942 register rtx op0
8943 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8944 register rtx op1
8945 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8946 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8947 register enum machine_mode mode = TYPE_MODE (type);
8948 int unsignedp = TREE_UNSIGNED (type);
8949 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
8951 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8952 ((mode == BLKmode)
8953 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8954 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8957 /* Like compare but expects the values to compare as two rtx's.
8958 The decision as to signed or unsigned comparison must be made by the caller.
8960 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8961 compared.
8963 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8964 size of MODE should be used. */
8967 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8968 register rtx op0, op1;
8969 enum rtx_code code;
8970 int unsignedp;
8971 enum machine_mode mode;
8972 rtx size;
8973 int align;
8975 rtx tem;
8977 /* If one operand is constant, make it the second one. Only do this
8978 if the other operand is not constant as well. */
8980 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8981 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8983 tem = op0;
8984 op0 = op1;
8985 op1 = tem;
8986 code = swap_condition (code);
8989 if (flag_force_mem)
8991 op0 = force_not_mem (op0);
8992 op1 = force_not_mem (op1);
8995 do_pending_stack_adjust ();
8997 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8998 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8999 return tem;
9001 #if 0
9002 /* There's no need to do this now that combine.c can eliminate lots of
9003 sign extensions. This can be less efficient in certain cases on other
9004 machines. */
9006 /* If this is a signed equality comparison, we can do it as an
9007 unsigned comparison since zero-extension is cheaper than sign
9008 extension and comparisons with zero are done as unsigned. This is
9009 the case even on machines that can do fast sign extension, since
9010 zero-extension is easier to combine with other operations than
9011 sign-extension is. If we are comparing against a constant, we must
9012 convert it to what it would look like unsigned. */
9013 if ((code == EQ || code == NE) && ! unsignedp
9014 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9016 if (GET_CODE (op1) == CONST_INT
9017 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9018 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9019 unsignedp = 1;
9021 #endif
9023 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9025 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
9028 /* Generate code to calculate EXP using a store-flag instruction
9029 and return an rtx for the result. EXP is either a comparison
9030 or a TRUTH_NOT_EXPR whose operand is a comparison.
9032 If TARGET is nonzero, store the result there if convenient.
9034 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9035 cheap.
9037 Return zero if there is no suitable set-flag instruction
9038 available on this machine.
9040 Once expand_expr has been called on the arguments of the comparison,
9041 we are committed to doing the store flag, since it is not safe to
9042 re-evaluate the expression. We emit the store-flag insn by calling
9043 emit_store_flag, but only expand the arguments if we have a reason
9044 to believe that emit_store_flag will be successful. If we think that
9045 it will, but it isn't, we have to simulate the store-flag with a
9046 set/jump/set sequence. */
9048 static rtx
9049 do_store_flag (exp, target, mode, only_cheap)
9050 tree exp;
9051 rtx target;
9052 enum machine_mode mode;
9053 int only_cheap;
9055 enum rtx_code code;
9056 tree arg0, arg1, type;
9057 tree tem;
9058 enum machine_mode operand_mode;
9059 int invert = 0;
9060 int unsignedp;
9061 rtx op0, op1;
9062 enum insn_code icode;
9063 rtx subtarget = target;
9064 rtx result, label, pattern, jump_pat;
9066 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9067 result at the end. We can't simply invert the test since it would
9068 have already been inverted if it were valid. This case occurs for
9069 some floating-point comparisons. */
9071 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9072 invert = 1, exp = TREE_OPERAND (exp, 0);
9074 arg0 = TREE_OPERAND (exp, 0);
9075 arg1 = TREE_OPERAND (exp, 1);
9076 type = TREE_TYPE (arg0);
9077 operand_mode = TYPE_MODE (type);
9078 unsignedp = TREE_UNSIGNED (type);
9080 /* We won't bother with BLKmode store-flag operations because it would mean
9081 passing a lot of information to emit_store_flag. */
9082 if (operand_mode == BLKmode)
9083 return 0;
9085 STRIP_NOPS (arg0);
9086 STRIP_NOPS (arg1);
9088 /* Get the rtx comparison code to use. We know that EXP is a comparison
9089 operation of some type. Some comparisons against 1 and -1 can be
9090 converted to comparisons with zero. Do so here so that the tests
9091 below will be aware that we have a comparison with zero. These
9092 tests will not catch constants in the first operand, but constants
9093 are rarely passed as the first operand. */
9095 switch (TREE_CODE (exp))
9097 case EQ_EXPR:
9098 code = EQ;
9099 break;
9100 case NE_EXPR:
9101 code = NE;
9102 break;
9103 case LT_EXPR:
9104 if (integer_onep (arg1))
9105 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9106 else
9107 code = unsignedp ? LTU : LT;
9108 break;
9109 case LE_EXPR:
9110 if (! unsignedp && integer_all_onesp (arg1))
9111 arg1 = integer_zero_node, code = LT;
9112 else
9113 code = unsignedp ? LEU : LE;
9114 break;
9115 case GT_EXPR:
9116 if (! unsignedp && integer_all_onesp (arg1))
9117 arg1 = integer_zero_node, code = GE;
9118 else
9119 code = unsignedp ? GTU : GT;
9120 break;
9121 case GE_EXPR:
9122 if (integer_onep (arg1))
9123 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9124 else
9125 code = unsignedp ? GEU : GE;
9126 break;
9127 default:
9128 abort ();
9131 /* Put a constant second. */
9132 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9134 tem = arg0; arg0 = arg1; arg1 = tem;
9135 code = swap_condition (code);
9138 /* If this is an equality or inequality test of a single bit, we can
9139 do this by shifting the bit being tested to the low-order bit and
9140 masking the result with the constant 1. If the condition was EQ,
9141 we xor it with 1. This does not require an scc insn and is faster
9142 than an scc insn even if we have it. */
9144 if ((code == NE || code == EQ)
9145 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9146 && integer_pow2p (TREE_OPERAND (arg0, 1))
9147 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9149 tree inner = TREE_OPERAND (arg0, 0);
9150 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9151 NULL_RTX, VOIDmode, 0)));
9152 int ops_unsignedp;
9154 /* If INNER is a right shift of a constant and it plus BITNUM does
9155 not overflow, adjust BITNUM and INNER. */
9157 if (TREE_CODE (inner) == RSHIFT_EXPR
9158 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9159 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9160 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9161 < TYPE_PRECISION (type)))
9163 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9164 inner = TREE_OPERAND (inner, 0);
9167 /* If we are going to be able to omit the AND below, we must do our
9168 operations as unsigned. If we must use the AND, we have a choice.
9169 Normally unsigned is faster, but for some machines signed is. */
9170 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9171 #ifdef LOAD_EXTEND_OP
9172 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9173 #else
9175 #endif
9178 if (subtarget == 0 || GET_CODE (subtarget) != REG
9179 || GET_MODE (subtarget) != operand_mode
9180 || ! safe_from_p (subtarget, inner))
9181 subtarget = 0;
9183 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9185 if (bitnum != 0)
9186 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9187 size_int (bitnum), subtarget, ops_unsignedp);
9189 if (GET_MODE (op0) != mode)
9190 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9192 if ((code == EQ && ! invert) || (code == NE && invert))
9193 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9194 ops_unsignedp, OPTAB_LIB_WIDEN);
9196 /* Put the AND last so it can combine with more things. */
9197 if (bitnum != TYPE_PRECISION (type) - 1)
9198 op0 = expand_and (op0, const1_rtx, subtarget);
9200 return op0;
9203 /* Now see if we are likely to be able to do this. Return if not. */
9204 if (! can_compare_p (operand_mode))
9205 return 0;
9206 icode = setcc_gen_code[(int) code];
9207 if (icode == CODE_FOR_nothing
9208 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9210 /* We can only do this if it is one of the special cases that
9211 can be handled without an scc insn. */
9212 if ((code == LT && integer_zerop (arg1))
9213 || (! only_cheap && code == GE && integer_zerop (arg1)))
9215 else if (BRANCH_COST >= 0
9216 && ! only_cheap && (code == NE || code == EQ)
9217 && TREE_CODE (type) != REAL_TYPE
9218 && ((abs_optab->handlers[(int) operand_mode].insn_code
9219 != CODE_FOR_nothing)
9220 || (ffs_optab->handlers[(int) operand_mode].insn_code
9221 != CODE_FOR_nothing)))
9223 else
9224 return 0;
9227 preexpand_calls (exp);
9228 if (subtarget == 0 || GET_CODE (subtarget) != REG
9229 || GET_MODE (subtarget) != operand_mode
9230 || ! safe_from_p (subtarget, arg1))
9231 subtarget = 0;
9233 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9234 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9236 if (target == 0)
9237 target = gen_reg_rtx (mode);
9239 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9240 because, if the emit_store_flag does anything it will succeed and
9241 OP0 and OP1 will not be used subsequently. */
9243 result = emit_store_flag (target, code,
9244 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9245 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9246 operand_mode, unsignedp, 1);
9248 if (result)
9250 if (invert)
9251 result = expand_binop (mode, xor_optab, result, const1_rtx,
9252 result, 0, OPTAB_LIB_WIDEN);
9253 return result;
9256 /* If this failed, we have to do this with set/compare/jump/set code. */
9257 if (target == 0 || GET_CODE (target) != REG
9258 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9259 target = gen_reg_rtx (GET_MODE (target));
9261 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9262 result = compare_from_rtx (op0, op1, code, unsignedp,
9263 operand_mode, NULL_RTX, 0);
9264 if (GET_CODE (result) == CONST_INT)
9265 return (((result == const0_rtx && ! invert)
9266 || (result != const0_rtx && invert))
9267 ? const0_rtx : const1_rtx);
9269 label = gen_label_rtx ();
9270 if (bcc_gen_fctn[(int) code] == 0)
9271 abort ();
9273 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9274 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9275 emit_label (label);
9277 return target;
9280 /* Generate a tablejump instruction (used for switch statements). */
9282 #ifdef HAVE_tablejump
9284 /* INDEX is the value being switched on, with the lowest value
9285 in the table already subtracted.
9286 MODE is its expected mode (needed if INDEX is constant).
9287 RANGE is the length of the jump table.
9288 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9290 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9291 index value is out of range. */
9293 void
9294 do_tablejump (index, mode, range, table_label, default_label)
9295 rtx index, range, table_label, default_label;
9296 enum machine_mode mode;
9298 register rtx temp, vector;
9300 /* Do an unsigned comparison (in the proper mode) between the index
9301 expression and the value which represents the length of the range.
9302 Since we just finished subtracting the lower bound of the range
9303 from the index expression, this comparison allows us to simultaneously
9304 check that the original index expression value is both greater than
9305 or equal to the minimum value of the range and less than or equal to
9306 the maximum value of the range. */
9308 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9309 emit_jump_insn (gen_bgtu (default_label));
9311 /* If index is in range, it must fit in Pmode.
9312 Convert to Pmode so we can index with it. */
9313 if (mode != Pmode)
9314 index = convert_to_mode (Pmode, index, 1);
9316 /* Don't let a MEM slip thru, because then INDEX that comes
9317 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9318 and break_out_memory_refs will go to work on it and mess it up. */
9319 #ifdef PIC_CASE_VECTOR_ADDRESS
9320 if (flag_pic && GET_CODE (index) != REG)
9321 index = copy_to_mode_reg (Pmode, index);
9322 #endif
9324 /* If flag_force_addr were to affect this address
9325 it could interfere with the tricky assumptions made
9326 about addresses that contain label-refs,
9327 which may be valid only very near the tablejump itself. */
9328 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9329 GET_MODE_SIZE, because this indicates how large insns are. The other
9330 uses should all be Pmode, because they are addresses. This code
9331 could fail if addresses and insns are not the same size. */
9332 index = gen_rtx (PLUS, Pmode,
9333 gen_rtx (MULT, Pmode, index,
9334 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9335 gen_rtx (LABEL_REF, Pmode, table_label));
9336 #ifdef PIC_CASE_VECTOR_ADDRESS
9337 if (flag_pic)
9338 index = PIC_CASE_VECTOR_ADDRESS (index);
9339 else
9340 #endif
9341 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9342 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9343 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9344 RTX_UNCHANGING_P (vector) = 1;
9345 convert_move (temp, vector, 0);
9347 emit_jump_insn (gen_tablejump (temp, table_label));
9349 #ifndef CASE_VECTOR_PC_RELATIVE
9350 /* If we are generating PIC code or if the table is PC-relative, the
9351 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9352 if (! flag_pic)
9353 emit_barrier ();
9354 #endif
9357 #endif /* HAVE_tablejump */
9360 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9361 to that value is on the top of the stack. The resulting type is TYPE, and
9362 the source declaration is DECL. */
9364 void
9365 bc_load_memory (type, decl)
9366 tree type, decl;
9368 enum bytecode_opcode opcode;
9371 /* Bit fields are special. We only know about signed and
9372 unsigned ints, and enums. The latter are treated as
9373 signed integers. */
9375 if (DECL_BIT_FIELD (decl))
9376 if (TREE_CODE (type) == ENUMERAL_TYPE
9377 || TREE_CODE (type) == INTEGER_TYPE)
9378 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9379 else
9380 abort ();
9381 else
9382 /* See corresponding comment in bc_store_memory(). */
9383 if (TYPE_MODE (type) == BLKmode
9384 || TYPE_MODE (type) == VOIDmode)
9385 return;
9386 else
9387 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9389 if (opcode == neverneverland)
9390 abort ();
9392 bc_emit_bytecode (opcode);
9394 #ifdef DEBUG_PRINT_CODE
9395 fputc ('\n', stderr);
9396 #endif
9400 /* Store the contents of the second stack slot to the address in the
9401 top stack slot. DECL is the declaration of the destination and is used
9402 to determine whether we're dealing with a bitfield. */
9404 void
9405 bc_store_memory (type, decl)
9406 tree type, decl;
9408 enum bytecode_opcode opcode;
9411 if (DECL_BIT_FIELD (decl))
9413 if (TREE_CODE (type) == ENUMERAL_TYPE
9414 || TREE_CODE (type) == INTEGER_TYPE)
9415 opcode = sstoreBI;
9416 else
9417 abort ();
9419 else
9420 if (TYPE_MODE (type) == BLKmode)
9422 /* Copy structure. This expands to a block copy instruction, storeBLK.
9423 In addition to the arguments expected by the other store instructions,
9424 it also expects a type size (SImode) on top of the stack, which is the
9425 structure size in size units (usually bytes). The two first arguments
9426 are already on the stack; so we just put the size on level 1. For some
9427 other languages, the size may be variable, this is why we don't encode
9428 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9430 bc_expand_expr (TYPE_SIZE (type));
9431 opcode = storeBLK;
9433 else
9434 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
9436 if (opcode == neverneverland)
9437 abort ();
9439 bc_emit_bytecode (opcode);
9441 #ifdef DEBUG_PRINT_CODE
9442 fputc ('\n', stderr);
9443 #endif
9447 /* Allocate local stack space sufficient to hold a value of the given
9448 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9449 integral power of 2. A special case is locals of type VOID, which
9450 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9451 remapped into the corresponding attribute of SI. */
9454 bc_allocate_local (size, alignment)
9455 int size, alignment;
9457 rtx retval;
9458 int byte_alignment;
9460 if (size < 0)
9461 abort ();
9463 /* Normalize size and alignment */
9464 if (!size)
9465 size = UNITS_PER_WORD;
9467 if (alignment < BITS_PER_UNIT)
9468 byte_alignment = 1 << (INT_ALIGN - 1);
9469 else
9470 /* Align */
9471 byte_alignment = alignment / BITS_PER_UNIT;
9473 if (local_vars_size & (byte_alignment - 1))
9474 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
9476 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9477 local_vars_size += size;
9479 return retval;
9483 /* Allocate variable-sized local array. Variable-sized arrays are
9484 actually pointers to the address in memory where they are stored. */
9487 bc_allocate_variable_array (size)
9488 tree size;
9490 rtx retval;
9491 const int ptralign = (1 << (PTR_ALIGN - 1));
9493 /* Align pointer */
9494 if (local_vars_size & ptralign)
9495 local_vars_size += ptralign - (local_vars_size & ptralign);
9497 /* Note down local space needed: pointer to block; also return
9498 dummy rtx */
9500 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9501 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9502 return retval;
9506 /* Push the machine address for the given external variable offset. */
9507 void
9508 bc_load_externaddr (externaddr)
9509 rtx externaddr;
9511 bc_emit_bytecode (constP);
9512 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9513 BYTECODE_BC_LABEL (externaddr)->offset);
9515 #ifdef DEBUG_PRINT_CODE
9516 fputc ('\n', stderr);
9517 #endif
9521 static char *
9522 bc_strdup (s)
9523 char *s;
9525 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9526 strcpy (new, s);
9527 return new;
9531 /* Like above, but expects an IDENTIFIER. */
9532 void
9533 bc_load_externaddr_id (id, offset)
9534 tree id;
9535 int offset;
9537 if (!IDENTIFIER_POINTER (id))
9538 abort ();
9540 bc_emit_bytecode (constP);
9541 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
9543 #ifdef DEBUG_PRINT_CODE
9544 fputc ('\n', stderr);
9545 #endif
9549 /* Push the machine address for the given local variable offset. */
9550 void
9551 bc_load_localaddr (localaddr)
9552 rtx localaddr;
9554 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
9558 /* Push the machine address for the given parameter offset.
9559 NOTE: offset is in bits. */
9560 void
9561 bc_load_parmaddr (parmaddr)
9562 rtx parmaddr;
9564 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9565 / BITS_PER_UNIT));
9569 /* Convert a[i] into *(a + i). */
9570 tree
9571 bc_canonicalize_array_ref (exp)
9572 tree exp;
9574 tree type = TREE_TYPE (exp);
9575 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9576 TREE_OPERAND (exp, 0));
9577 tree index = TREE_OPERAND (exp, 1);
9580 /* Convert the integer argument to a type the same size as a pointer
9581 so the multiply won't overflow spuriously. */
9583 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9584 index = convert (type_for_size (POINTER_SIZE, 0), index);
9586 /* The array address isn't volatile even if the array is.
9587 (Of course this isn't terribly relevant since the bytecode
9588 translator treats nearly everything as volatile anyway.) */
9589 TREE_THIS_VOLATILE (array_adr) = 0;
9591 return build1 (INDIRECT_REF, type,
9592 fold (build (PLUS_EXPR,
9593 TYPE_POINTER_TO (type),
9594 array_adr,
9595 fold (build (MULT_EXPR,
9596 TYPE_POINTER_TO (type),
9597 index,
9598 size_in_bytes (type))))));
9602 /* Load the address of the component referenced by the given
9603 COMPONENT_REF expression.
9605 Returns innermost lvalue. */
9607 tree
9608 bc_expand_component_address (exp)
9609 tree exp;
9611 tree tem, chain;
9612 enum machine_mode mode;
9613 int bitpos = 0;
9614 HOST_WIDE_INT SIval;
9617 tem = TREE_OPERAND (exp, 1);
9618 mode = DECL_MODE (tem);
9621 /* Compute cumulative bit offset for nested component refs
9622 and array refs, and find the ultimate containing object. */
9624 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
9626 if (TREE_CODE (tem) == COMPONENT_REF)
9627 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9628 else
9629 if (TREE_CODE (tem) == ARRAY_REF
9630 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9631 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
9633 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9634 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9635 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9636 else
9637 break;
9640 bc_expand_expr (tem);
9643 /* For bitfields also push their offset and size */
9644 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9645 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9646 else
9647 if (SIval = bitpos / BITS_PER_UNIT)
9648 bc_emit_instruction (addconstPSI, SIval);
9650 return (TREE_OPERAND (exp, 1));
9654 /* Emit code to push two SI constants */
9655 void
9656 bc_push_offset_and_size (offset, size)
9657 HOST_WIDE_INT offset, size;
9659 bc_emit_instruction (constSI, offset);
9660 bc_emit_instruction (constSI, size);
9664 /* Emit byte code to push the address of the given lvalue expression to
9665 the stack. If it's a bit field, we also push offset and size info.
9667 Returns innermost component, which allows us to determine not only
9668 its type, but also whether it's a bitfield. */
9670 tree
9671 bc_expand_address (exp)
9672 tree exp;
9674 /* Safeguard */
9675 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9676 return (exp);
9679 switch (TREE_CODE (exp))
9681 case ARRAY_REF:
9683 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
9685 case COMPONENT_REF:
9687 return (bc_expand_component_address (exp));
9689 case INDIRECT_REF:
9691 bc_expand_expr (TREE_OPERAND (exp, 0));
9693 /* For variable-sized types: retrieve pointer. Sometimes the
9694 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9695 also make sure we have an operand, just in case... */
9697 if (TREE_OPERAND (exp, 0)
9698 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9699 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9700 bc_emit_instruction (loadP);
9702 /* If packed, also return offset and size */
9703 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9705 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9706 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9708 return (TREE_OPERAND (exp, 0));
9710 case FUNCTION_DECL:
9712 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9713 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
9714 break;
9716 case PARM_DECL:
9718 bc_load_parmaddr (DECL_RTL (exp));
9720 /* For variable-sized types: retrieve pointer */
9721 if (TYPE_SIZE (TREE_TYPE (exp))
9722 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9723 bc_emit_instruction (loadP);
9725 /* If packed, also return offset and size */
9726 if (DECL_BIT_FIELD (exp))
9727 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9728 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9730 break;
9732 case RESULT_DECL:
9734 bc_emit_instruction (returnP);
9735 break;
9737 case VAR_DECL:
9739 #if 0
9740 if (BYTECODE_LABEL (DECL_RTL (exp)))
9741 bc_load_externaddr (DECL_RTL (exp));
9742 #endif
9744 if (DECL_EXTERNAL (exp))
9745 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9746 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
9747 else
9748 bc_load_localaddr (DECL_RTL (exp));
9750 /* For variable-sized types: retrieve pointer */
9751 if (TYPE_SIZE (TREE_TYPE (exp))
9752 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9753 bc_emit_instruction (loadP);
9755 /* If packed, also return offset and size */
9756 if (DECL_BIT_FIELD (exp))
9757 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9758 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9760 break;
9762 case STRING_CST:
9764 rtx r;
9766 bc_emit_bytecode (constP);
9767 r = output_constant_def (exp);
9768 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
9770 #ifdef DEBUG_PRINT_CODE
9771 fputc ('\n', stderr);
9772 #endif
9774 break;
9776 default:
9778 abort();
9779 break;
9782 /* Most lvalues don't have components. */
9783 return (exp);
9787 /* Emit a type code to be used by the runtime support in handling
9788 parameter passing. The type code consists of the machine mode
9789 plus the minimal alignment shifted left 8 bits. */
9791 tree
9792 bc_runtime_type_code (type)
9793 tree type;
9795 int val;
9797 switch (TREE_CODE (type))
9799 case VOID_TYPE:
9800 case INTEGER_TYPE:
9801 case REAL_TYPE:
9802 case COMPLEX_TYPE:
9803 case ENUMERAL_TYPE:
9804 case POINTER_TYPE:
9805 case RECORD_TYPE:
9807 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
9808 break;
9810 case ERROR_MARK:
9812 val = 0;
9813 break;
9815 default:
9817 abort ();
9819 return build_int_2 (val, 0);
9823 /* Generate constructor label */
9824 char *
9825 bc_gen_constr_label ()
9827 static int label_counter;
9828 static char label[20];
9830 sprintf (label, "*LR%d", label_counter++);
9832 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9836 /* Evaluate constructor CONSTR and return pointer to it on level one. We
9837 expand the constructor data as static data, and push a pointer to it.
9838 The pointer is put in the pointer table and is retrieved by a constP
9839 bytecode instruction. We then loop and store each constructor member in
9840 the corresponding component. Finally, we return the original pointer on
9841 the stack. */
9843 void
9844 bc_expand_constructor (constr)
9845 tree constr;
9847 char *l;
9848 HOST_WIDE_INT ptroffs;
9849 rtx constr_rtx;
9852 /* Literal constructors are handled as constants, whereas
9853 non-literals are evaluated and stored element by element
9854 into the data segment. */
9856 /* Allocate space in proper segment and push pointer to space on stack.
9859 l = bc_gen_constr_label ();
9861 if (TREE_CONSTANT (constr))
9863 text_section ();
9865 bc_emit_const_labeldef (l);
9866 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
9868 else
9870 data_section ();
9872 bc_emit_data_labeldef (l);
9873 bc_output_data_constructor (constr);
9877 /* Add reference to pointer table and recall pointer to stack;
9878 this code is common for both types of constructors: literals
9879 and non-literals. */
9881 ptroffs = bc_define_pointer (l);
9882 bc_emit_instruction (constP, ptroffs);
9884 /* This is all that has to be done if it's a literal. */
9885 if (TREE_CONSTANT (constr))
9886 return;
9889 /* At this point, we have the pointer to the structure on top of the stack.
9890 Generate sequences of store_memory calls for the constructor. */
9892 /* constructor type is structure */
9893 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
9895 register tree elt;
9897 /* If the constructor has fewer fields than the structure,
9898 clear the whole structure first. */
9900 if (list_length (CONSTRUCTOR_ELTS (constr))
9901 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9903 bc_emit_instruction (duplicate);
9904 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9905 bc_emit_instruction (clearBLK);
9908 /* Store each element of the constructor into the corresponding
9909 field of TARGET. */
9911 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9913 register tree field = TREE_PURPOSE (elt);
9914 register enum machine_mode mode;
9915 int bitsize;
9916 int bitpos;
9917 int unsignedp;
9919 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9920 mode = DECL_MODE (field);
9921 unsignedp = TREE_UNSIGNED (field);
9923 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9925 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9926 /* The alignment of TARGET is
9927 at least what its type requires. */
9928 VOIDmode, 0,
9929 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9930 int_size_in_bytes (TREE_TYPE (constr)));
9933 else
9935 /* Constructor type is array */
9936 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9938 register tree elt;
9939 register int i;
9940 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9941 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9942 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9943 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9945 /* If the constructor has fewer fields than the structure,
9946 clear the whole structure first. */
9948 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9950 bc_emit_instruction (duplicate);
9951 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9952 bc_emit_instruction (clearBLK);
9956 /* Store each element of the constructor into the corresponding
9957 element of TARGET, determined by counting the elements. */
9959 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9960 elt;
9961 elt = TREE_CHAIN (elt), i++)
9963 register enum machine_mode mode;
9964 int bitsize;
9965 int bitpos;
9966 int unsignedp;
9968 mode = TYPE_MODE (elttype);
9969 bitsize = GET_MODE_BITSIZE (mode);
9970 unsignedp = TREE_UNSIGNED (elttype);
9972 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9973 /* * TYPE_SIZE_UNIT (elttype) */ );
9975 bc_store_field (elt, bitsize, bitpos, mode,
9976 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9977 /* The alignment of TARGET is
9978 at least what its type requires. */
9979 VOIDmode, 0,
9980 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9981 int_size_in_bytes (TREE_TYPE (constr)));
9988 /* Store the value of EXP (an expression tree) into member FIELD of
9989 structure at address on stack, which has type TYPE, mode MODE and
9990 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9991 structure.
9993 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9994 TOTAL_SIZE is its size in bytes, or -1 if variable. */
9996 void
9997 bc_store_field (field, bitsize, bitpos, mode, exp, type,
9998 value_mode, unsignedp, align, total_size)
9999 int bitsize, bitpos;
10000 enum machine_mode mode;
10001 tree field, exp, type;
10002 enum machine_mode value_mode;
10003 int unsignedp;
10004 int align;
10005 int total_size;
10008 /* Expand expression and copy pointer */
10009 bc_expand_expr (exp);
10010 bc_emit_instruction (over);
10013 /* If the component is a bit field, we cannot use addressing to access
10014 it. Use bit-field techniques to store in it. */
10016 if (DECL_BIT_FIELD (field))
10018 bc_store_bit_field (bitpos, bitsize, unsignedp);
10019 return;
10021 else
10022 /* Not bit field */
10024 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10026 /* Advance pointer to the desired member */
10027 if (offset)
10028 bc_emit_instruction (addconstPSI, offset);
10030 /* Store */
10031 bc_store_memory (type, field);
10036 /* Store SI/SU in bitfield */
10037 void
10038 bc_store_bit_field (offset, size, unsignedp)
10039 int offset, size, unsignedp;
10041 /* Push bitfield offset and size */
10042 bc_push_offset_and_size (offset, size);
10044 /* Store */
10045 bc_emit_instruction (sstoreBI);
10049 /* Load SI/SU from bitfield */
10050 void
10051 bc_load_bit_field (offset, size, unsignedp)
10052 int offset, size, unsignedp;
10054 /* Push bitfield offset and size */
10055 bc_push_offset_and_size (offset, size);
10057 /* Load: sign-extend if signed, else zero-extend */
10058 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10062 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10063 (adjust stack pointer upwards), negative means add that number of
10064 levels (adjust the stack pointer downwards). Only positive values
10065 normally make sense. */
10067 void
10068 bc_adjust_stack (nlevels)
10069 int nlevels;
10071 switch (nlevels)
10073 case 0:
10074 break;
10076 case 2:
10077 bc_emit_instruction (drop);
10079 case 1:
10080 bc_emit_instruction (drop);
10081 break;
10083 default:
10085 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10086 stack_depth -= nlevels;
10089 #if defined (VALIDATE_STACK_FOR_BC)
10090 VALIDATE_STACK_FOR_BC ();
10091 #endif