({save,restore}_stack_nonlocal): Delete.
[official-gcc.git] / gcc / expr.c
blob9ecb85a24ba9cd074bf1046c4cb44b6b5fa69713
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 #include "config.h"
22 #include "machmode.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "obstack.h"
26 #include "flags.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "expr.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "output.h"
35 #include "typeclass.h"
37 #include "bytecode.h"
38 #include "bc-opcode.h"
39 #include "bc-typecd.h"
40 #include "bc-optab.h"
41 #include "bc-emit.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
52 #ifdef PUSH_ROUNDING
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
58 #endif
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* If this is nonzero, we do not bother generating VOLATILE
72 around volatile memory references, and we are willing to
73 output indirect addresses. If cse is to follow, we reject
74 indirect addresses so a useful potential cse is generated;
75 if it is used only once, instruction combination will produce
76 the same indirect address eventually. */
77 int cse_not_expected;
79 /* Nonzero to generate code for all the subroutines within an
80 expression before generating the upper levels of the expression.
81 Nowadays this is never zero. */
82 int do_preexpand_calls = 1;
84 /* Number of units that we should eventually pop off the stack.
85 These are the arguments to function calls that have already returned. */
86 int pending_stack_adjust;
88 /* Nonzero means stack pops must not be deferred, and deferred stack
89 pops must not be output. It is nonzero inside a function call,
90 inside a conditional expression, inside a statement expression,
91 and in other cases as well. */
92 int inhibit_defer_pop;
94 /* A list of all cleanups which belong to the arguments of
95 function calls being expanded by expand_call. */
96 tree cleanups_this_call;
98 /* When temporaries are created by TARGET_EXPRs, they are created at
99 this level of temp_slot_level, so that they can remain allocated
100 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
101 of TARGET_EXPRs. */
102 int target_temp_slot_level;
104 /* Nonzero means __builtin_saveregs has already been done in this function.
105 The value is the pseudoreg containing the value __builtin_saveregs
106 returned. */
107 static rtx saveregs_value;
109 /* Similarly for __builtin_apply_args. */
110 static rtx apply_args_value;
112 /* This structure is used by move_by_pieces to describe the move to
113 be performed. */
115 struct move_by_pieces
117 rtx to;
118 rtx to_addr;
119 int autinc_to;
120 int explicit_inc_to;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 int len;
126 int offset;
127 int reverse;
130 /* Used to generate bytecodes: keep track of size of local variables,
131 as well as depth of arithmetic stack. (Notice that variables are
132 stored on the machine's stack, not the arithmetic stack.) */
134 extern int local_vars_size;
135 extern int stack_depth;
136 extern int max_stack_depth;
137 extern struct obstack permanent_obstack;
140 static rtx enqueue_insn PROTO((rtx, rtx));
141 static int queued_subexp_p PROTO((rtx));
142 static void init_queue PROTO((void));
143 static void move_by_pieces PROTO((rtx, rtx, int, int));
144 static int move_by_pieces_ninsns PROTO((unsigned int, int));
145 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
146 struct move_by_pieces *));
147 static void store_constructor PROTO((tree, rtx));
148 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
149 enum machine_mode, int, int, int));
150 static int get_inner_unaligned_p PROTO((tree));
151 static tree save_noncopied_parts PROTO((tree, tree));
152 static tree init_noncopied_parts PROTO((tree, tree));
153 static int safe_from_p PROTO((rtx, tree));
154 static int fixed_type_p PROTO((tree));
155 static int get_pointer_alignment PROTO((tree, unsigned));
156 static tree string_constant PROTO((tree, tree *));
157 static tree c_strlen PROTO((tree));
158 static rtx expand_builtin PROTO((tree, rtx, rtx,
159 enum machine_mode, int));
160 static int apply_args_size PROTO((void));
161 static int apply_result_size PROTO((void));
162 static rtx result_vector PROTO((int, rtx));
163 static rtx expand_builtin_apply_args PROTO((void));
164 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
165 static void expand_builtin_return PROTO((rtx));
166 static rtx expand_increment PROTO((tree, int));
167 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
168 tree bc_runtime_type_code PROTO((tree));
169 rtx bc_allocate_local PROTO((int, int));
170 void bc_store_memory PROTO((tree, tree));
171 tree bc_expand_component_address PROTO((tree));
172 tree bc_expand_address PROTO((tree));
173 void bc_expand_constructor PROTO((tree));
174 void bc_adjust_stack PROTO((int));
175 tree bc_canonicalize_array_ref PROTO((tree));
176 void bc_load_memory PROTO((tree, tree));
177 void bc_load_externaddr PROTO((rtx));
178 void bc_load_externaddr_id PROTO((tree, int));
179 void bc_load_localaddr PROTO((rtx));
180 void bc_load_parmaddr PROTO((rtx));
181 static void preexpand_calls PROTO((tree));
182 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
183 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
184 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
185 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
186 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
187 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
188 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
189 static tree defer_cleanups_to PROTO((tree));
190 extern void (*interim_eh_hook) PROTO((tree));
192 /* Record for each mode whether we can move a register directly to or
193 from an object of that mode in memory. If we can't, we won't try
194 to use that mode directly when accessing a field of that mode. */
196 static char direct_load[NUM_MACHINE_MODES];
197 static char direct_store[NUM_MACHINE_MODES];
199 /* MOVE_RATIO is the number of move instructions that is better than
200 a block move. */
202 #ifndef MOVE_RATIO
203 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
204 #define MOVE_RATIO 2
205 #else
206 /* A value of around 6 would minimize code size; infinity would minimize
207 execution time. */
208 #define MOVE_RATIO 15
209 #endif
210 #endif
212 /* This array records the insn_code of insns to perform block moves. */
213 enum insn_code movstr_optab[NUM_MACHINE_MODES];
215 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
217 #ifndef SLOW_UNALIGNED_ACCESS
218 #define SLOW_UNALIGNED_ACCESS 0
219 #endif
221 /* Register mappings for target machines without register windows. */
222 #ifndef INCOMING_REGNO
223 #define INCOMING_REGNO(OUT) (OUT)
224 #endif
225 #ifndef OUTGOING_REGNO
226 #define OUTGOING_REGNO(IN) (IN)
227 #endif
229 /* Maps used to convert modes to const, load, and store bytecodes. */
230 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
231 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
232 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
234 /* Initialize maps used to convert modes to const, load, and store
235 bytecodes. */
236 void
237 bc_init_mode_to_opcode_maps ()
239 int mode;
241 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
242 mode_to_const_map[mode] =
243 mode_to_load_map[mode] =
244 mode_to_store_map[mode] = neverneverland;
246 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
247 mode_to_const_map[(int) SYM] = CONST; \
248 mode_to_load_map[(int) SYM] = LOAD; \
249 mode_to_store_map[(int) SYM] = STORE;
251 #include "modemap.def"
252 #undef DEF_MODEMAP
255 /* This is run once per compilation to set up which modes can be used
256 directly in memory and to initialize the block move optab. */
258 void
259 init_expr_once ()
261 rtx insn, pat;
262 enum machine_mode mode;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
267 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
269 start_sequence ();
270 insn = emit_insn (gen_rtx (SET, 0, 0));
271 pat = PATTERN (insn);
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
276 int regno;
277 rtx reg;
278 int num_clobbers;
280 direct_load[(int) mode] = direct_store[(int) mode] = 0;
281 PUT_MODE (mem, mode);
282 PUT_MODE (mem1, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 regno++)
292 if (! HARD_REGNO_MODE_OK (regno, mode))
293 continue;
295 reg = gen_rtx (REG, mode, regno);
297 SET_SRC (pat) = mem;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 end_sequence ();
322 /* This is run at the start of compiling a function. */
324 void
325 init_expr ()
327 init_queue ();
329 pending_stack_adjust = 0;
330 inhibit_defer_pop = 0;
331 cleanups_this_call = 0;
332 saveregs_value = 0;
333 apply_args_value = 0;
334 forced_labels = 0;
337 /* Save all variables describing the current status into the structure *P.
338 This is used before starting a nested function. */
340 void
341 save_expr_status (p)
342 struct function *p;
344 /* Instead of saving the postincrement queue, empty it. */
345 emit_queue ();
347 p->pending_stack_adjust = pending_stack_adjust;
348 p->inhibit_defer_pop = inhibit_defer_pop;
349 p->cleanups_this_call = cleanups_this_call;
350 p->saveregs_value = saveregs_value;
351 p->apply_args_value = apply_args_value;
352 p->forced_labels = forced_labels;
354 pending_stack_adjust = 0;
355 inhibit_defer_pop = 0;
356 cleanups_this_call = 0;
357 saveregs_value = 0;
358 apply_args_value = 0;
359 forced_labels = 0;
362 /* Restore all variables describing the current status from the structure *P.
363 This is used after a nested function. */
365 void
366 restore_expr_status (p)
367 struct function *p;
369 pending_stack_adjust = p->pending_stack_adjust;
370 inhibit_defer_pop = p->inhibit_defer_pop;
371 cleanups_this_call = p->cleanups_this_call;
372 saveregs_value = p->saveregs_value;
373 apply_args_value = p->apply_args_value;
374 forced_labels = p->forced_labels;
377 /* Manage the queue of increment instructions to be output
378 for POSTINCREMENT_EXPR expressions, etc. */
380 static rtx pending_chain;
382 /* Queue up to increment (or change) VAR later. BODY says how:
383 BODY should be the same thing you would pass to emit_insn
384 to increment right away. It will go to emit_insn later on.
386 The value is a QUEUED expression to be used in place of VAR
387 where you want to guarantee the pre-incrementation value of VAR. */
389 static rtx
390 enqueue_insn (var, body)
391 rtx var, body;
393 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
394 var, NULL_RTX, NULL_RTX, body, pending_chain);
395 return pending_chain;
398 /* Use protect_from_queue to convert a QUEUED expression
399 into something that you can put immediately into an instruction.
400 If the queued incrementation has not happened yet,
401 protect_from_queue returns the variable itself.
402 If the incrementation has happened, protect_from_queue returns a temp
403 that contains a copy of the old value of the variable.
405 Any time an rtx which might possibly be a QUEUED is to be put
406 into an instruction, it must be passed through protect_from_queue first.
407 QUEUED expressions are not meaningful in instructions.
409 Do not pass a value through protect_from_queue and then hold
410 on to it for a while before putting it in an instruction!
411 If the queue is flushed in between, incorrect code will result. */
414 protect_from_queue (x, modify)
415 register rtx x;
416 int modify;
418 register RTX_CODE code = GET_CODE (x);
420 #if 0 /* A QUEUED can hang around after the queue is forced out. */
421 /* Shortcut for most common case. */
422 if (pending_chain == 0)
423 return x;
424 #endif
426 if (code != QUEUED)
428 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
429 use of autoincrement. Make a copy of the contents of the memory
430 location rather than a copy of the address, but not if the value is
431 of mode BLKmode. Don't modify X in place since it might be
432 shared. */
433 if (code == MEM && GET_MODE (x) != BLKmode
434 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
436 register rtx y = XEXP (x, 0);
437 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
439 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
440 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
441 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
443 if (QUEUED_INSN (y))
445 register rtx temp = gen_reg_rtx (GET_MODE (new));
446 emit_insn_before (gen_move_insn (temp, new),
447 QUEUED_INSN (y));
448 return temp;
450 return new;
452 /* Otherwise, recursively protect the subexpressions of all
453 the kinds of rtx's that can contain a QUEUED. */
454 if (code == MEM)
456 rtx tem = protect_from_queue (XEXP (x, 0), 0);
457 if (tem != XEXP (x, 0))
459 x = copy_rtx (x);
460 XEXP (x, 0) = tem;
463 else if (code == PLUS || code == MULT)
465 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
466 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
467 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
469 x = copy_rtx (x);
470 XEXP (x, 0) = new0;
471 XEXP (x, 1) = new1;
474 return x;
476 /* If the increment has not happened, use the variable itself. */
477 if (QUEUED_INSN (x) == 0)
478 return QUEUED_VAR (x);
479 /* If the increment has happened and a pre-increment copy exists,
480 use that copy. */
481 if (QUEUED_COPY (x) != 0)
482 return QUEUED_COPY (x);
483 /* The increment has happened but we haven't set up a pre-increment copy.
484 Set one up now, and use it. */
485 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
486 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
487 QUEUED_INSN (x));
488 return QUEUED_COPY (x);
491 /* Return nonzero if X contains a QUEUED expression:
492 if it contains anything that will be altered by a queued increment.
493 We handle only combinations of MEM, PLUS, MINUS and MULT operators
494 since memory addresses generally contain only those. */
496 static int
497 queued_subexp_p (x)
498 rtx x;
500 register enum rtx_code code = GET_CODE (x);
501 switch (code)
503 case QUEUED:
504 return 1;
505 case MEM:
506 return queued_subexp_p (XEXP (x, 0));
507 case MULT:
508 case PLUS:
509 case MINUS:
510 return queued_subexp_p (XEXP (x, 0))
511 || queued_subexp_p (XEXP (x, 1));
513 return 0;
516 /* Perform all the pending incrementations. */
518 void
519 emit_queue ()
521 register rtx p;
522 while (p = pending_chain)
524 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
525 pending_chain = QUEUED_NEXT (p);
529 static void
530 init_queue ()
532 if (pending_chain)
533 abort ();
536 /* Copy data from FROM to TO, where the machine modes are not the same.
537 Both modes may be integer, or both may be floating.
538 UNSIGNEDP should be nonzero if FROM is an unsigned type.
539 This causes zero-extension instead of sign-extension. */
541 void
542 convert_move (to, from, unsignedp)
543 register rtx to, from;
544 int unsignedp;
546 enum machine_mode to_mode = GET_MODE (to);
547 enum machine_mode from_mode = GET_MODE (from);
548 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
549 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
550 enum insn_code code;
551 rtx libcall;
553 /* rtx code for making an equivalent value. */
554 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
556 to = protect_from_queue (to, 1);
557 from = protect_from_queue (from, 0);
559 if (to_real != from_real)
560 abort ();
562 /* If FROM is a SUBREG that indicates that we have already done at least
563 the required extension, strip it. We don't handle such SUBREGs as
564 TO here. */
566 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
567 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
568 >= GET_MODE_SIZE (to_mode))
569 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
570 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
573 abort ();
575 if (to_mode == from_mode
576 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 emit_move_insn (to, from);
579 return;
582 if (to_real)
584 rtx value;
586 #ifdef HAVE_extendqfhf2
587 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
589 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
590 return;
592 #endif
593 #ifdef HAVE_extendqfsf2
594 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
596 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
597 return;
599 #endif
600 #ifdef HAVE_extendqfdf2
601 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
603 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
604 return;
606 #endif
607 #ifdef HAVE_extendqfxf2
608 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
610 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
611 return;
613 #endif
614 #ifdef HAVE_extendqftf2
615 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
617 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
618 return;
620 #endif
622 #ifdef HAVE_extendhftqf2
623 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
625 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
626 return;
628 #endif
630 #ifdef HAVE_extendhfsf2
631 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
633 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
634 return;
636 #endif
637 #ifdef HAVE_extendhfdf2
638 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
640 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
641 return;
643 #endif
644 #ifdef HAVE_extendhfxf2
645 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
647 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
648 return;
650 #endif
651 #ifdef HAVE_extendhftf2
652 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
654 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
655 return;
657 #endif
659 #ifdef HAVE_extendsfdf2
660 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
662 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
663 return;
665 #endif
666 #ifdef HAVE_extendsfxf2
667 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
669 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
670 return;
672 #endif
673 #ifdef HAVE_extendsftf2
674 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
676 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
677 return;
679 #endif
680 #ifdef HAVE_extenddfxf2
681 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
683 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
684 return;
686 #endif
687 #ifdef HAVE_extenddftf2
688 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
690 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
691 return;
693 #endif
695 #ifdef HAVE_trunchfqf2
696 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
698 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
699 return;
701 #endif
702 #ifdef HAVE_truncsfqf2
703 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
705 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
706 return;
708 #endif
709 #ifdef HAVE_truncdfqf2
710 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
712 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
713 return;
715 #endif
716 #ifdef HAVE_truncxfqf2
717 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
719 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
720 return;
722 #endif
723 #ifdef HAVE_trunctfqf2
724 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
726 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
727 return;
729 #endif
731 #ifdef HAVE_trunctqfhf2
732 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
734 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
735 return;
737 #endif
738 #ifdef HAVE_truncsfhf2
739 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
741 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
742 return;
744 #endif
745 #ifdef HAVE_truncdfhf2
746 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
748 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
749 return;
751 #endif
752 #ifdef HAVE_truncxfhf2
753 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
755 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
756 return;
758 #endif
759 #ifdef HAVE_trunctfhf2
760 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
762 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
763 return;
765 #endif
766 #ifdef HAVE_truncdfsf2
767 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
769 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
770 return;
772 #endif
773 #ifdef HAVE_truncxfsf2
774 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
776 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
777 return;
779 #endif
780 #ifdef HAVE_trunctfsf2
781 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
783 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
784 return;
786 #endif
787 #ifdef HAVE_truncxfdf2
788 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
790 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
791 return;
793 #endif
794 #ifdef HAVE_trunctfdf2
795 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
797 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
798 return;
800 #endif
802 libcall = (rtx) 0;
803 switch (from_mode)
805 case SFmode:
806 switch (to_mode)
808 case DFmode:
809 libcall = extendsfdf2_libfunc;
810 break;
812 case XFmode:
813 libcall = extendsfxf2_libfunc;
814 break;
816 case TFmode:
817 libcall = extendsftf2_libfunc;
818 break;
820 break;
822 case DFmode:
823 switch (to_mode)
825 case SFmode:
826 libcall = truncdfsf2_libfunc;
827 break;
829 case XFmode:
830 libcall = extenddfxf2_libfunc;
831 break;
833 case TFmode:
834 libcall = extenddftf2_libfunc;
835 break;
837 break;
839 case XFmode:
840 switch (to_mode)
842 case SFmode:
843 libcall = truncxfsf2_libfunc;
844 break;
846 case DFmode:
847 libcall = truncxfdf2_libfunc;
848 break;
850 break;
852 case TFmode:
853 switch (to_mode)
855 case SFmode:
856 libcall = trunctfsf2_libfunc;
857 break;
859 case DFmode:
860 libcall = trunctfdf2_libfunc;
861 break;
863 break;
866 if (libcall == (rtx) 0)
867 /* This conversion is not implemented yet. */
868 abort ();
870 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
871 1, from, from_mode);
872 emit_move_insn (to, value);
873 return;
876 /* Now both modes are integers. */
878 /* Handle expanding beyond a word. */
879 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
880 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
882 rtx insns;
883 rtx lowpart;
884 rtx fill_value;
885 rtx lowfrom;
886 int i;
887 enum machine_mode lowpart_mode;
888 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
890 /* Try converting directly if the insn is supported. */
891 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
892 != CODE_FOR_nothing)
894 /* If FROM is a SUBREG, put it into a register. Do this
895 so that we always generate the same set of insns for
896 better cse'ing; if an intermediate assignment occurred,
897 we won't be doing the operation directly on the SUBREG. */
898 if (optimize > 0 && GET_CODE (from) == SUBREG)
899 from = force_reg (from_mode, from);
900 emit_unop_insn (code, to, from, equiv_code);
901 return;
903 /* Next, try converting via full word. */
904 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
905 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
906 != CODE_FOR_nothing))
908 if (GET_CODE (to) == REG)
909 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
910 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
911 emit_unop_insn (code, to,
912 gen_lowpart (word_mode, to), equiv_code);
913 return;
916 /* No special multiword conversion insn; do it by hand. */
917 start_sequence ();
919 /* Get a copy of FROM widened to a word, if necessary. */
920 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
921 lowpart_mode = word_mode;
922 else
923 lowpart_mode = from_mode;
925 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
927 lowpart = gen_lowpart (lowpart_mode, to);
928 emit_move_insn (lowpart, lowfrom);
930 /* Compute the value to put in each remaining word. */
931 if (unsignedp)
932 fill_value = const0_rtx;
933 else
935 #ifdef HAVE_slt
936 if (HAVE_slt
937 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
938 && STORE_FLAG_VALUE == -1)
940 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
941 lowpart_mode, 0, 0);
942 fill_value = gen_reg_rtx (word_mode);
943 emit_insn (gen_slt (fill_value));
945 else
946 #endif
948 fill_value
949 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
950 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
951 NULL_RTX, 0);
952 fill_value = convert_to_mode (word_mode, fill_value, 1);
956 /* Fill the remaining words. */
957 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
959 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
960 rtx subword = operand_subword (to, index, 1, to_mode);
962 if (subword == 0)
963 abort ();
965 if (fill_value != subword)
966 emit_move_insn (subword, fill_value);
969 insns = get_insns ();
970 end_sequence ();
972 emit_no_conflict_block (insns, to, from, NULL_RTX,
973 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
974 return;
977 /* Truncating multi-word to a word or less. */
978 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
979 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
981 if (!((GET_CODE (from) == MEM
982 && ! MEM_VOLATILE_P (from)
983 && direct_load[(int) to_mode]
984 && ! mode_dependent_address_p (XEXP (from, 0)))
985 || GET_CODE (from) == REG
986 || GET_CODE (from) == SUBREG))
987 from = force_reg (from_mode, from);
988 convert_move (to, gen_lowpart (word_mode, from), 0);
989 return;
992 /* Handle pointer conversion */ /* SPEE 900220 */
993 if (to_mode == PSImode)
995 if (from_mode != SImode)
996 from = convert_to_mode (SImode, from, unsignedp);
998 #ifdef HAVE_truncsipsi2
999 if (HAVE_truncsipsi2)
1001 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1002 return;
1004 #endif /* HAVE_truncsipsi2 */
1005 abort ();
1008 if (from_mode == PSImode)
1010 if (to_mode != SImode)
1012 from = convert_to_mode (SImode, from, unsignedp);
1013 from_mode = SImode;
1015 else
1017 #ifdef HAVE_extendpsisi2
1018 if (HAVE_extendpsisi2)
1020 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1021 return;
1023 #endif /* HAVE_extendpsisi2 */
1024 abort ();
1028 /* Now follow all the conversions between integers
1029 no more than a word long. */
1031 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1032 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1033 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1034 GET_MODE_BITSIZE (from_mode)))
1036 if (!((GET_CODE (from) == MEM
1037 && ! MEM_VOLATILE_P (from)
1038 && direct_load[(int) to_mode]
1039 && ! mode_dependent_address_p (XEXP (from, 0)))
1040 || GET_CODE (from) == REG
1041 || GET_CODE (from) == SUBREG))
1042 from = force_reg (from_mode, from);
1043 emit_move_insn (to, gen_lowpart (to_mode, from));
1044 return;
1047 /* Handle extension. */
1048 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1050 /* Convert directly if that works. */
1051 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1052 != CODE_FOR_nothing)
1054 /* If FROM is a SUBREG, put it into a register. Do this
1055 so that we always generate the same set of insns for
1056 better cse'ing; if an intermediate assignment occurred,
1057 we won't be doing the operation directly on the SUBREG. */
1058 if (optimize > 0 && GET_CODE (from) == SUBREG)
1059 from = force_reg (from_mode, from);
1060 emit_unop_insn (code, to, from, equiv_code);
1061 return;
1063 else
1065 enum machine_mode intermediate;
1067 /* Search for a mode to convert via. */
1068 for (intermediate = from_mode; intermediate != VOIDmode;
1069 intermediate = GET_MODE_WIDER_MODE (intermediate))
1070 if (((can_extend_p (to_mode, intermediate, unsignedp)
1071 != CODE_FOR_nothing)
1072 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1073 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1074 && (can_extend_p (intermediate, from_mode, unsignedp)
1075 != CODE_FOR_nothing))
1077 convert_move (to, convert_to_mode (intermediate, from,
1078 unsignedp), unsignedp);
1079 return;
1082 /* No suitable intermediate mode. */
1083 abort ();
1087 /* Support special truncate insns for certain modes. */
1089 if (from_mode == DImode && to_mode == SImode)
1091 #ifdef HAVE_truncdisi2
1092 if (HAVE_truncdisi2)
1094 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1095 return;
1097 #endif
1098 convert_move (to, force_reg (from_mode, from), unsignedp);
1099 return;
1102 if (from_mode == DImode && to_mode == HImode)
1104 #ifdef HAVE_truncdihi2
1105 if (HAVE_truncdihi2)
1107 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1108 return;
1110 #endif
1111 convert_move (to, force_reg (from_mode, from), unsignedp);
1112 return;
1115 if (from_mode == DImode && to_mode == QImode)
1117 #ifdef HAVE_truncdiqi2
1118 if (HAVE_truncdiqi2)
1120 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1121 return;
1123 #endif
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1125 return;
1128 if (from_mode == SImode && to_mode == HImode)
1130 #ifdef HAVE_truncsihi2
1131 if (HAVE_truncsihi2)
1133 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1134 return;
1136 #endif
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1138 return;
1141 if (from_mode == SImode && to_mode == QImode)
1143 #ifdef HAVE_truncsiqi2
1144 if (HAVE_truncsiqi2)
1146 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1147 return;
1149 #endif
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 return;
1154 if (from_mode == HImode && to_mode == QImode)
1156 #ifdef HAVE_trunchiqi2
1157 if (HAVE_trunchiqi2)
1159 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1160 return;
1162 #endif
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 return;
1167 /* Handle truncation of volatile memrefs, and so on;
1168 the things that couldn't be truncated directly,
1169 and for which there was no special instruction. */
1170 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1172 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1173 emit_move_insn (to, temp);
1174 return;
1177 /* Mode combination is not recognized. */
1178 abort ();
1181 /* Return an rtx for a value that would result
1182 from converting X to mode MODE.
1183 Both X and MODE may be floating, or both integer.
1184 UNSIGNEDP is nonzero if X is an unsigned value.
1185 This can be done by referring to a part of X in place
1186 or by copying to a new temporary with conversion.
1188 This function *must not* call protect_from_queue
1189 except when putting X into an insn (in which case convert_move does it). */
1192 convert_to_mode (mode, x, unsignedp)
1193 enum machine_mode mode;
1194 rtx x;
1195 int unsignedp;
1197 return convert_modes (mode, VOIDmode, x, unsignedp);
1200 /* Return an rtx for a value that would result
1201 from converting X from mode OLDMODE to mode MODE.
1202 Both modes may be floating, or both integer.
1203 UNSIGNEDP is nonzero if X is an unsigned value.
1205 This can be done by referring to a part of X in place
1206 or by copying to a new temporary with conversion.
1208 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1210 This function *must not* call protect_from_queue
1211 except when putting X into an insn (in which case convert_move does it). */
1214 convert_modes (mode, oldmode, x, unsignedp)
1215 enum machine_mode mode, oldmode;
1216 rtx x;
1217 int unsignedp;
1219 register rtx temp;
1221 /* If FROM is a SUBREG that indicates that we have already done at least
1222 the required extension, strip it. */
1224 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1225 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1226 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1227 x = gen_lowpart (mode, x);
1229 if (GET_MODE (x) != VOIDmode)
1230 oldmode = GET_MODE (x);
1232 if (mode == oldmode)
1233 return x;
1235 /* There is one case that we must handle specially: If we are converting
1236 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1237 we are to interpret the constant as unsigned, gen_lowpart will do
1238 the wrong if the constant appears negative. What we want to do is
1239 make the high-order word of the constant zero, not all ones. */
1241 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1242 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1243 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1244 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1246 /* We can do this with a gen_lowpart if both desired and current modes
1247 are integer, and this is either a constant integer, a register, or a
1248 non-volatile MEM. Except for the constant case where MODE is no
1249 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1251 if ((GET_CODE (x) == CONST_INT
1252 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1253 || (GET_MODE_CLASS (mode) == MODE_INT
1254 && GET_MODE_CLASS (oldmode) == MODE_INT
1255 && (GET_CODE (x) == CONST_DOUBLE
1256 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1257 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1258 && direct_load[(int) mode])
1259 || (GET_CODE (x) == REG
1260 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1261 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1263 /* ?? If we don't know OLDMODE, we have to assume here that
1264 X does not need sign- or zero-extension. This may not be
1265 the case, but it's the best we can do. */
1266 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1267 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1269 HOST_WIDE_INT val = INTVAL (x);
1270 int width = GET_MODE_BITSIZE (oldmode);
1272 /* We must sign or zero-extend in this case. Start by
1273 zero-extending, then sign extend if we need to. */
1274 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1275 if (! unsignedp
1276 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1277 val |= (HOST_WIDE_INT) (-1) << width;
1279 return GEN_INT (val);
1282 return gen_lowpart (mode, x);
1285 temp = gen_reg_rtx (mode);
1286 convert_move (temp, x, unsignedp);
1287 return temp;
1290 /* Generate several move instructions to copy LEN bytes
1291 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1292 The caller must pass FROM and TO
1293 through protect_from_queue before calling.
1294 ALIGN (in bytes) is maximum alignment we can assume. */
1296 static void
1297 move_by_pieces (to, from, len, align)
1298 rtx to, from;
1299 int len, align;
1301 struct move_by_pieces data;
1302 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1303 int max_size = MOVE_MAX + 1;
1305 data.offset = 0;
1306 data.to_addr = to_addr;
1307 data.from_addr = from_addr;
1308 data.to = to;
1309 data.from = from;
1310 data.autinc_to
1311 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1312 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1313 data.autinc_from
1314 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1315 || GET_CODE (from_addr) == POST_INC
1316 || GET_CODE (from_addr) == POST_DEC);
1318 data.explicit_inc_from = 0;
1319 data.explicit_inc_to = 0;
1320 data.reverse
1321 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1322 if (data.reverse) data.offset = len;
1323 data.len = len;
1325 /* If copying requires more than two move insns,
1326 copy addresses to registers (to make displacements shorter)
1327 and use post-increment if available. */
1328 if (!(data.autinc_from && data.autinc_to)
1329 && move_by_pieces_ninsns (len, align) > 2)
1331 #ifdef HAVE_PRE_DECREMENT
1332 if (data.reverse && ! data.autinc_from)
1334 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1335 data.autinc_from = 1;
1336 data.explicit_inc_from = -1;
1338 #endif
1339 #ifdef HAVE_POST_INCREMENT
1340 if (! data.autinc_from)
1342 data.from_addr = copy_addr_to_reg (from_addr);
1343 data.autinc_from = 1;
1344 data.explicit_inc_from = 1;
1346 #endif
1347 if (!data.autinc_from && CONSTANT_P (from_addr))
1348 data.from_addr = copy_addr_to_reg (from_addr);
1349 #ifdef HAVE_PRE_DECREMENT
1350 if (data.reverse && ! data.autinc_to)
1352 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1353 data.autinc_to = 1;
1354 data.explicit_inc_to = -1;
1356 #endif
1357 #ifdef HAVE_POST_INCREMENT
1358 if (! data.reverse && ! data.autinc_to)
1360 data.to_addr = copy_addr_to_reg (to_addr);
1361 data.autinc_to = 1;
1362 data.explicit_inc_to = 1;
1364 #endif
1365 if (!data.autinc_to && CONSTANT_P (to_addr))
1366 data.to_addr = copy_addr_to_reg (to_addr);
1369 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1370 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1371 align = MOVE_MAX;
1373 /* First move what we can in the largest integer mode, then go to
1374 successively smaller modes. */
1376 while (max_size > 1)
1378 enum machine_mode mode = VOIDmode, tmode;
1379 enum insn_code icode;
1381 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1382 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1383 if (GET_MODE_SIZE (tmode) < max_size)
1384 mode = tmode;
1386 if (mode == VOIDmode)
1387 break;
1389 icode = mov_optab->handlers[(int) mode].insn_code;
1390 if (icode != CODE_FOR_nothing
1391 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1392 GET_MODE_SIZE (mode)))
1393 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1395 max_size = GET_MODE_SIZE (mode);
1398 /* The code above should have handled everything. */
1399 if (data.len != 0)
1400 abort ();
1403 /* Return number of insns required to move L bytes by pieces.
1404 ALIGN (in bytes) is maximum alignment we can assume. */
1406 static int
1407 move_by_pieces_ninsns (l, align)
1408 unsigned int l;
1409 int align;
1411 register int n_insns = 0;
1412 int max_size = MOVE_MAX + 1;
1414 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1415 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1416 align = MOVE_MAX;
1418 while (max_size > 1)
1420 enum machine_mode mode = VOIDmode, tmode;
1421 enum insn_code icode;
1423 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1424 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1425 if (GET_MODE_SIZE (tmode) < max_size)
1426 mode = tmode;
1428 if (mode == VOIDmode)
1429 break;
1431 icode = mov_optab->handlers[(int) mode].insn_code;
1432 if (icode != CODE_FOR_nothing
1433 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1434 GET_MODE_SIZE (mode)))
1435 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1437 max_size = GET_MODE_SIZE (mode);
1440 return n_insns;
1443 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1444 with move instructions for mode MODE. GENFUN is the gen_... function
1445 to make a move insn for that mode. DATA has all the other info. */
1447 static void
1448 move_by_pieces_1 (genfun, mode, data)
1449 rtx (*genfun) ();
1450 enum machine_mode mode;
1451 struct move_by_pieces *data;
1453 register int size = GET_MODE_SIZE (mode);
1454 register rtx to1, from1;
1456 while (data->len >= size)
1458 if (data->reverse) data->offset -= size;
1460 to1 = (data->autinc_to
1461 ? gen_rtx (MEM, mode, data->to_addr)
1462 : change_address (data->to, mode,
1463 plus_constant (data->to_addr, data->offset)));
1464 from1 =
1465 (data->autinc_from
1466 ? gen_rtx (MEM, mode, data->from_addr)
1467 : change_address (data->from, mode,
1468 plus_constant (data->from_addr, data->offset)));
1470 #ifdef HAVE_PRE_DECREMENT
1471 if (data->explicit_inc_to < 0)
1472 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1473 if (data->explicit_inc_from < 0)
1474 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1475 #endif
1477 emit_insn ((*genfun) (to1, from1));
1478 #ifdef HAVE_POST_INCREMENT
1479 if (data->explicit_inc_to > 0)
1480 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1481 if (data->explicit_inc_from > 0)
1482 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1483 #endif
1485 if (! data->reverse) data->offset += size;
1487 data->len -= size;
1491 /* Emit code to move a block Y to a block X.
1492 This may be done with string-move instructions,
1493 with multiple scalar move instructions, or with a library call.
1495 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1496 with mode BLKmode.
1497 SIZE is an rtx that says how long they are.
1498 ALIGN is the maximum alignment we can assume they have,
1499 measured in bytes. */
1501 void
1502 emit_block_move (x, y, size, align)
1503 rtx x, y;
1504 rtx size;
1505 int align;
1507 if (GET_MODE (x) != BLKmode)
1508 abort ();
1510 if (GET_MODE (y) != BLKmode)
1511 abort ();
1513 x = protect_from_queue (x, 1);
1514 y = protect_from_queue (y, 0);
1515 size = protect_from_queue (size, 0);
1517 if (GET_CODE (x) != MEM)
1518 abort ();
1519 if (GET_CODE (y) != MEM)
1520 abort ();
1521 if (size == 0)
1522 abort ();
1524 if (GET_CODE (size) == CONST_INT
1525 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1526 move_by_pieces (x, y, INTVAL (size), align);
1527 else
1529 /* Try the most limited insn first, because there's no point
1530 including more than one in the machine description unless
1531 the more limited one has some advantage. */
1533 rtx opalign = GEN_INT (align);
1534 enum machine_mode mode;
1536 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1537 mode = GET_MODE_WIDER_MODE (mode))
1539 enum insn_code code = movstr_optab[(int) mode];
1541 if (code != CODE_FOR_nothing
1542 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1543 here because if SIZE is less than the mode mask, as it is
1544 returned by the macro, it will definitely be less than the
1545 actual mode mask. */
1546 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1547 && (insn_operand_predicate[(int) code][0] == 0
1548 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1549 && (insn_operand_predicate[(int) code][1] == 0
1550 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1551 && (insn_operand_predicate[(int) code][3] == 0
1552 || (*insn_operand_predicate[(int) code][3]) (opalign,
1553 VOIDmode)))
1555 rtx op2;
1556 rtx last = get_last_insn ();
1557 rtx pat;
1559 op2 = convert_to_mode (mode, size, 1);
1560 if (insn_operand_predicate[(int) code][2] != 0
1561 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1562 op2 = copy_to_mode_reg (mode, op2);
1564 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1565 if (pat)
1567 emit_insn (pat);
1568 return;
1570 else
1571 delete_insns_since (last);
1575 #ifdef TARGET_MEM_FUNCTIONS
1576 emit_library_call (memcpy_libfunc, 0,
1577 VOIDmode, 3, XEXP (x, 0), Pmode,
1578 XEXP (y, 0), Pmode,
1579 convert_to_mode (TYPE_MODE (sizetype), size,
1580 TREE_UNSIGNED (sizetype)),
1581 TYPE_MODE (sizetype));
1582 #else
1583 emit_library_call (bcopy_libfunc, 0,
1584 VOIDmode, 3, XEXP (y, 0), Pmode,
1585 XEXP (x, 0), Pmode,
1586 convert_to_mode (TYPE_MODE (sizetype), size,
1587 TREE_UNSIGNED (sizetype)),
1588 TYPE_MODE (sizetype));
1589 #endif
1593 /* Copy all or part of a value X into registers starting at REGNO.
1594 The number of registers to be filled is NREGS. */
1596 void
1597 move_block_to_reg (regno, x, nregs, mode)
1598 int regno;
1599 rtx x;
1600 int nregs;
1601 enum machine_mode mode;
1603 int i;
1604 rtx pat, last;
1606 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1607 x = validize_mem (force_const_mem (mode, x));
1609 /* See if the machine can do this with a load multiple insn. */
1610 #ifdef HAVE_load_multiple
1611 if (HAVE_load_multiple)
1613 last = get_last_insn ();
1614 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1615 GEN_INT (nregs));
1616 if (pat)
1618 emit_insn (pat);
1619 return;
1621 else
1622 delete_insns_since (last);
1624 #endif
1626 for (i = 0; i < nregs; i++)
1627 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1628 operand_subword_force (x, i, mode));
1631 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1632 The number of registers to be filled is NREGS. SIZE indicates the number
1633 of bytes in the object X. */
1636 void
1637 move_block_from_reg (regno, x, nregs, size)
1638 int regno;
1639 rtx x;
1640 int nregs;
1641 int size;
1643 int i;
1644 rtx pat, last;
1646 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1647 to the left before storing to memory. */
1648 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1650 rtx tem = operand_subword (x, 0, 1, BLKmode);
1651 rtx shift;
1653 if (tem == 0)
1654 abort ();
1656 shift = expand_shift (LSHIFT_EXPR, word_mode,
1657 gen_rtx (REG, word_mode, regno),
1658 build_int_2 ((UNITS_PER_WORD - size)
1659 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1660 emit_move_insn (tem, shift);
1661 return;
1664 /* See if the machine can do this with a store multiple insn. */
1665 #ifdef HAVE_store_multiple
1666 if (HAVE_store_multiple)
1668 last = get_last_insn ();
1669 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1670 GEN_INT (nregs));
1671 if (pat)
1673 emit_insn (pat);
1674 return;
1676 else
1677 delete_insns_since (last);
1679 #endif
1681 for (i = 0; i < nregs; i++)
1683 rtx tem = operand_subword (x, i, 1, BLKmode);
1685 if (tem == 0)
1686 abort ();
1688 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1692 /* Add a USE expression for REG to the (possibly empty) list pointed
1693 to by CALL_FUSAGE. REG must denote a hard register. */
1695 void
1696 use_reg (call_fusage, reg)
1697 rtx *call_fusage, reg;
1699 if (GET_CODE (reg) != REG
1700 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1701 abort();
1703 *call_fusage
1704 = gen_rtx (EXPR_LIST, VOIDmode,
1705 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1708 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1709 starting at REGNO. All of these registers must be hard registers. */
1711 void
1712 use_regs (call_fusage, regno, nregs)
1713 rtx *call_fusage;
1714 int regno;
1715 int nregs;
1717 int i;
1719 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1720 abort ();
1722 for (i = 0; i < nregs; i++)
1723 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1726 /* Write zeros through the storage of OBJECT.
1727 If OBJECT has BLKmode, SIZE is its length in bytes. */
1729 void
1730 clear_storage (object, size)
1731 rtx object;
1732 int size;
1734 if (GET_MODE (object) == BLKmode)
1736 #ifdef TARGET_MEM_FUNCTIONS
1737 emit_library_call (memset_libfunc, 0,
1738 VOIDmode, 3,
1739 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1740 GEN_INT (size), Pmode);
1741 #else
1742 emit_library_call (bzero_libfunc, 0,
1743 VOIDmode, 2,
1744 XEXP (object, 0), Pmode,
1745 GEN_INT (size), Pmode);
1746 #endif
1748 else
1749 emit_move_insn (object, const0_rtx);
1752 /* Generate code to copy Y into X.
1753 Both Y and X must have the same mode, except that
1754 Y can be a constant with VOIDmode.
1755 This mode cannot be BLKmode; use emit_block_move for that.
1757 Return the last instruction emitted. */
1760 emit_move_insn (x, y)
1761 rtx x, y;
1763 enum machine_mode mode = GET_MODE (x);
1765 x = protect_from_queue (x, 1);
1766 y = protect_from_queue (y, 0);
1768 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1769 abort ();
1771 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1772 y = force_const_mem (mode, y);
1774 /* If X or Y are memory references, verify that their addresses are valid
1775 for the machine. */
1776 if (GET_CODE (x) == MEM
1777 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1778 && ! push_operand (x, GET_MODE (x)))
1779 || (flag_force_addr
1780 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1781 x = change_address (x, VOIDmode, XEXP (x, 0));
1783 if (GET_CODE (y) == MEM
1784 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1785 || (flag_force_addr
1786 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1787 y = change_address (y, VOIDmode, XEXP (y, 0));
1789 if (mode == BLKmode)
1790 abort ();
1792 return emit_move_insn_1 (x, y);
1795 /* Low level part of emit_move_insn.
1796 Called just like emit_move_insn, but assumes X and Y
1797 are basically valid. */
1800 emit_move_insn_1 (x, y)
1801 rtx x, y;
1803 enum machine_mode mode = GET_MODE (x);
1804 enum machine_mode submode;
1805 enum mode_class class = GET_MODE_CLASS (mode);
1806 int i;
1808 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1809 return
1810 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1812 /* Expand complex moves by moving real part and imag part, if possible. */
1813 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1814 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1815 * BITS_PER_UNIT),
1816 (class == MODE_COMPLEX_INT
1817 ? MODE_INT : MODE_FLOAT),
1819 && (mov_optab->handlers[(int) submode].insn_code
1820 != CODE_FOR_nothing))
1822 /* Don't split destination if it is a stack push. */
1823 int stack = push_operand (x, GET_MODE (x));
1824 rtx insns;
1826 start_sequence ();
1828 /* If this is a stack, push the highpart first, so it
1829 will be in the argument order.
1831 In that case, change_address is used only to convert
1832 the mode, not to change the address. */
1833 if (stack)
1835 /* Note that the real part always precedes the imag part in memory
1836 regardless of machine's endianness. */
1837 #ifdef STACK_GROWS_DOWNWARD
1838 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1839 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1840 gen_imagpart (submode, y)));
1841 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1842 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1843 gen_realpart (submode, y)));
1844 #else
1845 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1846 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1847 gen_realpart (submode, y)));
1848 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1849 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1850 gen_imagpart (submode, y)));
1851 #endif
1853 else
1855 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1856 (gen_realpart (submode, x), gen_realpart (submode, y)));
1857 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1858 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1861 insns = get_insns ();
1862 end_sequence ();
1864 /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
1865 each with a separate pseudo as destination.
1866 It's not correct for flow to treat them as a unit. */
1867 if (GET_CODE (x) != CONCAT)
1868 emit_no_conflict_block (insns, x, y, NULL_RTX, NULL_RTX);
1869 else
1870 emit_insns (insns);
1872 return get_last_insn ();
1875 /* This will handle any multi-word mode that lacks a move_insn pattern.
1876 However, you will get better code if you define such patterns,
1877 even if they must turn into multiple assembler instructions. */
1878 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1880 rtx last_insn = 0;
1881 rtx insns;
1883 start_sequence ();
1885 for (i = 0;
1886 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1887 i++)
1889 rtx xpart = operand_subword (x, i, 1, mode);
1890 rtx ypart = operand_subword (y, i, 1, mode);
1892 /* If we can't get a part of Y, put Y into memory if it is a
1893 constant. Otherwise, force it into a register. If we still
1894 can't get a part of Y, abort. */
1895 if (ypart == 0 && CONSTANT_P (y))
1897 y = force_const_mem (mode, y);
1898 ypart = operand_subword (y, i, 1, mode);
1900 else if (ypart == 0)
1901 ypart = operand_subword_force (y, i, mode);
1903 if (xpart == 0 || ypart == 0)
1904 abort ();
1906 last_insn = emit_move_insn (xpart, ypart);
1909 insns = get_insns ();
1910 end_sequence ();
1911 emit_no_conflict_block (insns, x, y, NULL_RTX, NULL_RTX);
1913 return last_insn;
1915 else
1916 abort ();
1919 /* Pushing data onto the stack. */
1921 /* Push a block of length SIZE (perhaps variable)
1922 and return an rtx to address the beginning of the block.
1923 Note that it is not possible for the value returned to be a QUEUED.
1924 The value may be virtual_outgoing_args_rtx.
1926 EXTRA is the number of bytes of padding to push in addition to SIZE.
1927 BELOW nonzero means this padding comes at low addresses;
1928 otherwise, the padding comes at high addresses. */
1931 push_block (size, extra, below)
1932 rtx size;
1933 int extra, below;
1935 register rtx temp;
1936 if (CONSTANT_P (size))
1937 anti_adjust_stack (plus_constant (size, extra));
1938 else if (GET_CODE (size) == REG && extra == 0)
1939 anti_adjust_stack (size);
1940 else
1942 rtx temp = copy_to_mode_reg (Pmode, size);
1943 if (extra != 0)
1944 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1945 temp, 0, OPTAB_LIB_WIDEN);
1946 anti_adjust_stack (temp);
1949 #ifdef STACK_GROWS_DOWNWARD
1950 temp = virtual_outgoing_args_rtx;
1951 if (extra != 0 && below)
1952 temp = plus_constant (temp, extra);
1953 #else
1954 if (GET_CODE (size) == CONST_INT)
1955 temp = plus_constant (virtual_outgoing_args_rtx,
1956 - INTVAL (size) - (below ? 0 : extra));
1957 else if (extra != 0 && !below)
1958 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1959 negate_rtx (Pmode, plus_constant (size, extra)));
1960 else
1961 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1962 negate_rtx (Pmode, size));
1963 #endif
1965 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1969 gen_push_operand ()
1971 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1974 /* Generate code to push X onto the stack, assuming it has mode MODE and
1975 type TYPE.
1976 MODE is redundant except when X is a CONST_INT (since they don't
1977 carry mode info).
1978 SIZE is an rtx for the size of data to be copied (in bytes),
1979 needed only if X is BLKmode.
1981 ALIGN (in bytes) is maximum alignment we can assume.
1983 If PARTIAL and REG are both nonzero, then copy that many of the first
1984 words of X into registers starting with REG, and push the rest of X.
1985 The amount of space pushed is decreased by PARTIAL words,
1986 rounded *down* to a multiple of PARM_BOUNDARY.
1987 REG must be a hard register in this case.
1988 If REG is zero but PARTIAL is not, take any all others actions for an
1989 argument partially in registers, but do not actually load any
1990 registers.
1992 EXTRA is the amount in bytes of extra space to leave next to this arg.
1993 This is ignored if an argument block has already been allocated.
1995 On a machine that lacks real push insns, ARGS_ADDR is the address of
1996 the bottom of the argument block for this call. We use indexing off there
1997 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1998 argument block has not been preallocated.
2000 ARGS_SO_FAR is the size of args previously pushed for this call. */
2002 void
2003 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2004 args_addr, args_so_far)
2005 register rtx x;
2006 enum machine_mode mode;
2007 tree type;
2008 rtx size;
2009 int align;
2010 int partial;
2011 rtx reg;
2012 int extra;
2013 rtx args_addr;
2014 rtx args_so_far;
2016 rtx xinner;
2017 enum direction stack_direction
2018 #ifdef STACK_GROWS_DOWNWARD
2019 = downward;
2020 #else
2021 = upward;
2022 #endif
2024 /* Decide where to pad the argument: `downward' for below,
2025 `upward' for above, or `none' for don't pad it.
2026 Default is below for small data on big-endian machines; else above. */
2027 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2029 /* Invert direction if stack is post-update. */
2030 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2031 if (where_pad != none)
2032 where_pad = (where_pad == downward ? upward : downward);
2034 xinner = x = protect_from_queue (x, 0);
2036 if (mode == BLKmode)
2038 /* Copy a block into the stack, entirely or partially. */
2040 register rtx temp;
2041 int used = partial * UNITS_PER_WORD;
2042 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2043 int skip;
2045 if (size == 0)
2046 abort ();
2048 used -= offset;
2050 /* USED is now the # of bytes we need not copy to the stack
2051 because registers will take care of them. */
2053 if (partial != 0)
2054 xinner = change_address (xinner, BLKmode,
2055 plus_constant (XEXP (xinner, 0), used));
2057 /* If the partial register-part of the arg counts in its stack size,
2058 skip the part of stack space corresponding to the registers.
2059 Otherwise, start copying to the beginning of the stack space,
2060 by setting SKIP to 0. */
2061 #ifndef REG_PARM_STACK_SPACE
2062 skip = 0;
2063 #else
2064 skip = used;
2065 #endif
2067 #ifdef PUSH_ROUNDING
2068 /* Do it with several push insns if that doesn't take lots of insns
2069 and if there is no difficulty with push insns that skip bytes
2070 on the stack for alignment purposes. */
2071 if (args_addr == 0
2072 && GET_CODE (size) == CONST_INT
2073 && skip == 0
2074 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2075 < MOVE_RATIO)
2076 /* Here we avoid the case of a structure whose weak alignment
2077 forces many pushes of a small amount of data,
2078 and such small pushes do rounding that causes trouble. */
2079 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2080 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2081 || PUSH_ROUNDING (align) == align)
2082 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2084 /* Push padding now if padding above and stack grows down,
2085 or if padding below and stack grows up.
2086 But if space already allocated, this has already been done. */
2087 if (extra && args_addr == 0
2088 && where_pad != none && where_pad != stack_direction)
2089 anti_adjust_stack (GEN_INT (extra));
2091 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2092 INTVAL (size) - used, align);
2094 else
2095 #endif /* PUSH_ROUNDING */
2097 /* Otherwise make space on the stack and copy the data
2098 to the address of that space. */
2100 /* Deduct words put into registers from the size we must copy. */
2101 if (partial != 0)
2103 if (GET_CODE (size) == CONST_INT)
2104 size = GEN_INT (INTVAL (size) - used);
2105 else
2106 size = expand_binop (GET_MODE (size), sub_optab, size,
2107 GEN_INT (used), NULL_RTX, 0,
2108 OPTAB_LIB_WIDEN);
2111 /* Get the address of the stack space.
2112 In this case, we do not deal with EXTRA separately.
2113 A single stack adjust will do. */
2114 if (! args_addr)
2116 temp = push_block (size, extra, where_pad == downward);
2117 extra = 0;
2119 else if (GET_CODE (args_so_far) == CONST_INT)
2120 temp = memory_address (BLKmode,
2121 plus_constant (args_addr,
2122 skip + INTVAL (args_so_far)));
2123 else
2124 temp = memory_address (BLKmode,
2125 plus_constant (gen_rtx (PLUS, Pmode,
2126 args_addr, args_so_far),
2127 skip));
2129 /* TEMP is the address of the block. Copy the data there. */
2130 if (GET_CODE (size) == CONST_INT
2131 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2132 < MOVE_RATIO))
2134 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2135 INTVAL (size), align);
2136 goto ret;
2138 /* Try the most limited insn first, because there's no point
2139 including more than one in the machine description unless
2140 the more limited one has some advantage. */
2141 #ifdef HAVE_movstrqi
2142 if (HAVE_movstrqi
2143 && GET_CODE (size) == CONST_INT
2144 && ((unsigned) INTVAL (size)
2145 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2147 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2148 xinner, size, GEN_INT (align));
2149 if (pat != 0)
2151 emit_insn (pat);
2152 goto ret;
2155 #endif
2156 #ifdef HAVE_movstrhi
2157 if (HAVE_movstrhi
2158 && GET_CODE (size) == CONST_INT
2159 && ((unsigned) INTVAL (size)
2160 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2162 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2163 xinner, size, GEN_INT (align));
2164 if (pat != 0)
2166 emit_insn (pat);
2167 goto ret;
2170 #endif
2171 #ifdef HAVE_movstrsi
2172 if (HAVE_movstrsi)
2174 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2175 xinner, size, GEN_INT (align));
2176 if (pat != 0)
2178 emit_insn (pat);
2179 goto ret;
2182 #endif
2183 #ifdef HAVE_movstrdi
2184 if (HAVE_movstrdi)
2186 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2187 xinner, size, GEN_INT (align));
2188 if (pat != 0)
2190 emit_insn (pat);
2191 goto ret;
2194 #endif
2196 #ifndef ACCUMULATE_OUTGOING_ARGS
2197 /* If the source is referenced relative to the stack pointer,
2198 copy it to another register to stabilize it. We do not need
2199 to do this if we know that we won't be changing sp. */
2201 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2202 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2203 temp = copy_to_reg (temp);
2204 #endif
2206 /* Make inhibit_defer_pop nonzero around the library call
2207 to force it to pop the bcopy-arguments right away. */
2208 NO_DEFER_POP;
2209 #ifdef TARGET_MEM_FUNCTIONS
2210 emit_library_call (memcpy_libfunc, 0,
2211 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2212 convert_to_mode (TYPE_MODE (sizetype),
2213 size, TREE_UNSIGNED (sizetype)),
2214 TYPE_MODE (sizetype));
2215 #else
2216 emit_library_call (bcopy_libfunc, 0,
2217 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2218 convert_to_mode (TYPE_MODE (sizetype),
2219 size, TREE_UNSIGNED (sizetype)),
2220 TYPE_MODE (sizetype));
2221 #endif
2222 OK_DEFER_POP;
2225 else if (partial > 0)
2227 /* Scalar partly in registers. */
2229 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2230 int i;
2231 int not_stack;
2232 /* # words of start of argument
2233 that we must make space for but need not store. */
2234 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2235 int args_offset = INTVAL (args_so_far);
2236 int skip;
2238 /* Push padding now if padding above and stack grows down,
2239 or if padding below and stack grows up.
2240 But if space already allocated, this has already been done. */
2241 if (extra && args_addr == 0
2242 && where_pad != none && where_pad != stack_direction)
2243 anti_adjust_stack (GEN_INT (extra));
2245 /* If we make space by pushing it, we might as well push
2246 the real data. Otherwise, we can leave OFFSET nonzero
2247 and leave the space uninitialized. */
2248 if (args_addr == 0)
2249 offset = 0;
2251 /* Now NOT_STACK gets the number of words that we don't need to
2252 allocate on the stack. */
2253 not_stack = partial - offset;
2255 /* If the partial register-part of the arg counts in its stack size,
2256 skip the part of stack space corresponding to the registers.
2257 Otherwise, start copying to the beginning of the stack space,
2258 by setting SKIP to 0. */
2259 #ifndef REG_PARM_STACK_SPACE
2260 skip = 0;
2261 #else
2262 skip = not_stack;
2263 #endif
2265 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2266 x = validize_mem (force_const_mem (mode, x));
2268 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2269 SUBREGs of such registers are not allowed. */
2270 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2271 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2272 x = copy_to_reg (x);
2274 /* Loop over all the words allocated on the stack for this arg. */
2275 /* We can do it by words, because any scalar bigger than a word
2276 has a size a multiple of a word. */
2277 #ifndef PUSH_ARGS_REVERSED
2278 for (i = not_stack; i < size; i++)
2279 #else
2280 for (i = size - 1; i >= not_stack; i--)
2281 #endif
2282 if (i >= not_stack + offset)
2283 emit_push_insn (operand_subword_force (x, i, mode),
2284 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2285 0, args_addr,
2286 GEN_INT (args_offset + ((i - not_stack + skip)
2287 * UNITS_PER_WORD)));
2289 else
2291 rtx addr;
2293 /* Push padding now if padding above and stack grows down,
2294 or if padding below and stack grows up.
2295 But if space already allocated, this has already been done. */
2296 if (extra && args_addr == 0
2297 && where_pad != none && where_pad != stack_direction)
2298 anti_adjust_stack (GEN_INT (extra));
2300 #ifdef PUSH_ROUNDING
2301 if (args_addr == 0)
2302 addr = gen_push_operand ();
2303 else
2304 #endif
2305 if (GET_CODE (args_so_far) == CONST_INT)
2306 addr
2307 = memory_address (mode,
2308 plus_constant (args_addr, INTVAL (args_so_far)));
2309 else
2310 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2311 args_so_far));
2313 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2316 ret:
2317 /* If part should go in registers, copy that part
2318 into the appropriate registers. Do this now, at the end,
2319 since mem-to-mem copies above may do function calls. */
2320 if (partial > 0 && reg != 0)
2321 move_block_to_reg (REGNO (reg), x, partial, mode);
2323 if (extra && args_addr == 0 && where_pad == stack_direction)
2324 anti_adjust_stack (GEN_INT (extra));
2327 /* Expand an assignment that stores the value of FROM into TO.
2328 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2329 (This may contain a QUEUED rtx;
2330 if the value is constant, this rtx is a constant.)
2331 Otherwise, the returned value is NULL_RTX.
2333 SUGGEST_REG is no longer actually used.
2334 It used to mean, copy the value through a register
2335 and return that register, if that is possible.
2336 We now use WANT_VALUE to decide whether to do this. */
2339 expand_assignment (to, from, want_value, suggest_reg)
2340 tree to, from;
2341 int want_value;
2342 int suggest_reg;
2344 register rtx to_rtx = 0;
2345 rtx result;
2347 /* Don't crash if the lhs of the assignment was erroneous. */
2349 if (TREE_CODE (to) == ERROR_MARK)
2351 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2352 return want_value ? result : NULL_RTX;
2355 if (output_bytecode)
2357 tree dest_innermost;
2359 bc_expand_expr (from);
2360 bc_emit_instruction (duplicate);
2362 dest_innermost = bc_expand_address (to);
2364 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2365 take care of it here. */
2367 bc_store_memory (TREE_TYPE (to), dest_innermost);
2368 return NULL;
2371 /* Assignment of a structure component needs special treatment
2372 if the structure component's rtx is not simply a MEM.
2373 Assignment of an array element at a constant index, and assignment of
2374 an array element in an unaligned packed structure field, has the same
2375 problem. */
2377 if (TREE_CODE (to) == COMPONENT_REF
2378 || TREE_CODE (to) == BIT_FIELD_REF
2379 || (TREE_CODE (to) == ARRAY_REF
2380 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2381 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2382 || (STRICT_ALIGNMENT && get_inner_unaligned_p (to)))))
2384 enum machine_mode mode1;
2385 int bitsize;
2386 int bitpos;
2387 tree offset;
2388 int unsignedp;
2389 int volatilep = 0;
2390 tree tem;
2391 int alignment;
2393 push_temp_slots ();
2394 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2395 &mode1, &unsignedp, &volatilep);
2397 /* If we are going to use store_bit_field and extract_bit_field,
2398 make sure to_rtx will be safe for multiple use. */
2400 if (mode1 == VOIDmode && want_value)
2401 tem = stabilize_reference (tem);
2403 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2404 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2405 if (offset != 0)
2407 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2409 if (GET_CODE (to_rtx) != MEM)
2410 abort ();
2411 to_rtx = change_address (to_rtx, VOIDmode,
2412 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2413 force_reg (Pmode, offset_rtx)));
2414 /* If we have a variable offset, the known alignment
2415 is only that of the innermost structure containing the field.
2416 (Actually, we could sometimes do better by using the
2417 align of an element of the innermost array, but no need.) */
2418 if (TREE_CODE (to) == COMPONENT_REF
2419 || TREE_CODE (to) == BIT_FIELD_REF)
2420 alignment
2421 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2423 if (volatilep)
2425 if (GET_CODE (to_rtx) == MEM)
2426 MEM_VOLATILE_P (to_rtx) = 1;
2427 #if 0 /* This was turned off because, when a field is volatile
2428 in an object which is not volatile, the object may be in a register,
2429 and then we would abort over here. */
2430 else
2431 abort ();
2432 #endif
2435 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2436 (want_value
2437 /* Spurious cast makes HPUX compiler happy. */
2438 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2439 : VOIDmode),
2440 unsignedp,
2441 /* Required alignment of containing datum. */
2442 alignment,
2443 int_size_in_bytes (TREE_TYPE (tem)));
2444 preserve_temp_slots (result);
2445 free_temp_slots ();
2446 pop_temp_slots ();
2448 /* If the value is meaningful, convert RESULT to the proper mode.
2449 Otherwise, return nothing. */
2450 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2451 TYPE_MODE (TREE_TYPE (from)),
2452 result,
2453 TREE_UNSIGNED (TREE_TYPE (to)))
2454 : NULL_RTX);
2457 /* If the rhs is a function call and its value is not an aggregate,
2458 call the function before we start to compute the lhs.
2459 This is needed for correct code for cases such as
2460 val = setjmp (buf) on machines where reference to val
2461 requires loading up part of an address in a separate insn.
2463 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2464 a promoted variable where the zero- or sign- extension needs to be done.
2465 Handling this in the normal way is safe because no computation is done
2466 before the call. */
2467 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2468 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2470 rtx value;
2472 push_temp_slots ();
2473 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2474 if (to_rtx == 0)
2475 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2476 emit_move_insn (to_rtx, value);
2477 preserve_temp_slots (to_rtx);
2478 free_temp_slots ();
2479 pop_temp_slots ();
2480 return want_value ? to_rtx : NULL_RTX;
2483 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2484 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2486 if (to_rtx == 0)
2487 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2489 /* Don't move directly into a return register. */
2490 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2492 rtx temp;
2494 push_temp_slots ();
2495 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2496 emit_move_insn (to_rtx, temp);
2497 preserve_temp_slots (to_rtx);
2498 free_temp_slots ();
2499 pop_temp_slots ();
2500 return want_value ? to_rtx : NULL_RTX;
2503 /* In case we are returning the contents of an object which overlaps
2504 the place the value is being stored, use a safe function when copying
2505 a value through a pointer into a structure value return block. */
2506 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2507 && current_function_returns_struct
2508 && !current_function_returns_pcc_struct)
2510 rtx from_rtx, size;
2512 push_temp_slots ();
2513 size = expr_size (from);
2514 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2516 #ifdef TARGET_MEM_FUNCTIONS
2517 emit_library_call (memcpy_libfunc, 0,
2518 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2519 XEXP (from_rtx, 0), Pmode,
2520 convert_to_mode (TYPE_MODE (sizetype),
2521 size, TREE_UNSIGNED (sizetype)),
2522 TYPE_MODE (sizetype));
2523 #else
2524 emit_library_call (bcopy_libfunc, 0,
2525 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2526 XEXP (to_rtx, 0), Pmode,
2527 convert_to_mode (TYPE_MODE (sizetype),
2528 size, TREE_UNSIGNED (sizetype)),
2529 TYPE_MODE (sizetype));
2530 #endif
2532 preserve_temp_slots (to_rtx);
2533 free_temp_slots ();
2534 pop_temp_slots ();
2535 return want_value ? to_rtx : NULL_RTX;
2538 /* Compute FROM and store the value in the rtx we got. */
2540 push_temp_slots ();
2541 result = store_expr (from, to_rtx, want_value);
2542 preserve_temp_slots (result);
2543 free_temp_slots ();
2544 pop_temp_slots ();
2545 return want_value ? result : NULL_RTX;
2548 /* Generate code for computing expression EXP,
2549 and storing the value into TARGET.
2550 TARGET may contain a QUEUED rtx.
2552 If WANT_VALUE is nonzero, return a copy of the value
2553 not in TARGET, so that we can be sure to use the proper
2554 value in a containing expression even if TARGET has something
2555 else stored in it. If possible, we copy the value through a pseudo
2556 and return that pseudo. Or, if the value is constant, we try to
2557 return the constant. In some cases, we return a pseudo
2558 copied *from* TARGET.
2560 If the mode is BLKmode then we may return TARGET itself.
2561 It turns out that in BLKmode it doesn't cause a problem.
2562 because C has no operators that could combine two different
2563 assignments into the same BLKmode object with different values
2564 with no sequence point. Will other languages need this to
2565 be more thorough?
2567 If WANT_VALUE is 0, we return NULL, to make sure
2568 to catch quickly any cases where the caller uses the value
2569 and fails to set WANT_VALUE. */
2572 store_expr (exp, target, want_value)
2573 register tree exp;
2574 register rtx target;
2575 int want_value;
2577 register rtx temp;
2578 int dont_return_target = 0;
2580 if (TREE_CODE (exp) == COMPOUND_EXPR)
2582 /* Perform first part of compound expression, then assign from second
2583 part. */
2584 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2585 emit_queue ();
2586 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2588 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2590 /* For conditional expression, get safe form of the target. Then
2591 test the condition, doing the appropriate assignment on either
2592 side. This avoids the creation of unnecessary temporaries.
2593 For non-BLKmode, it is more efficient not to do this. */
2595 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2597 emit_queue ();
2598 target = protect_from_queue (target, 1);
2600 NO_DEFER_POP;
2601 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2602 store_expr (TREE_OPERAND (exp, 1), target, 0);
2603 emit_queue ();
2604 emit_jump_insn (gen_jump (lab2));
2605 emit_barrier ();
2606 emit_label (lab1);
2607 store_expr (TREE_OPERAND (exp, 2), target, 0);
2608 emit_queue ();
2609 emit_label (lab2);
2610 OK_DEFER_POP;
2611 return want_value ? target : NULL_RTX;
2613 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2614 && GET_MODE (target) != BLKmode)
2615 /* If target is in memory and caller wants value in a register instead,
2616 arrange that. Pass TARGET as target for expand_expr so that,
2617 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2618 We know expand_expr will not use the target in that case.
2619 Don't do this if TARGET is volatile because we are supposed
2620 to write it and then read it. */
2622 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2623 GET_MODE (target), 0);
2624 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2625 temp = copy_to_reg (temp);
2626 dont_return_target = 1;
2628 else if (queued_subexp_p (target))
2629 /* If target contains a postincrement, let's not risk
2630 using it as the place to generate the rhs. */
2632 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2634 /* Expand EXP into a new pseudo. */
2635 temp = gen_reg_rtx (GET_MODE (target));
2636 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2638 else
2639 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2641 /* If target is volatile, ANSI requires accessing the value
2642 *from* the target, if it is accessed. So make that happen.
2643 In no case return the target itself. */
2644 if (! MEM_VOLATILE_P (target) && want_value)
2645 dont_return_target = 1;
2647 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2648 /* If this is an scalar in a register that is stored in a wider mode
2649 than the declared mode, compute the result into its declared mode
2650 and then convert to the wider mode. Our value is the computed
2651 expression. */
2653 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2655 /* If TEMP is a volatile MEM and we want a result value, make
2656 the access now so it gets done only once. */
2657 if (GET_CODE (temp) == MEM && MEM_VOLATILE_P (temp))
2658 temp = copy_to_reg (temp);
2660 /* If TEMP is a VOIDmode constant, use convert_modes to make
2661 sure that we properly convert it. */
2662 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2663 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2664 TYPE_MODE (TREE_TYPE (exp)), temp,
2665 SUBREG_PROMOTED_UNSIGNED_P (target));
2667 convert_move (SUBREG_REG (target), temp,
2668 SUBREG_PROMOTED_UNSIGNED_P (target));
2669 return want_value ? temp : NULL_RTX;
2671 else
2673 temp = expand_expr (exp, target, GET_MODE (target), 0);
2674 /* Return TARGET if it's a specified hardware register.
2675 If TARGET is a volatile mem ref, either return TARGET
2676 or return a reg copied *from* TARGET; ANSI requires this.
2678 Otherwise, if TEMP is not TARGET, return TEMP
2679 if it is constant (for efficiency),
2680 or if we really want the correct value. */
2681 if (!(target && GET_CODE (target) == REG
2682 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2683 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2684 && temp != target
2685 && (CONSTANT_P (temp) || want_value))
2686 dont_return_target = 1;
2689 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2690 the same as that of TARGET, adjust the constant. This is needed, for
2691 example, in case it is a CONST_DOUBLE and we want only a word-sized
2692 value. */
2693 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2694 && TREE_CODE (exp) != ERROR_MARK
2695 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2696 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2697 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2699 /* If value was not generated in the target, store it there.
2700 Convert the value to TARGET's type first if nec. */
2702 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2704 target = protect_from_queue (target, 1);
2705 if (GET_MODE (temp) != GET_MODE (target)
2706 && GET_MODE (temp) != VOIDmode)
2708 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2709 if (dont_return_target)
2711 /* In this case, we will return TEMP,
2712 so make sure it has the proper mode.
2713 But don't forget to store the value into TARGET. */
2714 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2715 emit_move_insn (target, temp);
2717 else
2718 convert_move (target, temp, unsignedp);
2721 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2723 /* Handle copying a string constant into an array.
2724 The string constant may be shorter than the array.
2725 So copy just the string's actual length, and clear the rest. */
2726 rtx size;
2728 /* Get the size of the data type of the string,
2729 which is actually the size of the target. */
2730 size = expr_size (exp);
2731 if (GET_CODE (size) == CONST_INT
2732 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2733 emit_block_move (target, temp, size,
2734 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2735 else
2737 /* Compute the size of the data to copy from the string. */
2738 tree copy_size
2739 = size_binop (MIN_EXPR,
2740 make_tree (sizetype, size),
2741 convert (sizetype,
2742 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2743 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2744 VOIDmode, 0);
2745 rtx label = 0;
2747 /* Copy that much. */
2748 emit_block_move (target, temp, copy_size_rtx,
2749 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2751 /* Figure out how much is left in TARGET
2752 that we have to clear. */
2753 if (GET_CODE (copy_size_rtx) == CONST_INT)
2755 temp = plus_constant (XEXP (target, 0),
2756 TREE_STRING_LENGTH (exp));
2757 size = plus_constant (size,
2758 - TREE_STRING_LENGTH (exp));
2760 else
2762 enum machine_mode size_mode = Pmode;
2764 temp = force_reg (Pmode, XEXP (target, 0));
2765 temp = expand_binop (size_mode, add_optab, temp,
2766 copy_size_rtx, NULL_RTX, 0,
2767 OPTAB_LIB_WIDEN);
2769 size = expand_binop (size_mode, sub_optab, size,
2770 copy_size_rtx, NULL_RTX, 0,
2771 OPTAB_LIB_WIDEN);
2773 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2774 GET_MODE (size), 0, 0);
2775 label = gen_label_rtx ();
2776 emit_jump_insn (gen_blt (label));
2779 if (size != const0_rtx)
2781 #ifdef TARGET_MEM_FUNCTIONS
2782 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2783 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2784 #else
2785 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2786 temp, Pmode, size, Pmode);
2787 #endif
2789 if (label)
2790 emit_label (label);
2793 else if (GET_MODE (temp) == BLKmode)
2794 emit_block_move (target, temp, expr_size (exp),
2795 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2796 else
2797 emit_move_insn (target, temp);
2800 /* If we don't want a value, return NULL_RTX. */
2801 if (! want_value)
2802 return NULL_RTX;
2804 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2805 ??? The latter test doesn't seem to make sense. */
2806 else if (dont_return_target && GET_CODE (temp) != MEM)
2807 return temp;
2809 /* Return TARGET itself if it is a hard register. */
2810 else if (want_value && GET_MODE (target) != BLKmode
2811 && ! (GET_CODE (target) == REG
2812 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2813 return copy_to_reg (target);
2815 else
2816 return target;
2819 /* Store the value of constructor EXP into the rtx TARGET.
2820 TARGET is either a REG or a MEM. */
2822 static void
2823 store_constructor (exp, target)
2824 tree exp;
2825 rtx target;
2827 tree type = TREE_TYPE (exp);
2829 /* We know our target cannot conflict, since safe_from_p has been called. */
2830 #if 0
2831 /* Don't try copying piece by piece into a hard register
2832 since that is vulnerable to being clobbered by EXP.
2833 Instead, construct in a pseudo register and then copy it all. */
2834 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2836 rtx temp = gen_reg_rtx (GET_MODE (target));
2837 store_constructor (exp, temp);
2838 emit_move_insn (target, temp);
2839 return;
2841 #endif
2843 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2844 || TREE_CODE (type) == QUAL_UNION_TYPE)
2846 register tree elt;
2848 /* Inform later passes that the whole union value is dead. */
2849 if (TREE_CODE (type) == UNION_TYPE
2850 || TREE_CODE (type) == QUAL_UNION_TYPE)
2851 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2853 /* If we are building a static constructor into a register,
2854 set the initial value as zero so we can fold the value into
2855 a constant. */
2856 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2857 emit_move_insn (target, const0_rtx);
2859 /* If the constructor has fewer fields than the structure,
2860 clear the whole structure first. */
2861 else if (list_length (CONSTRUCTOR_ELTS (exp))
2862 != list_length (TYPE_FIELDS (type)))
2863 clear_storage (target, int_size_in_bytes (type));
2864 else
2865 /* Inform later passes that the old value is dead. */
2866 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2868 /* Store each element of the constructor into
2869 the corresponding field of TARGET. */
2871 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2873 register tree field = TREE_PURPOSE (elt);
2874 register enum machine_mode mode;
2875 int bitsize;
2876 int bitpos = 0;
2877 int unsignedp;
2878 tree pos, constant = 0, offset = 0;
2879 rtx to_rtx = target;
2881 /* Just ignore missing fields.
2882 We cleared the whole structure, above,
2883 if any fields are missing. */
2884 if (field == 0)
2885 continue;
2887 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2888 unsignedp = TREE_UNSIGNED (field);
2889 mode = DECL_MODE (field);
2890 if (DECL_BIT_FIELD (field))
2891 mode = VOIDmode;
2893 pos = DECL_FIELD_BITPOS (field);
2894 if (TREE_CODE (pos) == INTEGER_CST)
2895 constant = pos;
2896 else if (TREE_CODE (pos) == PLUS_EXPR
2897 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2898 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2899 else
2900 offset = pos;
2902 if (constant)
2903 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2905 if (offset)
2907 rtx offset_rtx;
2909 if (contains_placeholder_p (offset))
2910 offset = build (WITH_RECORD_EXPR, sizetype,
2911 offset, exp);
2913 offset = size_binop (FLOOR_DIV_EXPR, offset,
2914 size_int (BITS_PER_UNIT));
2916 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2917 if (GET_CODE (to_rtx) != MEM)
2918 abort ();
2920 to_rtx
2921 = change_address (to_rtx, VOIDmode,
2922 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2923 force_reg (Pmode, offset_rtx)));
2926 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
2927 /* The alignment of TARGET is
2928 at least what its type requires. */
2929 VOIDmode, 0,
2930 TYPE_ALIGN (type) / BITS_PER_UNIT,
2931 int_size_in_bytes (type));
2934 else if (TREE_CODE (type) == ARRAY_TYPE)
2936 register tree elt;
2937 register int i;
2938 tree domain = TYPE_DOMAIN (type);
2939 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2940 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2941 tree elttype = TREE_TYPE (type);
2943 /* If the constructor has fewer fields than the structure,
2944 clear the whole structure first. Similarly if this this is
2945 static constructor of a non-BLKmode object. */
2947 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2948 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2949 clear_storage (target, int_size_in_bytes (type));
2950 else
2951 /* Inform later passes that the old value is dead. */
2952 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2954 /* Store each element of the constructor into
2955 the corresponding element of TARGET, determined
2956 by counting the elements. */
2957 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2958 elt;
2959 elt = TREE_CHAIN (elt), i++)
2961 register enum machine_mode mode;
2962 int bitsize;
2963 int bitpos;
2964 int unsignedp;
2965 tree index = TREE_PURPOSE (elt);
2966 rtx xtarget = target;
2968 mode = TYPE_MODE (elttype);
2969 bitsize = GET_MODE_BITSIZE (mode);
2970 unsignedp = TREE_UNSIGNED (elttype);
2972 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2974 /* We don't currently allow variable indices in a
2975 C initializer, but let's try here to support them. */
2976 rtx pos_rtx, addr, xtarget;
2977 tree position;
2979 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2980 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2981 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2982 xtarget = change_address (target, mode, addr);
2983 store_expr (TREE_VALUE (elt), xtarget, 0);
2985 else
2987 if (index != 0)
2988 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
2989 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2990 else
2991 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2993 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2994 /* The alignment of TARGET is
2995 at least what its type requires. */
2996 VOIDmode, 0,
2997 TYPE_ALIGN (type) / BITS_PER_UNIT,
2998 int_size_in_bytes (type));
3003 else
3004 abort ();
3007 /* Store the value of EXP (an expression tree)
3008 into a subfield of TARGET which has mode MODE and occupies
3009 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3010 If MODE is VOIDmode, it means that we are storing into a bit-field.
3012 If VALUE_MODE is VOIDmode, return nothing in particular.
3013 UNSIGNEDP is not used in this case.
3015 Otherwise, return an rtx for the value stored. This rtx
3016 has mode VALUE_MODE if that is convenient to do.
3017 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3019 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3020 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3022 static rtx
3023 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3024 unsignedp, align, total_size)
3025 rtx target;
3026 int bitsize, bitpos;
3027 enum machine_mode mode;
3028 tree exp;
3029 enum machine_mode value_mode;
3030 int unsignedp;
3031 int align;
3032 int total_size;
3034 HOST_WIDE_INT width_mask = 0;
3036 if (bitsize < HOST_BITS_PER_WIDE_INT)
3037 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3039 /* If we are storing into an unaligned field of an aligned union that is
3040 in a register, we may have the mode of TARGET being an integer mode but
3041 MODE == BLKmode. In that case, get an aligned object whose size and
3042 alignment are the same as TARGET and store TARGET into it (we can avoid
3043 the store if the field being stored is the entire width of TARGET). Then
3044 call ourselves recursively to store the field into a BLKmode version of
3045 that object. Finally, load from the object into TARGET. This is not
3046 very efficient in general, but should only be slightly more expensive
3047 than the otherwise-required unaligned accesses. Perhaps this can be
3048 cleaned up later. */
3050 if (mode == BLKmode
3051 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3053 rtx object = assign_stack_temp (GET_MODE (target),
3054 GET_MODE_SIZE (GET_MODE (target)), 0);
3055 rtx blk_object = copy_rtx (object);
3057 PUT_MODE (blk_object, BLKmode);
3059 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3060 emit_move_insn (object, target);
3062 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3063 align, total_size);
3065 /* Even though we aren't returning target, we need to
3066 give it the updated value. */
3067 emit_move_insn (target, object);
3069 return blk_object;
3072 /* If the structure is in a register or if the component
3073 is a bit field, we cannot use addressing to access it.
3074 Use bit-field techniques or SUBREG to store in it. */
3076 if (mode == VOIDmode
3077 || (mode != BLKmode && ! direct_store[(int) mode])
3078 || GET_CODE (target) == REG
3079 || GET_CODE (target) == SUBREG
3080 /* If the field isn't aligned enough to store as an ordinary memref,
3081 store it as a bit field. */
3082 || (STRICT_ALIGNMENT
3083 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3084 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3086 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3088 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3089 MODE. */
3090 if (mode != VOIDmode && mode != BLKmode
3091 && mode != TYPE_MODE (TREE_TYPE (exp)))
3092 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3094 /* Store the value in the bitfield. */
3095 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3096 if (value_mode != VOIDmode)
3098 /* The caller wants an rtx for the value. */
3099 /* If possible, avoid refetching from the bitfield itself. */
3100 if (width_mask != 0
3101 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3103 tree count;
3104 enum machine_mode tmode;
3106 if (unsignedp)
3107 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3108 tmode = GET_MODE (temp);
3109 if (tmode == VOIDmode)
3110 tmode = value_mode;
3111 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3112 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3113 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3115 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3116 NULL_RTX, value_mode, 0, align,
3117 total_size);
3119 return const0_rtx;
3121 else
3123 rtx addr = XEXP (target, 0);
3124 rtx to_rtx;
3126 /* If a value is wanted, it must be the lhs;
3127 so make the address stable for multiple use. */
3129 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3130 && ! CONSTANT_ADDRESS_P (addr)
3131 /* A frame-pointer reference is already stable. */
3132 && ! (GET_CODE (addr) == PLUS
3133 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3134 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3135 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3136 addr = copy_to_reg (addr);
3138 /* Now build a reference to just the desired component. */
3140 to_rtx = change_address (target, mode,
3141 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3142 MEM_IN_STRUCT_P (to_rtx) = 1;
3144 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3148 /* Return true if any object containing the innermost array is an unaligned
3149 packed structure field. */
3151 static int
3152 get_inner_unaligned_p (exp)
3153 tree exp;
3155 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3157 while (1)
3159 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3161 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3162 < needed_alignment)
3163 return 1;
3165 else if (TREE_CODE (exp) != ARRAY_REF
3166 && TREE_CODE (exp) != NON_LVALUE_EXPR
3167 && ! ((TREE_CODE (exp) == NOP_EXPR
3168 || TREE_CODE (exp) == CONVERT_EXPR)
3169 && (TYPE_MODE (TREE_TYPE (exp))
3170 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3171 break;
3173 exp = TREE_OPERAND (exp, 0);
3176 return 0;
3179 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3180 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3181 ARRAY_REFs and find the ultimate containing object, which we return.
3183 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3184 bit position, and *PUNSIGNEDP to the signedness of the field.
3185 If the position of the field is variable, we store a tree
3186 giving the variable offset (in units) in *POFFSET.
3187 This offset is in addition to the bit position.
3188 If the position is not variable, we store 0 in *POFFSET.
3190 If any of the extraction expressions is volatile,
3191 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3193 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3194 is a mode that can be used to access the field. In that case, *PBITSIZE
3195 is redundant.
3197 If the field describes a variable-sized object, *PMODE is set to
3198 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3199 this case, but the address of the object can be found. */
3201 tree
3202 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3203 punsignedp, pvolatilep)
3204 tree exp;
3205 int *pbitsize;
3206 int *pbitpos;
3207 tree *poffset;
3208 enum machine_mode *pmode;
3209 int *punsignedp;
3210 int *pvolatilep;
3212 tree orig_exp = exp;
3213 tree size_tree = 0;
3214 enum machine_mode mode = VOIDmode;
3215 tree offset = integer_zero_node;
3217 if (TREE_CODE (exp) == COMPONENT_REF)
3219 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3220 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3221 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3222 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3224 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3226 size_tree = TREE_OPERAND (exp, 1);
3227 *punsignedp = TREE_UNSIGNED (exp);
3229 else
3231 mode = TYPE_MODE (TREE_TYPE (exp));
3232 *pbitsize = GET_MODE_BITSIZE (mode);
3233 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3236 if (size_tree)
3238 if (TREE_CODE (size_tree) != INTEGER_CST)
3239 mode = BLKmode, *pbitsize = -1;
3240 else
3241 *pbitsize = TREE_INT_CST_LOW (size_tree);
3244 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3245 and find the ultimate containing object. */
3247 *pbitpos = 0;
3249 while (1)
3251 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3253 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3254 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3255 : TREE_OPERAND (exp, 2));
3257 /* If this field hasn't been filled in yet, don't go
3258 past it. This should only happen when folding expressions
3259 made during type construction. */
3260 if (pos == 0)
3261 break;
3263 if (TREE_CODE (pos) == PLUS_EXPR)
3265 tree constant, var;
3266 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3268 constant = TREE_OPERAND (pos, 0);
3269 var = TREE_OPERAND (pos, 1);
3271 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3273 constant = TREE_OPERAND (pos, 1);
3274 var = TREE_OPERAND (pos, 0);
3276 else
3277 abort ();
3279 *pbitpos += TREE_INT_CST_LOW (constant);
3280 offset = size_binop (PLUS_EXPR, offset,
3281 size_binop (FLOOR_DIV_EXPR, var,
3282 size_int (BITS_PER_UNIT)));
3284 else if (TREE_CODE (pos) == INTEGER_CST)
3285 *pbitpos += TREE_INT_CST_LOW (pos);
3286 else
3288 /* Assume here that the offset is a multiple of a unit.
3289 If not, there should be an explicitly added constant. */
3290 offset = size_binop (PLUS_EXPR, offset,
3291 size_binop (FLOOR_DIV_EXPR, pos,
3292 size_int (BITS_PER_UNIT)));
3296 else if (TREE_CODE (exp) == ARRAY_REF)
3298 /* This code is based on the code in case ARRAY_REF in expand_expr
3299 below. We assume here that the size of an array element is
3300 always an integral multiple of BITS_PER_UNIT. */
3302 tree index = TREE_OPERAND (exp, 1);
3303 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3304 tree low_bound
3305 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3306 tree index_type = TREE_TYPE (index);
3308 if (! integer_zerop (low_bound))
3309 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3311 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3313 index = convert (type_for_size (POINTER_SIZE, 0), index);
3314 index_type = TREE_TYPE (index);
3317 index = fold (build (MULT_EXPR, index_type, index,
3318 TYPE_SIZE (TREE_TYPE (exp))));
3320 if (TREE_CODE (index) == INTEGER_CST
3321 && TREE_INT_CST_HIGH (index) == 0)
3322 *pbitpos += TREE_INT_CST_LOW (index);
3323 else
3324 offset = size_binop (PLUS_EXPR, offset,
3325 size_binop (FLOOR_DIV_EXPR, index,
3326 size_int (BITS_PER_UNIT)));
3328 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3329 && ! ((TREE_CODE (exp) == NOP_EXPR
3330 || TREE_CODE (exp) == CONVERT_EXPR)
3331 && (TYPE_MODE (TREE_TYPE (exp))
3332 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3333 break;
3335 /* If any reference in the chain is volatile, the effect is volatile. */
3336 if (TREE_THIS_VOLATILE (exp))
3337 *pvolatilep = 1;
3338 exp = TREE_OPERAND (exp, 0);
3341 /* If this was a bit-field, see if there is a mode that allows direct
3342 access in case EXP is in memory. */
3343 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3345 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3346 if (mode == BLKmode)
3347 mode = VOIDmode;
3350 if (integer_zerop (offset))
3351 offset = 0;
3353 if (offset != 0 && contains_placeholder_p (offset))
3354 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3356 *pmode = mode;
3357 *poffset = offset;
3358 return exp;
3361 /* Given an rtx VALUE that may contain additions and multiplications,
3362 return an equivalent value that just refers to a register or memory.
3363 This is done by generating instructions to perform the arithmetic
3364 and returning a pseudo-register containing the value.
3366 The returned value may be a REG, SUBREG, MEM or constant. */
3369 force_operand (value, target)
3370 rtx value, target;
3372 register optab binoptab = 0;
3373 /* Use a temporary to force order of execution of calls to
3374 `force_operand'. */
3375 rtx tmp;
3376 register rtx op2;
3377 /* Use subtarget as the target for operand 0 of a binary operation. */
3378 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3380 if (GET_CODE (value) == PLUS)
3381 binoptab = add_optab;
3382 else if (GET_CODE (value) == MINUS)
3383 binoptab = sub_optab;
3384 else if (GET_CODE (value) == MULT)
3386 op2 = XEXP (value, 1);
3387 if (!CONSTANT_P (op2)
3388 && !(GET_CODE (op2) == REG && op2 != subtarget))
3389 subtarget = 0;
3390 tmp = force_operand (XEXP (value, 0), subtarget);
3391 return expand_mult (GET_MODE (value), tmp,
3392 force_operand (op2, NULL_RTX),
3393 target, 0);
3396 if (binoptab)
3398 op2 = XEXP (value, 1);
3399 if (!CONSTANT_P (op2)
3400 && !(GET_CODE (op2) == REG && op2 != subtarget))
3401 subtarget = 0;
3402 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3404 binoptab = add_optab;
3405 op2 = negate_rtx (GET_MODE (value), op2);
3408 /* Check for an addition with OP2 a constant integer and our first
3409 operand a PLUS of a virtual register and something else. In that
3410 case, we want to emit the sum of the virtual register and the
3411 constant first and then add the other value. This allows virtual
3412 register instantiation to simply modify the constant rather than
3413 creating another one around this addition. */
3414 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3415 && GET_CODE (XEXP (value, 0)) == PLUS
3416 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3417 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3418 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3420 rtx temp = expand_binop (GET_MODE (value), binoptab,
3421 XEXP (XEXP (value, 0), 0), op2,
3422 subtarget, 0, OPTAB_LIB_WIDEN);
3423 return expand_binop (GET_MODE (value), binoptab, temp,
3424 force_operand (XEXP (XEXP (value, 0), 1), 0),
3425 target, 0, OPTAB_LIB_WIDEN);
3428 tmp = force_operand (XEXP (value, 0), subtarget);
3429 return expand_binop (GET_MODE (value), binoptab, tmp,
3430 force_operand (op2, NULL_RTX),
3431 target, 0, OPTAB_LIB_WIDEN);
3432 /* We give UNSIGNEDP = 0 to expand_binop
3433 because the only operations we are expanding here are signed ones. */
3435 return value;
3438 /* Subroutine of expand_expr:
3439 save the non-copied parts (LIST) of an expr (LHS), and return a list
3440 which can restore these values to their previous values,
3441 should something modify their storage. */
3443 static tree
3444 save_noncopied_parts (lhs, list)
3445 tree lhs;
3446 tree list;
3448 tree tail;
3449 tree parts = 0;
3451 for (tail = list; tail; tail = TREE_CHAIN (tail))
3452 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3453 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3454 else
3456 tree part = TREE_VALUE (tail);
3457 tree part_type = TREE_TYPE (part);
3458 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3459 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3460 int_size_in_bytes (part_type), 0);
3461 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3462 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3463 parts = tree_cons (to_be_saved,
3464 build (RTL_EXPR, part_type, NULL_TREE,
3465 (tree) target),
3466 parts);
3467 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3469 return parts;
3472 /* Subroutine of expand_expr:
3473 record the non-copied parts (LIST) of an expr (LHS), and return a list
3474 which specifies the initial values of these parts. */
3476 static tree
3477 init_noncopied_parts (lhs, list)
3478 tree lhs;
3479 tree list;
3481 tree tail;
3482 tree parts = 0;
3484 for (tail = list; tail; tail = TREE_CHAIN (tail))
3485 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3486 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3487 else
3489 tree part = TREE_VALUE (tail);
3490 tree part_type = TREE_TYPE (part);
3491 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3492 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3494 return parts;
3497 /* Subroutine of expand_expr: return nonzero iff there is no way that
3498 EXP can reference X, which is being modified. */
3500 static int
3501 safe_from_p (x, exp)
3502 rtx x;
3503 tree exp;
3505 rtx exp_rtl = 0;
3506 int i, nops;
3508 if (x == 0)
3509 return 1;
3511 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3512 find the underlying pseudo. */
3513 if (GET_CODE (x) == SUBREG)
3515 x = SUBREG_REG (x);
3516 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3517 return 0;
3520 /* If X is a location in the outgoing argument area, it is always safe. */
3521 if (GET_CODE (x) == MEM
3522 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3523 || (GET_CODE (XEXP (x, 0)) == PLUS
3524 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3525 return 1;
3527 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3529 case 'd':
3530 exp_rtl = DECL_RTL (exp);
3531 break;
3533 case 'c':
3534 return 1;
3536 case 'x':
3537 if (TREE_CODE (exp) == TREE_LIST)
3538 return ((TREE_VALUE (exp) == 0
3539 || safe_from_p (x, TREE_VALUE (exp)))
3540 && (TREE_CHAIN (exp) == 0
3541 || safe_from_p (x, TREE_CHAIN (exp))));
3542 else
3543 return 0;
3545 case '1':
3546 return safe_from_p (x, TREE_OPERAND (exp, 0));
3548 case '2':
3549 case '<':
3550 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3551 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3553 case 'e':
3554 case 'r':
3555 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3556 the expression. If it is set, we conflict iff we are that rtx or
3557 both are in memory. Otherwise, we check all operands of the
3558 expression recursively. */
3560 switch (TREE_CODE (exp))
3562 case ADDR_EXPR:
3563 return (staticp (TREE_OPERAND (exp, 0))
3564 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3566 case INDIRECT_REF:
3567 if (GET_CODE (x) == MEM)
3568 return 0;
3569 break;
3571 case CALL_EXPR:
3572 exp_rtl = CALL_EXPR_RTL (exp);
3573 if (exp_rtl == 0)
3575 /* Assume that the call will clobber all hard registers and
3576 all of memory. */
3577 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3578 || GET_CODE (x) == MEM)
3579 return 0;
3582 break;
3584 case RTL_EXPR:
3585 exp_rtl = RTL_EXPR_RTL (exp);
3586 if (exp_rtl == 0)
3587 /* We don't know what this can modify. */
3588 return 0;
3590 break;
3592 case WITH_CLEANUP_EXPR:
3593 exp_rtl = RTL_EXPR_RTL (exp);
3594 break;
3596 case CLEANUP_POINT_EXPR:
3597 return safe_from_p (x, TREE_OPERAND (exp, 0));
3599 case SAVE_EXPR:
3600 exp_rtl = SAVE_EXPR_RTL (exp);
3601 break;
3603 case BIND_EXPR:
3604 /* The only operand we look at is operand 1. The rest aren't
3605 part of the expression. */
3606 return safe_from_p (x, TREE_OPERAND (exp, 1));
3608 case METHOD_CALL_EXPR:
3609 /* This takes a rtx argument, but shouldn't appear here. */
3610 abort ();
3613 /* If we have an rtx, we do not need to scan our operands. */
3614 if (exp_rtl)
3615 break;
3617 nops = tree_code_length[(int) TREE_CODE (exp)];
3618 for (i = 0; i < nops; i++)
3619 if (TREE_OPERAND (exp, i) != 0
3620 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3621 return 0;
3624 /* If we have an rtl, find any enclosed object. Then see if we conflict
3625 with it. */
3626 if (exp_rtl)
3628 if (GET_CODE (exp_rtl) == SUBREG)
3630 exp_rtl = SUBREG_REG (exp_rtl);
3631 if (GET_CODE (exp_rtl) == REG
3632 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3633 return 0;
3636 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3637 are memory and EXP is not readonly. */
3638 return ! (rtx_equal_p (x, exp_rtl)
3639 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3640 && ! TREE_READONLY (exp)));
3643 /* If we reach here, it is safe. */
3644 return 1;
3647 /* Subroutine of expand_expr: return nonzero iff EXP is an
3648 expression whose type is statically determinable. */
3650 static int
3651 fixed_type_p (exp)
3652 tree exp;
3654 if (TREE_CODE (exp) == PARM_DECL
3655 || TREE_CODE (exp) == VAR_DECL
3656 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3657 || TREE_CODE (exp) == COMPONENT_REF
3658 || TREE_CODE (exp) == ARRAY_REF)
3659 return 1;
3660 return 0;
3663 /* expand_expr: generate code for computing expression EXP.
3664 An rtx for the computed value is returned. The value is never null.
3665 In the case of a void EXP, const0_rtx is returned.
3667 The value may be stored in TARGET if TARGET is nonzero.
3668 TARGET is just a suggestion; callers must assume that
3669 the rtx returned may not be the same as TARGET.
3671 If TARGET is CONST0_RTX, it means that the value will be ignored.
3673 If TMODE is not VOIDmode, it suggests generating the
3674 result in mode TMODE. But this is done only when convenient.
3675 Otherwise, TMODE is ignored and the value generated in its natural mode.
3676 TMODE is just a suggestion; callers must assume that
3677 the rtx returned may not have mode TMODE.
3679 Note that TARGET may have neither TMODE nor MODE. In that case, it
3680 probably will not be used.
3682 If MODIFIER is EXPAND_SUM then when EXP is an addition
3683 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3684 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3685 products as above, or REG or MEM, or constant.
3686 Ordinarily in such cases we would output mul or add instructions
3687 and then return a pseudo reg containing the sum.
3689 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3690 it also marks a label as absolutely required (it can't be dead).
3691 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3692 This is used for outputting expressions used in initializers.
3694 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3695 with a constant address even if that address is not normally legitimate.
3696 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
3699 expand_expr (exp, target, tmode, modifier)
3700 register tree exp;
3701 rtx target;
3702 enum machine_mode tmode;
3703 enum expand_modifier modifier;
3705 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3706 This is static so it will be accessible to our recursive callees. */
3707 static tree placeholder_list = 0;
3708 register rtx op0, op1, temp;
3709 tree type = TREE_TYPE (exp);
3710 int unsignedp = TREE_UNSIGNED (type);
3711 register enum machine_mode mode = TYPE_MODE (type);
3712 register enum tree_code code = TREE_CODE (exp);
3713 optab this_optab;
3714 /* Use subtarget as the target for operand 0 of a binary operation. */
3715 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3716 rtx original_target = target;
3717 /* Maybe defer this until sure not doing bytecode? */
3718 int ignore = (target == const0_rtx
3719 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3720 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3721 || code == COND_EXPR)
3722 && TREE_CODE (type) == VOID_TYPE));
3723 tree context;
3726 if (output_bytecode && modifier != EXPAND_INITIALIZER)
3728 bc_expand_expr (exp);
3729 return NULL;
3732 /* Don't use hard regs as subtargets, because the combiner
3733 can only handle pseudo regs. */
3734 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3735 subtarget = 0;
3736 /* Avoid subtargets inside loops,
3737 since they hide some invariant expressions. */
3738 if (preserve_subexpressions_p ())
3739 subtarget = 0;
3741 /* If we are going to ignore this result, we need only do something
3742 if there is a side-effect somewhere in the expression. If there
3743 is, short-circuit the most common cases here. Note that we must
3744 not call expand_expr with anything but const0_rtx in case this
3745 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
3747 if (ignore)
3749 if (! TREE_SIDE_EFFECTS (exp))
3750 return const0_rtx;
3752 /* Ensure we reference a volatile object even if value is ignored. */
3753 if (TREE_THIS_VOLATILE (exp)
3754 && TREE_CODE (exp) != FUNCTION_DECL
3755 && mode != VOIDmode && mode != BLKmode)
3757 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3758 if (GET_CODE (temp) == MEM)
3759 temp = copy_to_reg (temp);
3760 return const0_rtx;
3763 if (TREE_CODE_CLASS (code) == '1')
3764 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3765 VOIDmode, modifier);
3766 else if (TREE_CODE_CLASS (code) == '2'
3767 || TREE_CODE_CLASS (code) == '<')
3769 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3770 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3771 return const0_rtx;
3773 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3774 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3775 /* If the second operand has no side effects, just evaluate
3776 the first. */
3777 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3778 VOIDmode, modifier);
3780 target = 0;
3783 /* If will do cse, generate all results into pseudo registers
3784 since 1) that allows cse to find more things
3785 and 2) otherwise cse could produce an insn the machine
3786 cannot support. */
3788 if (! cse_not_expected && mode != BLKmode && target
3789 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3790 target = subtarget;
3792 switch (code)
3794 case LABEL_DECL:
3796 tree function = decl_function_context (exp);
3797 /* Handle using a label in a containing function. */
3798 if (function != current_function_decl && function != 0)
3800 struct function *p = find_function_data (function);
3801 /* Allocate in the memory associated with the function
3802 that the label is in. */
3803 push_obstacks (p->function_obstack,
3804 p->function_maybepermanent_obstack);
3806 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3807 label_rtx (exp), p->forced_labels);
3808 pop_obstacks ();
3810 else if (modifier == EXPAND_INITIALIZER)
3811 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3812 label_rtx (exp), forced_labels);
3813 temp = gen_rtx (MEM, FUNCTION_MODE,
3814 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3815 if (function != current_function_decl && function != 0)
3816 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3817 return temp;
3820 case PARM_DECL:
3821 if (DECL_RTL (exp) == 0)
3823 error_with_decl (exp, "prior parameter's size depends on `%s'");
3824 return CONST0_RTX (mode);
3827 /* ... fall through ... */
3829 case VAR_DECL:
3830 /* If a static var's type was incomplete when the decl was written,
3831 but the type is complete now, lay out the decl now. */
3832 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3833 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3835 push_obstacks_nochange ();
3836 end_temporary_allocation ();
3837 layout_decl (exp, 0);
3838 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3839 pop_obstacks ();
3842 /* ... fall through ... */
3844 case FUNCTION_DECL:
3845 case RESULT_DECL:
3846 if (DECL_RTL (exp) == 0)
3847 abort ();
3849 /* Ensure variable marked as used even if it doesn't go through
3850 a parser. If it hasn't be used yet, write out an external
3851 definition. */
3852 if (! TREE_USED (exp))
3854 assemble_external (exp);
3855 TREE_USED (exp) = 1;
3858 /* Handle variables inherited from containing functions. */
3859 context = decl_function_context (exp);
3861 /* We treat inline_function_decl as an alias for the current function
3862 because that is the inline function whose vars, types, etc.
3863 are being merged into the current function.
3864 See expand_inline_function. */
3866 if (context != 0 && context != current_function_decl
3867 && context != inline_function_decl
3868 /* If var is static, we don't need a static chain to access it. */
3869 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3870 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3872 rtx addr;
3874 /* Mark as non-local and addressable. */
3875 DECL_NONLOCAL (exp) = 1;
3876 mark_addressable (exp);
3877 if (GET_CODE (DECL_RTL (exp)) != MEM)
3878 abort ();
3879 addr = XEXP (DECL_RTL (exp), 0);
3880 if (GET_CODE (addr) == MEM)
3881 addr = gen_rtx (MEM, Pmode,
3882 fix_lexical_addr (XEXP (addr, 0), exp));
3883 else
3884 addr = fix_lexical_addr (addr, exp);
3885 return change_address (DECL_RTL (exp), mode, addr);
3888 /* This is the case of an array whose size is to be determined
3889 from its initializer, while the initializer is still being parsed.
3890 See expand_decl. */
3892 if (GET_CODE (DECL_RTL (exp)) == MEM
3893 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3894 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3895 XEXP (DECL_RTL (exp), 0));
3897 /* If DECL_RTL is memory, we are in the normal case and either
3898 the address is not valid or it is not a register and -fforce-addr
3899 is specified, get the address into a register. */
3901 if (GET_CODE (DECL_RTL (exp)) == MEM
3902 && modifier != EXPAND_CONST_ADDRESS
3903 && modifier != EXPAND_SUM
3904 && modifier != EXPAND_INITIALIZER
3905 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3906 || (flag_force_addr
3907 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
3908 return change_address (DECL_RTL (exp), VOIDmode,
3909 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3911 /* If the mode of DECL_RTL does not match that of the decl, it
3912 must be a promoted value. We return a SUBREG of the wanted mode,
3913 but mark it so that we know that it was already extended. */
3915 if (GET_CODE (DECL_RTL (exp)) == REG
3916 && GET_MODE (DECL_RTL (exp)) != mode)
3918 /* Get the signedness used for this variable. Ensure we get the
3919 same mode we got when the variable was declared. */
3920 if (GET_MODE (DECL_RTL (exp))
3921 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
3922 abort ();
3924 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3925 SUBREG_PROMOTED_VAR_P (temp) = 1;
3926 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3927 return temp;
3930 return DECL_RTL (exp);
3932 case INTEGER_CST:
3933 return immed_double_const (TREE_INT_CST_LOW (exp),
3934 TREE_INT_CST_HIGH (exp),
3935 mode);
3937 case CONST_DECL:
3938 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3940 case REAL_CST:
3941 /* If optimized, generate immediate CONST_DOUBLE
3942 which will be turned into memory by reload if necessary.
3944 We used to force a register so that loop.c could see it. But
3945 this does not allow gen_* patterns to perform optimizations with
3946 the constants. It also produces two insns in cases like "x = 1.0;".
3947 On most machines, floating-point constants are not permitted in
3948 many insns, so we'd end up copying it to a register in any case.
3950 Now, we do the copying in expand_binop, if appropriate. */
3951 return immed_real_const (exp);
3953 case COMPLEX_CST:
3954 case STRING_CST:
3955 if (! TREE_CST_RTL (exp))
3956 output_constant_def (exp);
3958 /* TREE_CST_RTL probably contains a constant address.
3959 On RISC machines where a constant address isn't valid,
3960 make some insns to get that address into a register. */
3961 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3962 && modifier != EXPAND_CONST_ADDRESS
3963 && modifier != EXPAND_INITIALIZER
3964 && modifier != EXPAND_SUM
3965 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
3966 || (flag_force_addr
3967 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
3968 return change_address (TREE_CST_RTL (exp), VOIDmode,
3969 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3970 return TREE_CST_RTL (exp);
3972 case SAVE_EXPR:
3973 context = decl_function_context (exp);
3975 /* We treat inline_function_decl as an alias for the current function
3976 because that is the inline function whose vars, types, etc.
3977 are being merged into the current function.
3978 See expand_inline_function. */
3979 if (context == current_function_decl || context == inline_function_decl)
3980 context = 0;
3982 /* If this is non-local, handle it. */
3983 if (context)
3985 temp = SAVE_EXPR_RTL (exp);
3986 if (temp && GET_CODE (temp) == REG)
3988 put_var_into_stack (exp);
3989 temp = SAVE_EXPR_RTL (exp);
3991 if (temp == 0 || GET_CODE (temp) != MEM)
3992 abort ();
3993 return change_address (temp, mode,
3994 fix_lexical_addr (XEXP (temp, 0), exp));
3996 if (SAVE_EXPR_RTL (exp) == 0)
3998 if (mode == BLKmode)
4000 temp
4001 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4002 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
4004 else
4005 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
4007 SAVE_EXPR_RTL (exp) = temp;
4008 if (!optimize && GET_CODE (temp) == REG)
4009 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4010 save_expr_regs);
4012 /* If the mode of TEMP does not match that of the expression, it
4013 must be a promoted value. We pass store_expr a SUBREG of the
4014 wanted mode but mark it so that we know that it was already
4015 extended. Note that `unsignedp' was modified above in
4016 this case. */
4018 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4020 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4021 SUBREG_PROMOTED_VAR_P (temp) = 1;
4022 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4025 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4028 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4029 must be a promoted value. We return a SUBREG of the wanted mode,
4030 but mark it so that we know that it was already extended. */
4032 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4033 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4035 /* Compute the signedness and make the proper SUBREG. */
4036 promote_mode (type, mode, &unsignedp, 0);
4037 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4038 SUBREG_PROMOTED_VAR_P (temp) = 1;
4039 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4040 return temp;
4043 return SAVE_EXPR_RTL (exp);
4045 case PLACEHOLDER_EXPR:
4046 /* If there is an object on the head of the placeholder list,
4047 see if some object in it's references is of type TYPE. For
4048 further information, see tree.def. */
4049 if (placeholder_list)
4051 tree object;
4052 tree old_list = placeholder_list;
4054 for (object = TREE_PURPOSE (placeholder_list);
4055 TREE_TYPE (object) != type
4056 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4057 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4058 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4059 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4060 object = TREE_OPERAND (object, 0))
4063 if (object && TREE_TYPE (object) == type)
4065 /* Expand this object skipping the list entries before
4066 it was found in case it is also a PLACEHOLDER_EXPR.
4067 In that case, we want to translate it using subsequent
4068 entries. */
4069 placeholder_list = TREE_CHAIN (placeholder_list);
4070 temp = expand_expr (object, original_target, tmode, modifier);
4071 placeholder_list = old_list;
4072 return temp;
4076 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4077 abort ();
4079 case WITH_RECORD_EXPR:
4080 /* Put the object on the placeholder list, expand our first operand,
4081 and pop the list. */
4082 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4083 placeholder_list);
4084 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4085 tmode, modifier);
4086 placeholder_list = TREE_CHAIN (placeholder_list);
4087 return target;
4089 case EXIT_EXPR:
4090 expand_exit_loop_if_false (NULL_PTR,
4091 invert_truthvalue (TREE_OPERAND (exp, 0)));
4092 return const0_rtx;
4094 case LOOP_EXPR:
4095 push_temp_slots ();
4096 expand_start_loop (1);
4097 expand_expr_stmt (TREE_OPERAND (exp, 0));
4098 expand_end_loop ();
4099 pop_temp_slots ();
4101 return const0_rtx;
4103 case BIND_EXPR:
4105 tree vars = TREE_OPERAND (exp, 0);
4106 int vars_need_expansion = 0;
4108 /* Need to open a binding contour here because
4109 if there are any cleanups they most be contained here. */
4110 expand_start_bindings (0);
4112 /* Mark the corresponding BLOCK for output in its proper place. */
4113 if (TREE_OPERAND (exp, 2) != 0
4114 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4115 insert_block (TREE_OPERAND (exp, 2));
4117 /* If VARS have not yet been expanded, expand them now. */
4118 while (vars)
4120 if (DECL_RTL (vars) == 0)
4122 vars_need_expansion = 1;
4123 expand_decl (vars);
4125 expand_decl_init (vars);
4126 vars = TREE_CHAIN (vars);
4129 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4131 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4133 return temp;
4136 case RTL_EXPR:
4137 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4138 abort ();
4139 emit_insns (RTL_EXPR_SEQUENCE (exp));
4140 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4141 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4142 free_temps_for_rtl_expr (exp);
4143 return RTL_EXPR_RTL (exp);
4145 case CONSTRUCTOR:
4146 /* If we don't need the result, just ensure we evaluate any
4147 subexpressions. */
4148 if (ignore)
4150 tree elt;
4151 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4152 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4153 return const0_rtx;
4156 /* All elts simple constants => refer to a constant in memory. But
4157 if this is a non-BLKmode mode, let it store a field at a time
4158 since that should make a CONST_INT or CONST_DOUBLE when we
4159 fold. Likewise, if we have a target we can use, it is best to
4160 store directly into the target unless the type is large enough
4161 that memcpy will be used. If we are making an initializer and
4162 all operands are constant, put it in memory as well. */
4163 else if ((TREE_STATIC (exp)
4164 && ((mode == BLKmode
4165 && ! (target != 0 && safe_from_p (target, exp)))
4166 || TREE_ADDRESSABLE (exp)
4167 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4168 && (move_by_pieces_ninsns
4169 (TREE_INT_CST_LOW (TYPE_SIZE (type)),
4170 TYPE_ALIGN (type))
4171 > MOVE_RATIO))))
4172 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4174 rtx constructor = output_constant_def (exp);
4175 if (modifier != EXPAND_CONST_ADDRESS
4176 && modifier != EXPAND_INITIALIZER
4177 && modifier != EXPAND_SUM
4178 && (! memory_address_p (GET_MODE (constructor),
4179 XEXP (constructor, 0))
4180 || (flag_force_addr
4181 && GET_CODE (XEXP (constructor, 0)) != REG)))
4182 constructor = change_address (constructor, VOIDmode,
4183 XEXP (constructor, 0));
4184 return constructor;
4187 else
4189 if (target == 0 || ! safe_from_p (target, exp))
4191 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4192 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4193 else
4195 target
4196 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4197 if (AGGREGATE_TYPE_P (type))
4198 MEM_IN_STRUCT_P (target) = 1;
4201 store_constructor (exp, target);
4202 return target;
4205 case INDIRECT_REF:
4207 tree exp1 = TREE_OPERAND (exp, 0);
4208 tree exp2;
4210 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4211 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4212 This code has the same general effect as simply doing
4213 expand_expr on the save expr, except that the expression PTR
4214 is computed for use as a memory address. This means different
4215 code, suitable for indexing, may be generated. */
4216 if (TREE_CODE (exp1) == SAVE_EXPR
4217 && SAVE_EXPR_RTL (exp1) == 0
4218 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4219 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4220 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4222 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4223 VOIDmode, EXPAND_SUM);
4224 op0 = memory_address (mode, temp);
4225 op0 = copy_all_regs (op0);
4226 SAVE_EXPR_RTL (exp1) = op0;
4228 else
4230 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4231 op0 = memory_address (mode, op0);
4234 temp = gen_rtx (MEM, mode, op0);
4235 /* If address was computed by addition,
4236 mark this as an element of an aggregate. */
4237 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4238 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4239 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4240 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
4241 || (TREE_CODE (exp1) == ADDR_EXPR
4242 && (exp2 = TREE_OPERAND (exp1, 0))
4243 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
4244 MEM_IN_STRUCT_P (temp) = 1;
4245 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4246 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4247 a location is accessed through a pointer to const does not mean
4248 that the value there can never change. */
4249 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4250 #endif
4251 return temp;
4254 case ARRAY_REF:
4255 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4256 abort ();
4259 tree array = TREE_OPERAND (exp, 0);
4260 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4261 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4262 tree index = TREE_OPERAND (exp, 1);
4263 tree index_type = TREE_TYPE (index);
4264 int i;
4266 if (TREE_CODE (low_bound) != INTEGER_CST
4267 && contains_placeholder_p (low_bound))
4268 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4270 /* Optimize the special-case of a zero lower bound.
4272 We convert the low_bound to sizetype to avoid some problems
4273 with constant folding. (E.g. suppose the lower bound is 1,
4274 and its mode is QI. Without the conversion, (ARRAY
4275 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4276 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4278 But sizetype isn't quite right either (especially if
4279 the lowbound is negative). FIXME */
4281 if (! integer_zerop (low_bound))
4282 index = fold (build (MINUS_EXPR, index_type, index,
4283 convert (sizetype, low_bound)));
4285 if ((TREE_CODE (index) != INTEGER_CST
4286 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4287 && (! STRICT_ALIGNMENT || ! get_inner_unaligned_p (exp)))
4289 /* Nonconstant array index or nonconstant element size, and
4290 not an array in an unaligned (packed) structure field.
4291 Generate the tree for *(&array+index) and expand that,
4292 except do it in a language-independent way
4293 and don't complain about non-lvalue arrays.
4294 `mark_addressable' should already have been called
4295 for any array for which this case will be reached. */
4297 /* Don't forget the const or volatile flag from the array
4298 element. */
4299 tree variant_type = build_type_variant (type,
4300 TREE_READONLY (exp),
4301 TREE_THIS_VOLATILE (exp));
4302 tree array_adr = build1 (ADDR_EXPR,
4303 build_pointer_type (variant_type), array);
4304 tree elt;
4305 tree size = size_in_bytes (type);
4307 /* Convert the integer argument to a type the same size as a
4308 pointer so the multiply won't overflow spuriously. */
4309 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4310 index = convert (type_for_size (POINTER_SIZE, 0), index);
4312 if (TREE_CODE (size) != INTEGER_CST
4313 && contains_placeholder_p (size))
4314 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4316 /* Don't think the address has side effects
4317 just because the array does.
4318 (In some cases the address might have side effects,
4319 and we fail to record that fact here. However, it should not
4320 matter, since expand_expr should not care.) */
4321 TREE_SIDE_EFFECTS (array_adr) = 0;
4323 elt = build1 (INDIRECT_REF, type,
4324 fold (build (PLUS_EXPR,
4325 TYPE_POINTER_TO (variant_type),
4326 array_adr,
4327 fold (build (MULT_EXPR,
4328 TYPE_POINTER_TO (variant_type),
4329 index, size)))));
4331 /* Volatility, etc., of new expression is same as old
4332 expression. */
4333 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4334 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4335 TREE_READONLY (elt) = TREE_READONLY (exp);
4337 return expand_expr (elt, target, tmode, modifier);
4340 /* Fold an expression like: "foo"[2].
4341 This is not done in fold so it won't happen inside &. */
4343 if (TREE_CODE (array) == STRING_CST
4344 && TREE_CODE (index) == INTEGER_CST
4345 && !TREE_INT_CST_HIGH (index)
4346 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4347 && GET_MODE_CLASS (mode) == MODE_INT)
4348 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4350 /* If this is a constant index into a constant array,
4351 just get the value from the array. Handle both the cases when
4352 we have an explicit constructor and when our operand is a variable
4353 that was declared const. */
4355 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4357 if (TREE_CODE (index) == INTEGER_CST
4358 && TREE_INT_CST_HIGH (index) == 0)
4360 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4362 i = TREE_INT_CST_LOW (index);
4363 while (elem && i--)
4364 elem = TREE_CHAIN (elem);
4365 if (elem)
4366 return expand_expr (fold (TREE_VALUE (elem)), target,
4367 tmode, modifier);
4371 else if (optimize >= 1
4372 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4373 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4374 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4376 if (TREE_CODE (index) == INTEGER_CST
4377 && TREE_INT_CST_HIGH (index) == 0)
4379 tree init = DECL_INITIAL (array);
4381 i = TREE_INT_CST_LOW (index);
4382 if (TREE_CODE (init) == CONSTRUCTOR)
4384 tree elem = CONSTRUCTOR_ELTS (init);
4386 while (elem
4387 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4388 elem = TREE_CHAIN (elem);
4389 if (elem)
4390 return expand_expr (fold (TREE_VALUE (elem)), target,
4391 tmode, modifier);
4393 else if (TREE_CODE (init) == STRING_CST
4394 && i < TREE_STRING_LENGTH (init))
4395 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4400 /* Treat array-ref with constant index as a component-ref. */
4402 case COMPONENT_REF:
4403 case BIT_FIELD_REF:
4404 /* If the operand is a CONSTRUCTOR, we can just extract the
4405 appropriate field if it is present. */
4406 if (code != ARRAY_REF
4407 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4409 tree elt;
4411 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4412 elt = TREE_CHAIN (elt))
4413 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4414 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4418 enum machine_mode mode1;
4419 int bitsize;
4420 int bitpos;
4421 tree offset;
4422 int volatilep = 0;
4423 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4424 &mode1, &unsignedp, &volatilep);
4425 int alignment;
4427 /* If we got back the original object, something is wrong. Perhaps
4428 we are evaluating an expression too early. In any event, don't
4429 infinitely recurse. */
4430 if (tem == exp)
4431 abort ();
4433 /* In some cases, we will be offsetting OP0's address by a constant.
4434 So get it as a sum, if possible. If we will be using it
4435 directly in an insn, we validate it. */
4436 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4438 /* If this is a constant, put it into a register if it is a
4439 legitimate constant and memory if it isn't. */
4440 if (CONSTANT_P (op0))
4442 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4443 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4444 op0 = force_reg (mode, op0);
4445 else
4446 op0 = validize_mem (force_const_mem (mode, op0));
4449 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4450 if (offset != 0)
4452 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4454 if (GET_CODE (op0) != MEM)
4455 abort ();
4456 op0 = change_address (op0, VOIDmode,
4457 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4458 force_reg (Pmode, offset_rtx)));
4459 /* If we have a variable offset, the known alignment
4460 is only that of the innermost structure containing the field.
4461 (Actually, we could sometimes do better by using the
4462 size of an element of the innermost array, but no need.) */
4463 if (TREE_CODE (exp) == COMPONENT_REF
4464 || TREE_CODE (exp) == BIT_FIELD_REF)
4465 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4466 / BITS_PER_UNIT);
4469 /* Don't forget about volatility even if this is a bitfield. */
4470 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4472 op0 = copy_rtx (op0);
4473 MEM_VOLATILE_P (op0) = 1;
4476 /* In cases where an aligned union has an unaligned object
4477 as a field, we might be extracting a BLKmode value from
4478 an integer-mode (e.g., SImode) object. Handle this case
4479 by doing the extract into an object as wide as the field
4480 (which we know to be the width of a basic mode), then
4481 storing into memory, and changing the mode to BLKmode. */
4482 if (mode1 == VOIDmode
4483 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4484 && modifier != EXPAND_CONST_ADDRESS
4485 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4486 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4487 /* If the field isn't aligned enough to fetch as a memref,
4488 fetch it as a bit field. */
4489 || (STRICT_ALIGNMENT
4490 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4491 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4493 enum machine_mode ext_mode = mode;
4495 if (ext_mode == BLKmode)
4496 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4498 if (ext_mode == BLKmode)
4499 abort ();
4501 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4502 unsignedp, target, ext_mode, ext_mode,
4503 alignment,
4504 int_size_in_bytes (TREE_TYPE (tem)));
4505 if (mode == BLKmode)
4507 rtx new = assign_stack_temp (ext_mode,
4508 bitsize / BITS_PER_UNIT, 0);
4510 emit_move_insn (new, op0);
4511 op0 = copy_rtx (new);
4512 PUT_MODE (op0, BLKmode);
4513 MEM_IN_STRUCT_P (op0) = 1;
4516 return op0;
4519 /* Get a reference to just this component. */
4520 if (modifier == EXPAND_CONST_ADDRESS
4521 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4522 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4523 (bitpos / BITS_PER_UNIT)));
4524 else
4525 op0 = change_address (op0, mode1,
4526 plus_constant (XEXP (op0, 0),
4527 (bitpos / BITS_PER_UNIT)));
4528 MEM_IN_STRUCT_P (op0) = 1;
4529 MEM_VOLATILE_P (op0) |= volatilep;
4530 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4531 return op0;
4532 if (target == 0)
4533 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4534 convert_move (target, op0, unsignedp);
4535 return target;
4538 case OFFSET_REF:
4540 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4541 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4542 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4543 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4544 MEM_IN_STRUCT_P (temp) = 1;
4545 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4546 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4547 a location is accessed through a pointer to const does not mean
4548 that the value there can never change. */
4549 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4550 #endif
4551 return temp;
4554 /* Intended for a reference to a buffer of a file-object in Pascal.
4555 But it's not certain that a special tree code will really be
4556 necessary for these. INDIRECT_REF might work for them. */
4557 case BUFFER_REF:
4558 abort ();
4560 case IN_EXPR:
4562 /* Pascal set IN expression.
4564 Algorithm:
4565 rlo = set_low - (set_low%bits_per_word);
4566 the_word = set [ (index - rlo)/bits_per_word ];
4567 bit_index = index % bits_per_word;
4568 bitmask = 1 << bit_index;
4569 return !!(the_word & bitmask); */
4571 tree set = TREE_OPERAND (exp, 0);
4572 tree index = TREE_OPERAND (exp, 1);
4573 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4574 tree set_type = TREE_TYPE (set);
4575 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4576 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4577 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4578 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4579 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4580 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4581 rtx setaddr = XEXP (setval, 0);
4582 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4583 rtx rlow;
4584 rtx diff, quo, rem, addr, bit, result;
4586 preexpand_calls (exp);
4588 /* If domain is empty, answer is no. Likewise if index is constant
4589 and out of bounds. */
4590 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4591 && TREE_CODE (set_low_bound) == INTEGER_CST
4592 && tree_int_cst_lt (set_high_bound, set_low_bound)
4593 || (TREE_CODE (index) == INTEGER_CST
4594 && TREE_CODE (set_low_bound) == INTEGER_CST
4595 && tree_int_cst_lt (index, set_low_bound))
4596 || (TREE_CODE (set_high_bound) == INTEGER_CST
4597 && TREE_CODE (index) == INTEGER_CST
4598 && tree_int_cst_lt (set_high_bound, index))))
4599 return const0_rtx;
4601 if (target == 0)
4602 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4604 /* If we get here, we have to generate the code for both cases
4605 (in range and out of range). */
4607 op0 = gen_label_rtx ();
4608 op1 = gen_label_rtx ();
4610 if (! (GET_CODE (index_val) == CONST_INT
4611 && GET_CODE (lo_r) == CONST_INT))
4613 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4614 GET_MODE (index_val), iunsignedp, 0);
4615 emit_jump_insn (gen_blt (op1));
4618 if (! (GET_CODE (index_val) == CONST_INT
4619 && GET_CODE (hi_r) == CONST_INT))
4621 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4622 GET_MODE (index_val), iunsignedp, 0);
4623 emit_jump_insn (gen_bgt (op1));
4626 /* Calculate the element number of bit zero in the first word
4627 of the set. */
4628 if (GET_CODE (lo_r) == CONST_INT)
4629 rlow = GEN_INT (INTVAL (lo_r)
4630 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4631 else
4632 rlow = expand_binop (index_mode, and_optab, lo_r,
4633 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4634 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4636 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4637 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4639 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4640 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4641 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4642 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4644 addr = memory_address (byte_mode,
4645 expand_binop (index_mode, add_optab, diff,
4646 setaddr, NULL_RTX, iunsignedp,
4647 OPTAB_LIB_WIDEN));
4649 /* Extract the bit we want to examine */
4650 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4651 gen_rtx (MEM, byte_mode, addr),
4652 make_tree (TREE_TYPE (index), rem),
4653 NULL_RTX, 1);
4654 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4655 GET_MODE (target) == byte_mode ? target : 0,
4656 1, OPTAB_LIB_WIDEN);
4658 if (result != target)
4659 convert_move (target, result, 1);
4661 /* Output the code to handle the out-of-range case. */
4662 emit_jump (op0);
4663 emit_label (op1);
4664 emit_move_insn (target, const0_rtx);
4665 emit_label (op0);
4666 return target;
4669 case WITH_CLEANUP_EXPR:
4670 if (RTL_EXPR_RTL (exp) == 0)
4672 RTL_EXPR_RTL (exp)
4673 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4674 cleanups_this_call
4675 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4676 /* That's it for this cleanup. */
4677 TREE_OPERAND (exp, 2) = 0;
4678 (*interim_eh_hook) (NULL_TREE);
4680 return RTL_EXPR_RTL (exp);
4682 case CLEANUP_POINT_EXPR:
4684 extern int temp_slot_level;
4685 tree old_cleanups = cleanups_this_call;
4686 int old_temp_level = target_temp_slot_level;
4687 push_temp_slots ();
4688 target_temp_slot_level = temp_slot_level;
4689 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4690 expand_cleanups_to (old_cleanups);
4691 preserve_temp_slots (op0);
4692 free_temp_slots ();
4693 pop_temp_slots ();
4694 target_temp_slot_level = old_temp_level;
4696 return op0;
4698 case CALL_EXPR:
4699 /* Check for a built-in function. */
4700 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4701 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4702 == FUNCTION_DECL)
4703 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4704 return expand_builtin (exp, target, subtarget, tmode, ignore);
4706 /* If this call was expanded already by preexpand_calls,
4707 just return the result we got. */
4708 if (CALL_EXPR_RTL (exp) != 0)
4709 return CALL_EXPR_RTL (exp);
4711 return expand_call (exp, target, ignore);
4713 case NON_LVALUE_EXPR:
4714 case NOP_EXPR:
4715 case CONVERT_EXPR:
4716 case REFERENCE_EXPR:
4717 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4719 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
4720 modifier);
4722 /* If the signedness of the conversion differs and OP0 is
4723 a promoted SUBREG, clear that indication since we now
4724 have to do the proper extension. */
4725 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
4726 && GET_CODE (op0) == SUBREG)
4727 SUBREG_PROMOTED_VAR_P (op0) = 0;
4729 return op0;
4732 if (TREE_CODE (type) == UNION_TYPE)
4734 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4735 if (target == 0)
4737 if (mode == BLKmode)
4739 if (TYPE_SIZE (type) == 0
4740 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4741 abort ();
4742 target = assign_stack_temp (BLKmode,
4743 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4744 + BITS_PER_UNIT - 1)
4745 / BITS_PER_UNIT, 0);
4747 else
4748 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4751 if (GET_CODE (target) == MEM)
4752 /* Store data into beginning of memory target. */
4753 store_expr (TREE_OPERAND (exp, 0),
4754 change_address (target, TYPE_MODE (valtype), 0), 0);
4756 else if (GET_CODE (target) == REG)
4757 /* Store this field into a union of the proper type. */
4758 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4759 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4760 VOIDmode, 0, 1,
4761 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4762 else
4763 abort ();
4765 /* Return the entire union. */
4766 return target;
4769 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4770 if (GET_MODE (op0) == mode)
4771 return op0;
4773 /* If OP0 is a constant, just convert it into the proper mode. */
4774 if (CONSTANT_P (op0))
4775 return
4776 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4777 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4779 if (modifier == EXPAND_INITIALIZER)
4780 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4782 if (flag_force_mem && GET_CODE (op0) == MEM)
4783 op0 = copy_to_reg (op0);
4785 if (target == 0)
4786 return
4787 convert_to_mode (mode, op0,
4788 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4789 else
4790 convert_move (target, op0,
4791 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4792 return target;
4794 case PLUS_EXPR:
4795 /* We come here from MINUS_EXPR when the second operand is a constant. */
4796 plus_expr:
4797 this_optab = add_optab;
4799 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4800 something else, make sure we add the register to the constant and
4801 then to the other thing. This case can occur during strength
4802 reduction and doing it this way will produce better code if the
4803 frame pointer or argument pointer is eliminated.
4805 fold-const.c will ensure that the constant is always in the inner
4806 PLUS_EXPR, so the only case we need to do anything about is if
4807 sp, ap, or fp is our second argument, in which case we must swap
4808 the innermost first argument and our second argument. */
4810 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4811 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4812 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4813 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4814 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4815 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4817 tree t = TREE_OPERAND (exp, 1);
4819 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4820 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4823 /* If the result is to be Pmode and we are adding an integer to
4824 something, we might be forming a constant. So try to use
4825 plus_constant. If it produces a sum and we can't accept it,
4826 use force_operand. This allows P = &ARR[const] to generate
4827 efficient code on machines where a SYMBOL_REF is not a valid
4828 address.
4830 If this is an EXPAND_SUM call, always return the sum. */
4831 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4832 || mode == Pmode)
4834 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4835 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4836 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4838 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4839 EXPAND_SUM);
4840 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4841 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4842 op1 = force_operand (op1, target);
4843 return op1;
4846 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4847 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4848 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4850 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4851 EXPAND_SUM);
4852 if (! CONSTANT_P (op0))
4854 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4855 VOIDmode, modifier);
4856 /* Don't go to both_summands if modifier
4857 says it's not right to return a PLUS. */
4858 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4859 goto binop2;
4860 goto both_summands;
4862 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4863 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4864 op0 = force_operand (op0, target);
4865 return op0;
4869 /* No sense saving up arithmetic to be done
4870 if it's all in the wrong mode to form part of an address.
4871 And force_operand won't know whether to sign-extend or
4872 zero-extend. */
4873 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4874 || mode != Pmode)
4875 goto binop;
4877 preexpand_calls (exp);
4878 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4879 subtarget = 0;
4881 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4882 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4884 both_summands:
4885 /* Make sure any term that's a sum with a constant comes last. */
4886 if (GET_CODE (op0) == PLUS
4887 && CONSTANT_P (XEXP (op0, 1)))
4889 temp = op0;
4890 op0 = op1;
4891 op1 = temp;
4893 /* If adding to a sum including a constant,
4894 associate it to put the constant outside. */
4895 if (GET_CODE (op1) == PLUS
4896 && CONSTANT_P (XEXP (op1, 1)))
4898 rtx constant_term = const0_rtx;
4900 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4901 if (temp != 0)
4902 op0 = temp;
4903 /* Ensure that MULT comes first if there is one. */
4904 else if (GET_CODE (op0) == MULT)
4905 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4906 else
4907 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4909 /* Let's also eliminate constants from op0 if possible. */
4910 op0 = eliminate_constant_term (op0, &constant_term);
4912 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4913 their sum should be a constant. Form it into OP1, since the
4914 result we want will then be OP0 + OP1. */
4916 temp = simplify_binary_operation (PLUS, mode, constant_term,
4917 XEXP (op1, 1));
4918 if (temp != 0)
4919 op1 = temp;
4920 else
4921 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4924 /* Put a constant term last and put a multiplication first. */
4925 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4926 temp = op1, op1 = op0, op0 = temp;
4928 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4929 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4931 case MINUS_EXPR:
4932 /* For initializers, we are allowed to return a MINUS of two
4933 symbolic constants. Here we handle all cases when both operands
4934 are constant. */
4935 /* Handle difference of two symbolic constants,
4936 for the sake of an initializer. */
4937 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4938 && really_constant_p (TREE_OPERAND (exp, 0))
4939 && really_constant_p (TREE_OPERAND (exp, 1)))
4941 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4942 VOIDmode, modifier);
4943 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4944 VOIDmode, modifier);
4946 /* If one operand is a CONST_INT, put it last. */
4947 if (GET_CODE (op0) == CONST_INT)
4948 temp = op0, op0 = op1, op1 = temp;
4950 /* If the last operand is a CONST_INT, use plus_constant of
4951 the negated constant. Else make the MINUS. */
4952 if (GET_CODE (op1) == CONST_INT)
4953 return plus_constant (op0, - INTVAL (op1));
4954 else
4955 return gen_rtx (MINUS, mode, op0, op1);
4957 /* Convert A - const to A + (-const). */
4958 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4960 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4961 fold (build1 (NEGATE_EXPR, type,
4962 TREE_OPERAND (exp, 1))));
4963 goto plus_expr;
4965 this_optab = sub_optab;
4966 goto binop;
4968 case MULT_EXPR:
4969 preexpand_calls (exp);
4970 /* If first operand is constant, swap them.
4971 Thus the following special case checks need only
4972 check the second operand. */
4973 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4975 register tree t1 = TREE_OPERAND (exp, 0);
4976 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4977 TREE_OPERAND (exp, 1) = t1;
4980 /* Attempt to return something suitable for generating an
4981 indexed address, for machines that support that. */
4983 if (modifier == EXPAND_SUM && mode == Pmode
4984 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4985 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4989 /* Apply distributive law if OP0 is x+c. */
4990 if (GET_CODE (op0) == PLUS
4991 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4992 return gen_rtx (PLUS, mode,
4993 gen_rtx (MULT, mode, XEXP (op0, 0),
4994 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4995 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4996 * INTVAL (XEXP (op0, 1))));
4998 if (GET_CODE (op0) != REG)
4999 op0 = force_operand (op0, NULL_RTX);
5000 if (GET_CODE (op0) != REG)
5001 op0 = copy_to_mode_reg (mode, op0);
5003 return gen_rtx (MULT, mode, op0,
5004 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5007 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5008 subtarget = 0;
5010 /* Check for multiplying things that have been extended
5011 from a narrower type. If this machine supports multiplying
5012 in that narrower type with a result in the desired type,
5013 do it that way, and avoid the explicit type-conversion. */
5014 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5015 && TREE_CODE (type) == INTEGER_TYPE
5016 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5017 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5018 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5019 && int_fits_type_p (TREE_OPERAND (exp, 1),
5020 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5021 /* Don't use a widening multiply if a shift will do. */
5022 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5023 > HOST_BITS_PER_WIDE_INT)
5024 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5026 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5027 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5029 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5030 /* If both operands are extended, they must either both
5031 be zero-extended or both be sign-extended. */
5032 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5034 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5036 enum machine_mode innermode
5037 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5038 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5039 ? umul_widen_optab : smul_widen_optab);
5040 if (mode == GET_MODE_WIDER_MODE (innermode)
5041 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5043 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5044 NULL_RTX, VOIDmode, 0);
5045 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5046 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5047 VOIDmode, 0);
5048 else
5049 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5050 NULL_RTX, VOIDmode, 0);
5051 goto binop2;
5054 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5055 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5056 return expand_mult (mode, op0, op1, target, unsignedp);
5058 case TRUNC_DIV_EXPR:
5059 case FLOOR_DIV_EXPR:
5060 case CEIL_DIV_EXPR:
5061 case ROUND_DIV_EXPR:
5062 case EXACT_DIV_EXPR:
5063 preexpand_calls (exp);
5064 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5065 subtarget = 0;
5066 /* Possible optimization: compute the dividend with EXPAND_SUM
5067 then if the divisor is constant can optimize the case
5068 where some terms of the dividend have coeffs divisible by it. */
5069 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5070 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5071 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5073 case RDIV_EXPR:
5074 this_optab = flodiv_optab;
5075 goto binop;
5077 case TRUNC_MOD_EXPR:
5078 case FLOOR_MOD_EXPR:
5079 case CEIL_MOD_EXPR:
5080 case ROUND_MOD_EXPR:
5081 preexpand_calls (exp);
5082 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5083 subtarget = 0;
5084 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5085 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5086 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5088 case FIX_ROUND_EXPR:
5089 case FIX_FLOOR_EXPR:
5090 case FIX_CEIL_EXPR:
5091 abort (); /* Not used for C. */
5093 case FIX_TRUNC_EXPR:
5094 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5095 if (target == 0)
5096 target = gen_reg_rtx (mode);
5097 expand_fix (target, op0, unsignedp);
5098 return target;
5100 case FLOAT_EXPR:
5101 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5102 if (target == 0)
5103 target = gen_reg_rtx (mode);
5104 /* expand_float can't figure out what to do if FROM has VOIDmode.
5105 So give it the correct mode. With -O, cse will optimize this. */
5106 if (GET_MODE (op0) == VOIDmode)
5107 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5108 op0);
5109 expand_float (target, op0,
5110 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5111 return target;
5113 case NEGATE_EXPR:
5114 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5115 temp = expand_unop (mode, neg_optab, op0, target, 0);
5116 if (temp == 0)
5117 abort ();
5118 return temp;
5120 case ABS_EXPR:
5121 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5123 /* Handle complex values specially. */
5124 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5125 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5126 return expand_complex_abs (mode, op0, target, unsignedp);
5128 /* Unsigned abs is simply the operand. Testing here means we don't
5129 risk generating incorrect code below. */
5130 if (TREE_UNSIGNED (type))
5131 return op0;
5133 /* First try to do it with a special abs instruction. */
5134 temp = expand_unop (mode, abs_optab, op0, target, 0);
5135 if (temp != 0)
5136 return temp;
5138 /* If this machine has expensive jumps, we can do integer absolute
5139 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5140 where W is the width of MODE. */
5142 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5144 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5145 size_int (GET_MODE_BITSIZE (mode) - 1),
5146 NULL_RTX, 0);
5148 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5149 OPTAB_LIB_WIDEN);
5150 if (temp != 0)
5151 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5152 OPTAB_LIB_WIDEN);
5154 if (temp != 0)
5155 return temp;
5158 /* If that does not win, use conditional jump and negate. */
5159 target = original_target;
5160 op1 = gen_label_rtx ();
5161 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5162 || GET_MODE (target) != mode
5163 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5164 || (GET_CODE (target) == REG
5165 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5166 target = gen_reg_rtx (mode);
5168 emit_move_insn (target, op0);
5169 NO_DEFER_POP;
5171 /* If this mode is an integer too wide to compare properly,
5172 compare word by word. Rely on CSE to optimize constant cases. */
5173 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode))
5174 do_jump_by_parts_greater_rtx (mode, 0, target, const0_rtx,
5175 NULL_RTX, op1);
5176 else
5178 temp = compare_from_rtx (target, CONST0_RTX (mode), GE, 0, mode,
5179 NULL_RTX, 0);
5180 if (temp == const1_rtx)
5181 return target;
5182 else if (temp != const0_rtx)
5184 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5185 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op1));
5186 else
5187 abort ();
5191 op0 = expand_unop (mode, neg_optab, target, target, 0);
5192 if (op0 != target)
5193 emit_move_insn (target, op0);
5194 emit_label (op1);
5195 OK_DEFER_POP;
5196 return target;
5198 case MAX_EXPR:
5199 case MIN_EXPR:
5200 target = original_target;
5201 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5202 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5203 || GET_MODE (target) != mode
5204 || (GET_CODE (target) == REG
5205 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5206 target = gen_reg_rtx (mode);
5207 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5208 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5210 /* First try to do it with a special MIN or MAX instruction.
5211 If that does not win, use a conditional jump to select the proper
5212 value. */
5213 this_optab = (TREE_UNSIGNED (type)
5214 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5215 : (code == MIN_EXPR ? smin_optab : smax_optab));
5217 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5218 OPTAB_WIDEN);
5219 if (temp != 0)
5220 return temp;
5222 if (target != op0)
5223 emit_move_insn (target, op0);
5225 op0 = gen_label_rtx ();
5227 /* If this mode is an integer too wide to compare properly,
5228 compare word by word. Rely on cse to optimize constant cases. */
5229 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5231 if (code == MAX_EXPR)
5232 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5233 target, op1, NULL_RTX, op0);
5234 else
5235 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5236 op1, target, NULL_RTX, op0);
5237 emit_move_insn (target, op1);
5239 else
5241 if (code == MAX_EXPR)
5242 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5243 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5244 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5245 else
5246 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5247 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5248 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5249 if (temp == const0_rtx)
5250 emit_move_insn (target, op1);
5251 else if (temp != const_true_rtx)
5253 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5254 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5255 else
5256 abort ();
5257 emit_move_insn (target, op1);
5260 emit_label (op0);
5261 return target;
5263 case BIT_NOT_EXPR:
5264 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5265 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5266 if (temp == 0)
5267 abort ();
5268 return temp;
5270 case FFS_EXPR:
5271 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5272 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5273 if (temp == 0)
5274 abort ();
5275 return temp;
5277 /* ??? Can optimize bitwise operations with one arg constant.
5278 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5279 and (a bitwise1 b) bitwise2 b (etc)
5280 but that is probably not worth while. */
5282 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5283 boolean values when we want in all cases to compute both of them. In
5284 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5285 as actual zero-or-1 values and then bitwise anding. In cases where
5286 there cannot be any side effects, better code would be made by
5287 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5288 how to recognize those cases. */
5290 case TRUTH_AND_EXPR:
5291 case BIT_AND_EXPR:
5292 this_optab = and_optab;
5293 goto binop;
5295 case TRUTH_OR_EXPR:
5296 case BIT_IOR_EXPR:
5297 this_optab = ior_optab;
5298 goto binop;
5300 case TRUTH_XOR_EXPR:
5301 case BIT_XOR_EXPR:
5302 this_optab = xor_optab;
5303 goto binop;
5305 case LSHIFT_EXPR:
5306 case RSHIFT_EXPR:
5307 case LROTATE_EXPR:
5308 case RROTATE_EXPR:
5309 preexpand_calls (exp);
5310 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5311 subtarget = 0;
5312 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5313 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5314 unsignedp);
5316 /* Could determine the answer when only additive constants differ. Also,
5317 the addition of one can be handled by changing the condition. */
5318 case LT_EXPR:
5319 case LE_EXPR:
5320 case GT_EXPR:
5321 case GE_EXPR:
5322 case EQ_EXPR:
5323 case NE_EXPR:
5324 preexpand_calls (exp);
5325 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5326 if (temp != 0)
5327 return temp;
5329 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5330 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5331 && original_target
5332 && GET_CODE (original_target) == REG
5333 && (GET_MODE (original_target)
5334 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5336 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5337 VOIDmode, 0);
5339 if (temp != original_target)
5340 temp = copy_to_reg (temp);
5342 op1 = gen_label_rtx ();
5343 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5344 GET_MODE (temp), unsignedp, 0);
5345 emit_jump_insn (gen_beq (op1));
5346 emit_move_insn (temp, const1_rtx);
5347 emit_label (op1);
5348 return temp;
5351 /* If no set-flag instruction, must generate a conditional
5352 store into a temporary variable. Drop through
5353 and handle this like && and ||. */
5355 case TRUTH_ANDIF_EXPR:
5356 case TRUTH_ORIF_EXPR:
5357 if (! ignore
5358 && (target == 0 || ! safe_from_p (target, exp)
5359 /* Make sure we don't have a hard reg (such as function's return
5360 value) live across basic blocks, if not optimizing. */
5361 || (!optimize && GET_CODE (target) == REG
5362 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5363 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5365 if (target)
5366 emit_clr_insn (target);
5368 op1 = gen_label_rtx ();
5369 jumpifnot (exp, op1);
5371 if (target)
5372 emit_0_to_1_insn (target);
5374 emit_label (op1);
5375 return ignore ? const0_rtx : target;
5377 case TRUTH_NOT_EXPR:
5378 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5379 /* The parser is careful to generate TRUTH_NOT_EXPR
5380 only with operands that are always zero or one. */
5381 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5382 target, 1, OPTAB_LIB_WIDEN);
5383 if (temp == 0)
5384 abort ();
5385 return temp;
5387 case COMPOUND_EXPR:
5388 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5389 emit_queue ();
5390 return expand_expr (TREE_OPERAND (exp, 1),
5391 (ignore ? const0_rtx : target),
5392 VOIDmode, 0);
5394 case COND_EXPR:
5396 rtx flag = NULL_RTX;
5397 tree left_cleanups = NULL_TREE;
5398 tree right_cleanups = NULL_TREE;
5400 /* Used to save a pointer to the place to put the setting of
5401 the flag that indicates if this side of the conditional was
5402 taken. We backpatch the code, if we find out later that we
5403 have any conditional cleanups that need to be performed. */
5404 rtx dest_right_flag = NULL_RTX;
5405 rtx dest_left_flag = NULL_RTX;
5407 /* Note that COND_EXPRs whose type is a structure or union
5408 are required to be constructed to contain assignments of
5409 a temporary variable, so that we can evaluate them here
5410 for side effect only. If type is void, we must do likewise. */
5412 /* If an arm of the branch requires a cleanup,
5413 only that cleanup is performed. */
5415 tree singleton = 0;
5416 tree binary_op = 0, unary_op = 0;
5417 tree old_cleanups = cleanups_this_call;
5419 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5420 convert it to our mode, if necessary. */
5421 if (integer_onep (TREE_OPERAND (exp, 1))
5422 && integer_zerop (TREE_OPERAND (exp, 2))
5423 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5425 if (ignore)
5427 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5428 modifier);
5429 return const0_rtx;
5432 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5433 if (GET_MODE (op0) == mode)
5434 return op0;
5436 if (target == 0)
5437 target = gen_reg_rtx (mode);
5438 convert_move (target, op0, unsignedp);
5439 return target;
5442 /* If we are not to produce a result, we have no target. Otherwise,
5443 if a target was specified use it; it will not be used as an
5444 intermediate target unless it is safe. If no target, use a
5445 temporary. */
5447 if (ignore)
5448 temp = 0;
5449 else if (original_target
5450 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5451 && GET_MODE (original_target) == mode)
5452 temp = original_target;
5453 else if (mode == BLKmode)
5455 if (TYPE_SIZE (type) == 0
5456 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5457 abort ();
5459 temp = assign_stack_temp (BLKmode,
5460 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5461 + BITS_PER_UNIT - 1)
5462 / BITS_PER_UNIT, 0);
5463 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
5465 else
5466 temp = gen_reg_rtx (mode);
5468 /* Check for X ? A + B : A. If we have this, we can copy
5469 A to the output and conditionally add B. Similarly for unary
5470 operations. Don't do this if X has side-effects because
5471 those side effects might affect A or B and the "?" operation is
5472 a sequence point in ANSI. (We test for side effects later.) */
5474 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5475 && operand_equal_p (TREE_OPERAND (exp, 2),
5476 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5477 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5478 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5479 && operand_equal_p (TREE_OPERAND (exp, 1),
5480 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5481 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5482 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5483 && operand_equal_p (TREE_OPERAND (exp, 2),
5484 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5485 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5486 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5487 && operand_equal_p (TREE_OPERAND (exp, 1),
5488 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5489 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5491 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5492 operation, do this as A + (X != 0). Similarly for other simple
5493 binary operators. */
5494 if (temp && singleton && binary_op
5495 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5496 && (TREE_CODE (binary_op) == PLUS_EXPR
5497 || TREE_CODE (binary_op) == MINUS_EXPR
5498 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5499 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5500 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5501 && integer_onep (TREE_OPERAND (binary_op, 1))
5502 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5504 rtx result;
5505 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5506 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5507 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5508 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5509 : and_optab);
5511 /* If we had X ? A : A + 1, do this as A + (X == 0).
5513 We have to invert the truth value here and then put it
5514 back later if do_store_flag fails. We cannot simply copy
5515 TREE_OPERAND (exp, 0) to another variable and modify that
5516 because invert_truthvalue can modify the tree pointed to
5517 by its argument. */
5518 if (singleton == TREE_OPERAND (exp, 1))
5519 TREE_OPERAND (exp, 0)
5520 = invert_truthvalue (TREE_OPERAND (exp, 0));
5522 result = do_store_flag (TREE_OPERAND (exp, 0),
5523 (safe_from_p (temp, singleton)
5524 ? temp : NULL_RTX),
5525 mode, BRANCH_COST <= 1);
5527 if (result)
5529 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5530 return expand_binop (mode, boptab, op1, result, temp,
5531 unsignedp, OPTAB_LIB_WIDEN);
5533 else if (singleton == TREE_OPERAND (exp, 1))
5534 TREE_OPERAND (exp, 0)
5535 = invert_truthvalue (TREE_OPERAND (exp, 0));
5538 NO_DEFER_POP;
5539 op0 = gen_label_rtx ();
5541 flag = gen_reg_rtx (word_mode);
5542 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5544 if (temp != 0)
5546 /* If the target conflicts with the other operand of the
5547 binary op, we can't use it. Also, we can't use the target
5548 if it is a hard register, because evaluating the condition
5549 might clobber it. */
5550 if ((binary_op
5551 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5552 || (GET_CODE (temp) == REG
5553 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5554 temp = gen_reg_rtx (mode);
5555 store_expr (singleton, temp, 0);
5557 else
5558 expand_expr (singleton,
5559 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5560 dest_left_flag = get_last_insn ();
5561 if (singleton == TREE_OPERAND (exp, 1))
5562 jumpif (TREE_OPERAND (exp, 0), op0);
5563 else
5564 jumpifnot (TREE_OPERAND (exp, 0), op0);
5566 /* Allows cleanups up to here. */
5567 old_cleanups = cleanups_this_call;
5568 if (binary_op && temp == 0)
5569 /* Just touch the other operand. */
5570 expand_expr (TREE_OPERAND (binary_op, 1),
5571 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5572 else if (binary_op)
5573 store_expr (build (TREE_CODE (binary_op), type,
5574 make_tree (type, temp),
5575 TREE_OPERAND (binary_op, 1)),
5576 temp, 0);
5577 else
5578 store_expr (build1 (TREE_CODE (unary_op), type,
5579 make_tree (type, temp)),
5580 temp, 0);
5581 op1 = op0;
5582 dest_right_flag = get_last_insn ();
5584 #if 0
5585 /* This is now done in jump.c and is better done there because it
5586 produces shorter register lifetimes. */
5588 /* Check for both possibilities either constants or variables
5589 in registers (but not the same as the target!). If so, can
5590 save branches by assigning one, branching, and assigning the
5591 other. */
5592 else if (temp && GET_MODE (temp) != BLKmode
5593 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5594 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5595 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5596 && DECL_RTL (TREE_OPERAND (exp, 1))
5597 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5598 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5599 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5600 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5601 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5602 && DECL_RTL (TREE_OPERAND (exp, 2))
5603 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5604 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5606 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5607 temp = gen_reg_rtx (mode);
5608 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5609 dest_left_flag = get_last_insn ();
5610 jumpifnot (TREE_OPERAND (exp, 0), op0);
5612 /* Allows cleanups up to here. */
5613 old_cleanups = cleanups_this_call;
5614 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5615 op1 = op0;
5616 dest_right_flag = get_last_insn ();
5618 #endif
5619 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5620 comparison operator. If we have one of these cases, set the
5621 output to A, branch on A (cse will merge these two references),
5622 then set the output to FOO. */
5623 else if (temp
5624 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5625 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5626 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5627 TREE_OPERAND (exp, 1), 0)
5628 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5629 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5631 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5632 temp = gen_reg_rtx (mode);
5633 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5634 dest_left_flag = get_last_insn ();
5635 jumpif (TREE_OPERAND (exp, 0), op0);
5637 /* Allows cleanups up to here. */
5638 old_cleanups = cleanups_this_call;
5639 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5640 op1 = op0;
5641 dest_right_flag = get_last_insn ();
5643 else if (temp
5644 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5645 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5646 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5647 TREE_OPERAND (exp, 2), 0)
5648 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5649 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5651 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5652 temp = gen_reg_rtx (mode);
5653 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5654 dest_left_flag = get_last_insn ();
5655 jumpifnot (TREE_OPERAND (exp, 0), op0);
5657 /* Allows cleanups up to here. */
5658 old_cleanups = cleanups_this_call;
5659 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5660 op1 = op0;
5661 dest_right_flag = get_last_insn ();
5663 else
5665 op1 = gen_label_rtx ();
5666 jumpifnot (TREE_OPERAND (exp, 0), op0);
5668 /* Allows cleanups up to here. */
5669 old_cleanups = cleanups_this_call;
5670 if (temp != 0)
5671 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5672 else
5673 expand_expr (TREE_OPERAND (exp, 1),
5674 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5675 dest_left_flag = get_last_insn ();
5677 /* Handle conditional cleanups, if any. */
5678 left_cleanups = defer_cleanups_to (old_cleanups);
5680 emit_queue ();
5681 emit_jump_insn (gen_jump (op1));
5682 emit_barrier ();
5683 emit_label (op0);
5684 if (temp != 0)
5685 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5686 else
5687 expand_expr (TREE_OPERAND (exp, 2),
5688 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5689 dest_right_flag = get_last_insn ();
5692 /* Handle conditional cleanups, if any. */
5693 right_cleanups = defer_cleanups_to (old_cleanups);
5695 emit_queue ();
5696 emit_label (op1);
5697 OK_DEFER_POP;
5699 /* Add back in, any conditional cleanups. */
5700 if (left_cleanups || right_cleanups)
5702 tree new_cleanups;
5703 tree cond;
5704 rtx last;
5706 /* Now that we know that a flag is needed, go back and add in the
5707 setting of the flag. */
5709 /* Do the left side flag. */
5710 last = get_last_insn ();
5711 /* Flag left cleanups as needed. */
5712 emit_move_insn (flag, const1_rtx);
5713 /* ??? deprecated, use sequences instead. */
5714 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
5716 /* Do the right side flag. */
5717 last = get_last_insn ();
5718 /* Flag left cleanups as needed. */
5719 emit_move_insn (flag, const0_rtx);
5720 /* ??? deprecated, use sequences instead. */
5721 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
5723 /* convert flag, which is an rtx, into a tree. */
5724 cond = make_node (RTL_EXPR);
5725 TREE_TYPE (cond) = integer_type_node;
5726 RTL_EXPR_RTL (cond) = flag;
5727 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
5729 if (! left_cleanups)
5730 left_cleanups = integer_zero_node;
5731 if (! right_cleanups)
5732 right_cleanups = integer_zero_node;
5733 new_cleanups = build (COND_EXPR, void_type_node, cond,
5734 left_cleanups, right_cleanups);
5735 new_cleanups = fold (new_cleanups);
5737 /* Now add in the conditionalized cleanups. */
5738 cleanups_this_call
5739 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
5740 (*interim_eh_hook) (NULL_TREE);
5742 return temp;
5745 case TARGET_EXPR:
5747 int need_exception_region = 0;
5748 /* Something needs to be initialized, but we didn't know
5749 where that thing was when building the tree. For example,
5750 it could be the return value of a function, or a parameter
5751 to a function which lays down in the stack, or a temporary
5752 variable which must be passed by reference.
5754 We guarantee that the expression will either be constructed
5755 or copied into our original target. */
5757 tree slot = TREE_OPERAND (exp, 0);
5758 tree exp1;
5759 rtx temp;
5761 if (TREE_CODE (slot) != VAR_DECL)
5762 abort ();
5764 if (target == 0)
5766 if (DECL_RTL (slot) != 0)
5768 target = DECL_RTL (slot);
5769 /* If we have already expanded the slot, so don't do
5770 it again. (mrs) */
5771 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5772 return target;
5774 else
5776 target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
5777 /* All temp slots at this level must not conflict. */
5778 preserve_temp_slots (target);
5779 DECL_RTL (slot) = target;
5781 /* Since SLOT is not known to the called function
5782 to belong to its stack frame, we must build an explicit
5783 cleanup. This case occurs when we must build up a reference
5784 to pass the reference as an argument. In this case,
5785 it is very likely that such a reference need not be
5786 built here. */
5788 if (TREE_OPERAND (exp, 2) == 0)
5789 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5790 if (TREE_OPERAND (exp, 2))
5792 cleanups_this_call = tree_cons (NULL_TREE,
5793 TREE_OPERAND (exp, 2),
5794 cleanups_this_call);
5795 need_exception_region = 1;
5799 else
5801 /* This case does occur, when expanding a parameter which
5802 needs to be constructed on the stack. The target
5803 is the actual stack address that we want to initialize.
5804 The function we call will perform the cleanup in this case. */
5806 /* If we have already assigned it space, use that space,
5807 not target that we were passed in, as our target
5808 parameter is only a hint. */
5809 if (DECL_RTL (slot) != 0)
5811 target = DECL_RTL (slot);
5812 /* If we have already expanded the slot, so don't do
5813 it again. (mrs) */
5814 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5815 return target;
5818 DECL_RTL (slot) = target;
5821 exp1 = TREE_OPERAND (exp, 1);
5822 /* Mark it as expanded. */
5823 TREE_OPERAND (exp, 1) = NULL_TREE;
5825 temp = expand_expr (exp1, target, tmode, modifier);
5827 if (need_exception_region)
5828 (*interim_eh_hook) (NULL_TREE);
5830 return temp;
5833 case INIT_EXPR:
5835 tree lhs = TREE_OPERAND (exp, 0);
5836 tree rhs = TREE_OPERAND (exp, 1);
5837 tree noncopied_parts = 0;
5838 tree lhs_type = TREE_TYPE (lhs);
5840 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5841 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5842 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5843 TYPE_NONCOPIED_PARTS (lhs_type));
5844 while (noncopied_parts != 0)
5846 expand_assignment (TREE_VALUE (noncopied_parts),
5847 TREE_PURPOSE (noncopied_parts), 0, 0);
5848 noncopied_parts = TREE_CHAIN (noncopied_parts);
5850 return temp;
5853 case MODIFY_EXPR:
5855 /* If lhs is complex, expand calls in rhs before computing it.
5856 That's so we don't compute a pointer and save it over a call.
5857 If lhs is simple, compute it first so we can give it as a
5858 target if the rhs is just a call. This avoids an extra temp and copy
5859 and that prevents a partial-subsumption which makes bad code.
5860 Actually we could treat component_ref's of vars like vars. */
5862 tree lhs = TREE_OPERAND (exp, 0);
5863 tree rhs = TREE_OPERAND (exp, 1);
5864 tree noncopied_parts = 0;
5865 tree lhs_type = TREE_TYPE (lhs);
5867 temp = 0;
5869 if (TREE_CODE (lhs) != VAR_DECL
5870 && TREE_CODE (lhs) != RESULT_DECL
5871 && TREE_CODE (lhs) != PARM_DECL)
5872 preexpand_calls (exp);
5874 /* Check for |= or &= of a bitfield of size one into another bitfield
5875 of size 1. In this case, (unless we need the result of the
5876 assignment) we can do this more efficiently with a
5877 test followed by an assignment, if necessary.
5879 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5880 things change so we do, this code should be enhanced to
5881 support it. */
5882 if (ignore
5883 && TREE_CODE (lhs) == COMPONENT_REF
5884 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5885 || TREE_CODE (rhs) == BIT_AND_EXPR)
5886 && TREE_OPERAND (rhs, 0) == lhs
5887 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5888 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5889 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5891 rtx label = gen_label_rtx ();
5893 do_jump (TREE_OPERAND (rhs, 1),
5894 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5895 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5896 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5897 (TREE_CODE (rhs) == BIT_IOR_EXPR
5898 ? integer_one_node
5899 : integer_zero_node)),
5900 0, 0);
5901 do_pending_stack_adjust ();
5902 emit_label (label);
5903 return const0_rtx;
5906 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5907 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5908 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5909 TYPE_NONCOPIED_PARTS (lhs_type));
5911 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5912 while (noncopied_parts != 0)
5914 expand_assignment (TREE_PURPOSE (noncopied_parts),
5915 TREE_VALUE (noncopied_parts), 0, 0);
5916 noncopied_parts = TREE_CHAIN (noncopied_parts);
5918 return temp;
5921 case PREINCREMENT_EXPR:
5922 case PREDECREMENT_EXPR:
5923 return expand_increment (exp, 0);
5925 case POSTINCREMENT_EXPR:
5926 case POSTDECREMENT_EXPR:
5927 /* Faster to treat as pre-increment if result is not used. */
5928 return expand_increment (exp, ! ignore);
5930 case ADDR_EXPR:
5931 /* If nonzero, TEMP will be set to the address of something that might
5932 be a MEM corresponding to a stack slot. */
5933 temp = 0;
5935 /* Are we taking the address of a nested function? */
5936 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5937 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5939 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5940 op0 = force_operand (op0, target);
5942 /* If we are taking the address of something erroneous, just
5943 return a zero. */
5944 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
5945 return const0_rtx;
5946 else
5948 /* We make sure to pass const0_rtx down if we came in with
5949 ignore set, to avoid doing the cleanups twice for something. */
5950 op0 = expand_expr (TREE_OPERAND (exp, 0),
5951 ignore ? const0_rtx : NULL_RTX, VOIDmode,
5952 (modifier == EXPAND_INITIALIZER
5953 ? modifier : EXPAND_CONST_ADDRESS));
5955 /* If we are going to ignore the result, OP0 will have been set
5956 to const0_rtx, so just return it. Don't get confused and
5957 think we are taking the address of the constant. */
5958 if (ignore)
5959 return op0;
5961 /* We would like the object in memory. If it is a constant,
5962 we can have it be statically allocated into memory. For
5963 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
5964 memory and store the value into it. */
5966 if (CONSTANT_P (op0))
5967 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5968 op0);
5969 else if (GET_CODE (op0) == MEM)
5970 temp = XEXP (op0, 0);
5972 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5973 || GET_CODE (op0) == CONCAT)
5975 /* If this object is in a register, it must be not
5976 be BLKmode. */
5977 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5978 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5979 rtx memloc
5980 = assign_stack_temp (inner_mode,
5981 int_size_in_bytes (inner_type), 1);
5983 emit_move_insn (memloc, op0);
5984 op0 = memloc;
5987 if (GET_CODE (op0) != MEM)
5988 abort ();
5990 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5991 return XEXP (op0, 0);
5993 op0 = force_operand (XEXP (op0, 0), target);
5996 if (flag_force_addr && GET_CODE (op0) != REG)
5997 op0 = force_reg (Pmode, op0);
5999 if (GET_CODE (op0) == REG)
6000 mark_reg_pointer (op0);
6002 /* If we might have had a temp slot, add an equivalent address
6003 for it. */
6004 if (temp != 0)
6005 update_temp_slot_address (temp, op0);
6007 return op0;
6009 case ENTRY_VALUE_EXPR:
6010 abort ();
6012 /* COMPLEX type for Extended Pascal & Fortran */
6013 case COMPLEX_EXPR:
6015 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6016 rtx insns;
6018 /* Get the rtx code of the operands. */
6019 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6020 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6022 if (! target)
6023 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6025 start_sequence ();
6027 /* Move the real (op0) and imaginary (op1) parts to their location. */
6028 emit_move_insn (gen_realpart (mode, target), op0);
6029 emit_move_insn (gen_imagpart (mode, target), op1);
6031 insns = get_insns ();
6032 end_sequence ();
6034 /* Complex construction should appear as a single unit. */
6035 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6036 each with a separate pseudo as destination.
6037 It's not correct for flow to treat them as a unit. */
6038 if (GET_CODE (target) != CONCAT)
6039 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6040 else
6041 emit_insns (insns);
6043 return target;
6046 case REALPART_EXPR:
6047 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6048 return gen_realpart (mode, op0);
6050 case IMAGPART_EXPR:
6051 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6052 return gen_imagpart (mode, op0);
6054 case CONJ_EXPR:
6056 rtx imag_t;
6057 rtx insns;
6059 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6061 if (! target)
6062 target = gen_reg_rtx (mode);
6064 start_sequence ();
6066 /* Store the realpart and the negated imagpart to target. */
6067 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
6069 imag_t = gen_imagpart (mode, target);
6070 temp = expand_unop (mode, neg_optab,
6071 gen_imagpart (mode, op0), imag_t, 0);
6072 if (temp != imag_t)
6073 emit_move_insn (imag_t, temp);
6075 insns = get_insns ();
6076 end_sequence ();
6078 /* Conjugate should appear as a single unit
6079 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6080 each with a separate pseudo as destination.
6081 It's not correct for flow to treat them as a unit. */
6082 if (GET_CODE (target) != CONCAT)
6083 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6084 else
6085 emit_insns (insns);
6087 return target;
6090 case ERROR_MARK:
6091 op0 = CONST0_RTX (tmode);
6092 if (op0 != 0)
6093 return op0;
6094 return const0_rtx;
6096 default:
6097 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6100 /* Here to do an ordinary binary operator, generating an instruction
6101 from the optab already placed in `this_optab'. */
6102 binop:
6103 preexpand_calls (exp);
6104 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6105 subtarget = 0;
6106 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6107 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6108 binop2:
6109 temp = expand_binop (mode, this_optab, op0, op1, target,
6110 unsignedp, OPTAB_LIB_WIDEN);
6111 if (temp == 0)
6112 abort ();
6113 return temp;
6117 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6118 void
6119 bc_expand_expr (exp)
6120 tree exp;
6122 enum tree_code code;
6123 tree type, arg0;
6124 rtx r;
6125 struct binary_operator *binoptab;
6126 struct unary_operator *unoptab;
6127 struct increment_operator *incroptab;
6128 struct bc_label *lab, *lab1;
6129 enum bytecode_opcode opcode;
6132 code = TREE_CODE (exp);
6134 switch (code)
6136 case PARM_DECL:
6138 if (DECL_RTL (exp) == 0)
6140 error_with_decl (exp, "prior parameter's size depends on `%s'");
6141 return;
6144 bc_load_parmaddr (DECL_RTL (exp));
6145 bc_load_memory (TREE_TYPE (exp), exp);
6147 return;
6149 case VAR_DECL:
6151 if (DECL_RTL (exp) == 0)
6152 abort ();
6154 #if 0
6155 if (BYTECODE_LABEL (DECL_RTL (exp)))
6156 bc_load_externaddr (DECL_RTL (exp));
6157 else
6158 bc_load_localaddr (DECL_RTL (exp));
6159 #endif
6160 if (TREE_PUBLIC (exp))
6161 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6162 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
6163 else
6164 bc_load_localaddr (DECL_RTL (exp));
6166 bc_load_memory (TREE_TYPE (exp), exp);
6167 return;
6169 case INTEGER_CST:
6171 #ifdef DEBUG_PRINT_CODE
6172 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6173 #endif
6174 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6175 ? SImode
6176 : TYPE_MODE (TREE_TYPE (exp)))],
6177 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6178 return;
6180 case REAL_CST:
6182 #if 0
6183 #ifdef DEBUG_PRINT_CODE
6184 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6185 #endif
6186 /* FIX THIS: find a better way to pass real_cst's. -bson */
6187 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6188 (double) TREE_REAL_CST (exp));
6189 #else
6190 abort ();
6191 #endif
6193 return;
6195 case CALL_EXPR:
6197 /* We build a call description vector describing the type of
6198 the return value and of the arguments; this call vector,
6199 together with a pointer to a location for the return value
6200 and the base of the argument list, is passed to the low
6201 level machine dependent call subroutine, which is responsible
6202 for putting the arguments wherever real functions expect
6203 them, as well as getting the return value back. */
6205 tree calldesc = 0, arg;
6206 int nargs = 0, i;
6207 rtx retval;
6209 /* Push the evaluated args on the evaluation stack in reverse
6210 order. Also make an entry for each arg in the calldesc
6211 vector while we're at it. */
6213 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6215 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6217 ++nargs;
6218 bc_expand_expr (TREE_VALUE (arg));
6220 calldesc = tree_cons ((tree) 0,
6221 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6222 calldesc);
6223 calldesc = tree_cons ((tree) 0,
6224 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6225 calldesc);
6228 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6230 /* Allocate a location for the return value and push its
6231 address on the evaluation stack. Also make an entry
6232 at the front of the calldesc for the return value type. */
6234 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6235 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6236 bc_load_localaddr (retval);
6238 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6239 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6241 /* Prepend the argument count. */
6242 calldesc = tree_cons ((tree) 0,
6243 build_int_2 (nargs, 0),
6244 calldesc);
6246 /* Push the address of the call description vector on the stack. */
6247 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6248 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6249 build_index_type (build_int_2 (nargs * 2, 0)));
6250 r = output_constant_def (calldesc);
6251 bc_load_externaddr (r);
6253 /* Push the address of the function to be called. */
6254 bc_expand_expr (TREE_OPERAND (exp, 0));
6256 /* Call the function, popping its address and the calldesc vector
6257 address off the evaluation stack in the process. */
6258 bc_emit_instruction (call);
6260 /* Pop the arguments off the stack. */
6261 bc_adjust_stack (nargs);
6263 /* Load the return value onto the stack. */
6264 bc_load_localaddr (retval);
6265 bc_load_memory (type, TREE_OPERAND (exp, 0));
6267 return;
6269 case SAVE_EXPR:
6271 if (!SAVE_EXPR_RTL (exp))
6273 /* First time around: copy to local variable */
6274 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6275 TYPE_ALIGN (TREE_TYPE(exp)));
6276 bc_expand_expr (TREE_OPERAND (exp, 0));
6277 bc_emit_instruction (duplicate);
6279 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6280 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6282 else
6284 /* Consecutive reference: use saved copy */
6285 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6286 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6288 return;
6290 #if 0
6291 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6292 how are they handled instead? */
6293 case LET_STMT:
6295 TREE_USED (exp) = 1;
6296 bc_expand_expr (STMT_BODY (exp));
6297 return;
6298 #endif
6300 case NOP_EXPR:
6301 case CONVERT_EXPR:
6303 bc_expand_expr (TREE_OPERAND (exp, 0));
6304 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6305 return;
6307 case MODIFY_EXPR:
6309 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6310 return;
6312 case ADDR_EXPR:
6314 bc_expand_address (TREE_OPERAND (exp, 0));
6315 return;
6317 case INDIRECT_REF:
6319 bc_expand_expr (TREE_OPERAND (exp, 0));
6320 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6321 return;
6323 case ARRAY_REF:
6325 bc_expand_expr (bc_canonicalize_array_ref (exp));
6326 return;
6328 case COMPONENT_REF:
6330 bc_expand_component_address (exp);
6332 /* If we have a bitfield, generate a proper load */
6333 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6334 return;
6336 case COMPOUND_EXPR:
6338 bc_expand_expr (TREE_OPERAND (exp, 0));
6339 bc_emit_instruction (drop);
6340 bc_expand_expr (TREE_OPERAND (exp, 1));
6341 return;
6343 case COND_EXPR:
6345 bc_expand_expr (TREE_OPERAND (exp, 0));
6346 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6347 lab = bc_get_bytecode_label ();
6348 bc_emit_bytecode (xjumpifnot);
6349 bc_emit_bytecode_labelref (lab);
6351 #ifdef DEBUG_PRINT_CODE
6352 fputc ('\n', stderr);
6353 #endif
6354 bc_expand_expr (TREE_OPERAND (exp, 1));
6355 lab1 = bc_get_bytecode_label ();
6356 bc_emit_bytecode (jump);
6357 bc_emit_bytecode_labelref (lab1);
6359 #ifdef DEBUG_PRINT_CODE
6360 fputc ('\n', stderr);
6361 #endif
6363 bc_emit_bytecode_labeldef (lab);
6364 bc_expand_expr (TREE_OPERAND (exp, 2));
6365 bc_emit_bytecode_labeldef (lab1);
6366 return;
6368 case TRUTH_ANDIF_EXPR:
6370 opcode = xjumpifnot;
6371 goto andorif;
6373 case TRUTH_ORIF_EXPR:
6375 opcode = xjumpif;
6376 goto andorif;
6378 case PLUS_EXPR:
6380 binoptab = optab_plus_expr;
6381 goto binop;
6383 case MINUS_EXPR:
6385 binoptab = optab_minus_expr;
6386 goto binop;
6388 case MULT_EXPR:
6390 binoptab = optab_mult_expr;
6391 goto binop;
6393 case TRUNC_DIV_EXPR:
6394 case FLOOR_DIV_EXPR:
6395 case CEIL_DIV_EXPR:
6396 case ROUND_DIV_EXPR:
6397 case EXACT_DIV_EXPR:
6399 binoptab = optab_trunc_div_expr;
6400 goto binop;
6402 case TRUNC_MOD_EXPR:
6403 case FLOOR_MOD_EXPR:
6404 case CEIL_MOD_EXPR:
6405 case ROUND_MOD_EXPR:
6407 binoptab = optab_trunc_mod_expr;
6408 goto binop;
6410 case FIX_ROUND_EXPR:
6411 case FIX_FLOOR_EXPR:
6412 case FIX_CEIL_EXPR:
6413 abort (); /* Not used for C. */
6415 case FIX_TRUNC_EXPR:
6416 case FLOAT_EXPR:
6417 case MAX_EXPR:
6418 case MIN_EXPR:
6419 case FFS_EXPR:
6420 case LROTATE_EXPR:
6421 case RROTATE_EXPR:
6422 abort (); /* FIXME */
6424 case RDIV_EXPR:
6426 binoptab = optab_rdiv_expr;
6427 goto binop;
6429 case BIT_AND_EXPR:
6431 binoptab = optab_bit_and_expr;
6432 goto binop;
6434 case BIT_IOR_EXPR:
6436 binoptab = optab_bit_ior_expr;
6437 goto binop;
6439 case BIT_XOR_EXPR:
6441 binoptab = optab_bit_xor_expr;
6442 goto binop;
6444 case LSHIFT_EXPR:
6446 binoptab = optab_lshift_expr;
6447 goto binop;
6449 case RSHIFT_EXPR:
6451 binoptab = optab_rshift_expr;
6452 goto binop;
6454 case TRUTH_AND_EXPR:
6456 binoptab = optab_truth_and_expr;
6457 goto binop;
6459 case TRUTH_OR_EXPR:
6461 binoptab = optab_truth_or_expr;
6462 goto binop;
6464 case LT_EXPR:
6466 binoptab = optab_lt_expr;
6467 goto binop;
6469 case LE_EXPR:
6471 binoptab = optab_le_expr;
6472 goto binop;
6474 case GE_EXPR:
6476 binoptab = optab_ge_expr;
6477 goto binop;
6479 case GT_EXPR:
6481 binoptab = optab_gt_expr;
6482 goto binop;
6484 case EQ_EXPR:
6486 binoptab = optab_eq_expr;
6487 goto binop;
6489 case NE_EXPR:
6491 binoptab = optab_ne_expr;
6492 goto binop;
6494 case NEGATE_EXPR:
6496 unoptab = optab_negate_expr;
6497 goto unop;
6499 case BIT_NOT_EXPR:
6501 unoptab = optab_bit_not_expr;
6502 goto unop;
6504 case TRUTH_NOT_EXPR:
6506 unoptab = optab_truth_not_expr;
6507 goto unop;
6509 case PREDECREMENT_EXPR:
6511 incroptab = optab_predecrement_expr;
6512 goto increment;
6514 case PREINCREMENT_EXPR:
6516 incroptab = optab_preincrement_expr;
6517 goto increment;
6519 case POSTDECREMENT_EXPR:
6521 incroptab = optab_postdecrement_expr;
6522 goto increment;
6524 case POSTINCREMENT_EXPR:
6526 incroptab = optab_postincrement_expr;
6527 goto increment;
6529 case CONSTRUCTOR:
6531 bc_expand_constructor (exp);
6532 return;
6534 case ERROR_MARK:
6535 case RTL_EXPR:
6537 return;
6539 case BIND_EXPR:
6541 tree vars = TREE_OPERAND (exp, 0);
6542 int vars_need_expansion = 0;
6544 /* Need to open a binding contour here because
6545 if there are any cleanups they most be contained here. */
6546 expand_start_bindings (0);
6548 /* Mark the corresponding BLOCK for output. */
6549 if (TREE_OPERAND (exp, 2) != 0)
6550 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6552 /* If VARS have not yet been expanded, expand them now. */
6553 while (vars)
6555 if (DECL_RTL (vars) == 0)
6557 vars_need_expansion = 1;
6558 expand_decl (vars);
6560 expand_decl_init (vars);
6561 vars = TREE_CHAIN (vars);
6564 bc_expand_expr (TREE_OPERAND (exp, 1));
6566 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6568 return;
6572 abort ();
6574 binop:
6576 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6577 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6578 return;
6581 unop:
6583 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6584 return;
6587 andorif:
6589 bc_expand_expr (TREE_OPERAND (exp, 0));
6590 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6591 lab = bc_get_bytecode_label ();
6593 bc_emit_instruction (duplicate);
6594 bc_emit_bytecode (opcode);
6595 bc_emit_bytecode_labelref (lab);
6597 #ifdef DEBUG_PRINT_CODE
6598 fputc ('\n', stderr);
6599 #endif
6601 bc_emit_instruction (drop);
6603 bc_expand_expr (TREE_OPERAND (exp, 1));
6604 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6605 bc_emit_bytecode_labeldef (lab);
6606 return;
6609 increment:
6611 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6613 /* Push the quantum. */
6614 bc_expand_expr (TREE_OPERAND (exp, 1));
6616 /* Convert it to the lvalue's type. */
6617 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6619 /* Push the address of the lvalue */
6620 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6622 /* Perform actual increment */
6623 bc_expand_increment (incroptab, type);
6624 return;
6627 /* Return the alignment in bits of EXP, a pointer valued expression.
6628 But don't return more than MAX_ALIGN no matter what.
6629 The alignment returned is, by default, the alignment of the thing that
6630 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6632 Otherwise, look at the expression to see if we can do better, i.e., if the
6633 expression is actually pointing at an object whose alignment is tighter. */
6635 static int
6636 get_pointer_alignment (exp, max_align)
6637 tree exp;
6638 unsigned max_align;
6640 unsigned align, inner;
6642 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6643 return 0;
6645 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6646 align = MIN (align, max_align);
6648 while (1)
6650 switch (TREE_CODE (exp))
6652 case NOP_EXPR:
6653 case CONVERT_EXPR:
6654 case NON_LVALUE_EXPR:
6655 exp = TREE_OPERAND (exp, 0);
6656 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6657 return align;
6658 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6659 align = MIN (inner, max_align);
6660 break;
6662 case PLUS_EXPR:
6663 /* If sum of pointer + int, restrict our maximum alignment to that
6664 imposed by the integer. If not, we can't do any better than
6665 ALIGN. */
6666 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6667 return align;
6669 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6670 & (max_align - 1))
6671 != 0)
6672 max_align >>= 1;
6674 exp = TREE_OPERAND (exp, 0);
6675 break;
6677 case ADDR_EXPR:
6678 /* See what we are pointing at and look at its alignment. */
6679 exp = TREE_OPERAND (exp, 0);
6680 if (TREE_CODE (exp) == FUNCTION_DECL)
6681 align = FUNCTION_BOUNDARY;
6682 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6683 align = DECL_ALIGN (exp);
6684 #ifdef CONSTANT_ALIGNMENT
6685 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6686 align = CONSTANT_ALIGNMENT (exp, align);
6687 #endif
6688 return MIN (align, max_align);
6690 default:
6691 return align;
6696 /* Return the tree node and offset if a given argument corresponds to
6697 a string constant. */
6699 static tree
6700 string_constant (arg, ptr_offset)
6701 tree arg;
6702 tree *ptr_offset;
6704 STRIP_NOPS (arg);
6706 if (TREE_CODE (arg) == ADDR_EXPR
6707 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6709 *ptr_offset = integer_zero_node;
6710 return TREE_OPERAND (arg, 0);
6712 else if (TREE_CODE (arg) == PLUS_EXPR)
6714 tree arg0 = TREE_OPERAND (arg, 0);
6715 tree arg1 = TREE_OPERAND (arg, 1);
6717 STRIP_NOPS (arg0);
6718 STRIP_NOPS (arg1);
6720 if (TREE_CODE (arg0) == ADDR_EXPR
6721 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6723 *ptr_offset = arg1;
6724 return TREE_OPERAND (arg0, 0);
6726 else if (TREE_CODE (arg1) == ADDR_EXPR
6727 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6729 *ptr_offset = arg0;
6730 return TREE_OPERAND (arg1, 0);
6734 return 0;
6737 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6738 way, because it could contain a zero byte in the middle.
6739 TREE_STRING_LENGTH is the size of the character array, not the string.
6741 Unfortunately, string_constant can't access the values of const char
6742 arrays with initializers, so neither can we do so here. */
6744 static tree
6745 c_strlen (src)
6746 tree src;
6748 tree offset_node;
6749 int offset, max;
6750 char *ptr;
6752 src = string_constant (src, &offset_node);
6753 if (src == 0)
6754 return 0;
6755 max = TREE_STRING_LENGTH (src);
6756 ptr = TREE_STRING_POINTER (src);
6757 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6759 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6760 compute the offset to the following null if we don't know where to
6761 start searching for it. */
6762 int i;
6763 for (i = 0; i < max; i++)
6764 if (ptr[i] == 0)
6765 return 0;
6766 /* We don't know the starting offset, but we do know that the string
6767 has no internal zero bytes. We can assume that the offset falls
6768 within the bounds of the string; otherwise, the programmer deserves
6769 what he gets. Subtract the offset from the length of the string,
6770 and return that. */
6771 /* This would perhaps not be valid if we were dealing with named
6772 arrays in addition to literal string constants. */
6773 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6776 /* We have a known offset into the string. Start searching there for
6777 a null character. */
6778 if (offset_node == 0)
6779 offset = 0;
6780 else
6782 /* Did we get a long long offset? If so, punt. */
6783 if (TREE_INT_CST_HIGH (offset_node) != 0)
6784 return 0;
6785 offset = TREE_INT_CST_LOW (offset_node);
6787 /* If the offset is known to be out of bounds, warn, and call strlen at
6788 runtime. */
6789 if (offset < 0 || offset > max)
6791 warning ("offset outside bounds of constant string");
6792 return 0;
6794 /* Use strlen to search for the first zero byte. Since any strings
6795 constructed with build_string will have nulls appended, we win even
6796 if we get handed something like (char[4])"abcd".
6798 Since OFFSET is our starting index into the string, no further
6799 calculation is needed. */
6800 return size_int (strlen (ptr + offset));
6803 /* Expand an expression EXP that calls a built-in function,
6804 with result going to TARGET if that's convenient
6805 (and in mode MODE if that's convenient).
6806 SUBTARGET may be used as the target for computing one of EXP's operands.
6807 IGNORE is nonzero if the value is to be ignored. */
6809 #define CALLED_AS_BUILT_IN(NODE) \
6810 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
6812 static rtx
6813 expand_builtin (exp, target, subtarget, mode, ignore)
6814 tree exp;
6815 rtx target;
6816 rtx subtarget;
6817 enum machine_mode mode;
6818 int ignore;
6820 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6821 tree arglist = TREE_OPERAND (exp, 1);
6822 rtx op0;
6823 rtx lab1, insns;
6824 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6825 optab builtin_optab;
6827 switch (DECL_FUNCTION_CODE (fndecl))
6829 case BUILT_IN_ABS:
6830 case BUILT_IN_LABS:
6831 case BUILT_IN_FABS:
6832 /* build_function_call changes these into ABS_EXPR. */
6833 abort ();
6835 case BUILT_IN_SIN:
6836 case BUILT_IN_COS:
6837 case BUILT_IN_FSQRT:
6838 /* If not optimizing, call the library function. */
6839 if (! optimize)
6840 break;
6842 if (arglist == 0
6843 /* Arg could be wrong type if user redeclared this fcn wrong. */
6844 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6845 break;
6847 /* Stabilize and compute the argument. */
6848 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6849 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6851 exp = copy_node (exp);
6852 arglist = copy_node (arglist);
6853 TREE_OPERAND (exp, 1) = arglist;
6854 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6856 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6858 /* Make a suitable register to place result in. */
6859 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6861 emit_queue ();
6862 start_sequence ();
6864 switch (DECL_FUNCTION_CODE (fndecl))
6866 case BUILT_IN_SIN:
6867 builtin_optab = sin_optab; break;
6868 case BUILT_IN_COS:
6869 builtin_optab = cos_optab; break;
6870 case BUILT_IN_FSQRT:
6871 builtin_optab = sqrt_optab; break;
6872 default:
6873 abort ();
6876 /* Compute into TARGET.
6877 Set TARGET to wherever the result comes back. */
6878 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6879 builtin_optab, op0, target, 0);
6881 /* If we were unable to expand via the builtin, stop the
6882 sequence (without outputting the insns) and break, causing
6883 a call the the library function. */
6884 if (target == 0)
6886 end_sequence ();
6887 break;
6890 /* Check the results by default. But if flag_fast_math is turned on,
6891 then assume sqrt will always be called with valid arguments. */
6893 if (! flag_fast_math)
6895 /* Don't define the builtin FP instructions
6896 if your machine is not IEEE. */
6897 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6898 abort ();
6900 lab1 = gen_label_rtx ();
6902 /* Test the result; if it is NaN, set errno=EDOM because
6903 the argument was not in the domain. */
6904 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6905 emit_jump_insn (gen_beq (lab1));
6907 #ifdef TARGET_EDOM
6909 #ifdef GEN_ERRNO_RTX
6910 rtx errno_rtx = GEN_ERRNO_RTX;
6911 #else
6912 rtx errno_rtx
6913 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
6914 #endif
6916 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6918 #else
6919 /* We can't set errno=EDOM directly; let the library call do it.
6920 Pop the arguments right away in case the call gets deleted. */
6921 NO_DEFER_POP;
6922 expand_call (exp, target, 0);
6923 OK_DEFER_POP;
6924 #endif
6926 emit_label (lab1);
6929 /* Output the entire sequence. */
6930 insns = get_insns ();
6931 end_sequence ();
6932 emit_insns (insns);
6934 return target;
6936 /* __builtin_apply_args returns block of memory allocated on
6937 the stack into which is stored the arg pointer, structure
6938 value address, static chain, and all the registers that might
6939 possibly be used in performing a function call. The code is
6940 moved to the start of the function so the incoming values are
6941 saved. */
6942 case BUILT_IN_APPLY_ARGS:
6943 /* Don't do __builtin_apply_args more than once in a function.
6944 Save the result of the first call and reuse it. */
6945 if (apply_args_value != 0)
6946 return apply_args_value;
6948 /* When this function is called, it means that registers must be
6949 saved on entry to this function. So we migrate the
6950 call to the first insn of this function. */
6951 rtx temp;
6952 rtx seq;
6954 start_sequence ();
6955 temp = expand_builtin_apply_args ();
6956 seq = get_insns ();
6957 end_sequence ();
6959 apply_args_value = temp;
6961 /* Put the sequence after the NOTE that starts the function.
6962 If this is inside a SEQUENCE, make the outer-level insn
6963 chain current, so the code is placed at the start of the
6964 function. */
6965 push_topmost_sequence ();
6966 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6967 pop_topmost_sequence ();
6968 return temp;
6971 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6972 FUNCTION with a copy of the parameters described by
6973 ARGUMENTS, and ARGSIZE. It returns a block of memory
6974 allocated on the stack into which is stored all the registers
6975 that might possibly be used for returning the result of a
6976 function. ARGUMENTS is the value returned by
6977 __builtin_apply_args. ARGSIZE is the number of bytes of
6978 arguments that must be copied. ??? How should this value be
6979 computed? We'll also need a safe worst case value for varargs
6980 functions. */
6981 case BUILT_IN_APPLY:
6982 if (arglist == 0
6983 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6984 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6985 || TREE_CHAIN (arglist) == 0
6986 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6987 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6988 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6989 return const0_rtx;
6990 else
6992 int i;
6993 tree t;
6994 rtx ops[3];
6996 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6997 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6999 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7002 /* __builtin_return (RESULT) causes the function to return the
7003 value described by RESULT. RESULT is address of the block of
7004 memory returned by __builtin_apply. */
7005 case BUILT_IN_RETURN:
7006 if (arglist
7007 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7008 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7009 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7010 NULL_RTX, VOIDmode, 0));
7011 return const0_rtx;
7013 case BUILT_IN_SAVEREGS:
7014 /* Don't do __builtin_saveregs more than once in a function.
7015 Save the result of the first call and reuse it. */
7016 if (saveregs_value != 0)
7017 return saveregs_value;
7019 /* When this function is called, it means that registers must be
7020 saved on entry to this function. So we migrate the
7021 call to the first insn of this function. */
7022 rtx temp;
7023 rtx seq;
7025 /* Now really call the function. `expand_call' does not call
7026 expand_builtin, so there is no danger of infinite recursion here. */
7027 start_sequence ();
7029 #ifdef EXPAND_BUILTIN_SAVEREGS
7030 /* Do whatever the machine needs done in this case. */
7031 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7032 #else
7033 /* The register where the function returns its value
7034 is likely to have something else in it, such as an argument.
7035 So preserve that register around the call. */
7037 if (value_mode != VOIDmode)
7039 rtx valreg = hard_libcall_value (value_mode);
7040 rtx saved_valreg = gen_reg_rtx (value_mode);
7042 emit_move_insn (saved_valreg, valreg);
7043 temp = expand_call (exp, target, ignore);
7044 emit_move_insn (valreg, saved_valreg);
7046 else
7047 /* Generate the call, putting the value in a pseudo. */
7048 temp = expand_call (exp, target, ignore);
7049 #endif
7051 seq = get_insns ();
7052 end_sequence ();
7054 saveregs_value = temp;
7056 /* Put the sequence after the NOTE that starts the function.
7057 If this is inside a SEQUENCE, make the outer-level insn
7058 chain current, so the code is placed at the start of the
7059 function. */
7060 push_topmost_sequence ();
7061 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7062 pop_topmost_sequence ();
7063 return temp;
7066 /* __builtin_args_info (N) returns word N of the arg space info
7067 for the current function. The number and meanings of words
7068 is controlled by the definition of CUMULATIVE_ARGS. */
7069 case BUILT_IN_ARGS_INFO:
7071 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7072 int i;
7073 int *word_ptr = (int *) &current_function_args_info;
7074 tree type, elts, result;
7076 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7077 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7078 __FILE__, __LINE__);
7080 if (arglist != 0)
7082 tree arg = TREE_VALUE (arglist);
7083 if (TREE_CODE (arg) != INTEGER_CST)
7084 error ("argument of `__builtin_args_info' must be constant");
7085 else
7087 int wordnum = TREE_INT_CST_LOW (arg);
7089 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7090 error ("argument of `__builtin_args_info' out of range");
7091 else
7092 return GEN_INT (word_ptr[wordnum]);
7095 else
7096 error ("missing argument in `__builtin_args_info'");
7098 return const0_rtx;
7100 #if 0
7101 for (i = 0; i < nwords; i++)
7102 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7104 type = build_array_type (integer_type_node,
7105 build_index_type (build_int_2 (nwords, 0)));
7106 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7107 TREE_CONSTANT (result) = 1;
7108 TREE_STATIC (result) = 1;
7109 result = build (INDIRECT_REF, build_pointer_type (type), result);
7110 TREE_CONSTANT (result) = 1;
7111 return expand_expr (result, NULL_RTX, VOIDmode, 0);
7112 #endif
7115 /* Return the address of the first anonymous stack arg. */
7116 case BUILT_IN_NEXT_ARG:
7118 tree fntype = TREE_TYPE (current_function_decl);
7120 if ((TYPE_ARG_TYPES (fntype) == 0
7121 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7122 == void_type_node))
7123 && ! current_function_varargs)
7125 error ("`va_start' used in function with fixed args");
7126 return const0_rtx;
7129 if (arglist)
7131 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7132 tree arg = TREE_VALUE (arglist);
7134 /* Strip off all nops for the sake of the comparison. This
7135 is not quite the same as STRIP_NOPS. It does more. */
7136 while (TREE_CODE (arg) == NOP_EXPR
7137 || TREE_CODE (arg) == CONVERT_EXPR
7138 || TREE_CODE (arg) == NON_LVALUE_EXPR)
7139 arg = TREE_OPERAND (arg, 0);
7140 if (arg != last_parm)
7141 warning ("second parameter of `va_start' not last named argument");
7143 else
7144 /* Evidently an out of date version of <stdarg.h>; can't validate
7145 va_start's second argument, but can still work as intended. */
7146 warning ("`__builtin_next_arg' called without an argument");
7149 return expand_binop (Pmode, add_optab,
7150 current_function_internal_arg_pointer,
7151 current_function_arg_offset_rtx,
7152 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7154 case BUILT_IN_CLASSIFY_TYPE:
7155 if (arglist != 0)
7157 tree type = TREE_TYPE (TREE_VALUE (arglist));
7158 enum tree_code code = TREE_CODE (type);
7159 if (code == VOID_TYPE)
7160 return GEN_INT (void_type_class);
7161 if (code == INTEGER_TYPE)
7162 return GEN_INT (integer_type_class);
7163 if (code == CHAR_TYPE)
7164 return GEN_INT (char_type_class);
7165 if (code == ENUMERAL_TYPE)
7166 return GEN_INT (enumeral_type_class);
7167 if (code == BOOLEAN_TYPE)
7168 return GEN_INT (boolean_type_class);
7169 if (code == POINTER_TYPE)
7170 return GEN_INT (pointer_type_class);
7171 if (code == REFERENCE_TYPE)
7172 return GEN_INT (reference_type_class);
7173 if (code == OFFSET_TYPE)
7174 return GEN_INT (offset_type_class);
7175 if (code == REAL_TYPE)
7176 return GEN_INT (real_type_class);
7177 if (code == COMPLEX_TYPE)
7178 return GEN_INT (complex_type_class);
7179 if (code == FUNCTION_TYPE)
7180 return GEN_INT (function_type_class);
7181 if (code == METHOD_TYPE)
7182 return GEN_INT (method_type_class);
7183 if (code == RECORD_TYPE)
7184 return GEN_INT (record_type_class);
7185 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7186 return GEN_INT (union_type_class);
7187 if (code == ARRAY_TYPE)
7189 if (TYPE_STRING_FLAG (type))
7190 return GEN_INT (string_type_class);
7191 else
7192 return GEN_INT (array_type_class);
7194 if (code == SET_TYPE)
7195 return GEN_INT (set_type_class);
7196 if (code == FILE_TYPE)
7197 return GEN_INT (file_type_class);
7198 if (code == LANG_TYPE)
7199 return GEN_INT (lang_type_class);
7201 return GEN_INT (no_type_class);
7203 case BUILT_IN_CONSTANT_P:
7204 if (arglist == 0)
7205 return const0_rtx;
7206 else
7207 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7208 ? const1_rtx : const0_rtx);
7210 case BUILT_IN_FRAME_ADDRESS:
7211 /* The argument must be a nonnegative integer constant.
7212 It counts the number of frames to scan up the stack.
7213 The value is the address of that frame. */
7214 case BUILT_IN_RETURN_ADDRESS:
7215 /* The argument must be a nonnegative integer constant.
7216 It counts the number of frames to scan up the stack.
7217 The value is the return address saved in that frame. */
7218 if (arglist == 0)
7219 /* Warning about missing arg was already issued. */
7220 return const0_rtx;
7221 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7223 error ("invalid arg to `__builtin_return_address'");
7224 return const0_rtx;
7226 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7228 error ("invalid arg to `__builtin_return_address'");
7229 return const0_rtx;
7231 else
7233 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7234 rtx tem = frame_pointer_rtx;
7235 int i;
7237 /* Some machines need special handling before we can access arbitrary
7238 frames. For example, on the sparc, we must first flush all
7239 register windows to the stack. */
7240 #ifdef SETUP_FRAME_ADDRESSES
7241 SETUP_FRAME_ADDRESSES ();
7242 #endif
7244 /* On the sparc, the return address is not in the frame, it is
7245 in a register. There is no way to access it off of the current
7246 frame pointer, but it can be accessed off the previous frame
7247 pointer by reading the value from the register window save
7248 area. */
7249 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7250 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7251 count--;
7252 #endif
7254 /* Scan back COUNT frames to the specified frame. */
7255 for (i = 0; i < count; i++)
7257 /* Assume the dynamic chain pointer is in the word that
7258 the frame address points to, unless otherwise specified. */
7259 #ifdef DYNAMIC_CHAIN_ADDRESS
7260 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7261 #endif
7262 tem = memory_address (Pmode, tem);
7263 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7266 /* For __builtin_frame_address, return what we've got. */
7267 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7268 return tem;
7270 /* For __builtin_return_address,
7271 Get the return address from that frame. */
7272 #ifdef RETURN_ADDR_RTX
7273 return RETURN_ADDR_RTX (count, tem);
7274 #else
7275 tem = memory_address (Pmode,
7276 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7277 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7278 #endif
7281 case BUILT_IN_ALLOCA:
7282 if (arglist == 0
7283 /* Arg could be non-integer if user redeclared this fcn wrong. */
7284 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7285 break;
7287 /* Compute the argument. */
7288 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7290 /* Allocate the desired space. */
7291 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7293 case BUILT_IN_FFS:
7294 /* If not optimizing, call the library function. */
7295 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7296 break;
7298 if (arglist == 0
7299 /* Arg could be non-integer if user redeclared this fcn wrong. */
7300 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7301 break;
7303 /* Compute the argument. */
7304 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7305 /* Compute ffs, into TARGET if possible.
7306 Set TARGET to wherever the result comes back. */
7307 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7308 ffs_optab, op0, target, 1);
7309 if (target == 0)
7310 abort ();
7311 return target;
7313 case BUILT_IN_STRLEN:
7314 /* If not optimizing, call the library function. */
7315 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7316 break;
7318 if (arglist == 0
7319 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7320 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7321 break;
7322 else
7324 tree src = TREE_VALUE (arglist);
7325 tree len = c_strlen (src);
7327 int align
7328 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7330 rtx result, src_rtx, char_rtx;
7331 enum machine_mode insn_mode = value_mode, char_mode;
7332 enum insn_code icode;
7334 /* If the length is known, just return it. */
7335 if (len != 0)
7336 return expand_expr (len, target, mode, 0);
7338 /* If SRC is not a pointer type, don't do this operation inline. */
7339 if (align == 0)
7340 break;
7342 /* Call a function if we can't compute strlen in the right mode. */
7344 while (insn_mode != VOIDmode)
7346 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7347 if (icode != CODE_FOR_nothing)
7348 break;
7350 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7352 if (insn_mode == VOIDmode)
7353 break;
7355 /* Make a place to write the result of the instruction. */
7356 result = target;
7357 if (! (result != 0
7358 && GET_CODE (result) == REG
7359 && GET_MODE (result) == insn_mode
7360 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7361 result = gen_reg_rtx (insn_mode);
7363 /* Make sure the operands are acceptable to the predicates. */
7365 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7366 result = gen_reg_rtx (insn_mode);
7368 src_rtx = memory_address (BLKmode,
7369 expand_expr (src, NULL_RTX, Pmode,
7370 EXPAND_NORMAL));
7371 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7372 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7374 char_rtx = const0_rtx;
7375 char_mode = insn_operand_mode[(int)icode][2];
7376 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7377 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7379 emit_insn (GEN_FCN (icode) (result,
7380 gen_rtx (MEM, BLKmode, src_rtx),
7381 char_rtx, GEN_INT (align)));
7383 /* Return the value in the proper mode for this function. */
7384 if (GET_MODE (result) == value_mode)
7385 return result;
7386 else if (target != 0)
7388 convert_move (target, result, 0);
7389 return target;
7391 else
7392 return convert_to_mode (value_mode, result, 0);
7395 case BUILT_IN_STRCPY:
7396 /* If not optimizing, call the library function. */
7397 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7398 break;
7400 if (arglist == 0
7401 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7402 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7403 || TREE_CHAIN (arglist) == 0
7404 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7405 break;
7406 else
7408 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7410 if (len == 0)
7411 break;
7413 len = size_binop (PLUS_EXPR, len, integer_one_node);
7415 chainon (arglist, build_tree_list (NULL_TREE, len));
7418 /* Drops in. */
7419 case BUILT_IN_MEMCPY:
7420 /* If not optimizing, call the library function. */
7421 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7422 break;
7424 if (arglist == 0
7425 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7426 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7427 || TREE_CHAIN (arglist) == 0
7428 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7429 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7430 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7431 break;
7432 else
7434 tree dest = TREE_VALUE (arglist);
7435 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7436 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7438 int src_align
7439 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7440 int dest_align
7441 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7442 rtx dest_rtx, dest_mem, src_mem;
7444 /* If either SRC or DEST is not a pointer type, don't do
7445 this operation in-line. */
7446 if (src_align == 0 || dest_align == 0)
7448 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7449 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7450 break;
7453 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7454 dest_mem = gen_rtx (MEM, BLKmode,
7455 memory_address (BLKmode, dest_rtx));
7456 src_mem = gen_rtx (MEM, BLKmode,
7457 memory_address (BLKmode,
7458 expand_expr (src, NULL_RTX,
7459 Pmode,
7460 EXPAND_NORMAL)));
7462 /* Copy word part most expediently. */
7463 emit_block_move (dest_mem, src_mem,
7464 expand_expr (len, NULL_RTX, VOIDmode, 0),
7465 MIN (src_align, dest_align));
7466 return dest_rtx;
7469 /* These comparison functions need an instruction that returns an actual
7470 index. An ordinary compare that just sets the condition codes
7471 is not enough. */
7472 #ifdef HAVE_cmpstrsi
7473 case BUILT_IN_STRCMP:
7474 /* If not optimizing, call the library function. */
7475 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7476 break;
7478 if (arglist == 0
7479 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7480 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7481 || TREE_CHAIN (arglist) == 0
7482 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7483 break;
7484 else if (!HAVE_cmpstrsi)
7485 break;
7487 tree arg1 = TREE_VALUE (arglist);
7488 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7489 tree offset;
7490 tree len, len2;
7492 len = c_strlen (arg1);
7493 if (len)
7494 len = size_binop (PLUS_EXPR, integer_one_node, len);
7495 len2 = c_strlen (arg2);
7496 if (len2)
7497 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7499 /* If we don't have a constant length for the first, use the length
7500 of the second, if we know it. We don't require a constant for
7501 this case; some cost analysis could be done if both are available
7502 but neither is constant. For now, assume they're equally cheap.
7504 If both strings have constant lengths, use the smaller. This
7505 could arise if optimization results in strcpy being called with
7506 two fixed strings, or if the code was machine-generated. We should
7507 add some code to the `memcmp' handler below to deal with such
7508 situations, someday. */
7509 if (!len || TREE_CODE (len) != INTEGER_CST)
7511 if (len2)
7512 len = len2;
7513 else if (len == 0)
7514 break;
7516 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7518 if (tree_int_cst_lt (len2, len))
7519 len = len2;
7522 chainon (arglist, build_tree_list (NULL_TREE, len));
7525 /* Drops in. */
7526 case BUILT_IN_MEMCMP:
7527 /* If not optimizing, call the library function. */
7528 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7529 break;
7531 if (arglist == 0
7532 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7533 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7534 || TREE_CHAIN (arglist) == 0
7535 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7536 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7537 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7538 break;
7539 else if (!HAVE_cmpstrsi)
7540 break;
7542 tree arg1 = TREE_VALUE (arglist);
7543 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7544 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7545 rtx result;
7547 int arg1_align
7548 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7549 int arg2_align
7550 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7551 enum machine_mode insn_mode
7552 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7554 /* If we don't have POINTER_TYPE, call the function. */
7555 if (arg1_align == 0 || arg2_align == 0)
7557 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7558 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7559 break;
7562 /* Make a place to write the result of the instruction. */
7563 result = target;
7564 if (! (result != 0
7565 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7566 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7567 result = gen_reg_rtx (insn_mode);
7569 emit_insn (gen_cmpstrsi (result,
7570 gen_rtx (MEM, BLKmode,
7571 expand_expr (arg1, NULL_RTX, Pmode,
7572 EXPAND_NORMAL)),
7573 gen_rtx (MEM, BLKmode,
7574 expand_expr (arg2, NULL_RTX, Pmode,
7575 EXPAND_NORMAL)),
7576 expand_expr (len, NULL_RTX, VOIDmode, 0),
7577 GEN_INT (MIN (arg1_align, arg2_align))));
7579 /* Return the value in the proper mode for this function. */
7580 mode = TYPE_MODE (TREE_TYPE (exp));
7581 if (GET_MODE (result) == mode)
7582 return result;
7583 else if (target != 0)
7585 convert_move (target, result, 0);
7586 return target;
7588 else
7589 return convert_to_mode (mode, result, 0);
7591 #else
7592 case BUILT_IN_STRCMP:
7593 case BUILT_IN_MEMCMP:
7594 break;
7595 #endif
7597 default: /* just do library call, if unknown builtin */
7598 error ("built-in function `%s' not currently supported",
7599 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7602 /* The switch statement above can drop through to cause the function
7603 to be called normally. */
7605 return expand_call (exp, target, ignore);
7608 /* Built-in functions to perform an untyped call and return. */
7610 /* For each register that may be used for calling a function, this
7611 gives a mode used to copy the register's value. VOIDmode indicates
7612 the register is not used for calling a function. If the machine
7613 has register windows, this gives only the outbound registers.
7614 INCOMING_REGNO gives the corresponding inbound register. */
7615 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7617 /* For each register that may be used for returning values, this gives
7618 a mode used to copy the register's value. VOIDmode indicates the
7619 register is not used for returning values. If the machine has
7620 register windows, this gives only the outbound registers.
7621 INCOMING_REGNO gives the corresponding inbound register. */
7622 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7624 /* For each register that may be used for calling a function, this
7625 gives the offset of that register into the block returned by
7626 __bultin_apply_args. 0 indicates that the register is not
7627 used for calling a function. */
7628 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7630 /* Return the offset of register REGNO into the block returned by
7631 __builtin_apply_args. This is not declared static, since it is
7632 needed in objc-act.c. */
7634 int
7635 apply_args_register_offset (regno)
7636 int regno;
7638 apply_args_size ();
7640 /* Arguments are always put in outgoing registers (in the argument
7641 block) if such make sense. */
7642 #ifdef OUTGOING_REGNO
7643 regno = OUTGOING_REGNO(regno);
7644 #endif
7645 return apply_args_reg_offset[regno];
7648 /* Return the size required for the block returned by __builtin_apply_args,
7649 and initialize apply_args_mode. */
7651 static int
7652 apply_args_size ()
7654 static int size = -1;
7655 int align, regno;
7656 enum machine_mode mode;
7658 /* The values computed by this function never change. */
7659 if (size < 0)
7661 /* The first value is the incoming arg-pointer. */
7662 size = GET_MODE_SIZE (Pmode);
7664 /* The second value is the structure value address unless this is
7665 passed as an "invisible" first argument. */
7666 if (struct_value_rtx)
7667 size += GET_MODE_SIZE (Pmode);
7669 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7670 if (FUNCTION_ARG_REGNO_P (regno))
7672 /* Search for the proper mode for copying this register's
7673 value. I'm not sure this is right, but it works so far. */
7674 enum machine_mode best_mode = VOIDmode;
7676 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7677 mode != VOIDmode;
7678 mode = GET_MODE_WIDER_MODE (mode))
7679 if (HARD_REGNO_MODE_OK (regno, mode)
7680 && HARD_REGNO_NREGS (regno, mode) == 1)
7681 best_mode = mode;
7683 if (best_mode == VOIDmode)
7684 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7685 mode != VOIDmode;
7686 mode = GET_MODE_WIDER_MODE (mode))
7687 if (HARD_REGNO_MODE_OK (regno, mode)
7688 && (mov_optab->handlers[(int) mode].insn_code
7689 != CODE_FOR_nothing))
7690 best_mode = mode;
7692 mode = best_mode;
7693 if (mode == VOIDmode)
7694 abort ();
7696 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7697 if (size % align != 0)
7698 size = CEIL (size, align) * align;
7699 apply_args_reg_offset[regno] = size;
7700 size += GET_MODE_SIZE (mode);
7701 apply_args_mode[regno] = mode;
7703 else
7705 apply_args_mode[regno] = VOIDmode;
7706 apply_args_reg_offset[regno] = 0;
7709 return size;
7712 /* Return the size required for the block returned by __builtin_apply,
7713 and initialize apply_result_mode. */
7715 static int
7716 apply_result_size ()
7718 static int size = -1;
7719 int align, regno;
7720 enum machine_mode mode;
7722 /* The values computed by this function never change. */
7723 if (size < 0)
7725 size = 0;
7727 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7728 if (FUNCTION_VALUE_REGNO_P (regno))
7730 /* Search for the proper mode for copying this register's
7731 value. I'm not sure this is right, but it works so far. */
7732 enum machine_mode best_mode = VOIDmode;
7734 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7735 mode != TImode;
7736 mode = GET_MODE_WIDER_MODE (mode))
7737 if (HARD_REGNO_MODE_OK (regno, mode))
7738 best_mode = mode;
7740 if (best_mode == VOIDmode)
7741 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7742 mode != VOIDmode;
7743 mode = GET_MODE_WIDER_MODE (mode))
7744 if (HARD_REGNO_MODE_OK (regno, mode)
7745 && (mov_optab->handlers[(int) mode].insn_code
7746 != CODE_FOR_nothing))
7747 best_mode = mode;
7749 mode = best_mode;
7750 if (mode == VOIDmode)
7751 abort ();
7753 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7754 if (size % align != 0)
7755 size = CEIL (size, align) * align;
7756 size += GET_MODE_SIZE (mode);
7757 apply_result_mode[regno] = mode;
7759 else
7760 apply_result_mode[regno] = VOIDmode;
7762 /* Allow targets that use untyped_call and untyped_return to override
7763 the size so that machine-specific information can be stored here. */
7764 #ifdef APPLY_RESULT_SIZE
7765 size = APPLY_RESULT_SIZE;
7766 #endif
7768 return size;
7771 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7772 /* Create a vector describing the result block RESULT. If SAVEP is true,
7773 the result block is used to save the values; otherwise it is used to
7774 restore the values. */
7776 static rtx
7777 result_vector (savep, result)
7778 int savep;
7779 rtx result;
7781 int regno, size, align, nelts;
7782 enum machine_mode mode;
7783 rtx reg, mem;
7784 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7786 size = nelts = 0;
7787 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7788 if ((mode = apply_result_mode[regno]) != VOIDmode)
7790 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7791 if (size % align != 0)
7792 size = CEIL (size, align) * align;
7793 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7794 mem = change_address (result, mode,
7795 plus_constant (XEXP (result, 0), size));
7796 savevec[nelts++] = (savep
7797 ? gen_rtx (SET, VOIDmode, mem, reg)
7798 : gen_rtx (SET, VOIDmode, reg, mem));
7799 size += GET_MODE_SIZE (mode);
7801 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7803 #endif /* HAVE_untyped_call or HAVE_untyped_return */
7805 /* Save the state required to perform an untyped call with the same
7806 arguments as were passed to the current function. */
7808 static rtx
7809 expand_builtin_apply_args ()
7811 rtx registers;
7812 int size, align, regno;
7813 enum machine_mode mode;
7815 /* Create a block where the arg-pointer, structure value address,
7816 and argument registers can be saved. */
7817 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7819 /* Walk past the arg-pointer and structure value address. */
7820 size = GET_MODE_SIZE (Pmode);
7821 if (struct_value_rtx)
7822 size += GET_MODE_SIZE (Pmode);
7824 /* Save each register used in calling a function to the block. */
7825 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7826 if ((mode = apply_args_mode[regno]) != VOIDmode)
7828 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7829 if (size % align != 0)
7830 size = CEIL (size, align) * align;
7831 emit_move_insn (change_address (registers, mode,
7832 plus_constant (XEXP (registers, 0),
7833 size)),
7834 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7835 size += GET_MODE_SIZE (mode);
7838 /* Save the arg pointer to the block. */
7839 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7840 copy_to_reg (virtual_incoming_args_rtx));
7841 size = GET_MODE_SIZE (Pmode);
7843 /* Save the structure value address unless this is passed as an
7844 "invisible" first argument. */
7845 if (struct_value_incoming_rtx)
7847 emit_move_insn (change_address (registers, Pmode,
7848 plus_constant (XEXP (registers, 0),
7849 size)),
7850 copy_to_reg (struct_value_incoming_rtx));
7851 size += GET_MODE_SIZE (Pmode);
7854 /* Return the address of the block. */
7855 return copy_addr_to_reg (XEXP (registers, 0));
7858 /* Perform an untyped call and save the state required to perform an
7859 untyped return of whatever value was returned by the given function. */
7861 static rtx
7862 expand_builtin_apply (function, arguments, argsize)
7863 rtx function, arguments, argsize;
7865 int size, align, regno;
7866 enum machine_mode mode;
7867 rtx incoming_args, result, reg, dest, call_insn;
7868 rtx old_stack_level = 0;
7869 rtx call_fusage = 0;
7871 /* Create a block where the return registers can be saved. */
7872 result = assign_stack_local (BLKmode, apply_result_size (), -1);
7874 /* ??? The argsize value should be adjusted here. */
7876 /* Fetch the arg pointer from the ARGUMENTS block. */
7877 incoming_args = gen_reg_rtx (Pmode);
7878 emit_move_insn (incoming_args,
7879 gen_rtx (MEM, Pmode, arguments));
7880 #ifndef STACK_GROWS_DOWNWARD
7881 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7882 incoming_args, 0, OPTAB_LIB_WIDEN);
7883 #endif
7885 /* Perform postincrements before actually calling the function. */
7886 emit_queue ();
7888 /* Push a new argument block and copy the arguments. */
7889 do_pending_stack_adjust ();
7890 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
7892 /* Push a block of memory onto the stack to store the memory arguments.
7893 Save the address in a register, and copy the memory arguments. ??? I
7894 haven't figured out how the calling convention macros effect this,
7895 but it's likely that the source and/or destination addresses in
7896 the block copy will need updating in machine specific ways. */
7897 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7898 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7899 gen_rtx (MEM, BLKmode, incoming_args),
7900 argsize,
7901 PARM_BOUNDARY / BITS_PER_UNIT);
7903 /* Refer to the argument block. */
7904 apply_args_size ();
7905 arguments = gen_rtx (MEM, BLKmode, arguments);
7907 /* Walk past the arg-pointer and structure value address. */
7908 size = GET_MODE_SIZE (Pmode);
7909 if (struct_value_rtx)
7910 size += GET_MODE_SIZE (Pmode);
7912 /* Restore each of the registers previously saved. Make USE insns
7913 for each of these registers for use in making the call. */
7914 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7915 if ((mode = apply_args_mode[regno]) != VOIDmode)
7917 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7918 if (size % align != 0)
7919 size = CEIL (size, align) * align;
7920 reg = gen_rtx (REG, mode, regno);
7921 emit_move_insn (reg,
7922 change_address (arguments, mode,
7923 plus_constant (XEXP (arguments, 0),
7924 size)));
7926 use_reg (&call_fusage, reg);
7927 size += GET_MODE_SIZE (mode);
7930 /* Restore the structure value address unless this is passed as an
7931 "invisible" first argument. */
7932 size = GET_MODE_SIZE (Pmode);
7933 if (struct_value_rtx)
7935 rtx value = gen_reg_rtx (Pmode);
7936 emit_move_insn (value,
7937 change_address (arguments, Pmode,
7938 plus_constant (XEXP (arguments, 0),
7939 size)));
7940 emit_move_insn (struct_value_rtx, value);
7941 if (GET_CODE (struct_value_rtx) == REG)
7942 use_reg (&call_fusage, struct_value_rtx);
7943 size += GET_MODE_SIZE (Pmode);
7946 /* All arguments and registers used for the call are set up by now! */
7947 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
7949 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7950 and we don't want to load it into a register as an optimization,
7951 because prepare_call_address already did it if it should be done. */
7952 if (GET_CODE (function) != SYMBOL_REF)
7953 function = memory_address (FUNCTION_MODE, function);
7955 /* Generate the actual call instruction and save the return value. */
7956 #ifdef HAVE_untyped_call
7957 if (HAVE_untyped_call)
7958 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7959 result, result_vector (1, result)));
7960 else
7961 #endif
7962 #ifdef HAVE_call_value
7963 if (HAVE_call_value)
7965 rtx valreg = 0;
7967 /* Locate the unique return register. It is not possible to
7968 express a call that sets more than one return register using
7969 call_value; use untyped_call for that. In fact, untyped_call
7970 only needs to save the return registers in the given block. */
7971 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7972 if ((mode = apply_result_mode[regno]) != VOIDmode)
7974 if (valreg)
7975 abort (); /* HAVE_untyped_call required. */
7976 valreg = gen_rtx (REG, mode, regno);
7979 emit_call_insn (gen_call_value (valreg,
7980 gen_rtx (MEM, FUNCTION_MODE, function),
7981 const0_rtx, NULL_RTX, const0_rtx));
7983 emit_move_insn (change_address (result, GET_MODE (valreg),
7984 XEXP (result, 0)),
7985 valreg);
7987 else
7988 #endif
7989 abort ();
7991 /* Find the CALL insn we just emitted. */
7992 for (call_insn = get_last_insn ();
7993 call_insn && GET_CODE (call_insn) != CALL_INSN;
7994 call_insn = PREV_INSN (call_insn))
7997 if (! call_insn)
7998 abort ();
8000 /* Put the register usage information on the CALL. If there is already
8001 some usage information, put ours at the end. */
8002 if (CALL_INSN_FUNCTION_USAGE (call_insn))
8004 rtx link;
8006 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
8007 link = XEXP (link, 1))
8010 XEXP (link, 1) = call_fusage;
8012 else
8013 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
8015 /* Restore the stack. */
8016 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
8018 /* Return the address of the result block. */
8019 return copy_addr_to_reg (XEXP (result, 0));
8022 /* Perform an untyped return. */
8024 static void
8025 expand_builtin_return (result)
8026 rtx result;
8028 int size, align, regno;
8029 enum machine_mode mode;
8030 rtx reg;
8031 rtx call_fusage = 0;
8033 apply_result_size ();
8034 result = gen_rtx (MEM, BLKmode, result);
8036 #ifdef HAVE_untyped_return
8037 if (HAVE_untyped_return)
8039 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
8040 emit_barrier ();
8041 return;
8043 #endif
8045 /* Restore the return value and note that each value is used. */
8046 size = 0;
8047 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8048 if ((mode = apply_result_mode[regno]) != VOIDmode)
8050 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8051 if (size % align != 0)
8052 size = CEIL (size, align) * align;
8053 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8054 emit_move_insn (reg,
8055 change_address (result, mode,
8056 plus_constant (XEXP (result, 0),
8057 size)));
8059 push_to_sequence (call_fusage);
8060 emit_insn (gen_rtx (USE, VOIDmode, reg));
8061 call_fusage = get_insns ();
8062 end_sequence ();
8063 size += GET_MODE_SIZE (mode);
8066 /* Put the USE insns before the return. */
8067 emit_insns (call_fusage);
8069 /* Return whatever values was restored by jumping directly to the end
8070 of the function. */
8071 expand_null_return ();
8074 /* Expand code for a post- or pre- increment or decrement
8075 and return the RTX for the result.
8076 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8078 static rtx
8079 expand_increment (exp, post)
8080 register tree exp;
8081 int post;
8083 register rtx op0, op1;
8084 register rtx temp, value;
8085 register tree incremented = TREE_OPERAND (exp, 0);
8086 optab this_optab = add_optab;
8087 int icode;
8088 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8089 int op0_is_copy = 0;
8090 int single_insn = 0;
8091 /* 1 means we can't store into OP0 directly,
8092 because it is a subreg narrower than a word,
8093 and we don't dare clobber the rest of the word. */
8094 int bad_subreg = 0;
8096 if (output_bytecode)
8098 bc_expand_expr (exp);
8099 return NULL_RTX;
8102 /* Stabilize any component ref that might need to be
8103 evaluated more than once below. */
8104 if (!post
8105 || TREE_CODE (incremented) == BIT_FIELD_REF
8106 || (TREE_CODE (incremented) == COMPONENT_REF
8107 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8108 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8109 incremented = stabilize_reference (incremented);
8110 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8111 ones into save exprs so that they don't accidentally get evaluated
8112 more than once by the code below. */
8113 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8114 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8115 incremented = save_expr (incremented);
8117 /* Compute the operands as RTX.
8118 Note whether OP0 is the actual lvalue or a copy of it:
8119 I believe it is a copy iff it is a register or subreg
8120 and insns were generated in computing it. */
8122 temp = get_last_insn ();
8123 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8125 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8126 in place but intead must do sign- or zero-extension during assignment,
8127 so we copy it into a new register and let the code below use it as
8128 a copy.
8130 Note that we can safely modify this SUBREG since it is know not to be
8131 shared (it was made by the expand_expr call above). */
8133 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8135 if (post)
8136 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8137 else
8138 bad_subreg = 1;
8140 else if (GET_CODE (op0) == SUBREG
8141 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8143 /* We cannot increment this SUBREG in place. If we are
8144 post-incrementing, get a copy of the old value. Otherwise,
8145 just mark that we cannot increment in place. */
8146 if (post)
8147 op0 = copy_to_reg (op0);
8148 else
8149 bad_subreg = 1;
8152 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8153 && temp != get_last_insn ());
8154 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8156 /* Decide whether incrementing or decrementing. */
8157 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8158 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8159 this_optab = sub_optab;
8161 /* Convert decrement by a constant into a negative increment. */
8162 if (this_optab == sub_optab
8163 && GET_CODE (op1) == CONST_INT)
8165 op1 = GEN_INT (- INTVAL (op1));
8166 this_optab = add_optab;
8169 /* For a preincrement, see if we can do this with a single instruction. */
8170 if (!post)
8172 icode = (int) this_optab->handlers[(int) mode].insn_code;
8173 if (icode != (int) CODE_FOR_nothing
8174 /* Make sure that OP0 is valid for operands 0 and 1
8175 of the insn we want to queue. */
8176 && (*insn_operand_predicate[icode][0]) (op0, mode)
8177 && (*insn_operand_predicate[icode][1]) (op0, mode)
8178 && (*insn_operand_predicate[icode][2]) (op1, mode))
8179 single_insn = 1;
8182 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8183 then we cannot just increment OP0. We must therefore contrive to
8184 increment the original value. Then, for postincrement, we can return
8185 OP0 since it is a copy of the old value. For preincrement, expand here
8186 unless we can do it with a single insn.
8188 Likewise if storing directly into OP0 would clobber high bits
8189 we need to preserve (bad_subreg). */
8190 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8192 /* This is the easiest way to increment the value wherever it is.
8193 Problems with multiple evaluation of INCREMENTED are prevented
8194 because either (1) it is a component_ref or preincrement,
8195 in which case it was stabilized above, or (2) it is an array_ref
8196 with constant index in an array in a register, which is
8197 safe to reevaluate. */
8198 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8199 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8200 ? MINUS_EXPR : PLUS_EXPR),
8201 TREE_TYPE (exp),
8202 incremented,
8203 TREE_OPERAND (exp, 1));
8204 temp = expand_assignment (incremented, newexp, ! post, 0);
8205 return post ? op0 : temp;
8208 if (post)
8210 /* We have a true reference to the value in OP0.
8211 If there is an insn to add or subtract in this mode, queue it.
8212 Queueing the increment insn avoids the register shuffling
8213 that often results if we must increment now and first save
8214 the old value for subsequent use. */
8216 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8217 op0 = stabilize (op0);
8218 #endif
8220 icode = (int) this_optab->handlers[(int) mode].insn_code;
8221 if (icode != (int) CODE_FOR_nothing
8222 /* Make sure that OP0 is valid for operands 0 and 1
8223 of the insn we want to queue. */
8224 && (*insn_operand_predicate[icode][0]) (op0, mode)
8225 && (*insn_operand_predicate[icode][1]) (op0, mode))
8227 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8228 op1 = force_reg (mode, op1);
8230 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8234 /* Preincrement, or we can't increment with one simple insn. */
8235 if (post)
8236 /* Save a copy of the value before inc or dec, to return it later. */
8237 temp = value = copy_to_reg (op0);
8238 else
8239 /* Arrange to return the incremented value. */
8240 /* Copy the rtx because expand_binop will protect from the queue,
8241 and the results of that would be invalid for us to return
8242 if our caller does emit_queue before using our result. */
8243 temp = copy_rtx (value = op0);
8245 /* Increment however we can. */
8246 op1 = expand_binop (mode, this_optab, value, op1, op0,
8247 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8248 /* Make sure the value is stored into OP0. */
8249 if (op1 != op0)
8250 emit_move_insn (op0, op1);
8252 return temp;
8255 /* Expand all function calls contained within EXP, innermost ones first.
8256 But don't look within expressions that have sequence points.
8257 For each CALL_EXPR, record the rtx for its value
8258 in the CALL_EXPR_RTL field. */
8260 static void
8261 preexpand_calls (exp)
8262 tree exp;
8264 register int nops, i;
8265 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8267 if (! do_preexpand_calls)
8268 return;
8270 /* Only expressions and references can contain calls. */
8272 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8273 return;
8275 switch (TREE_CODE (exp))
8277 case CALL_EXPR:
8278 /* Do nothing if already expanded. */
8279 if (CALL_EXPR_RTL (exp) != 0)
8280 return;
8282 /* Do nothing to built-in functions. */
8283 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8284 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8285 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8286 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8287 return;
8289 case COMPOUND_EXPR:
8290 case COND_EXPR:
8291 case TRUTH_ANDIF_EXPR:
8292 case TRUTH_ORIF_EXPR:
8293 /* If we find one of these, then we can be sure
8294 the adjust will be done for it (since it makes jumps).
8295 Do it now, so that if this is inside an argument
8296 of a function, we don't get the stack adjustment
8297 after some other args have already been pushed. */
8298 do_pending_stack_adjust ();
8299 return;
8301 case BLOCK:
8302 case RTL_EXPR:
8303 case WITH_CLEANUP_EXPR:
8304 return;
8306 case SAVE_EXPR:
8307 if (SAVE_EXPR_RTL (exp) != 0)
8308 return;
8311 nops = tree_code_length[(int) TREE_CODE (exp)];
8312 for (i = 0; i < nops; i++)
8313 if (TREE_OPERAND (exp, i) != 0)
8315 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8316 if (type == 'e' || type == '<' || type == '1' || type == '2'
8317 || type == 'r')
8318 preexpand_calls (TREE_OPERAND (exp, i));
8322 /* At the start of a function, record that we have no previously-pushed
8323 arguments waiting to be popped. */
8325 void
8326 init_pending_stack_adjust ()
8328 pending_stack_adjust = 0;
8331 /* When exiting from function, if safe, clear out any pending stack adjust
8332 so the adjustment won't get done. */
8334 void
8335 clear_pending_stack_adjust ()
8337 #ifdef EXIT_IGNORE_STACK
8338 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8339 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8340 && ! flag_inline_functions)
8341 pending_stack_adjust = 0;
8342 #endif
8345 /* Pop any previously-pushed arguments that have not been popped yet. */
8347 void
8348 do_pending_stack_adjust ()
8350 if (inhibit_defer_pop == 0)
8352 if (pending_stack_adjust != 0)
8353 adjust_stack (GEN_INT (pending_stack_adjust));
8354 pending_stack_adjust = 0;
8358 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
8359 Returns the cleanups to be performed. */
8361 static tree
8362 defer_cleanups_to (old_cleanups)
8363 tree old_cleanups;
8365 tree new_cleanups = NULL_TREE;
8366 tree cleanups = cleanups_this_call;
8367 tree last = NULL_TREE;
8369 while (cleanups_this_call != old_cleanups)
8371 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8372 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8375 if (last)
8377 /* Remove the list from the chain of cleanups. */
8378 TREE_CHAIN (last) = NULL_TREE;
8380 /* reverse them so that we can build them in the right order. */
8381 cleanups = nreverse (cleanups);
8383 while (cleanups)
8385 if (new_cleanups)
8386 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8387 TREE_VALUE (cleanups), new_cleanups);
8388 else
8389 new_cleanups = TREE_VALUE (cleanups);
8391 cleanups = TREE_CHAIN (cleanups);
8395 return new_cleanups;
8398 /* Expand all cleanups up to OLD_CLEANUPS.
8399 Needed here, and also for language-dependent calls. */
8401 void
8402 expand_cleanups_to (old_cleanups)
8403 tree old_cleanups;
8405 while (cleanups_this_call != old_cleanups)
8407 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8408 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
8409 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8413 /* Expand conditional expressions. */
8415 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8416 LABEL is an rtx of code CODE_LABEL, in this function and all the
8417 functions here. */
8419 void
8420 jumpifnot (exp, label)
8421 tree exp;
8422 rtx label;
8424 do_jump (exp, label, NULL_RTX);
8427 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8429 void
8430 jumpif (exp, label)
8431 tree exp;
8432 rtx label;
8434 do_jump (exp, NULL_RTX, label);
8437 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8438 the result is zero, or IF_TRUE_LABEL if the result is one.
8439 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8440 meaning fall through in that case.
8442 do_jump always does any pending stack adjust except when it does not
8443 actually perform a jump. An example where there is no jump
8444 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8446 This function is responsible for optimizing cases such as
8447 &&, || and comparison operators in EXP. */
8449 void
8450 do_jump (exp, if_false_label, if_true_label)
8451 tree exp;
8452 rtx if_false_label, if_true_label;
8454 register enum tree_code code = TREE_CODE (exp);
8455 /* Some cases need to create a label to jump to
8456 in order to properly fall through.
8457 These cases set DROP_THROUGH_LABEL nonzero. */
8458 rtx drop_through_label = 0;
8459 rtx temp;
8460 rtx comparison = 0;
8461 int i;
8462 tree type;
8463 enum machine_mode mode;
8465 emit_queue ();
8467 switch (code)
8469 case ERROR_MARK:
8470 break;
8472 case INTEGER_CST:
8473 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8474 if (temp)
8475 emit_jump (temp);
8476 break;
8478 #if 0
8479 /* This is not true with #pragma weak */
8480 case ADDR_EXPR:
8481 /* The address of something can never be zero. */
8482 if (if_true_label)
8483 emit_jump (if_true_label);
8484 break;
8485 #endif
8487 case NOP_EXPR:
8488 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8489 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8490 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8491 goto normal;
8492 case CONVERT_EXPR:
8493 /* If we are narrowing the operand, we have to do the compare in the
8494 narrower mode. */
8495 if ((TYPE_PRECISION (TREE_TYPE (exp))
8496 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8497 goto normal;
8498 case NON_LVALUE_EXPR:
8499 case REFERENCE_EXPR:
8500 case ABS_EXPR:
8501 case NEGATE_EXPR:
8502 case LROTATE_EXPR:
8503 case RROTATE_EXPR:
8504 /* These cannot change zero->non-zero or vice versa. */
8505 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8506 break;
8508 #if 0
8509 /* This is never less insns than evaluating the PLUS_EXPR followed by
8510 a test and can be longer if the test is eliminated. */
8511 case PLUS_EXPR:
8512 /* Reduce to minus. */
8513 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8514 TREE_OPERAND (exp, 0),
8515 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8516 TREE_OPERAND (exp, 1))));
8517 /* Process as MINUS. */
8518 #endif
8520 case MINUS_EXPR:
8521 /* Non-zero iff operands of minus differ. */
8522 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8523 TREE_OPERAND (exp, 0),
8524 TREE_OPERAND (exp, 1)),
8525 NE, NE);
8526 break;
8528 case BIT_AND_EXPR:
8529 /* If we are AND'ing with a small constant, do this comparison in the
8530 smallest type that fits. If the machine doesn't have comparisons
8531 that small, it will be converted back to the wider comparison.
8532 This helps if we are testing the sign bit of a narrower object.
8533 combine can't do this for us because it can't know whether a
8534 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8536 if (! SLOW_BYTE_ACCESS
8537 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8538 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8539 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8540 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8541 && (type = type_for_mode (mode, 1)) != 0
8542 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8543 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8544 != CODE_FOR_nothing))
8546 do_jump (convert (type, exp), if_false_label, if_true_label);
8547 break;
8549 goto normal;
8551 case TRUTH_NOT_EXPR:
8552 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8553 break;
8555 case TRUTH_ANDIF_EXPR:
8556 if (if_false_label == 0)
8557 if_false_label = drop_through_label = gen_label_rtx ();
8558 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8559 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8560 break;
8562 case TRUTH_ORIF_EXPR:
8563 if (if_true_label == 0)
8564 if_true_label = drop_through_label = gen_label_rtx ();
8565 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8566 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8567 break;
8569 case COMPOUND_EXPR:
8570 push_temp_slots ();
8571 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8572 free_temp_slots ();
8573 pop_temp_slots ();
8574 emit_queue ();
8575 do_pending_stack_adjust ();
8576 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8577 break;
8579 case COMPONENT_REF:
8580 case BIT_FIELD_REF:
8581 case ARRAY_REF:
8583 int bitsize, bitpos, unsignedp;
8584 enum machine_mode mode;
8585 tree type;
8586 tree offset;
8587 int volatilep = 0;
8589 /* Get description of this reference. We don't actually care
8590 about the underlying object here. */
8591 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8592 &mode, &unsignedp, &volatilep);
8594 type = type_for_size (bitsize, unsignedp);
8595 if (! SLOW_BYTE_ACCESS
8596 && type != 0 && bitsize >= 0
8597 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8598 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8599 != CODE_FOR_nothing))
8601 do_jump (convert (type, exp), if_false_label, if_true_label);
8602 break;
8604 goto normal;
8607 case COND_EXPR:
8608 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8609 if (integer_onep (TREE_OPERAND (exp, 1))
8610 && integer_zerop (TREE_OPERAND (exp, 2)))
8611 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8613 else if (integer_zerop (TREE_OPERAND (exp, 1))
8614 && integer_onep (TREE_OPERAND (exp, 2)))
8615 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8617 else
8619 register rtx label1 = gen_label_rtx ();
8620 drop_through_label = gen_label_rtx ();
8621 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8622 /* Now the THEN-expression. */
8623 do_jump (TREE_OPERAND (exp, 1),
8624 if_false_label ? if_false_label : drop_through_label,
8625 if_true_label ? if_true_label : drop_through_label);
8626 /* In case the do_jump just above never jumps. */
8627 do_pending_stack_adjust ();
8628 emit_label (label1);
8629 /* Now the ELSE-expression. */
8630 do_jump (TREE_OPERAND (exp, 2),
8631 if_false_label ? if_false_label : drop_through_label,
8632 if_true_label ? if_true_label : drop_through_label);
8634 break;
8636 case EQ_EXPR:
8637 if (integer_zerop (TREE_OPERAND (exp, 1)))
8638 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8639 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8640 == MODE_INT)
8642 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8643 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8644 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8645 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8646 else
8647 comparison = compare (exp, EQ, EQ);
8648 break;
8650 case NE_EXPR:
8651 if (integer_zerop (TREE_OPERAND (exp, 1)))
8652 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8653 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8654 == MODE_INT)
8656 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8657 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8658 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8659 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8660 else
8661 comparison = compare (exp, NE, NE);
8662 break;
8664 case LT_EXPR:
8665 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8666 == MODE_INT)
8667 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8668 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8669 else
8670 comparison = compare (exp, LT, LTU);
8671 break;
8673 case LE_EXPR:
8674 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8675 == MODE_INT)
8676 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8677 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8678 else
8679 comparison = compare (exp, LE, LEU);
8680 break;
8682 case GT_EXPR:
8683 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8684 == MODE_INT)
8685 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8686 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8687 else
8688 comparison = compare (exp, GT, GTU);
8689 break;
8691 case GE_EXPR:
8692 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8693 == MODE_INT)
8694 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8695 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8696 else
8697 comparison = compare (exp, GE, GEU);
8698 break;
8700 default:
8701 normal:
8702 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8703 #if 0
8704 /* This is not needed any more and causes poor code since it causes
8705 comparisons and tests from non-SI objects to have different code
8706 sequences. */
8707 /* Copy to register to avoid generating bad insns by cse
8708 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8709 if (!cse_not_expected && GET_CODE (temp) == MEM)
8710 temp = copy_to_reg (temp);
8711 #endif
8712 do_pending_stack_adjust ();
8713 if (GET_CODE (temp) == CONST_INT)
8714 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8715 else if (GET_CODE (temp) == LABEL_REF)
8716 comparison = const_true_rtx;
8717 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8718 && !can_compare_p (GET_MODE (temp)))
8719 /* Note swapping the labels gives us not-equal. */
8720 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8721 else if (GET_MODE (temp) != VOIDmode)
8722 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8723 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8724 GET_MODE (temp), NULL_RTX, 0);
8725 else
8726 abort ();
8729 /* Do any postincrements in the expression that was tested. */
8730 emit_queue ();
8732 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8733 straight into a conditional jump instruction as the jump condition.
8734 Otherwise, all the work has been done already. */
8736 if (comparison == const_true_rtx)
8738 if (if_true_label)
8739 emit_jump (if_true_label);
8741 else if (comparison == const0_rtx)
8743 if (if_false_label)
8744 emit_jump (if_false_label);
8746 else if (comparison)
8747 do_jump_for_compare (comparison, if_false_label, if_true_label);
8749 if (drop_through_label)
8751 /* If do_jump produces code that might be jumped around,
8752 do any stack adjusts from that code, before the place
8753 where control merges in. */
8754 do_pending_stack_adjust ();
8755 emit_label (drop_through_label);
8759 /* Given a comparison expression EXP for values too wide to be compared
8760 with one insn, test the comparison and jump to the appropriate label.
8761 The code of EXP is ignored; we always test GT if SWAP is 0,
8762 and LT if SWAP is 1. */
8764 static void
8765 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8766 tree exp;
8767 int swap;
8768 rtx if_false_label, if_true_label;
8770 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8771 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8772 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8773 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8774 rtx drop_through_label = 0;
8775 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8776 int i;
8778 if (! if_true_label || ! if_false_label)
8779 drop_through_label = gen_label_rtx ();
8780 if (! if_true_label)
8781 if_true_label = drop_through_label;
8782 if (! if_false_label)
8783 if_false_label = drop_through_label;
8785 /* Compare a word at a time, high order first. */
8786 for (i = 0; i < nwords; i++)
8788 rtx comp;
8789 rtx op0_word, op1_word;
8791 if (WORDS_BIG_ENDIAN)
8793 op0_word = operand_subword_force (op0, i, mode);
8794 op1_word = operand_subword_force (op1, i, mode);
8796 else
8798 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8799 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8802 /* All but high-order word must be compared as unsigned. */
8803 comp = compare_from_rtx (op0_word, op1_word,
8804 (unsignedp || i > 0) ? GTU : GT,
8805 unsignedp, word_mode, NULL_RTX, 0);
8806 if (comp == const_true_rtx)
8807 emit_jump (if_true_label);
8808 else if (comp != const0_rtx)
8809 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8811 /* Consider lower words only if these are equal. */
8812 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8813 NULL_RTX, 0);
8814 if (comp == const_true_rtx)
8815 emit_jump (if_false_label);
8816 else if (comp != const0_rtx)
8817 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8820 if (if_false_label)
8821 emit_jump (if_false_label);
8822 if (drop_through_label)
8823 emit_label (drop_through_label);
8826 /* Compare OP0 with OP1, word at a time, in mode MODE.
8827 UNSIGNEDP says to do unsigned comparison.
8828 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
8830 static void
8831 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8832 enum machine_mode mode;
8833 int unsignedp;
8834 rtx op0, op1;
8835 rtx if_false_label, if_true_label;
8837 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8838 rtx drop_through_label = 0;
8839 int i;
8841 if (! if_true_label || ! if_false_label)
8842 drop_through_label = gen_label_rtx ();
8843 if (! if_true_label)
8844 if_true_label = drop_through_label;
8845 if (! if_false_label)
8846 if_false_label = drop_through_label;
8848 /* Compare a word at a time, high order first. */
8849 for (i = 0; i < nwords; i++)
8851 rtx comp;
8852 rtx op0_word, op1_word;
8854 if (WORDS_BIG_ENDIAN)
8856 op0_word = operand_subword_force (op0, i, mode);
8857 op1_word = operand_subword_force (op1, i, mode);
8859 else
8861 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8862 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8865 /* All but high-order word must be compared as unsigned. */
8866 comp = compare_from_rtx (op0_word, op1_word,
8867 (unsignedp || i > 0) ? GTU : GT,
8868 unsignedp, word_mode, NULL_RTX, 0);
8869 if (comp == const_true_rtx)
8870 emit_jump (if_true_label);
8871 else if (comp != const0_rtx)
8872 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8874 /* Consider lower words only if these are equal. */
8875 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8876 NULL_RTX, 0);
8877 if (comp == const_true_rtx)
8878 emit_jump (if_false_label);
8879 else if (comp != const0_rtx)
8880 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8883 if (if_false_label)
8884 emit_jump (if_false_label);
8885 if (drop_through_label)
8886 emit_label (drop_through_label);
8889 /* Given an EQ_EXPR expression EXP for values too wide to be compared
8890 with one insn, test the comparison and jump to the appropriate label. */
8892 static void
8893 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8894 tree exp;
8895 rtx if_false_label, if_true_label;
8897 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8898 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8899 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8900 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8901 int i;
8902 rtx drop_through_label = 0;
8904 if (! if_false_label)
8905 drop_through_label = if_false_label = gen_label_rtx ();
8907 for (i = 0; i < nwords; i++)
8909 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8910 operand_subword_force (op1, i, mode),
8911 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8912 word_mode, NULL_RTX, 0);
8913 if (comp == const_true_rtx)
8914 emit_jump (if_false_label);
8915 else if (comp != const0_rtx)
8916 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8919 if (if_true_label)
8920 emit_jump (if_true_label);
8921 if (drop_through_label)
8922 emit_label (drop_through_label);
8925 /* Jump according to whether OP0 is 0.
8926 We assume that OP0 has an integer mode that is too wide
8927 for the available compare insns. */
8929 static void
8930 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8931 rtx op0;
8932 rtx if_false_label, if_true_label;
8934 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8935 int i;
8936 rtx drop_through_label = 0;
8938 if (! if_false_label)
8939 drop_through_label = if_false_label = gen_label_rtx ();
8941 for (i = 0; i < nwords; i++)
8943 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8944 GET_MODE (op0)),
8945 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8946 if (comp == const_true_rtx)
8947 emit_jump (if_false_label);
8948 else if (comp != const0_rtx)
8949 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8952 if (if_true_label)
8953 emit_jump (if_true_label);
8954 if (drop_through_label)
8955 emit_label (drop_through_label);
8958 /* Given a comparison expression in rtl form, output conditional branches to
8959 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
8961 static void
8962 do_jump_for_compare (comparison, if_false_label, if_true_label)
8963 rtx comparison, if_false_label, if_true_label;
8965 if (if_true_label)
8967 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8968 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8969 else
8970 abort ();
8972 if (if_false_label)
8973 emit_jump (if_false_label);
8975 else if (if_false_label)
8977 rtx insn;
8978 rtx prev = get_last_insn ();
8979 rtx branch = 0;
8981 if (prev != 0)
8982 prev = PREV_INSN (prev);
8984 /* Output the branch with the opposite condition. Then try to invert
8985 what is generated. If more than one insn is a branch, or if the
8986 branch is not the last insn written, abort. If we can't invert
8987 the branch, emit make a true label, redirect this jump to that,
8988 emit a jump to the false label and define the true label. */
8990 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8991 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8992 else
8993 abort ();
8995 /* Here we get the insn before what was just emitted.
8996 On some machines, emitting the branch can discard
8997 the previous compare insn and emit a replacement. */
8998 if (prev == 0)
8999 /* If there's only one preceding insn... */
9000 insn = get_insns ();
9001 else
9002 insn = NEXT_INSN (prev);
9004 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
9005 if (GET_CODE (insn) == JUMP_INSN)
9007 if (branch)
9008 abort ();
9009 branch = insn;
9012 if (branch != get_last_insn ())
9013 abort ();
9015 JUMP_LABEL (branch) = if_false_label;
9016 if (! invert_jump (branch, if_false_label))
9018 if_true_label = gen_label_rtx ();
9019 redirect_jump (branch, if_true_label);
9020 emit_jump (if_false_label);
9021 emit_label (if_true_label);
9026 /* Generate code for a comparison expression EXP
9027 (including code to compute the values to be compared)
9028 and set (CC0) according to the result.
9029 SIGNED_CODE should be the rtx operation for this comparison for
9030 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9032 We force a stack adjustment unless there are currently
9033 things pushed on the stack that aren't yet used. */
9035 static rtx
9036 compare (exp, signed_code, unsigned_code)
9037 register tree exp;
9038 enum rtx_code signed_code, unsigned_code;
9040 register rtx op0
9041 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9042 register rtx op1
9043 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9044 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
9045 register enum machine_mode mode = TYPE_MODE (type);
9046 int unsignedp = TREE_UNSIGNED (type);
9047 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
9049 return compare_from_rtx (op0, op1, code, unsignedp, mode,
9050 ((mode == BLKmode)
9051 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9052 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9055 /* Like compare but expects the values to compare as two rtx's.
9056 The decision as to signed or unsigned comparison must be made by the caller.
9058 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9059 compared.
9061 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9062 size of MODE should be used. */
9065 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9066 register rtx op0, op1;
9067 enum rtx_code code;
9068 int unsignedp;
9069 enum machine_mode mode;
9070 rtx size;
9071 int align;
9073 rtx tem;
9075 /* If one operand is constant, make it the second one. Only do this
9076 if the other operand is not constant as well. */
9078 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9079 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9081 tem = op0;
9082 op0 = op1;
9083 op1 = tem;
9084 code = swap_condition (code);
9087 if (flag_force_mem)
9089 op0 = force_not_mem (op0);
9090 op1 = force_not_mem (op1);
9093 do_pending_stack_adjust ();
9095 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9096 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9097 return tem;
9099 #if 0
9100 /* There's no need to do this now that combine.c can eliminate lots of
9101 sign extensions. This can be less efficient in certain cases on other
9102 machines. */
9104 /* If this is a signed equality comparison, we can do it as an
9105 unsigned comparison since zero-extension is cheaper than sign
9106 extension and comparisons with zero are done as unsigned. This is
9107 the case even on machines that can do fast sign extension, since
9108 zero-extension is easier to combine with other operations than
9109 sign-extension is. If we are comparing against a constant, we must
9110 convert it to what it would look like unsigned. */
9111 if ((code == EQ || code == NE) && ! unsignedp
9112 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9114 if (GET_CODE (op1) == CONST_INT
9115 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9116 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9117 unsignedp = 1;
9119 #endif
9121 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9123 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
9126 /* Generate code to calculate EXP using a store-flag instruction
9127 and return an rtx for the result. EXP is either a comparison
9128 or a TRUTH_NOT_EXPR whose operand is a comparison.
9130 If TARGET is nonzero, store the result there if convenient.
9132 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9133 cheap.
9135 Return zero if there is no suitable set-flag instruction
9136 available on this machine.
9138 Once expand_expr has been called on the arguments of the comparison,
9139 we are committed to doing the store flag, since it is not safe to
9140 re-evaluate the expression. We emit the store-flag insn by calling
9141 emit_store_flag, but only expand the arguments if we have a reason
9142 to believe that emit_store_flag will be successful. If we think that
9143 it will, but it isn't, we have to simulate the store-flag with a
9144 set/jump/set sequence. */
9146 static rtx
9147 do_store_flag (exp, target, mode, only_cheap)
9148 tree exp;
9149 rtx target;
9150 enum machine_mode mode;
9151 int only_cheap;
9153 enum rtx_code code;
9154 tree arg0, arg1, type;
9155 tree tem;
9156 enum machine_mode operand_mode;
9157 int invert = 0;
9158 int unsignedp;
9159 rtx op0, op1;
9160 enum insn_code icode;
9161 rtx subtarget = target;
9162 rtx result, label, pattern, jump_pat;
9164 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9165 result at the end. We can't simply invert the test since it would
9166 have already been inverted if it were valid. This case occurs for
9167 some floating-point comparisons. */
9169 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9170 invert = 1, exp = TREE_OPERAND (exp, 0);
9172 arg0 = TREE_OPERAND (exp, 0);
9173 arg1 = TREE_OPERAND (exp, 1);
9174 type = TREE_TYPE (arg0);
9175 operand_mode = TYPE_MODE (type);
9176 unsignedp = TREE_UNSIGNED (type);
9178 /* We won't bother with BLKmode store-flag operations because it would mean
9179 passing a lot of information to emit_store_flag. */
9180 if (operand_mode == BLKmode)
9181 return 0;
9183 STRIP_NOPS (arg0);
9184 STRIP_NOPS (arg1);
9186 /* Get the rtx comparison code to use. We know that EXP is a comparison
9187 operation of some type. Some comparisons against 1 and -1 can be
9188 converted to comparisons with zero. Do so here so that the tests
9189 below will be aware that we have a comparison with zero. These
9190 tests will not catch constants in the first operand, but constants
9191 are rarely passed as the first operand. */
9193 switch (TREE_CODE (exp))
9195 case EQ_EXPR:
9196 code = EQ;
9197 break;
9198 case NE_EXPR:
9199 code = NE;
9200 break;
9201 case LT_EXPR:
9202 if (integer_onep (arg1))
9203 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9204 else
9205 code = unsignedp ? LTU : LT;
9206 break;
9207 case LE_EXPR:
9208 if (! unsignedp && integer_all_onesp (arg1))
9209 arg1 = integer_zero_node, code = LT;
9210 else
9211 code = unsignedp ? LEU : LE;
9212 break;
9213 case GT_EXPR:
9214 if (! unsignedp && integer_all_onesp (arg1))
9215 arg1 = integer_zero_node, code = GE;
9216 else
9217 code = unsignedp ? GTU : GT;
9218 break;
9219 case GE_EXPR:
9220 if (integer_onep (arg1))
9221 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9222 else
9223 code = unsignedp ? GEU : GE;
9224 break;
9225 default:
9226 abort ();
9229 /* Put a constant second. */
9230 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9232 tem = arg0; arg0 = arg1; arg1 = tem;
9233 code = swap_condition (code);
9236 /* If this is an equality or inequality test of a single bit, we can
9237 do this by shifting the bit being tested to the low-order bit and
9238 masking the result with the constant 1. If the condition was EQ,
9239 we xor it with 1. This does not require an scc insn and is faster
9240 than an scc insn even if we have it. */
9242 if ((code == NE || code == EQ)
9243 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9244 && integer_pow2p (TREE_OPERAND (arg0, 1))
9245 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9247 tree inner = TREE_OPERAND (arg0, 0);
9248 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9249 NULL_RTX, VOIDmode, 0)));
9250 int ops_unsignedp;
9252 /* If INNER is a right shift of a constant and it plus BITNUM does
9253 not overflow, adjust BITNUM and INNER. */
9255 if (TREE_CODE (inner) == RSHIFT_EXPR
9256 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9257 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9258 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9259 < TYPE_PRECISION (type)))
9261 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9262 inner = TREE_OPERAND (inner, 0);
9265 /* If we are going to be able to omit the AND below, we must do our
9266 operations as unsigned. If we must use the AND, we have a choice.
9267 Normally unsigned is faster, but for some machines signed is. */
9268 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9269 #ifdef LOAD_EXTEND_OP
9270 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9271 #else
9273 #endif
9276 if (subtarget == 0 || GET_CODE (subtarget) != REG
9277 || GET_MODE (subtarget) != operand_mode
9278 || ! safe_from_p (subtarget, inner))
9279 subtarget = 0;
9281 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9283 if (bitnum != 0)
9284 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9285 size_int (bitnum), subtarget, ops_unsignedp);
9287 if (GET_MODE (op0) != mode)
9288 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9290 if ((code == EQ && ! invert) || (code == NE && invert))
9291 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9292 ops_unsignedp, OPTAB_LIB_WIDEN);
9294 /* Put the AND last so it can combine with more things. */
9295 if (bitnum != TYPE_PRECISION (type) - 1)
9296 op0 = expand_and (op0, const1_rtx, subtarget);
9298 return op0;
9301 /* Now see if we are likely to be able to do this. Return if not. */
9302 if (! can_compare_p (operand_mode))
9303 return 0;
9304 icode = setcc_gen_code[(int) code];
9305 if (icode == CODE_FOR_nothing
9306 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9308 /* We can only do this if it is one of the special cases that
9309 can be handled without an scc insn. */
9310 if ((code == LT && integer_zerop (arg1))
9311 || (! only_cheap && code == GE && integer_zerop (arg1)))
9313 else if (BRANCH_COST >= 0
9314 && ! only_cheap && (code == NE || code == EQ)
9315 && TREE_CODE (type) != REAL_TYPE
9316 && ((abs_optab->handlers[(int) operand_mode].insn_code
9317 != CODE_FOR_nothing)
9318 || (ffs_optab->handlers[(int) operand_mode].insn_code
9319 != CODE_FOR_nothing)))
9321 else
9322 return 0;
9325 preexpand_calls (exp);
9326 if (subtarget == 0 || GET_CODE (subtarget) != REG
9327 || GET_MODE (subtarget) != operand_mode
9328 || ! safe_from_p (subtarget, arg1))
9329 subtarget = 0;
9331 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9332 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9334 if (target == 0)
9335 target = gen_reg_rtx (mode);
9337 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9338 because, if the emit_store_flag does anything it will succeed and
9339 OP0 and OP1 will not be used subsequently. */
9341 result = emit_store_flag (target, code,
9342 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9343 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9344 operand_mode, unsignedp, 1);
9346 if (result)
9348 if (invert)
9349 result = expand_binop (mode, xor_optab, result, const1_rtx,
9350 result, 0, OPTAB_LIB_WIDEN);
9351 return result;
9354 /* If this failed, we have to do this with set/compare/jump/set code. */
9355 if (target == 0 || GET_CODE (target) != REG
9356 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9357 target = gen_reg_rtx (GET_MODE (target));
9359 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9360 result = compare_from_rtx (op0, op1, code, unsignedp,
9361 operand_mode, NULL_RTX, 0);
9362 if (GET_CODE (result) == CONST_INT)
9363 return (((result == const0_rtx && ! invert)
9364 || (result != const0_rtx && invert))
9365 ? const0_rtx : const1_rtx);
9367 label = gen_label_rtx ();
9368 if (bcc_gen_fctn[(int) code] == 0)
9369 abort ();
9371 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9372 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9373 emit_label (label);
9375 return target;
9378 /* Generate a tablejump instruction (used for switch statements). */
9380 #ifdef HAVE_tablejump
9382 /* INDEX is the value being switched on, with the lowest value
9383 in the table already subtracted.
9384 MODE is its expected mode (needed if INDEX is constant).
9385 RANGE is the length of the jump table.
9386 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9388 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9389 index value is out of range. */
9391 void
9392 do_tablejump (index, mode, range, table_label, default_label)
9393 rtx index, range, table_label, default_label;
9394 enum machine_mode mode;
9396 register rtx temp, vector;
9398 /* Do an unsigned comparison (in the proper mode) between the index
9399 expression and the value which represents the length of the range.
9400 Since we just finished subtracting the lower bound of the range
9401 from the index expression, this comparison allows us to simultaneously
9402 check that the original index expression value is both greater than
9403 or equal to the minimum value of the range and less than or equal to
9404 the maximum value of the range. */
9406 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9407 emit_jump_insn (gen_bgtu (default_label));
9409 /* If index is in range, it must fit in Pmode.
9410 Convert to Pmode so we can index with it. */
9411 if (mode != Pmode)
9412 index = convert_to_mode (Pmode, index, 1);
9414 /* Don't let a MEM slip thru, because then INDEX that comes
9415 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9416 and break_out_memory_refs will go to work on it and mess it up. */
9417 #ifdef PIC_CASE_VECTOR_ADDRESS
9418 if (flag_pic && GET_CODE (index) != REG)
9419 index = copy_to_mode_reg (Pmode, index);
9420 #endif
9422 /* If flag_force_addr were to affect this address
9423 it could interfere with the tricky assumptions made
9424 about addresses that contain label-refs,
9425 which may be valid only very near the tablejump itself. */
9426 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9427 GET_MODE_SIZE, because this indicates how large insns are. The other
9428 uses should all be Pmode, because they are addresses. This code
9429 could fail if addresses and insns are not the same size. */
9430 index = gen_rtx (PLUS, Pmode,
9431 gen_rtx (MULT, Pmode, index,
9432 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9433 gen_rtx (LABEL_REF, Pmode, table_label));
9434 #ifdef PIC_CASE_VECTOR_ADDRESS
9435 if (flag_pic)
9436 index = PIC_CASE_VECTOR_ADDRESS (index);
9437 else
9438 #endif
9439 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9440 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9441 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9442 RTX_UNCHANGING_P (vector) = 1;
9443 convert_move (temp, vector, 0);
9445 emit_jump_insn (gen_tablejump (temp, table_label));
9447 #ifndef CASE_VECTOR_PC_RELATIVE
9448 /* If we are generating PIC code or if the table is PC-relative, the
9449 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9450 if (! flag_pic)
9451 emit_barrier ();
9452 #endif
9455 #endif /* HAVE_tablejump */
9458 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9459 to that value is on the top of the stack. The resulting type is TYPE, and
9460 the source declaration is DECL. */
9462 void
9463 bc_load_memory (type, decl)
9464 tree type, decl;
9466 enum bytecode_opcode opcode;
9469 /* Bit fields are special. We only know about signed and
9470 unsigned ints, and enums. The latter are treated as
9471 signed integers. */
9473 if (DECL_BIT_FIELD (decl))
9474 if (TREE_CODE (type) == ENUMERAL_TYPE
9475 || TREE_CODE (type) == INTEGER_TYPE)
9476 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9477 else
9478 abort ();
9479 else
9480 /* See corresponding comment in bc_store_memory(). */
9481 if (TYPE_MODE (type) == BLKmode
9482 || TYPE_MODE (type) == VOIDmode)
9483 return;
9484 else
9485 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9487 if (opcode == neverneverland)
9488 abort ();
9490 bc_emit_bytecode (opcode);
9492 #ifdef DEBUG_PRINT_CODE
9493 fputc ('\n', stderr);
9494 #endif
9498 /* Store the contents of the second stack slot to the address in the
9499 top stack slot. DECL is the declaration of the destination and is used
9500 to determine whether we're dealing with a bitfield. */
9502 void
9503 bc_store_memory (type, decl)
9504 tree type, decl;
9506 enum bytecode_opcode opcode;
9509 if (DECL_BIT_FIELD (decl))
9511 if (TREE_CODE (type) == ENUMERAL_TYPE
9512 || TREE_CODE (type) == INTEGER_TYPE)
9513 opcode = sstoreBI;
9514 else
9515 abort ();
9517 else
9518 if (TYPE_MODE (type) == BLKmode)
9520 /* Copy structure. This expands to a block copy instruction, storeBLK.
9521 In addition to the arguments expected by the other store instructions,
9522 it also expects a type size (SImode) on top of the stack, which is the
9523 structure size in size units (usually bytes). The two first arguments
9524 are already on the stack; so we just put the size on level 1. For some
9525 other languages, the size may be variable, this is why we don't encode
9526 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9528 bc_expand_expr (TYPE_SIZE (type));
9529 opcode = storeBLK;
9531 else
9532 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
9534 if (opcode == neverneverland)
9535 abort ();
9537 bc_emit_bytecode (opcode);
9539 #ifdef DEBUG_PRINT_CODE
9540 fputc ('\n', stderr);
9541 #endif
9545 /* Allocate local stack space sufficient to hold a value of the given
9546 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9547 integral power of 2. A special case is locals of type VOID, which
9548 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9549 remapped into the corresponding attribute of SI. */
9552 bc_allocate_local (size, alignment)
9553 int size, alignment;
9555 rtx retval;
9556 int byte_alignment;
9558 if (size < 0)
9559 abort ();
9561 /* Normalize size and alignment */
9562 if (!size)
9563 size = UNITS_PER_WORD;
9565 if (alignment < BITS_PER_UNIT)
9566 byte_alignment = 1 << (INT_ALIGN - 1);
9567 else
9568 /* Align */
9569 byte_alignment = alignment / BITS_PER_UNIT;
9571 if (local_vars_size & (byte_alignment - 1))
9572 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
9574 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9575 local_vars_size += size;
9577 return retval;
9581 /* Allocate variable-sized local array. Variable-sized arrays are
9582 actually pointers to the address in memory where they are stored. */
9585 bc_allocate_variable_array (size)
9586 tree size;
9588 rtx retval;
9589 const int ptralign = (1 << (PTR_ALIGN - 1));
9591 /* Align pointer */
9592 if (local_vars_size & ptralign)
9593 local_vars_size += ptralign - (local_vars_size & ptralign);
9595 /* Note down local space needed: pointer to block; also return
9596 dummy rtx */
9598 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9599 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9600 return retval;
9604 /* Push the machine address for the given external variable offset. */
9605 void
9606 bc_load_externaddr (externaddr)
9607 rtx externaddr;
9609 bc_emit_bytecode (constP);
9610 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9611 BYTECODE_BC_LABEL (externaddr)->offset);
9613 #ifdef DEBUG_PRINT_CODE
9614 fputc ('\n', stderr);
9615 #endif
9619 static char *
9620 bc_strdup (s)
9621 char *s;
9623 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9624 strcpy (new, s);
9625 return new;
9629 /* Like above, but expects an IDENTIFIER. */
9630 void
9631 bc_load_externaddr_id (id, offset)
9632 tree id;
9633 int offset;
9635 if (!IDENTIFIER_POINTER (id))
9636 abort ();
9638 bc_emit_bytecode (constP);
9639 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
9641 #ifdef DEBUG_PRINT_CODE
9642 fputc ('\n', stderr);
9643 #endif
9647 /* Push the machine address for the given local variable offset. */
9648 void
9649 bc_load_localaddr (localaddr)
9650 rtx localaddr;
9652 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
9656 /* Push the machine address for the given parameter offset.
9657 NOTE: offset is in bits. */
9658 void
9659 bc_load_parmaddr (parmaddr)
9660 rtx parmaddr;
9662 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9663 / BITS_PER_UNIT));
9667 /* Convert a[i] into *(a + i). */
9668 tree
9669 bc_canonicalize_array_ref (exp)
9670 tree exp;
9672 tree type = TREE_TYPE (exp);
9673 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9674 TREE_OPERAND (exp, 0));
9675 tree index = TREE_OPERAND (exp, 1);
9678 /* Convert the integer argument to a type the same size as a pointer
9679 so the multiply won't overflow spuriously. */
9681 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9682 index = convert (type_for_size (POINTER_SIZE, 0), index);
9684 /* The array address isn't volatile even if the array is.
9685 (Of course this isn't terribly relevant since the bytecode
9686 translator treats nearly everything as volatile anyway.) */
9687 TREE_THIS_VOLATILE (array_adr) = 0;
9689 return build1 (INDIRECT_REF, type,
9690 fold (build (PLUS_EXPR,
9691 TYPE_POINTER_TO (type),
9692 array_adr,
9693 fold (build (MULT_EXPR,
9694 TYPE_POINTER_TO (type),
9695 index,
9696 size_in_bytes (type))))));
9700 /* Load the address of the component referenced by the given
9701 COMPONENT_REF expression.
9703 Returns innermost lvalue. */
9705 tree
9706 bc_expand_component_address (exp)
9707 tree exp;
9709 tree tem, chain;
9710 enum machine_mode mode;
9711 int bitpos = 0;
9712 HOST_WIDE_INT SIval;
9715 tem = TREE_OPERAND (exp, 1);
9716 mode = DECL_MODE (tem);
9719 /* Compute cumulative bit offset for nested component refs
9720 and array refs, and find the ultimate containing object. */
9722 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
9724 if (TREE_CODE (tem) == COMPONENT_REF)
9725 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9726 else
9727 if (TREE_CODE (tem) == ARRAY_REF
9728 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9729 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
9731 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9732 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9733 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9734 else
9735 break;
9738 bc_expand_expr (tem);
9741 /* For bitfields also push their offset and size */
9742 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9743 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9744 else
9745 if (SIval = bitpos / BITS_PER_UNIT)
9746 bc_emit_instruction (addconstPSI, SIval);
9748 return (TREE_OPERAND (exp, 1));
9752 /* Emit code to push two SI constants */
9753 void
9754 bc_push_offset_and_size (offset, size)
9755 HOST_WIDE_INT offset, size;
9757 bc_emit_instruction (constSI, offset);
9758 bc_emit_instruction (constSI, size);
9762 /* Emit byte code to push the address of the given lvalue expression to
9763 the stack. If it's a bit field, we also push offset and size info.
9765 Returns innermost component, which allows us to determine not only
9766 its type, but also whether it's a bitfield. */
9768 tree
9769 bc_expand_address (exp)
9770 tree exp;
9772 /* Safeguard */
9773 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9774 return (exp);
9777 switch (TREE_CODE (exp))
9779 case ARRAY_REF:
9781 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
9783 case COMPONENT_REF:
9785 return (bc_expand_component_address (exp));
9787 case INDIRECT_REF:
9789 bc_expand_expr (TREE_OPERAND (exp, 0));
9791 /* For variable-sized types: retrieve pointer. Sometimes the
9792 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9793 also make sure we have an operand, just in case... */
9795 if (TREE_OPERAND (exp, 0)
9796 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9797 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9798 bc_emit_instruction (loadP);
9800 /* If packed, also return offset and size */
9801 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9803 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9804 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9806 return (TREE_OPERAND (exp, 0));
9808 case FUNCTION_DECL:
9810 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9811 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
9812 break;
9814 case PARM_DECL:
9816 bc_load_parmaddr (DECL_RTL (exp));
9818 /* For variable-sized types: retrieve pointer */
9819 if (TYPE_SIZE (TREE_TYPE (exp))
9820 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9821 bc_emit_instruction (loadP);
9823 /* If packed, also return offset and size */
9824 if (DECL_BIT_FIELD (exp))
9825 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9826 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9828 break;
9830 case RESULT_DECL:
9832 bc_emit_instruction (returnP);
9833 break;
9835 case VAR_DECL:
9837 #if 0
9838 if (BYTECODE_LABEL (DECL_RTL (exp)))
9839 bc_load_externaddr (DECL_RTL (exp));
9840 #endif
9842 if (DECL_EXTERNAL (exp))
9843 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9844 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
9845 else
9846 bc_load_localaddr (DECL_RTL (exp));
9848 /* For variable-sized types: retrieve pointer */
9849 if (TYPE_SIZE (TREE_TYPE (exp))
9850 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9851 bc_emit_instruction (loadP);
9853 /* If packed, also return offset and size */
9854 if (DECL_BIT_FIELD (exp))
9855 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9856 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9858 break;
9860 case STRING_CST:
9862 rtx r;
9864 bc_emit_bytecode (constP);
9865 r = output_constant_def (exp);
9866 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
9868 #ifdef DEBUG_PRINT_CODE
9869 fputc ('\n', stderr);
9870 #endif
9872 break;
9874 default:
9876 abort();
9877 break;
9880 /* Most lvalues don't have components. */
9881 return (exp);
9885 /* Emit a type code to be used by the runtime support in handling
9886 parameter passing. The type code consists of the machine mode
9887 plus the minimal alignment shifted left 8 bits. */
9889 tree
9890 bc_runtime_type_code (type)
9891 tree type;
9893 int val;
9895 switch (TREE_CODE (type))
9897 case VOID_TYPE:
9898 case INTEGER_TYPE:
9899 case REAL_TYPE:
9900 case COMPLEX_TYPE:
9901 case ENUMERAL_TYPE:
9902 case POINTER_TYPE:
9903 case RECORD_TYPE:
9905 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
9906 break;
9908 case ERROR_MARK:
9910 val = 0;
9911 break;
9913 default:
9915 abort ();
9917 return build_int_2 (val, 0);
9921 /* Generate constructor label */
9922 char *
9923 bc_gen_constr_label ()
9925 static int label_counter;
9926 static char label[20];
9928 sprintf (label, "*LR%d", label_counter++);
9930 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9934 /* Evaluate constructor CONSTR and return pointer to it on level one. We
9935 expand the constructor data as static data, and push a pointer to it.
9936 The pointer is put in the pointer table and is retrieved by a constP
9937 bytecode instruction. We then loop and store each constructor member in
9938 the corresponding component. Finally, we return the original pointer on
9939 the stack. */
9941 void
9942 bc_expand_constructor (constr)
9943 tree constr;
9945 char *l;
9946 HOST_WIDE_INT ptroffs;
9947 rtx constr_rtx;
9950 /* Literal constructors are handled as constants, whereas
9951 non-literals are evaluated and stored element by element
9952 into the data segment. */
9954 /* Allocate space in proper segment and push pointer to space on stack.
9957 l = bc_gen_constr_label ();
9959 if (TREE_CONSTANT (constr))
9961 text_section ();
9963 bc_emit_const_labeldef (l);
9964 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
9966 else
9968 data_section ();
9970 bc_emit_data_labeldef (l);
9971 bc_output_data_constructor (constr);
9975 /* Add reference to pointer table and recall pointer to stack;
9976 this code is common for both types of constructors: literals
9977 and non-literals. */
9979 ptroffs = bc_define_pointer (l);
9980 bc_emit_instruction (constP, ptroffs);
9982 /* This is all that has to be done if it's a literal. */
9983 if (TREE_CONSTANT (constr))
9984 return;
9987 /* At this point, we have the pointer to the structure on top of the stack.
9988 Generate sequences of store_memory calls for the constructor. */
9990 /* constructor type is structure */
9991 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
9993 register tree elt;
9995 /* If the constructor has fewer fields than the structure,
9996 clear the whole structure first. */
9998 if (list_length (CONSTRUCTOR_ELTS (constr))
9999 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
10001 bc_emit_instruction (duplicate);
10002 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10003 bc_emit_instruction (clearBLK);
10006 /* Store each element of the constructor into the corresponding
10007 field of TARGET. */
10009 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
10011 register tree field = TREE_PURPOSE (elt);
10012 register enum machine_mode mode;
10013 int bitsize;
10014 int bitpos;
10015 int unsignedp;
10017 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
10018 mode = DECL_MODE (field);
10019 unsignedp = TREE_UNSIGNED (field);
10021 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
10023 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10024 /* The alignment of TARGET is
10025 at least what its type requires. */
10026 VOIDmode, 0,
10027 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10028 int_size_in_bytes (TREE_TYPE (constr)));
10031 else
10033 /* Constructor type is array */
10034 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
10036 register tree elt;
10037 register int i;
10038 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
10039 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
10040 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
10041 tree elttype = TREE_TYPE (TREE_TYPE (constr));
10043 /* If the constructor has fewer fields than the structure,
10044 clear the whole structure first. */
10046 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
10048 bc_emit_instruction (duplicate);
10049 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10050 bc_emit_instruction (clearBLK);
10054 /* Store each element of the constructor into the corresponding
10055 element of TARGET, determined by counting the elements. */
10057 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
10058 elt;
10059 elt = TREE_CHAIN (elt), i++)
10061 register enum machine_mode mode;
10062 int bitsize;
10063 int bitpos;
10064 int unsignedp;
10066 mode = TYPE_MODE (elttype);
10067 bitsize = GET_MODE_BITSIZE (mode);
10068 unsignedp = TREE_UNSIGNED (elttype);
10070 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
10071 /* * TYPE_SIZE_UNIT (elttype) */ );
10073 bc_store_field (elt, bitsize, bitpos, mode,
10074 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10075 /* The alignment of TARGET is
10076 at least what its type requires. */
10077 VOIDmode, 0,
10078 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10079 int_size_in_bytes (TREE_TYPE (constr)));
10086 /* Store the value of EXP (an expression tree) into member FIELD of
10087 structure at address on stack, which has type TYPE, mode MODE and
10088 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10089 structure.
10091 ALIGN is the alignment that TARGET is known to have, measured in bytes.
10092 TOTAL_SIZE is its size in bytes, or -1 if variable. */
10094 void
10095 bc_store_field (field, bitsize, bitpos, mode, exp, type,
10096 value_mode, unsignedp, align, total_size)
10097 int bitsize, bitpos;
10098 enum machine_mode mode;
10099 tree field, exp, type;
10100 enum machine_mode value_mode;
10101 int unsignedp;
10102 int align;
10103 int total_size;
10106 /* Expand expression and copy pointer */
10107 bc_expand_expr (exp);
10108 bc_emit_instruction (over);
10111 /* If the component is a bit field, we cannot use addressing to access
10112 it. Use bit-field techniques to store in it. */
10114 if (DECL_BIT_FIELD (field))
10116 bc_store_bit_field (bitpos, bitsize, unsignedp);
10117 return;
10119 else
10120 /* Not bit field */
10122 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10124 /* Advance pointer to the desired member */
10125 if (offset)
10126 bc_emit_instruction (addconstPSI, offset);
10128 /* Store */
10129 bc_store_memory (type, field);
10134 /* Store SI/SU in bitfield */
10135 void
10136 bc_store_bit_field (offset, size, unsignedp)
10137 int offset, size, unsignedp;
10139 /* Push bitfield offset and size */
10140 bc_push_offset_and_size (offset, size);
10142 /* Store */
10143 bc_emit_instruction (sstoreBI);
10147 /* Load SI/SU from bitfield */
10148 void
10149 bc_load_bit_field (offset, size, unsignedp)
10150 int offset, size, unsignedp;
10152 /* Push bitfield offset and size */
10153 bc_push_offset_and_size (offset, size);
10155 /* Load: sign-extend if signed, else zero-extend */
10156 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10160 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10161 (adjust stack pointer upwards), negative means add that number of
10162 levels (adjust the stack pointer downwards). Only positive values
10163 normally make sense. */
10165 void
10166 bc_adjust_stack (nlevels)
10167 int nlevels;
10169 switch (nlevels)
10171 case 0:
10172 break;
10174 case 2:
10175 bc_emit_instruction (drop);
10177 case 1:
10178 bc_emit_instruction (drop);
10179 break;
10181 default:
10183 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10184 stack_depth -= nlevels;
10187 #if defined (VALIDATE_STACK_FOR_BC)
10188 VALIDATE_STACK_FOR_BC ();
10189 #endif