* doc/tm.texi (INIT_CUMULATIVE_ARGS): Update doco.
[official-gcc.git] / gcc / expr.c
blob7d6dd16673131c7e7257c104ffea966a170b1ac0
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
58 #ifdef PUSH_ROUNDING
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
66 #endif
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
80 #else
81 #define TARGET_MEM_FUNCTIONS 0
82 #endif
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 tree placeholder_list = 0;
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
110 int reverse;
113 /* This structure is used by store_by_pieces to describe the clear to
114 be performed. */
116 struct store_by_pieces
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
125 void *constfundata;
126 int reverse;
129 static rtx enqueue_insn (rtx, rtx);
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
131 unsigned int);
132 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
133 struct move_by_pieces *);
134 static bool block_move_libcall_safe_for_call_parm (void);
135 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
136 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
137 static tree emit_block_move_libcall_fn (int);
138 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
139 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
140 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
141 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
142 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
143 struct store_by_pieces *);
144 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
145 static rtx clear_storage_via_libcall (rtx, rtx);
146 static tree clear_storage_libcall_fn (int);
147 static rtx compress_float_constant (rtx, rtx);
148 static rtx get_subtarget (rtx);
149 static int is_zeros_p (tree);
150 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
151 HOST_WIDE_INT, enum machine_mode,
152 tree, tree, int, int);
153 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
154 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
155 tree, enum machine_mode, int, tree, int);
156 static rtx var_rtx (tree);
158 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
161 static int is_aligning_offset (tree, tree);
162 static rtx expand_increment (tree, int, int);
163 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
164 enum expand_modifier);
165 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
166 #ifdef PUSH_ROUNDING
167 static void emit_single_push_insn (enum machine_mode, rtx, tree);
168 #endif
169 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
170 static rtx const_vector_from_tree (tree);
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
176 static char direct_load[NUM_MACHINE_MODES];
177 static char direct_store[NUM_MACHINE_MODES];
179 /* Record for each mode whether we can float-extend from memory. */
181 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
188 #endif
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
195 #endif
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
202 #endif
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
215 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
216 struct file_stack *expr_wfl_stack;
218 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
222 #endif
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
227 void
228 init_expr_once (void)
230 rtx insn, pat;
231 enum machine_mode mode;
232 int num_clobbers;
233 rtx mem, mem1;
234 rtx reg;
236 /* Try indexing by frame ptr and try by stack ptr.
237 It is known that on the Convex the stack ptr isn't a valid index.
238 With luck, one or the other is valid on any machine. */
239 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
240 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
242 /* A scratch register we can modify in-place below to avoid
243 useless RTL allocations. */
244 reg = gen_rtx_REG (VOIDmode, -1);
246 insn = rtx_alloc (INSN);
247 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
248 PATTERN (insn) = pat;
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
253 int regno;
255 direct_load[(int) mode] = direct_store[(int) mode] = 0;
256 PUT_MODE (mem, mode);
257 PUT_MODE (mem1, mode);
258 PUT_MODE (reg, mode);
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
266 regno++)
268 if (! HARD_REGNO_MODE_OK (regno, mode))
269 continue;
271 REGNO (reg) = regno;
273 SET_SRC (pat) = mem;
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
283 SET_SRC (pat) = reg;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
295 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
298 mode = GET_MODE_WIDER_MODE (mode))
300 enum machine_mode srcmode;
301 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
302 srcmode = GET_MODE_WIDER_MODE (srcmode))
304 enum insn_code ic;
306 ic = can_extend_p (mode, srcmode, 0);
307 if (ic == CODE_FOR_nothing)
308 continue;
310 PUT_MODE (mem, srcmode);
312 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
313 float_extend_from_mem[mode][srcmode] = true;
318 /* This is run at the start of compiling a function. */
320 void
321 init_expr (void)
323 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
326 /* Small sanity check that the queue is empty at the end of a function. */
328 void
329 finish_expr_for_function (void)
331 if (pending_chain)
332 abort ();
335 /* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
338 /* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
345 static rtx
346 enqueue_insn (rtx var, rtx body)
348 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
349 body, pending_chain);
350 return pending_chain;
353 /* Use protect_from_queue to convert a QUEUED expression
354 into something that you can put immediately into an instruction.
355 If the queued incrementation has not happened yet,
356 protect_from_queue returns the variable itself.
357 If the incrementation has happened, protect_from_queue returns a temp
358 that contains a copy of the old value of the variable.
360 Any time an rtx which might possibly be a QUEUED is to be put
361 into an instruction, it must be passed through protect_from_queue first.
362 QUEUED expressions are not meaningful in instructions.
364 Do not pass a value through protect_from_queue and then hold
365 on to it for a while before putting it in an instruction!
366 If the queue is flushed in between, incorrect code will result. */
369 protect_from_queue (rtx x, int modify)
371 RTX_CODE code = GET_CODE (x);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
376 return x;
377 #endif
379 if (code != QUEUED)
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
385 shared. */
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
389 rtx y = XEXP (x, 0);
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
392 if (QUEUED_INSN (y))
394 rtx temp = gen_reg_rtx (GET_MODE (x));
396 emit_insn_before (gen_move_insn (temp, new),
397 QUEUED_INSN (y));
398 return temp;
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
408 if (code == MEM)
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
413 x = copy_rtx (x);
414 XEXP (x, 0) = tem;
417 else if (code == PLUS || code == MULT)
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
423 x = copy_rtx (x);
424 XEXP (x, 0) = new0;
425 XEXP (x, 1) = new1;
428 return x;
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
432 emit_queue. */
433 if (QUEUED_INSN (x) == 0)
434 return copy_to_reg (QUEUED_VAR (x));
435 /* If the increment has happened and a pre-increment copy exists,
436 use that copy. */
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
443 QUEUED_INSN (x));
444 return QUEUED_COPY (x);
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
453 queued_subexp_p (rtx x)
455 enum rtx_code code = GET_CODE (x);
456 switch (code)
458 case QUEUED:
459 return 1;
460 case MEM:
461 return queued_subexp_p (XEXP (x, 0));
462 case MULT:
463 case PLUS:
464 case MINUS:
465 return (queued_subexp_p (XEXP (x, 0))
466 || queued_subexp_p (XEXP (x, 1)));
467 default:
468 return 0;
472 /* Perform all the pending incrementations. */
474 void
475 emit_queue (void)
477 rtx p;
478 while ((p = pending_chain))
480 rtx body = QUEUED_BODY (p);
482 switch (GET_CODE (body))
484 case INSN:
485 case JUMP_INSN:
486 case CALL_INSN:
487 case CODE_LABEL:
488 case BARRIER:
489 case NOTE:
490 QUEUED_INSN (p) = body;
491 emit_insn (body);
492 break;
494 #ifdef ENABLE_CHECKING
495 case SEQUENCE:
496 abort ();
497 break;
498 #endif
500 default:
501 QUEUED_INSN (p) = emit_insn (body);
502 break;
505 pending_chain = QUEUED_NEXT (p);
509 /* Copy data from FROM to TO, where the machine modes are not the same.
510 Both modes may be integer, or both may be floating.
511 UNSIGNEDP should be nonzero if FROM is an unsigned type.
512 This causes zero-extension instead of sign-extension. */
514 void
515 convert_move (rtx to, rtx from, int unsignedp)
517 enum machine_mode to_mode = GET_MODE (to);
518 enum machine_mode from_mode = GET_MODE (from);
519 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
520 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
521 enum insn_code code;
522 rtx libcall;
524 /* rtx code for making an equivalent value. */
525 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
526 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
528 to = protect_from_queue (to, 1);
529 from = protect_from_queue (from, 0);
531 if (to_real != from_real)
532 abort ();
534 /* If FROM is a SUBREG that indicates that we have already done at least
535 the required extension, strip it. We don't handle such SUBREGs as
536 TO here. */
538 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
539 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
540 >= GET_MODE_SIZE (to_mode))
541 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
542 from = gen_lowpart (to_mode, from), from_mode = to_mode;
544 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
545 abort ();
547 if (to_mode == from_mode
548 || (from_mode == VOIDmode && CONSTANT_P (from)))
550 emit_move_insn (to, from);
551 return;
554 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
556 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
557 abort ();
559 if (VECTOR_MODE_P (to_mode))
560 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
561 else
562 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
564 emit_move_insn (to, from);
565 return;
568 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
570 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
571 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
572 return;
575 if (to_real)
577 rtx value, insns;
578 convert_optab tab;
580 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
581 tab = sext_optab;
582 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
583 tab = trunc_optab;
584 else
585 abort ();
587 /* Try converting directly if the insn is supported. */
589 code = tab->handlers[to_mode][from_mode].insn_code;
590 if (code != CODE_FOR_nothing)
592 emit_unop_insn (code, to, from,
593 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
594 return;
597 /* Otherwise use a libcall. */
598 libcall = tab->handlers[to_mode][from_mode].libfunc;
600 if (!libcall)
601 /* This conversion is not implemented yet. */
602 abort ();
604 start_sequence ();
605 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
606 1, from, from_mode);
607 insns = get_insns ();
608 end_sequence ();
609 emit_libcall_block (insns, to, value,
610 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
611 from)
612 : gen_rtx_FLOAT_EXTEND (to_mode, from));
613 return;
616 /* Handle pointer conversion. */ /* SPEE 900220. */
617 /* Targets are expected to provide conversion insns between PxImode and
618 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
619 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
621 enum machine_mode full_mode
622 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
624 if (trunc_optab->handlers[to_mode][full_mode].insn_code
625 == CODE_FOR_nothing)
626 abort ();
628 if (full_mode != from_mode)
629 from = convert_to_mode (full_mode, from, unsignedp);
630 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
631 to, from, UNKNOWN);
632 return;
634 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
636 enum machine_mode full_mode
637 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
639 if (sext_optab->handlers[full_mode][from_mode].insn_code
640 == CODE_FOR_nothing)
641 abort ();
643 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
644 to, from, UNKNOWN);
645 if (to_mode == full_mode)
646 return;
648 /* else proceed to integer conversions below */
649 from_mode = full_mode;
652 /* Now both modes are integers. */
654 /* Handle expanding beyond a word. */
655 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
656 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
658 rtx insns;
659 rtx lowpart;
660 rtx fill_value;
661 rtx lowfrom;
662 int i;
663 enum machine_mode lowpart_mode;
664 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
666 /* Try converting directly if the insn is supported. */
667 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
668 != CODE_FOR_nothing)
670 /* If FROM is a SUBREG, put it into a register. Do this
671 so that we always generate the same set of insns for
672 better cse'ing; if an intermediate assignment occurred,
673 we won't be doing the operation directly on the SUBREG. */
674 if (optimize > 0 && GET_CODE (from) == SUBREG)
675 from = force_reg (from_mode, from);
676 emit_unop_insn (code, to, from, equiv_code);
677 return;
679 /* Next, try converting via full word. */
680 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
681 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
682 != CODE_FOR_nothing))
684 if (GET_CODE (to) == REG)
685 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
686 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
687 emit_unop_insn (code, to,
688 gen_lowpart (word_mode, to), equiv_code);
689 return;
692 /* No special multiword conversion insn; do it by hand. */
693 start_sequence ();
695 /* Since we will turn this into a no conflict block, we must ensure
696 that the source does not overlap the target. */
698 if (reg_overlap_mentioned_p (to, from))
699 from = force_reg (from_mode, from);
701 /* Get a copy of FROM widened to a word, if necessary. */
702 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
703 lowpart_mode = word_mode;
704 else
705 lowpart_mode = from_mode;
707 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
709 lowpart = gen_lowpart (lowpart_mode, to);
710 emit_move_insn (lowpart, lowfrom);
712 /* Compute the value to put in each remaining word. */
713 if (unsignedp)
714 fill_value = const0_rtx;
715 else
717 #ifdef HAVE_slt
718 if (HAVE_slt
719 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
720 && STORE_FLAG_VALUE == -1)
722 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
723 lowpart_mode, 0);
724 fill_value = gen_reg_rtx (word_mode);
725 emit_insn (gen_slt (fill_value));
727 else
728 #endif
730 fill_value
731 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
732 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
733 NULL_RTX, 0);
734 fill_value = convert_to_mode (word_mode, fill_value, 1);
738 /* Fill the remaining words. */
739 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
741 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
742 rtx subword = operand_subword (to, index, 1, to_mode);
744 if (subword == 0)
745 abort ();
747 if (fill_value != subword)
748 emit_move_insn (subword, fill_value);
751 insns = get_insns ();
752 end_sequence ();
754 emit_no_conflict_block (insns, to, from, NULL_RTX,
755 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
756 return;
759 /* Truncating multi-word to a word or less. */
760 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
761 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
763 if (!((GET_CODE (from) == MEM
764 && ! MEM_VOLATILE_P (from)
765 && direct_load[(int) to_mode]
766 && ! mode_dependent_address_p (XEXP (from, 0)))
767 || GET_CODE (from) == REG
768 || GET_CODE (from) == SUBREG))
769 from = force_reg (from_mode, from);
770 convert_move (to, gen_lowpart (word_mode, from), 0);
771 return;
774 /* Now follow all the conversions between integers
775 no more than a word long. */
777 /* For truncation, usually we can just refer to FROM in a narrower mode. */
778 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
779 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
780 GET_MODE_BITSIZE (from_mode)))
782 if (!((GET_CODE (from) == MEM
783 && ! MEM_VOLATILE_P (from)
784 && direct_load[(int) to_mode]
785 && ! mode_dependent_address_p (XEXP (from, 0)))
786 || GET_CODE (from) == REG
787 || GET_CODE (from) == SUBREG))
788 from = force_reg (from_mode, from);
789 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
790 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
791 from = copy_to_reg (from);
792 emit_move_insn (to, gen_lowpart (to_mode, from));
793 return;
796 /* Handle extension. */
797 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
799 /* Convert directly if that works. */
800 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
801 != CODE_FOR_nothing)
803 if (flag_force_mem)
804 from = force_not_mem (from);
806 emit_unop_insn (code, to, from, equiv_code);
807 return;
809 else
811 enum machine_mode intermediate;
812 rtx tmp;
813 tree shift_amount;
815 /* Search for a mode to convert via. */
816 for (intermediate = from_mode; intermediate != VOIDmode;
817 intermediate = GET_MODE_WIDER_MODE (intermediate))
818 if (((can_extend_p (to_mode, intermediate, unsignedp)
819 != CODE_FOR_nothing)
820 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
821 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
822 GET_MODE_BITSIZE (intermediate))))
823 && (can_extend_p (intermediate, from_mode, unsignedp)
824 != CODE_FOR_nothing))
826 convert_move (to, convert_to_mode (intermediate, from,
827 unsignedp), unsignedp);
828 return;
831 /* No suitable intermediate mode.
832 Generate what we need with shifts. */
833 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
834 - GET_MODE_BITSIZE (from_mode), 0);
835 from = gen_lowpart (to_mode, force_reg (from_mode, from));
836 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
837 to, unsignedp);
838 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
839 to, unsignedp);
840 if (tmp != to)
841 emit_move_insn (to, tmp);
842 return;
846 /* Support special truncate insns for certain modes. */
847 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
849 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
850 to, from, UNKNOWN);
851 return;
854 /* Handle truncation of volatile memrefs, and so on;
855 the things that couldn't be truncated directly,
856 and for which there was no special instruction.
858 ??? Code above formerly short-circuited this, for most integer
859 mode pairs, with a force_reg in from_mode followed by a recursive
860 call to this routine. Appears always to have been wrong. */
861 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
863 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
864 emit_move_insn (to, temp);
865 return;
868 /* Mode combination is not recognized. */
869 abort ();
872 /* Return an rtx for a value that would result
873 from converting X to mode MODE.
874 Both X and MODE may be floating, or both integer.
875 UNSIGNEDP is nonzero if X is an unsigned value.
876 This can be done by referring to a part of X in place
877 or by copying to a new temporary with conversion.
879 This function *must not* call protect_from_queue
880 except when putting X into an insn (in which case convert_move does it). */
883 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
885 return convert_modes (mode, VOIDmode, x, unsignedp);
888 /* Return an rtx for a value that would result
889 from converting X from mode OLDMODE to mode MODE.
890 Both modes may be floating, or both integer.
891 UNSIGNEDP is nonzero if X is an unsigned value.
893 This can be done by referring to a part of X in place
894 or by copying to a new temporary with conversion.
896 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
898 This function *must not* call protect_from_queue
899 except when putting X into an insn (in which case convert_move does it). */
902 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
904 rtx temp;
906 /* If FROM is a SUBREG that indicates that we have already done at least
907 the required extension, strip it. */
909 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
910 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
911 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
912 x = gen_lowpart (mode, x);
914 if (GET_MODE (x) != VOIDmode)
915 oldmode = GET_MODE (x);
917 if (mode == oldmode)
918 return x;
920 /* There is one case that we must handle specially: If we are converting
921 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
922 we are to interpret the constant as unsigned, gen_lowpart will do
923 the wrong if the constant appears negative. What we want to do is
924 make the high-order word of the constant zero, not all ones. */
926 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
927 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
928 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
930 HOST_WIDE_INT val = INTVAL (x);
932 if (oldmode != VOIDmode
933 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
935 int width = GET_MODE_BITSIZE (oldmode);
937 /* We need to zero extend VAL. */
938 val &= ((HOST_WIDE_INT) 1 << width) - 1;
941 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
944 /* We can do this with a gen_lowpart if both desired and current modes
945 are integer, and this is either a constant integer, a register, or a
946 non-volatile MEM. Except for the constant case where MODE is no
947 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
949 if ((GET_CODE (x) == CONST_INT
950 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
951 || (GET_MODE_CLASS (mode) == MODE_INT
952 && GET_MODE_CLASS (oldmode) == MODE_INT
953 && (GET_CODE (x) == CONST_DOUBLE
954 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
955 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
956 && direct_load[(int) mode])
957 || (GET_CODE (x) == REG
958 && (! HARD_REGISTER_P (x)
959 || HARD_REGNO_MODE_OK (REGNO (x), mode))
960 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
961 GET_MODE_BITSIZE (GET_MODE (x)))))))))
963 /* ?? If we don't know OLDMODE, we have to assume here that
964 X does not need sign- or zero-extension. This may not be
965 the case, but it's the best we can do. */
966 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
967 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
969 HOST_WIDE_INT val = INTVAL (x);
970 int width = GET_MODE_BITSIZE (oldmode);
972 /* We must sign or zero-extend in this case. Start by
973 zero-extending, then sign extend if we need to. */
974 val &= ((HOST_WIDE_INT) 1 << width) - 1;
975 if (! unsignedp
976 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
977 val |= (HOST_WIDE_INT) (-1) << width;
979 return gen_int_mode (val, mode);
982 return gen_lowpart (mode, x);
985 /* Converting from integer constant into mode is always equivalent to an
986 subreg operation. */
987 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
989 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
990 abort ();
991 return simplify_gen_subreg (mode, x, oldmode, 0);
994 temp = gen_reg_rtx (mode);
995 convert_move (temp, x, unsignedp);
996 return temp;
999 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1000 store efficiently. Due to internal GCC limitations, this is
1001 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1002 for an immediate constant. */
1004 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1006 /* Determine whether the LEN bytes can be moved by using several move
1007 instructions. Return nonzero if a call to move_by_pieces should
1008 succeed. */
1011 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1012 unsigned int align ATTRIBUTE_UNUSED)
1014 return MOVE_BY_PIECES_P (len, align);
1017 /* Generate several move instructions to copy LEN bytes from block FROM to
1018 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1019 and TO through protect_from_queue before calling.
1021 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1022 used to push FROM to the stack.
1024 ALIGN is maximum stack alignment we can assume.
1026 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1027 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1028 stpcpy. */
1031 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1032 unsigned int align, int endp)
1034 struct move_by_pieces data;
1035 rtx to_addr, from_addr = XEXP (from, 0);
1036 unsigned int max_size = MOVE_MAX_PIECES + 1;
1037 enum machine_mode mode = VOIDmode, tmode;
1038 enum insn_code icode;
1040 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1042 data.offset = 0;
1043 data.from_addr = from_addr;
1044 if (to)
1046 to_addr = XEXP (to, 0);
1047 data.to = to;
1048 data.autinc_to
1049 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1050 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1051 data.reverse
1052 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1054 else
1056 to_addr = NULL_RTX;
1057 data.to = NULL_RTX;
1058 data.autinc_to = 1;
1059 #ifdef STACK_GROWS_DOWNWARD
1060 data.reverse = 1;
1061 #else
1062 data.reverse = 0;
1063 #endif
1065 data.to_addr = to_addr;
1066 data.from = from;
1067 data.autinc_from
1068 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1069 || GET_CODE (from_addr) == POST_INC
1070 || GET_CODE (from_addr) == POST_DEC);
1072 data.explicit_inc_from = 0;
1073 data.explicit_inc_to = 0;
1074 if (data.reverse) data.offset = len;
1075 data.len = len;
1077 /* If copying requires more than two move insns,
1078 copy addresses to registers (to make displacements shorter)
1079 and use post-increment if available. */
1080 if (!(data.autinc_from && data.autinc_to)
1081 && move_by_pieces_ninsns (len, align) > 2)
1083 /* Find the mode of the largest move... */
1084 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1085 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1086 if (GET_MODE_SIZE (tmode) < max_size)
1087 mode = tmode;
1089 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1091 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1092 data.autinc_from = 1;
1093 data.explicit_inc_from = -1;
1095 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1097 data.from_addr = copy_addr_to_reg (from_addr);
1098 data.autinc_from = 1;
1099 data.explicit_inc_from = 1;
1101 if (!data.autinc_from && CONSTANT_P (from_addr))
1102 data.from_addr = copy_addr_to_reg (from_addr);
1103 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1105 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1106 data.autinc_to = 1;
1107 data.explicit_inc_to = -1;
1109 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1111 data.to_addr = copy_addr_to_reg (to_addr);
1112 data.autinc_to = 1;
1113 data.explicit_inc_to = 1;
1115 if (!data.autinc_to && CONSTANT_P (to_addr))
1116 data.to_addr = copy_addr_to_reg (to_addr);
1119 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1120 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1121 align = MOVE_MAX * BITS_PER_UNIT;
1123 /* First move what we can in the largest integer mode, then go to
1124 successively smaller modes. */
1126 while (max_size > 1)
1128 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1129 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1130 if (GET_MODE_SIZE (tmode) < max_size)
1131 mode = tmode;
1133 if (mode == VOIDmode)
1134 break;
1136 icode = mov_optab->handlers[(int) mode].insn_code;
1137 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1138 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1140 max_size = GET_MODE_SIZE (mode);
1143 /* The code above should have handled everything. */
1144 if (data.len > 0)
1145 abort ();
1147 if (endp)
1149 rtx to1;
1151 if (data.reverse)
1152 abort ();
1153 if (data.autinc_to)
1155 if (endp == 2)
1157 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1158 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1159 else
1160 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1161 -1));
1163 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1164 data.offset);
1166 else
1168 if (endp == 2)
1169 --data.offset;
1170 to1 = adjust_address (data.to, QImode, data.offset);
1172 return to1;
1174 else
1175 return data.to;
1178 /* Return number of insns required to move L bytes by pieces.
1179 ALIGN (in bits) is maximum alignment we can assume. */
1181 static unsigned HOST_WIDE_INT
1182 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1184 unsigned HOST_WIDE_INT n_insns = 0;
1185 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1187 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1188 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1189 align = MOVE_MAX * BITS_PER_UNIT;
1191 while (max_size > 1)
1193 enum machine_mode mode = VOIDmode, tmode;
1194 enum insn_code icode;
1196 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1197 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1198 if (GET_MODE_SIZE (tmode) < max_size)
1199 mode = tmode;
1201 if (mode == VOIDmode)
1202 break;
1204 icode = mov_optab->handlers[(int) mode].insn_code;
1205 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1206 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1208 max_size = GET_MODE_SIZE (mode);
1211 if (l)
1212 abort ();
1213 return n_insns;
1216 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1217 with move instructions for mode MODE. GENFUN is the gen_... function
1218 to make a move insn for that mode. DATA has all the other info. */
1220 static void
1221 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1222 struct move_by_pieces *data)
1224 unsigned int size = GET_MODE_SIZE (mode);
1225 rtx to1 = NULL_RTX, from1;
1227 while (data->len >= size)
1229 if (data->reverse)
1230 data->offset -= size;
1232 if (data->to)
1234 if (data->autinc_to)
1235 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1236 data->offset);
1237 else
1238 to1 = adjust_address (data->to, mode, data->offset);
1241 if (data->autinc_from)
1242 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1243 data->offset);
1244 else
1245 from1 = adjust_address (data->from, mode, data->offset);
1247 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1248 emit_insn (gen_add2_insn (data->to_addr,
1249 GEN_INT (-(HOST_WIDE_INT)size)));
1250 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1251 emit_insn (gen_add2_insn (data->from_addr,
1252 GEN_INT (-(HOST_WIDE_INT)size)));
1254 if (data->to)
1255 emit_insn ((*genfun) (to1, from1));
1256 else
1258 #ifdef PUSH_ROUNDING
1259 emit_single_push_insn (mode, from1, NULL);
1260 #else
1261 abort ();
1262 #endif
1265 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1266 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1267 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1268 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1270 if (! data->reverse)
1271 data->offset += size;
1273 data->len -= size;
1277 /* Emit code to move a block Y to a block X. This may be done with
1278 string-move instructions, with multiple scalar move instructions,
1279 or with a library call.
1281 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1282 SIZE is an rtx that says how long they are.
1283 ALIGN is the maximum alignment we can assume they have.
1284 METHOD describes what kind of copy this is, and what mechanisms may be used.
1286 Return the address of the new block, if memcpy is called and returns it,
1287 0 otherwise. */
1290 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1292 bool may_use_call;
1293 rtx retval = 0;
1294 unsigned int align;
1296 switch (method)
1298 case BLOCK_OP_NORMAL:
1299 may_use_call = true;
1300 break;
1302 case BLOCK_OP_CALL_PARM:
1303 may_use_call = block_move_libcall_safe_for_call_parm ();
1305 /* Make inhibit_defer_pop nonzero around the library call
1306 to force it to pop the arguments right away. */
1307 NO_DEFER_POP;
1308 break;
1310 case BLOCK_OP_NO_LIBCALL:
1311 may_use_call = false;
1312 break;
1314 default:
1315 abort ();
1318 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1320 if (GET_MODE (x) != BLKmode)
1321 abort ();
1322 if (GET_MODE (y) != BLKmode)
1323 abort ();
1325 x = protect_from_queue (x, 1);
1326 y = protect_from_queue (y, 0);
1327 size = protect_from_queue (size, 0);
1329 if (GET_CODE (x) != MEM)
1330 abort ();
1331 if (GET_CODE (y) != MEM)
1332 abort ();
1333 if (size == 0)
1334 abort ();
1336 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1337 can be incorrect is coming from __builtin_memcpy. */
1338 if (GET_CODE (size) == CONST_INT)
1340 if (INTVAL (size) == 0)
1341 return 0;
1343 x = shallow_copy_rtx (x);
1344 y = shallow_copy_rtx (y);
1345 set_mem_size (x, size);
1346 set_mem_size (y, size);
1349 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1350 move_by_pieces (x, y, INTVAL (size), align, 0);
1351 else if (emit_block_move_via_movstr (x, y, size, align))
1353 else if (may_use_call)
1354 retval = emit_block_move_via_libcall (x, y, size);
1355 else
1356 emit_block_move_via_loop (x, y, size, align);
1358 if (method == BLOCK_OP_CALL_PARM)
1359 OK_DEFER_POP;
1361 return retval;
1364 /* A subroutine of emit_block_move. Returns true if calling the
1365 block move libcall will not clobber any parameters which may have
1366 already been placed on the stack. */
1368 static bool
1369 block_move_libcall_safe_for_call_parm (void)
1371 /* If arguments are pushed on the stack, then they're safe. */
1372 if (PUSH_ARGS)
1373 return true;
1375 /* If registers go on the stack anyway, any argument is sure to clobber
1376 an outgoing argument. */
1377 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1379 tree fn = emit_block_move_libcall_fn (false);
1380 (void) fn;
1381 if (REG_PARM_STACK_SPACE (fn) != 0)
1382 return false;
1384 #endif
1386 /* If any argument goes in memory, then it might clobber an outgoing
1387 argument. */
1389 CUMULATIVE_ARGS args_so_far;
1390 tree fn, arg;
1392 fn = emit_block_move_libcall_fn (false);
1393 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1395 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1396 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1398 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1399 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1400 if (!tmp || !REG_P (tmp))
1401 return false;
1402 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1403 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1404 NULL_TREE, 1))
1405 return false;
1406 #endif
1407 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1410 return true;
1413 /* A subroutine of emit_block_move. Expand a movstr pattern;
1414 return true if successful. */
1416 static bool
1417 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1419 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1420 enum machine_mode mode;
1422 /* Since this is a move insn, we don't care about volatility. */
1423 volatile_ok = 1;
1425 /* Try the most limited insn first, because there's no point
1426 including more than one in the machine description unless
1427 the more limited one has some advantage. */
1429 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1430 mode = GET_MODE_WIDER_MODE (mode))
1432 enum insn_code code = movstr_optab[(int) mode];
1433 insn_operand_predicate_fn pred;
1435 if (code != CODE_FOR_nothing
1436 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1437 here because if SIZE is less than the mode mask, as it is
1438 returned by the macro, it will definitely be less than the
1439 actual mode mask. */
1440 && ((GET_CODE (size) == CONST_INT
1441 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1442 <= (GET_MODE_MASK (mode) >> 1)))
1443 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1444 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1445 || (*pred) (x, BLKmode))
1446 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1447 || (*pred) (y, BLKmode))
1448 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1449 || (*pred) (opalign, VOIDmode)))
1451 rtx op2;
1452 rtx last = get_last_insn ();
1453 rtx pat;
1455 op2 = convert_to_mode (mode, size, 1);
1456 pred = insn_data[(int) code].operand[2].predicate;
1457 if (pred != 0 && ! (*pred) (op2, mode))
1458 op2 = copy_to_mode_reg (mode, op2);
1460 /* ??? When called via emit_block_move_for_call, it'd be
1461 nice if there were some way to inform the backend, so
1462 that it doesn't fail the expansion because it thinks
1463 emitting the libcall would be more efficient. */
1465 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1466 if (pat)
1468 emit_insn (pat);
1469 volatile_ok = 0;
1470 return true;
1472 else
1473 delete_insns_since (last);
1477 volatile_ok = 0;
1478 return false;
1481 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1482 Return the return value from memcpy, 0 otherwise. */
1484 static rtx
1485 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1487 rtx dst_addr, src_addr;
1488 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1489 enum machine_mode size_mode;
1490 rtx retval;
1492 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1494 It is unsafe to save the value generated by protect_from_queue and reuse
1495 it later. Consider what happens if emit_queue is called before the
1496 return value from protect_from_queue is used.
1498 Expansion of the CALL_EXPR below will call emit_queue before we are
1499 finished emitting RTL for argument setup. So if we are not careful we
1500 could get the wrong value for an argument.
1502 To avoid this problem we go ahead and emit code to copy the addresses of
1503 DST and SRC and SIZE into new pseudos. We can then place those new
1504 pseudos into an RTL_EXPR and use them later, even after a call to
1505 emit_queue.
1507 Note this is not strictly needed for library calls since they do not call
1508 emit_queue before loading their arguments. However, we may need to have
1509 library calls call emit_queue in the future since failing to do so could
1510 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1511 arguments in registers. */
1513 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1514 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1516 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1517 src_addr = convert_memory_address (ptr_mode, src_addr);
1519 dst_tree = make_tree (ptr_type_node, dst_addr);
1520 src_tree = make_tree (ptr_type_node, src_addr);
1522 if (TARGET_MEM_FUNCTIONS)
1523 size_mode = TYPE_MODE (sizetype);
1524 else
1525 size_mode = TYPE_MODE (unsigned_type_node);
1527 size = convert_to_mode (size_mode, size, 1);
1528 size = copy_to_mode_reg (size_mode, size);
1530 /* It is incorrect to use the libcall calling conventions to call
1531 memcpy in this context. This could be a user call to memcpy and
1532 the user may wish to examine the return value from memcpy. For
1533 targets where libcalls and normal calls have different conventions
1534 for returning pointers, we could end up generating incorrect code.
1536 For convenience, we generate the call to bcopy this way as well. */
1538 if (TARGET_MEM_FUNCTIONS)
1539 size_tree = make_tree (sizetype, size);
1540 else
1541 size_tree = make_tree (unsigned_type_node, size);
1543 fn = emit_block_move_libcall_fn (true);
1544 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1545 if (TARGET_MEM_FUNCTIONS)
1547 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1548 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1550 else
1552 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1553 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1556 /* Now we have to build up the CALL_EXPR itself. */
1557 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1558 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1559 call_expr, arg_list, NULL_TREE);
1561 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1563 /* If we are initializing a readonly value, show the above call clobbered
1564 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1565 the delay slot scheduler might overlook conflicts and take nasty
1566 decisions. */
1567 if (RTX_UNCHANGING_P (dst))
1568 add_function_usage_to
1569 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1570 gen_rtx_CLOBBER (VOIDmode, dst),
1571 NULL_RTX));
1573 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1576 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1577 for the function we use for block copies. The first time FOR_CALL
1578 is true, we call assemble_external. */
1580 static GTY(()) tree block_move_fn;
1582 void
1583 init_block_move_fn (const char *asmspec)
1585 if (!block_move_fn)
1587 tree args, fn;
1589 if (TARGET_MEM_FUNCTIONS)
1591 fn = get_identifier ("memcpy");
1592 args = build_function_type_list (ptr_type_node, ptr_type_node,
1593 const_ptr_type_node, sizetype,
1594 NULL_TREE);
1596 else
1598 fn = get_identifier ("bcopy");
1599 args = build_function_type_list (void_type_node, const_ptr_type_node,
1600 ptr_type_node, unsigned_type_node,
1601 NULL_TREE);
1604 fn = build_decl (FUNCTION_DECL, fn, args);
1605 DECL_EXTERNAL (fn) = 1;
1606 TREE_PUBLIC (fn) = 1;
1607 DECL_ARTIFICIAL (fn) = 1;
1608 TREE_NOTHROW (fn) = 1;
1610 block_move_fn = fn;
1613 if (asmspec)
1615 SET_DECL_RTL (block_move_fn, NULL_RTX);
1616 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1620 static tree
1621 emit_block_move_libcall_fn (int for_call)
1623 static bool emitted_extern;
1625 if (!block_move_fn)
1626 init_block_move_fn (NULL);
1628 if (for_call && !emitted_extern)
1630 emitted_extern = true;
1631 make_decl_rtl (block_move_fn, NULL);
1632 assemble_external (block_move_fn);
1635 return block_move_fn;
1638 /* A subroutine of emit_block_move. Copy the data via an explicit
1639 loop. This is used only when libcalls are forbidden. */
1640 /* ??? It'd be nice to copy in hunks larger than QImode. */
1642 static void
1643 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1644 unsigned int align ATTRIBUTE_UNUSED)
1646 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1647 enum machine_mode iter_mode;
1649 iter_mode = GET_MODE (size);
1650 if (iter_mode == VOIDmode)
1651 iter_mode = word_mode;
1653 top_label = gen_label_rtx ();
1654 cmp_label = gen_label_rtx ();
1655 iter = gen_reg_rtx (iter_mode);
1657 emit_move_insn (iter, const0_rtx);
1659 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1660 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1661 do_pending_stack_adjust ();
1663 emit_note (NOTE_INSN_LOOP_BEG);
1665 emit_jump (cmp_label);
1666 emit_label (top_label);
1668 tmp = convert_modes (Pmode, iter_mode, iter, true);
1669 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1670 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1671 x = change_address (x, QImode, x_addr);
1672 y = change_address (y, QImode, y_addr);
1674 emit_move_insn (x, y);
1676 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1677 true, OPTAB_LIB_WIDEN);
1678 if (tmp != iter)
1679 emit_move_insn (iter, tmp);
1681 emit_note (NOTE_INSN_LOOP_CONT);
1682 emit_label (cmp_label);
1684 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1685 true, top_label);
1687 emit_note (NOTE_INSN_LOOP_END);
1690 /* Copy all or part of a value X into registers starting at REGNO.
1691 The number of registers to be filled is NREGS. */
1693 void
1694 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1696 int i;
1697 #ifdef HAVE_load_multiple
1698 rtx pat;
1699 rtx last;
1700 #endif
1702 if (nregs == 0)
1703 return;
1705 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1706 x = validize_mem (force_const_mem (mode, x));
1708 /* See if the machine can do this with a load multiple insn. */
1709 #ifdef HAVE_load_multiple
1710 if (HAVE_load_multiple)
1712 last = get_last_insn ();
1713 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1714 GEN_INT (nregs));
1715 if (pat)
1717 emit_insn (pat);
1718 return;
1720 else
1721 delete_insns_since (last);
1723 #endif
1725 for (i = 0; i < nregs; i++)
1726 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1727 operand_subword_force (x, i, mode));
1730 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1731 The number of registers to be filled is NREGS. */
1733 void
1734 move_block_from_reg (int regno, rtx x, int nregs)
1736 int i;
1738 if (nregs == 0)
1739 return;
1741 /* See if the machine can do this with a store multiple insn. */
1742 #ifdef HAVE_store_multiple
1743 if (HAVE_store_multiple)
1745 rtx last = get_last_insn ();
1746 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1747 GEN_INT (nregs));
1748 if (pat)
1750 emit_insn (pat);
1751 return;
1753 else
1754 delete_insns_since (last);
1756 #endif
1758 for (i = 0; i < nregs; i++)
1760 rtx tem = operand_subword (x, i, 1, BLKmode);
1762 if (tem == 0)
1763 abort ();
1765 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1769 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1770 ORIG, where ORIG is a non-consecutive group of registers represented by
1771 a PARALLEL. The clone is identical to the original except in that the
1772 original set of registers is replaced by a new set of pseudo registers.
1773 The new set has the same modes as the original set. */
1776 gen_group_rtx (rtx orig)
1778 int i, length;
1779 rtx *tmps;
1781 if (GET_CODE (orig) != PARALLEL)
1782 abort ();
1784 length = XVECLEN (orig, 0);
1785 tmps = alloca (sizeof (rtx) * length);
1787 /* Skip a NULL entry in first slot. */
1788 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1790 if (i)
1791 tmps[0] = 0;
1793 for (; i < length; i++)
1795 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1796 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1798 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1801 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1804 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1805 where DST is non-consecutive registers represented by a PARALLEL.
1806 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1807 if not known. */
1809 void
1810 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1812 rtx *tmps, src;
1813 int start, i;
1815 if (GET_CODE (dst) != PARALLEL)
1816 abort ();
1818 /* Check for a NULL entry, used to indicate that the parameter goes
1819 both on the stack and in registers. */
1820 if (XEXP (XVECEXP (dst, 0, 0), 0))
1821 start = 0;
1822 else
1823 start = 1;
1825 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1827 /* Process the pieces. */
1828 for (i = start; i < XVECLEN (dst, 0); i++)
1830 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1831 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1832 unsigned int bytelen = GET_MODE_SIZE (mode);
1833 int shift = 0;
1835 /* Handle trailing fragments that run over the size of the struct. */
1836 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1838 /* Arrange to shift the fragment to where it belongs.
1839 extract_bit_field loads to the lsb of the reg. */
1840 if (
1841 #ifdef BLOCK_REG_PADDING
1842 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1843 == (BYTES_BIG_ENDIAN ? upward : downward)
1844 #else
1845 BYTES_BIG_ENDIAN
1846 #endif
1848 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1849 bytelen = ssize - bytepos;
1850 if (bytelen <= 0)
1851 abort ();
1854 /* If we won't be loading directly from memory, protect the real source
1855 from strange tricks we might play; but make sure that the source can
1856 be loaded directly into the destination. */
1857 src = orig_src;
1858 if (GET_CODE (orig_src) != MEM
1859 && (!CONSTANT_P (orig_src)
1860 || (GET_MODE (orig_src) != mode
1861 && GET_MODE (orig_src) != VOIDmode)))
1863 if (GET_MODE (orig_src) == VOIDmode)
1864 src = gen_reg_rtx (mode);
1865 else
1866 src = gen_reg_rtx (GET_MODE (orig_src));
1868 emit_move_insn (src, orig_src);
1871 /* Optimize the access just a bit. */
1872 if (GET_CODE (src) == MEM
1873 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1874 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1875 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1876 && bytelen == GET_MODE_SIZE (mode))
1878 tmps[i] = gen_reg_rtx (mode);
1879 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1881 else if (GET_CODE (src) == CONCAT)
1883 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1884 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1886 if ((bytepos == 0 && bytelen == slen0)
1887 || (bytepos != 0 && bytepos + bytelen <= slen))
1889 /* The following assumes that the concatenated objects all
1890 have the same size. In this case, a simple calculation
1891 can be used to determine the object and the bit field
1892 to be extracted. */
1893 tmps[i] = XEXP (src, bytepos / slen0);
1894 if (! CONSTANT_P (tmps[i])
1895 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1896 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1897 (bytepos % slen0) * BITS_PER_UNIT,
1898 1, NULL_RTX, mode, mode, ssize);
1900 else if (bytepos == 0)
1902 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1903 emit_move_insn (mem, src);
1904 tmps[i] = adjust_address (mem, mode, 0);
1906 else
1907 abort ();
1909 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1910 SIMD register, which is currently broken. While we get GCC
1911 to emit proper RTL for these cases, let's dump to memory. */
1912 else if (VECTOR_MODE_P (GET_MODE (dst))
1913 && GET_CODE (src) == REG)
1915 int slen = GET_MODE_SIZE (GET_MODE (src));
1916 rtx mem;
1918 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1919 emit_move_insn (mem, src);
1920 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1922 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1923 && XVECLEN (dst, 0) > 1)
1924 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1925 else if (CONSTANT_P (src)
1926 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1927 tmps[i] = src;
1928 else
1929 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1930 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1931 mode, mode, ssize);
1933 if (shift)
1934 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1935 tmps[i], 0, OPTAB_WIDEN);
1938 emit_queue ();
1940 /* Copy the extracted pieces into the proper (probable) hard regs. */
1941 for (i = start; i < XVECLEN (dst, 0); i++)
1942 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1945 /* Emit code to move a block SRC to block DST, where SRC and DST are
1946 non-consecutive groups of registers, each represented by a PARALLEL. */
1948 void
1949 emit_group_move (rtx dst, rtx src)
1951 int i;
1953 if (GET_CODE (src) != PARALLEL
1954 || GET_CODE (dst) != PARALLEL
1955 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1956 abort ();
1958 /* Skip first entry if NULL. */
1959 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1960 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1961 XEXP (XVECEXP (src, 0, i), 0));
1964 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1965 where SRC is non-consecutive registers represented by a PARALLEL.
1966 SSIZE represents the total size of block ORIG_DST, or -1 if not
1967 known. */
1969 void
1970 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1972 rtx *tmps, dst;
1973 int start, i;
1975 if (GET_CODE (src) != PARALLEL)
1976 abort ();
1978 /* Check for a NULL entry, used to indicate that the parameter goes
1979 both on the stack and in registers. */
1980 if (XEXP (XVECEXP (src, 0, 0), 0))
1981 start = 0;
1982 else
1983 start = 1;
1985 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1987 /* Copy the (probable) hard regs into pseudos. */
1988 for (i = start; i < XVECLEN (src, 0); i++)
1990 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1991 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1992 emit_move_insn (tmps[i], reg);
1994 emit_queue ();
1996 /* If we won't be storing directly into memory, protect the real destination
1997 from strange tricks we might play. */
1998 dst = orig_dst;
1999 if (GET_CODE (dst) == PARALLEL)
2001 rtx temp;
2003 /* We can get a PARALLEL dst if there is a conditional expression in
2004 a return statement. In that case, the dst and src are the same,
2005 so no action is necessary. */
2006 if (rtx_equal_p (dst, src))
2007 return;
2009 /* It is unclear if we can ever reach here, but we may as well handle
2010 it. Allocate a temporary, and split this into a store/load to/from
2011 the temporary. */
2013 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2014 emit_group_store (temp, src, type, ssize);
2015 emit_group_load (dst, temp, type, ssize);
2016 return;
2018 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2020 dst = gen_reg_rtx (GET_MODE (orig_dst));
2021 /* Make life a bit easier for combine. */
2022 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2025 /* Process the pieces. */
2026 for (i = start; i < XVECLEN (src, 0); i++)
2028 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2029 enum machine_mode mode = GET_MODE (tmps[i]);
2030 unsigned int bytelen = GET_MODE_SIZE (mode);
2031 rtx dest = dst;
2033 /* Handle trailing fragments that run over the size of the struct. */
2034 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2036 /* store_bit_field always takes its value from the lsb.
2037 Move the fragment to the lsb if it's not already there. */
2038 if (
2039 #ifdef BLOCK_REG_PADDING
2040 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2041 == (BYTES_BIG_ENDIAN ? upward : downward)
2042 #else
2043 BYTES_BIG_ENDIAN
2044 #endif
2047 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2048 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2049 tmps[i], 0, OPTAB_WIDEN);
2051 bytelen = ssize - bytepos;
2054 if (GET_CODE (dst) == CONCAT)
2056 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2057 dest = XEXP (dst, 0);
2058 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2060 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2061 dest = XEXP (dst, 1);
2063 else if (bytepos == 0 && XVECLEN (src, 0))
2065 dest = assign_stack_temp (GET_MODE (dest),
2066 GET_MODE_SIZE (GET_MODE (dest)), 0);
2067 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2068 tmps[i]);
2069 dst = dest;
2070 break;
2072 else
2073 abort ();
2076 /* Optimize the access just a bit. */
2077 if (GET_CODE (dest) == MEM
2078 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2079 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2080 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2081 && bytelen == GET_MODE_SIZE (mode))
2082 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2083 else
2084 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2085 mode, tmps[i], ssize);
2088 emit_queue ();
2090 /* Copy from the pseudo into the (probable) hard reg. */
2091 if (orig_dst != dst)
2092 emit_move_insn (orig_dst, dst);
2095 /* Generate code to copy a BLKmode object of TYPE out of a
2096 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2097 is null, a stack temporary is created. TGTBLK is returned.
2099 The purpose of this routine is to handle functions that return
2100 BLKmode structures in registers. Some machines (the PA for example)
2101 want to return all small structures in registers regardless of the
2102 structure's alignment. */
2105 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2107 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2108 rtx src = NULL, dst = NULL;
2109 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2110 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2112 if (tgtblk == 0)
2114 tgtblk = assign_temp (build_qualified_type (type,
2115 (TYPE_QUALS (type)
2116 | TYPE_QUAL_CONST)),
2117 0, 1, 1);
2118 preserve_temp_slots (tgtblk);
2121 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2122 into a new pseudo which is a full word. */
2124 if (GET_MODE (srcreg) != BLKmode
2125 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2126 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2128 /* If the structure doesn't take up a whole number of words, see whether
2129 SRCREG is padded on the left or on the right. If it's on the left,
2130 set PADDING_CORRECTION to the number of bits to skip.
2132 In most ABIs, the structure will be returned at the least end of
2133 the register, which translates to right padding on little-endian
2134 targets and left padding on big-endian targets. The opposite
2135 holds if the structure is returned at the most significant
2136 end of the register. */
2137 if (bytes % UNITS_PER_WORD != 0
2138 && (targetm.calls.return_in_msb (type)
2139 ? !BYTES_BIG_ENDIAN
2140 : BYTES_BIG_ENDIAN))
2141 padding_correction
2142 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2144 /* Copy the structure BITSIZE bites at a time.
2146 We could probably emit more efficient code for machines which do not use
2147 strict alignment, but it doesn't seem worth the effort at the current
2148 time. */
2149 for (bitpos = 0, xbitpos = padding_correction;
2150 bitpos < bytes * BITS_PER_UNIT;
2151 bitpos += bitsize, xbitpos += bitsize)
2153 /* We need a new source operand each time xbitpos is on a
2154 word boundary and when xbitpos == padding_correction
2155 (the first time through). */
2156 if (xbitpos % BITS_PER_WORD == 0
2157 || xbitpos == padding_correction)
2158 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2159 GET_MODE (srcreg));
2161 /* We need a new destination operand each time bitpos is on
2162 a word boundary. */
2163 if (bitpos % BITS_PER_WORD == 0)
2164 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2166 /* Use xbitpos for the source extraction (right justified) and
2167 xbitpos for the destination store (left justified). */
2168 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2169 extract_bit_field (src, bitsize,
2170 xbitpos % BITS_PER_WORD, 1,
2171 NULL_RTX, word_mode, word_mode,
2172 BITS_PER_WORD),
2173 BITS_PER_WORD);
2176 return tgtblk;
2179 /* Add a USE expression for REG to the (possibly empty) list pointed
2180 to by CALL_FUSAGE. REG must denote a hard register. */
2182 void
2183 use_reg (rtx *call_fusage, rtx reg)
2185 if (GET_CODE (reg) != REG
2186 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2187 abort ();
2189 *call_fusage
2190 = gen_rtx_EXPR_LIST (VOIDmode,
2191 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2194 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2195 starting at REGNO. All of these registers must be hard registers. */
2197 void
2198 use_regs (rtx *call_fusage, int regno, int nregs)
2200 int i;
2202 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2203 abort ();
2205 for (i = 0; i < nregs; i++)
2206 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2209 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2210 PARALLEL REGS. This is for calls that pass values in multiple
2211 non-contiguous locations. The Irix 6 ABI has examples of this. */
2213 void
2214 use_group_regs (rtx *call_fusage, rtx regs)
2216 int i;
2218 for (i = 0; i < XVECLEN (regs, 0); i++)
2220 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2222 /* A NULL entry means the parameter goes both on the stack and in
2223 registers. This can also be a MEM for targets that pass values
2224 partially on the stack and partially in registers. */
2225 if (reg != 0 && GET_CODE (reg) == REG)
2226 use_reg (call_fusage, reg);
2231 /* Determine whether the LEN bytes generated by CONSTFUN can be
2232 stored to memory using several move instructions. CONSTFUNDATA is
2233 a pointer which will be passed as argument in every CONSTFUN call.
2234 ALIGN is maximum alignment we can assume. Return nonzero if a
2235 call to store_by_pieces should succeed. */
2238 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2239 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2240 void *constfundata, unsigned int align)
2242 unsigned HOST_WIDE_INT max_size, l;
2243 HOST_WIDE_INT offset = 0;
2244 enum machine_mode mode, tmode;
2245 enum insn_code icode;
2246 int reverse;
2247 rtx cst;
2249 if (len == 0)
2250 return 1;
2252 if (! STORE_BY_PIECES_P (len, align))
2253 return 0;
2255 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2256 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2257 align = MOVE_MAX * BITS_PER_UNIT;
2259 /* We would first store what we can in the largest integer mode, then go to
2260 successively smaller modes. */
2262 for (reverse = 0;
2263 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2264 reverse++)
2266 l = len;
2267 mode = VOIDmode;
2268 max_size = STORE_MAX_PIECES + 1;
2269 while (max_size > 1)
2271 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2272 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2273 if (GET_MODE_SIZE (tmode) < max_size)
2274 mode = tmode;
2276 if (mode == VOIDmode)
2277 break;
2279 icode = mov_optab->handlers[(int) mode].insn_code;
2280 if (icode != CODE_FOR_nothing
2281 && align >= GET_MODE_ALIGNMENT (mode))
2283 unsigned int size = GET_MODE_SIZE (mode);
2285 while (l >= size)
2287 if (reverse)
2288 offset -= size;
2290 cst = (*constfun) (constfundata, offset, mode);
2291 if (!LEGITIMATE_CONSTANT_P (cst))
2292 return 0;
2294 if (!reverse)
2295 offset += size;
2297 l -= size;
2301 max_size = GET_MODE_SIZE (mode);
2304 /* The code above should have handled everything. */
2305 if (l != 0)
2306 abort ();
2309 return 1;
2312 /* Generate several move instructions to store LEN bytes generated by
2313 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2314 pointer which will be passed as argument in every CONSTFUN call.
2315 ALIGN is maximum alignment we can assume.
2316 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2317 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2318 stpcpy. */
2321 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2322 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2323 void *constfundata, unsigned int align, int endp)
2325 struct store_by_pieces data;
2327 if (len == 0)
2329 if (endp == 2)
2330 abort ();
2331 return to;
2334 if (! STORE_BY_PIECES_P (len, align))
2335 abort ();
2336 to = protect_from_queue (to, 1);
2337 data.constfun = constfun;
2338 data.constfundata = constfundata;
2339 data.len = len;
2340 data.to = to;
2341 store_by_pieces_1 (&data, align);
2342 if (endp)
2344 rtx to1;
2346 if (data.reverse)
2347 abort ();
2348 if (data.autinc_to)
2350 if (endp == 2)
2352 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2353 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2354 else
2355 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2356 -1));
2358 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2359 data.offset);
2361 else
2363 if (endp == 2)
2364 --data.offset;
2365 to1 = adjust_address (data.to, QImode, data.offset);
2367 return to1;
2369 else
2370 return data.to;
2373 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2374 rtx with BLKmode). The caller must pass TO through protect_from_queue
2375 before calling. ALIGN is maximum alignment we can assume. */
2377 static void
2378 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2380 struct store_by_pieces data;
2382 if (len == 0)
2383 return;
2385 data.constfun = clear_by_pieces_1;
2386 data.constfundata = NULL;
2387 data.len = len;
2388 data.to = to;
2389 store_by_pieces_1 (&data, align);
2392 /* Callback routine for clear_by_pieces.
2393 Return const0_rtx unconditionally. */
2395 static rtx
2396 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2397 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2398 enum machine_mode mode ATTRIBUTE_UNUSED)
2400 return const0_rtx;
2403 /* Subroutine of clear_by_pieces and store_by_pieces.
2404 Generate several move instructions to store LEN bytes of block TO. (A MEM
2405 rtx with BLKmode). The caller must pass TO through protect_from_queue
2406 before calling. ALIGN is maximum alignment we can assume. */
2408 static void
2409 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2410 unsigned int align ATTRIBUTE_UNUSED)
2412 rtx to_addr = XEXP (data->to, 0);
2413 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2414 enum machine_mode mode = VOIDmode, tmode;
2415 enum insn_code icode;
2417 data->offset = 0;
2418 data->to_addr = to_addr;
2419 data->autinc_to
2420 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2421 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2423 data->explicit_inc_to = 0;
2424 data->reverse
2425 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2426 if (data->reverse)
2427 data->offset = data->len;
2429 /* If storing requires more than two move insns,
2430 copy addresses to registers (to make displacements shorter)
2431 and use post-increment if available. */
2432 if (!data->autinc_to
2433 && move_by_pieces_ninsns (data->len, align) > 2)
2435 /* Determine the main mode we'll be using. */
2436 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2437 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2438 if (GET_MODE_SIZE (tmode) < max_size)
2439 mode = tmode;
2441 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2443 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2444 data->autinc_to = 1;
2445 data->explicit_inc_to = -1;
2448 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2449 && ! data->autinc_to)
2451 data->to_addr = copy_addr_to_reg (to_addr);
2452 data->autinc_to = 1;
2453 data->explicit_inc_to = 1;
2456 if ( !data->autinc_to && CONSTANT_P (to_addr))
2457 data->to_addr = copy_addr_to_reg (to_addr);
2460 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2461 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2462 align = MOVE_MAX * BITS_PER_UNIT;
2464 /* First store what we can in the largest integer mode, then go to
2465 successively smaller modes. */
2467 while (max_size > 1)
2469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2471 if (GET_MODE_SIZE (tmode) < max_size)
2472 mode = tmode;
2474 if (mode == VOIDmode)
2475 break;
2477 icode = mov_optab->handlers[(int) mode].insn_code;
2478 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2479 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2481 max_size = GET_MODE_SIZE (mode);
2484 /* The code above should have handled everything. */
2485 if (data->len != 0)
2486 abort ();
2489 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2490 with move instructions for mode MODE. GENFUN is the gen_... function
2491 to make a move insn for that mode. DATA has all the other info. */
2493 static void
2494 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2495 struct store_by_pieces *data)
2497 unsigned int size = GET_MODE_SIZE (mode);
2498 rtx to1, cst;
2500 while (data->len >= size)
2502 if (data->reverse)
2503 data->offset -= size;
2505 if (data->autinc_to)
2506 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2507 data->offset);
2508 else
2509 to1 = adjust_address (data->to, mode, data->offset);
2511 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2512 emit_insn (gen_add2_insn (data->to_addr,
2513 GEN_INT (-(HOST_WIDE_INT) size)));
2515 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2516 emit_insn ((*genfun) (to1, cst));
2518 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2519 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2521 if (! data->reverse)
2522 data->offset += size;
2524 data->len -= size;
2528 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2529 its length in bytes. */
2532 clear_storage (rtx object, rtx size)
2534 rtx retval = 0;
2535 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2536 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2538 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2539 just move a zero. Otherwise, do this a piece at a time. */
2540 if (GET_MODE (object) != BLKmode
2541 && GET_CODE (size) == CONST_INT
2542 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2543 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2544 else
2546 object = protect_from_queue (object, 1);
2547 size = protect_from_queue (size, 0);
2549 if (size == const0_rtx)
2551 else if (GET_CODE (size) == CONST_INT
2552 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2553 clear_by_pieces (object, INTVAL (size), align);
2554 else if (clear_storage_via_clrstr (object, size, align))
2556 else
2557 retval = clear_storage_via_libcall (object, size);
2560 return retval;
2563 /* A subroutine of clear_storage. Expand a clrstr pattern;
2564 return true if successful. */
2566 static bool
2567 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2569 /* Try the most limited insn first, because there's no point
2570 including more than one in the machine description unless
2571 the more limited one has some advantage. */
2573 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2574 enum machine_mode mode;
2576 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2577 mode = GET_MODE_WIDER_MODE (mode))
2579 enum insn_code code = clrstr_optab[(int) mode];
2580 insn_operand_predicate_fn pred;
2582 if (code != CODE_FOR_nothing
2583 /* We don't need MODE to be narrower than
2584 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2585 the mode mask, as it is returned by the macro, it will
2586 definitely be less than the actual mode mask. */
2587 && ((GET_CODE (size) == CONST_INT
2588 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2589 <= (GET_MODE_MASK (mode) >> 1)))
2590 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2591 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2592 || (*pred) (object, BLKmode))
2593 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2594 || (*pred) (opalign, VOIDmode)))
2596 rtx op1;
2597 rtx last = get_last_insn ();
2598 rtx pat;
2600 op1 = convert_to_mode (mode, size, 1);
2601 pred = insn_data[(int) code].operand[1].predicate;
2602 if (pred != 0 && ! (*pred) (op1, mode))
2603 op1 = copy_to_mode_reg (mode, op1);
2605 pat = GEN_FCN ((int) code) (object, op1, opalign);
2606 if (pat)
2608 emit_insn (pat);
2609 return true;
2611 else
2612 delete_insns_since (last);
2616 return false;
2619 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2620 Return the return value of memset, 0 otherwise. */
2622 static rtx
2623 clear_storage_via_libcall (rtx object, rtx size)
2625 tree call_expr, arg_list, fn, object_tree, size_tree;
2626 enum machine_mode size_mode;
2627 rtx retval;
2629 /* OBJECT or SIZE may have been passed through protect_from_queue.
2631 It is unsafe to save the value generated by protect_from_queue
2632 and reuse it later. Consider what happens if emit_queue is
2633 called before the return value from protect_from_queue is used.
2635 Expansion of the CALL_EXPR below will call emit_queue before
2636 we are finished emitting RTL for argument setup. So if we are
2637 not careful we could get the wrong value for an argument.
2639 To avoid this problem we go ahead and emit code to copy OBJECT
2640 and SIZE into new pseudos. We can then place those new pseudos
2641 into an RTL_EXPR and use them later, even after a call to
2642 emit_queue.
2644 Note this is not strictly needed for library calls since they
2645 do not call emit_queue before loading their arguments. However,
2646 we may need to have library calls call emit_queue in the future
2647 since failing to do so could cause problems for targets which
2648 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2650 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2652 if (TARGET_MEM_FUNCTIONS)
2653 size_mode = TYPE_MODE (sizetype);
2654 else
2655 size_mode = TYPE_MODE (unsigned_type_node);
2656 size = convert_to_mode (size_mode, size, 1);
2657 size = copy_to_mode_reg (size_mode, size);
2659 /* It is incorrect to use the libcall calling conventions to call
2660 memset in this context. This could be a user call to memset and
2661 the user may wish to examine the return value from memset. For
2662 targets where libcalls and normal calls have different conventions
2663 for returning pointers, we could end up generating incorrect code.
2665 For convenience, we generate the call to bzero this way as well. */
2667 object_tree = make_tree (ptr_type_node, object);
2668 if (TARGET_MEM_FUNCTIONS)
2669 size_tree = make_tree (sizetype, size);
2670 else
2671 size_tree = make_tree (unsigned_type_node, size);
2673 fn = clear_storage_libcall_fn (true);
2674 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2675 if (TARGET_MEM_FUNCTIONS)
2676 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2677 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2679 /* Now we have to build up the CALL_EXPR itself. */
2680 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2681 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2682 call_expr, arg_list, NULL_TREE);
2684 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2686 /* If we are initializing a readonly value, show the above call
2687 clobbered it. Otherwise, a load from it may erroneously be
2688 hoisted from a loop. */
2689 if (RTX_UNCHANGING_P (object))
2690 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2692 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2695 /* A subroutine of clear_storage_via_libcall. Create the tree node
2696 for the function we use for block clears. The first time FOR_CALL
2697 is true, we call assemble_external. */
2699 static GTY(()) tree block_clear_fn;
2701 void
2702 init_block_clear_fn (const char *asmspec)
2704 if (!block_clear_fn)
2706 tree fn, args;
2708 if (TARGET_MEM_FUNCTIONS)
2710 fn = get_identifier ("memset");
2711 args = build_function_type_list (ptr_type_node, ptr_type_node,
2712 integer_type_node, sizetype,
2713 NULL_TREE);
2715 else
2717 fn = get_identifier ("bzero");
2718 args = build_function_type_list (void_type_node, ptr_type_node,
2719 unsigned_type_node, NULL_TREE);
2722 fn = build_decl (FUNCTION_DECL, fn, args);
2723 DECL_EXTERNAL (fn) = 1;
2724 TREE_PUBLIC (fn) = 1;
2725 DECL_ARTIFICIAL (fn) = 1;
2726 TREE_NOTHROW (fn) = 1;
2728 block_clear_fn = fn;
2731 if (asmspec)
2733 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2734 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2738 static tree
2739 clear_storage_libcall_fn (int for_call)
2741 static bool emitted_extern;
2743 if (!block_clear_fn)
2744 init_block_clear_fn (NULL);
2746 if (for_call && !emitted_extern)
2748 emitted_extern = true;
2749 make_decl_rtl (block_clear_fn, NULL);
2750 assemble_external (block_clear_fn);
2753 return block_clear_fn;
2756 /* Generate code to copy Y into X.
2757 Both Y and X must have the same mode, except that
2758 Y can be a constant with VOIDmode.
2759 This mode cannot be BLKmode; use emit_block_move for that.
2761 Return the last instruction emitted. */
2764 emit_move_insn (rtx x, rtx y)
2766 enum machine_mode mode = GET_MODE (x);
2767 rtx y_cst = NULL_RTX;
2768 rtx last_insn, set;
2770 x = protect_from_queue (x, 1);
2771 y = protect_from_queue (y, 0);
2773 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2774 abort ();
2776 /* Never force constant_p_rtx to memory. */
2777 if (GET_CODE (y) == CONSTANT_P_RTX)
2779 else if (CONSTANT_P (y))
2781 if (optimize
2782 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2783 && (last_insn = compress_float_constant (x, y)))
2784 return last_insn;
2786 y_cst = y;
2788 if (!LEGITIMATE_CONSTANT_P (y))
2790 y = force_const_mem (mode, y);
2792 /* If the target's cannot_force_const_mem prevented the spill,
2793 assume that the target's move expanders will also take care
2794 of the non-legitimate constant. */
2795 if (!y)
2796 y = y_cst;
2800 /* If X or Y are memory references, verify that their addresses are valid
2801 for the machine. */
2802 if (GET_CODE (x) == MEM
2803 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2804 && ! push_operand (x, GET_MODE (x)))
2805 || (flag_force_addr
2806 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2807 x = validize_mem (x);
2809 if (GET_CODE (y) == MEM
2810 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2811 || (flag_force_addr
2812 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2813 y = validize_mem (y);
2815 if (mode == BLKmode)
2816 abort ();
2818 last_insn = emit_move_insn_1 (x, y);
2820 if (y_cst && GET_CODE (x) == REG
2821 && (set = single_set (last_insn)) != NULL_RTX
2822 && SET_DEST (set) == x
2823 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2824 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2826 return last_insn;
2829 /* Low level part of emit_move_insn.
2830 Called just like emit_move_insn, but assumes X and Y
2831 are basically valid. */
2834 emit_move_insn_1 (rtx x, rtx y)
2836 enum machine_mode mode = GET_MODE (x);
2837 enum machine_mode submode;
2838 enum mode_class class = GET_MODE_CLASS (mode);
2840 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2841 abort ();
2843 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2844 return
2845 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2847 /* Expand complex moves by moving real part and imag part, if possible. */
2848 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2849 && BLKmode != (submode = GET_MODE_INNER (mode))
2850 && (mov_optab->handlers[(int) submode].insn_code
2851 != CODE_FOR_nothing))
2853 /* Don't split destination if it is a stack push. */
2854 int stack = push_operand (x, GET_MODE (x));
2856 #ifdef PUSH_ROUNDING
2857 /* In case we output to the stack, but the size is smaller than the
2858 machine can push exactly, we need to use move instructions. */
2859 if (stack
2860 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2861 != GET_MODE_SIZE (submode)))
2863 rtx temp;
2864 HOST_WIDE_INT offset1, offset2;
2866 /* Do not use anti_adjust_stack, since we don't want to update
2867 stack_pointer_delta. */
2868 temp = expand_binop (Pmode,
2869 #ifdef STACK_GROWS_DOWNWARD
2870 sub_optab,
2871 #else
2872 add_optab,
2873 #endif
2874 stack_pointer_rtx,
2875 GEN_INT
2876 (PUSH_ROUNDING
2877 (GET_MODE_SIZE (GET_MODE (x)))),
2878 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2880 if (temp != stack_pointer_rtx)
2881 emit_move_insn (stack_pointer_rtx, temp);
2883 #ifdef STACK_GROWS_DOWNWARD
2884 offset1 = 0;
2885 offset2 = GET_MODE_SIZE (submode);
2886 #else
2887 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2888 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2889 + GET_MODE_SIZE (submode));
2890 #endif
2892 emit_move_insn (change_address (x, submode,
2893 gen_rtx_PLUS (Pmode,
2894 stack_pointer_rtx,
2895 GEN_INT (offset1))),
2896 gen_realpart (submode, y));
2897 emit_move_insn (change_address (x, submode,
2898 gen_rtx_PLUS (Pmode,
2899 stack_pointer_rtx,
2900 GEN_INT (offset2))),
2901 gen_imagpart (submode, y));
2903 else
2904 #endif
2905 /* If this is a stack, push the highpart first, so it
2906 will be in the argument order.
2908 In that case, change_address is used only to convert
2909 the mode, not to change the address. */
2910 if (stack)
2912 /* Note that the real part always precedes the imag part in memory
2913 regardless of machine's endianness. */
2914 #ifdef STACK_GROWS_DOWNWARD
2915 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2916 gen_imagpart (submode, y));
2917 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2918 gen_realpart (submode, y));
2919 #else
2920 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2921 gen_realpart (submode, y));
2922 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2923 gen_imagpart (submode, y));
2924 #endif
2926 else
2928 rtx realpart_x, realpart_y;
2929 rtx imagpart_x, imagpart_y;
2931 /* If this is a complex value with each part being smaller than a
2932 word, the usual calling sequence will likely pack the pieces into
2933 a single register. Unfortunately, SUBREG of hard registers only
2934 deals in terms of words, so we have a problem converting input
2935 arguments to the CONCAT of two registers that is used elsewhere
2936 for complex values. If this is before reload, we can copy it into
2937 memory and reload. FIXME, we should see about using extract and
2938 insert on integer registers, but complex short and complex char
2939 variables should be rarely used. */
2940 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2941 && (reload_in_progress | reload_completed) == 0)
2943 int packed_dest_p
2944 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2945 int packed_src_p
2946 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2948 if (packed_dest_p || packed_src_p)
2950 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2951 ? MODE_FLOAT : MODE_INT);
2953 enum machine_mode reg_mode
2954 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2956 if (reg_mode != BLKmode)
2958 rtx mem = assign_stack_temp (reg_mode,
2959 GET_MODE_SIZE (mode), 0);
2960 rtx cmem = adjust_address (mem, mode, 0);
2962 cfun->cannot_inline
2963 = N_("function using short complex types cannot be inline");
2965 if (packed_dest_p)
2967 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2969 emit_move_insn_1 (cmem, y);
2970 return emit_move_insn_1 (sreg, mem);
2972 else
2974 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2976 emit_move_insn_1 (mem, sreg);
2977 return emit_move_insn_1 (x, cmem);
2983 realpart_x = gen_realpart (submode, x);
2984 realpart_y = gen_realpart (submode, y);
2985 imagpart_x = gen_imagpart (submode, x);
2986 imagpart_y = gen_imagpart (submode, y);
2988 /* Show the output dies here. This is necessary for SUBREGs
2989 of pseudos since we cannot track their lifetimes correctly;
2990 hard regs shouldn't appear here except as return values.
2991 We never want to emit such a clobber after reload. */
2992 if (x != y
2993 && ! (reload_in_progress || reload_completed)
2994 && (GET_CODE (realpart_x) == SUBREG
2995 || GET_CODE (imagpart_x) == SUBREG))
2996 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2998 emit_move_insn (realpart_x, realpart_y);
2999 emit_move_insn (imagpart_x, imagpart_y);
3002 return get_last_insn ();
3005 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3006 find a mode to do it in. If we have a movcc, use it. Otherwise,
3007 find the MODE_INT mode of the same width. */
3008 else if (GET_MODE_CLASS (mode) == MODE_CC
3009 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3011 enum insn_code insn_code;
3012 enum machine_mode tmode = VOIDmode;
3013 rtx x1 = x, y1 = y;
3015 if (mode != CCmode
3016 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3017 tmode = CCmode;
3018 else
3019 for (tmode = QImode; tmode != VOIDmode;
3020 tmode = GET_MODE_WIDER_MODE (tmode))
3021 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3022 break;
3024 if (tmode == VOIDmode)
3025 abort ();
3027 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3028 may call change_address which is not appropriate if we were
3029 called when a reload was in progress. We don't have to worry
3030 about changing the address since the size in bytes is supposed to
3031 be the same. Copy the MEM to change the mode and move any
3032 substitutions from the old MEM to the new one. */
3034 if (reload_in_progress)
3036 x = gen_lowpart_common (tmode, x1);
3037 if (x == 0 && GET_CODE (x1) == MEM)
3039 x = adjust_address_nv (x1, tmode, 0);
3040 copy_replacements (x1, x);
3043 y = gen_lowpart_common (tmode, y1);
3044 if (y == 0 && GET_CODE (y1) == MEM)
3046 y = adjust_address_nv (y1, tmode, 0);
3047 copy_replacements (y1, y);
3050 else
3052 x = gen_lowpart (tmode, x);
3053 y = gen_lowpart (tmode, y);
3056 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3057 return emit_insn (GEN_FCN (insn_code) (x, y));
3060 /* Try using a move pattern for the corresponding integer mode. This is
3061 only safe when simplify_subreg can convert MODE constants into integer
3062 constants. At present, it can only do this reliably if the value
3063 fits within a HOST_WIDE_INT. */
3064 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3065 && (submode = int_mode_for_mode (mode)) != BLKmode
3066 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3067 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3068 (simplify_gen_subreg (submode, x, mode, 0),
3069 simplify_gen_subreg (submode, y, mode, 0)));
3071 /* This will handle any multi-word or full-word mode that lacks a move_insn
3072 pattern. However, you will get better code if you define such patterns,
3073 even if they must turn into multiple assembler instructions. */
3074 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3076 rtx last_insn = 0;
3077 rtx seq, inner;
3078 int need_clobber;
3079 int i;
3081 #ifdef PUSH_ROUNDING
3083 /* If X is a push on the stack, do the push now and replace
3084 X with a reference to the stack pointer. */
3085 if (push_operand (x, GET_MODE (x)))
3087 rtx temp;
3088 enum rtx_code code;
3090 /* Do not use anti_adjust_stack, since we don't want to update
3091 stack_pointer_delta. */
3092 temp = expand_binop (Pmode,
3093 #ifdef STACK_GROWS_DOWNWARD
3094 sub_optab,
3095 #else
3096 add_optab,
3097 #endif
3098 stack_pointer_rtx,
3099 GEN_INT
3100 (PUSH_ROUNDING
3101 (GET_MODE_SIZE (GET_MODE (x)))),
3102 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3104 if (temp != stack_pointer_rtx)
3105 emit_move_insn (stack_pointer_rtx, temp);
3107 code = GET_CODE (XEXP (x, 0));
3109 /* Just hope that small offsets off SP are OK. */
3110 if (code == POST_INC)
3111 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3112 GEN_INT (-((HOST_WIDE_INT)
3113 GET_MODE_SIZE (GET_MODE (x)))));
3114 else if (code == POST_DEC)
3115 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3116 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3117 else
3118 temp = stack_pointer_rtx;
3120 x = change_address (x, VOIDmode, temp);
3122 #endif
3124 /* If we are in reload, see if either operand is a MEM whose address
3125 is scheduled for replacement. */
3126 if (reload_in_progress && GET_CODE (x) == MEM
3127 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3128 x = replace_equiv_address_nv (x, inner);
3129 if (reload_in_progress && GET_CODE (y) == MEM
3130 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3131 y = replace_equiv_address_nv (y, inner);
3133 start_sequence ();
3135 need_clobber = 0;
3136 for (i = 0;
3137 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3138 i++)
3140 rtx xpart = operand_subword (x, i, 1, mode);
3141 rtx ypart = operand_subword (y, i, 1, mode);
3143 /* If we can't get a part of Y, put Y into memory if it is a
3144 constant. Otherwise, force it into a register. If we still
3145 can't get a part of Y, abort. */
3146 if (ypart == 0 && CONSTANT_P (y))
3148 y = force_const_mem (mode, y);
3149 ypart = operand_subword (y, i, 1, mode);
3151 else if (ypart == 0)
3152 ypart = operand_subword_force (y, i, mode);
3154 if (xpart == 0 || ypart == 0)
3155 abort ();
3157 need_clobber |= (GET_CODE (xpart) == SUBREG);
3159 last_insn = emit_move_insn (xpart, ypart);
3162 seq = get_insns ();
3163 end_sequence ();
3165 /* Show the output dies here. This is necessary for SUBREGs
3166 of pseudos since we cannot track their lifetimes correctly;
3167 hard regs shouldn't appear here except as return values.
3168 We never want to emit such a clobber after reload. */
3169 if (x != y
3170 && ! (reload_in_progress || reload_completed)
3171 && need_clobber != 0)
3172 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3174 emit_insn (seq);
3176 return last_insn;
3178 else
3179 abort ();
3182 /* If Y is representable exactly in a narrower mode, and the target can
3183 perform the extension directly from constant or memory, then emit the
3184 move as an extension. */
3186 static rtx
3187 compress_float_constant (rtx x, rtx y)
3189 enum machine_mode dstmode = GET_MODE (x);
3190 enum machine_mode orig_srcmode = GET_MODE (y);
3191 enum machine_mode srcmode;
3192 REAL_VALUE_TYPE r;
3194 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3196 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3197 srcmode != orig_srcmode;
3198 srcmode = GET_MODE_WIDER_MODE (srcmode))
3200 enum insn_code ic;
3201 rtx trunc_y, last_insn;
3203 /* Skip if the target can't extend this way. */
3204 ic = can_extend_p (dstmode, srcmode, 0);
3205 if (ic == CODE_FOR_nothing)
3206 continue;
3208 /* Skip if the narrowed value isn't exact. */
3209 if (! exact_real_truncate (srcmode, &r))
3210 continue;
3212 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3214 if (LEGITIMATE_CONSTANT_P (trunc_y))
3216 /* Skip if the target needs extra instructions to perform
3217 the extension. */
3218 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3219 continue;
3221 else if (float_extend_from_mem[dstmode][srcmode])
3222 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3223 else
3224 continue;
3226 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3227 last_insn = get_last_insn ();
3229 if (GET_CODE (x) == REG)
3230 set_unique_reg_note (last_insn, REG_EQUAL, y);
3232 return last_insn;
3235 return NULL_RTX;
3238 /* Pushing data onto the stack. */
3240 /* Push a block of length SIZE (perhaps variable)
3241 and return an rtx to address the beginning of the block.
3242 Note that it is not possible for the value returned to be a QUEUED.
3243 The value may be virtual_outgoing_args_rtx.
3245 EXTRA is the number of bytes of padding to push in addition to SIZE.
3246 BELOW nonzero means this padding comes at low addresses;
3247 otherwise, the padding comes at high addresses. */
3250 push_block (rtx size, int extra, int below)
3252 rtx temp;
3254 size = convert_modes (Pmode, ptr_mode, size, 1);
3255 if (CONSTANT_P (size))
3256 anti_adjust_stack (plus_constant (size, extra));
3257 else if (GET_CODE (size) == REG && extra == 0)
3258 anti_adjust_stack (size);
3259 else
3261 temp = copy_to_mode_reg (Pmode, size);
3262 if (extra != 0)
3263 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3264 temp, 0, OPTAB_LIB_WIDEN);
3265 anti_adjust_stack (temp);
3268 #ifndef STACK_GROWS_DOWNWARD
3269 if (0)
3270 #else
3271 if (1)
3272 #endif
3274 temp = virtual_outgoing_args_rtx;
3275 if (extra != 0 && below)
3276 temp = plus_constant (temp, extra);
3278 else
3280 if (GET_CODE (size) == CONST_INT)
3281 temp = plus_constant (virtual_outgoing_args_rtx,
3282 -INTVAL (size) - (below ? 0 : extra));
3283 else if (extra != 0 && !below)
3284 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3285 negate_rtx (Pmode, plus_constant (size, extra)));
3286 else
3287 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3288 negate_rtx (Pmode, size));
3291 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3294 #ifdef PUSH_ROUNDING
3296 /* Emit single push insn. */
3298 static void
3299 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3301 rtx dest_addr;
3302 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3303 rtx dest;
3304 enum insn_code icode;
3305 insn_operand_predicate_fn pred;
3307 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3308 /* If there is push pattern, use it. Otherwise try old way of throwing
3309 MEM representing push operation to move expander. */
3310 icode = push_optab->handlers[(int) mode].insn_code;
3311 if (icode != CODE_FOR_nothing)
3313 if (((pred = insn_data[(int) icode].operand[0].predicate)
3314 && !((*pred) (x, mode))))
3315 x = force_reg (mode, x);
3316 emit_insn (GEN_FCN (icode) (x));
3317 return;
3319 if (GET_MODE_SIZE (mode) == rounded_size)
3320 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3321 /* If we are to pad downward, adjust the stack pointer first and
3322 then store X into the stack location using an offset. This is
3323 because emit_move_insn does not know how to pad; it does not have
3324 access to type. */
3325 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3327 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3328 HOST_WIDE_INT offset;
3330 emit_move_insn (stack_pointer_rtx,
3331 expand_binop (Pmode,
3332 #ifdef STACK_GROWS_DOWNWARD
3333 sub_optab,
3334 #else
3335 add_optab,
3336 #endif
3337 stack_pointer_rtx,
3338 GEN_INT (rounded_size),
3339 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3341 offset = (HOST_WIDE_INT) padding_size;
3342 #ifdef STACK_GROWS_DOWNWARD
3343 if (STACK_PUSH_CODE == POST_DEC)
3344 /* We have already decremented the stack pointer, so get the
3345 previous value. */
3346 offset += (HOST_WIDE_INT) rounded_size;
3347 #else
3348 if (STACK_PUSH_CODE == POST_INC)
3349 /* We have already incremented the stack pointer, so get the
3350 previous value. */
3351 offset -= (HOST_WIDE_INT) rounded_size;
3352 #endif
3353 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3355 else
3357 #ifdef STACK_GROWS_DOWNWARD
3358 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3359 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3360 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3361 #else
3362 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3363 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3364 GEN_INT (rounded_size));
3365 #endif
3366 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3369 dest = gen_rtx_MEM (mode, dest_addr);
3371 if (type != 0)
3373 set_mem_attributes (dest, type, 1);
3375 if (flag_optimize_sibling_calls)
3376 /* Function incoming arguments may overlap with sibling call
3377 outgoing arguments and we cannot allow reordering of reads
3378 from function arguments with stores to outgoing arguments
3379 of sibling calls. */
3380 set_mem_alias_set (dest, 0);
3382 emit_move_insn (dest, x);
3384 #endif
3386 /* Generate code to push X onto the stack, assuming it has mode MODE and
3387 type TYPE.
3388 MODE is redundant except when X is a CONST_INT (since they don't
3389 carry mode info).
3390 SIZE is an rtx for the size of data to be copied (in bytes),
3391 needed only if X is BLKmode.
3393 ALIGN (in bits) is maximum alignment we can assume.
3395 If PARTIAL and REG are both nonzero, then copy that many of the first
3396 words of X into registers starting with REG, and push the rest of X.
3397 The amount of space pushed is decreased by PARTIAL words,
3398 rounded *down* to a multiple of PARM_BOUNDARY.
3399 REG must be a hard register in this case.
3400 If REG is zero but PARTIAL is not, take any all others actions for an
3401 argument partially in registers, but do not actually load any
3402 registers.
3404 EXTRA is the amount in bytes of extra space to leave next to this arg.
3405 This is ignored if an argument block has already been allocated.
3407 On a machine that lacks real push insns, ARGS_ADDR is the address of
3408 the bottom of the argument block for this call. We use indexing off there
3409 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3410 argument block has not been preallocated.
3412 ARGS_SO_FAR is the size of args previously pushed for this call.
3414 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3415 for arguments passed in registers. If nonzero, it will be the number
3416 of bytes required. */
3418 void
3419 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3420 unsigned int align, int partial, rtx reg, int extra,
3421 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3422 rtx alignment_pad)
3424 rtx xinner;
3425 enum direction stack_direction
3426 #ifdef STACK_GROWS_DOWNWARD
3427 = downward;
3428 #else
3429 = upward;
3430 #endif
3432 /* Decide where to pad the argument: `downward' for below,
3433 `upward' for above, or `none' for don't pad it.
3434 Default is below for small data on big-endian machines; else above. */
3435 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3437 /* Invert direction if stack is post-decrement.
3438 FIXME: why? */
3439 if (STACK_PUSH_CODE == POST_DEC)
3440 if (where_pad != none)
3441 where_pad = (where_pad == downward ? upward : downward);
3443 xinner = x = protect_from_queue (x, 0);
3445 if (mode == BLKmode)
3447 /* Copy a block into the stack, entirely or partially. */
3449 rtx temp;
3450 int used = partial * UNITS_PER_WORD;
3451 int offset;
3452 int skip;
3454 if (reg && GET_CODE (reg) == PARALLEL)
3456 /* Use the size of the elt to compute offset. */
3457 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3458 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3459 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3461 else
3462 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3464 if (size == 0)
3465 abort ();
3467 used -= offset;
3469 /* USED is now the # of bytes we need not copy to the stack
3470 because registers will take care of them. */
3472 if (partial != 0)
3473 xinner = adjust_address (xinner, BLKmode, used);
3475 /* If the partial register-part of the arg counts in its stack size,
3476 skip the part of stack space corresponding to the registers.
3477 Otherwise, start copying to the beginning of the stack space,
3478 by setting SKIP to 0. */
3479 skip = (reg_parm_stack_space == 0) ? 0 : used;
3481 #ifdef PUSH_ROUNDING
3482 /* Do it with several push insns if that doesn't take lots of insns
3483 and if there is no difficulty with push insns that skip bytes
3484 on the stack for alignment purposes. */
3485 if (args_addr == 0
3486 && PUSH_ARGS
3487 && GET_CODE (size) == CONST_INT
3488 && skip == 0
3489 && MEM_ALIGN (xinner) >= align
3490 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3491 /* Here we avoid the case of a structure whose weak alignment
3492 forces many pushes of a small amount of data,
3493 and such small pushes do rounding that causes trouble. */
3494 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3495 || align >= BIGGEST_ALIGNMENT
3496 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3497 == (align / BITS_PER_UNIT)))
3498 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3500 /* Push padding now if padding above and stack grows down,
3501 or if padding below and stack grows up.
3502 But if space already allocated, this has already been done. */
3503 if (extra && args_addr == 0
3504 && where_pad != none && where_pad != stack_direction)
3505 anti_adjust_stack (GEN_INT (extra));
3507 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3509 else
3510 #endif /* PUSH_ROUNDING */
3512 rtx target;
3514 /* Otherwise make space on the stack and copy the data
3515 to the address of that space. */
3517 /* Deduct words put into registers from the size we must copy. */
3518 if (partial != 0)
3520 if (GET_CODE (size) == CONST_INT)
3521 size = GEN_INT (INTVAL (size) - used);
3522 else
3523 size = expand_binop (GET_MODE (size), sub_optab, size,
3524 GEN_INT (used), NULL_RTX, 0,
3525 OPTAB_LIB_WIDEN);
3528 /* Get the address of the stack space.
3529 In this case, we do not deal with EXTRA separately.
3530 A single stack adjust will do. */
3531 if (! args_addr)
3533 temp = push_block (size, extra, where_pad == downward);
3534 extra = 0;
3536 else if (GET_CODE (args_so_far) == CONST_INT)
3537 temp = memory_address (BLKmode,
3538 plus_constant (args_addr,
3539 skip + INTVAL (args_so_far)));
3540 else
3541 temp = memory_address (BLKmode,
3542 plus_constant (gen_rtx_PLUS (Pmode,
3543 args_addr,
3544 args_so_far),
3545 skip));
3547 if (!ACCUMULATE_OUTGOING_ARGS)
3549 /* If the source is referenced relative to the stack pointer,
3550 copy it to another register to stabilize it. We do not need
3551 to do this if we know that we won't be changing sp. */
3553 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3554 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3555 temp = copy_to_reg (temp);
3558 target = gen_rtx_MEM (BLKmode, temp);
3560 if (type != 0)
3562 set_mem_attributes (target, type, 1);
3563 /* Function incoming arguments may overlap with sibling call
3564 outgoing arguments and we cannot allow reordering of reads
3565 from function arguments with stores to outgoing arguments
3566 of sibling calls. */
3567 set_mem_alias_set (target, 0);
3570 /* ALIGN may well be better aligned than TYPE, e.g. due to
3571 PARM_BOUNDARY. Assume the caller isn't lying. */
3572 set_mem_align (target, align);
3574 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3577 else if (partial > 0)
3579 /* Scalar partly in registers. */
3581 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3582 int i;
3583 int not_stack;
3584 /* # words of start of argument
3585 that we must make space for but need not store. */
3586 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3587 int args_offset = INTVAL (args_so_far);
3588 int skip;
3590 /* Push padding now if padding above and stack grows down,
3591 or if padding below and stack grows up.
3592 But if space already allocated, this has already been done. */
3593 if (extra && args_addr == 0
3594 && where_pad != none && where_pad != stack_direction)
3595 anti_adjust_stack (GEN_INT (extra));
3597 /* If we make space by pushing it, we might as well push
3598 the real data. Otherwise, we can leave OFFSET nonzero
3599 and leave the space uninitialized. */
3600 if (args_addr == 0)
3601 offset = 0;
3603 /* Now NOT_STACK gets the number of words that we don't need to
3604 allocate on the stack. */
3605 not_stack = partial - offset;
3607 /* If the partial register-part of the arg counts in its stack size,
3608 skip the part of stack space corresponding to the registers.
3609 Otherwise, start copying to the beginning of the stack space,
3610 by setting SKIP to 0. */
3611 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3613 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3614 x = validize_mem (force_const_mem (mode, x));
3616 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3617 SUBREGs of such registers are not allowed. */
3618 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3619 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3620 x = copy_to_reg (x);
3622 /* Loop over all the words allocated on the stack for this arg. */
3623 /* We can do it by words, because any scalar bigger than a word
3624 has a size a multiple of a word. */
3625 #ifndef PUSH_ARGS_REVERSED
3626 for (i = not_stack; i < size; i++)
3627 #else
3628 for (i = size - 1; i >= not_stack; i--)
3629 #endif
3630 if (i >= not_stack + offset)
3631 emit_push_insn (operand_subword_force (x, i, mode),
3632 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3633 0, args_addr,
3634 GEN_INT (args_offset + ((i - not_stack + skip)
3635 * UNITS_PER_WORD)),
3636 reg_parm_stack_space, alignment_pad);
3638 else
3640 rtx addr;
3641 rtx dest;
3643 /* Push padding now if padding above and stack grows down,
3644 or if padding below and stack grows up.
3645 But if space already allocated, this has already been done. */
3646 if (extra && args_addr == 0
3647 && where_pad != none && where_pad != stack_direction)
3648 anti_adjust_stack (GEN_INT (extra));
3650 #ifdef PUSH_ROUNDING
3651 if (args_addr == 0 && PUSH_ARGS)
3652 emit_single_push_insn (mode, x, type);
3653 else
3654 #endif
3656 if (GET_CODE (args_so_far) == CONST_INT)
3657 addr
3658 = memory_address (mode,
3659 plus_constant (args_addr,
3660 INTVAL (args_so_far)));
3661 else
3662 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3663 args_so_far));
3664 dest = gen_rtx_MEM (mode, addr);
3665 if (type != 0)
3667 set_mem_attributes (dest, type, 1);
3668 /* Function incoming arguments may overlap with sibling call
3669 outgoing arguments and we cannot allow reordering of reads
3670 from function arguments with stores to outgoing arguments
3671 of sibling calls. */
3672 set_mem_alias_set (dest, 0);
3675 emit_move_insn (dest, x);
3679 /* If part should go in registers, copy that part
3680 into the appropriate registers. Do this now, at the end,
3681 since mem-to-mem copies above may do function calls. */
3682 if (partial > 0 && reg != 0)
3684 /* Handle calls that pass values in multiple non-contiguous locations.
3685 The Irix 6 ABI has examples of this. */
3686 if (GET_CODE (reg) == PARALLEL)
3687 emit_group_load (reg, x, type, -1);
3688 else
3689 move_block_to_reg (REGNO (reg), x, partial, mode);
3692 if (extra && args_addr == 0 && where_pad == stack_direction)
3693 anti_adjust_stack (GEN_INT (extra));
3695 if (alignment_pad && args_addr == 0)
3696 anti_adjust_stack (alignment_pad);
3699 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3700 operations. */
3702 static rtx
3703 get_subtarget (rtx x)
3705 return ((x == 0
3706 /* Only registers can be subtargets. */
3707 || GET_CODE (x) != REG
3708 /* If the register is readonly, it can't be set more than once. */
3709 || RTX_UNCHANGING_P (x)
3710 /* Don't use hard regs to avoid extending their life. */
3711 || REGNO (x) < FIRST_PSEUDO_REGISTER
3712 /* Avoid subtargets inside loops,
3713 since they hide some invariant expressions. */
3714 || preserve_subexpressions_p ())
3715 ? 0 : x);
3718 /* Expand an assignment that stores the value of FROM into TO.
3719 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3720 (This may contain a QUEUED rtx;
3721 if the value is constant, this rtx is a constant.)
3722 Otherwise, the returned value is NULL_RTX. */
3725 expand_assignment (tree to, tree from, int want_value)
3727 rtx to_rtx = 0;
3728 rtx result;
3730 /* Don't crash if the lhs of the assignment was erroneous. */
3732 if (TREE_CODE (to) == ERROR_MARK)
3734 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3735 return want_value ? result : NULL_RTX;
3738 /* Assignment of a structure component needs special treatment
3739 if the structure component's rtx is not simply a MEM.
3740 Assignment of an array element at a constant index, and assignment of
3741 an array element in an unaligned packed structure field, has the same
3742 problem. */
3744 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3745 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3746 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3748 enum machine_mode mode1;
3749 HOST_WIDE_INT bitsize, bitpos;
3750 rtx orig_to_rtx;
3751 tree offset;
3752 int unsignedp;
3753 int volatilep = 0;
3754 tree tem;
3756 push_temp_slots ();
3757 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3758 &unsignedp, &volatilep);
3760 /* If we are going to use store_bit_field and extract_bit_field,
3761 make sure to_rtx will be safe for multiple use. */
3763 if (mode1 == VOIDmode && want_value)
3764 tem = stabilize_reference (tem);
3766 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3768 if (offset != 0)
3770 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3772 if (GET_CODE (to_rtx) != MEM)
3773 abort ();
3775 #ifdef POINTERS_EXTEND_UNSIGNED
3776 if (GET_MODE (offset_rtx) != Pmode)
3777 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3778 #else
3779 if (GET_MODE (offset_rtx) != ptr_mode)
3780 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3781 #endif
3783 /* A constant address in TO_RTX can have VOIDmode, we must not try
3784 to call force_reg for that case. Avoid that case. */
3785 if (GET_CODE (to_rtx) == MEM
3786 && GET_MODE (to_rtx) == BLKmode
3787 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3788 && bitsize > 0
3789 && (bitpos % bitsize) == 0
3790 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3791 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3793 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3794 bitpos = 0;
3797 to_rtx = offset_address (to_rtx, offset_rtx,
3798 highest_pow2_factor_for_type (TREE_TYPE (to),
3799 offset));
3802 if (GET_CODE (to_rtx) == MEM)
3804 /* If the field is at offset zero, we could have been given the
3805 DECL_RTX of the parent struct. Don't munge it. */
3806 to_rtx = shallow_copy_rtx (to_rtx);
3808 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3811 /* Deal with volatile and readonly fields. The former is only done
3812 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3813 if (volatilep && GET_CODE (to_rtx) == MEM)
3815 if (to_rtx == orig_to_rtx)
3816 to_rtx = copy_rtx (to_rtx);
3817 MEM_VOLATILE_P (to_rtx) = 1;
3820 if (TREE_CODE (to) == COMPONENT_REF
3821 && TREE_READONLY (TREE_OPERAND (to, 1))
3822 /* We can't assert that a MEM won't be set more than once
3823 if the component is not addressable because another
3824 non-addressable component may be referenced by the same MEM. */
3825 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3827 if (to_rtx == orig_to_rtx)
3828 to_rtx = copy_rtx (to_rtx);
3829 RTX_UNCHANGING_P (to_rtx) = 1;
3832 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3834 if (to_rtx == orig_to_rtx)
3835 to_rtx = copy_rtx (to_rtx);
3836 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3839 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3840 (want_value
3841 /* Spurious cast for HPUX compiler. */
3842 ? ((enum machine_mode)
3843 TYPE_MODE (TREE_TYPE (to)))
3844 : VOIDmode),
3845 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3847 preserve_temp_slots (result);
3848 free_temp_slots ();
3849 pop_temp_slots ();
3851 /* If the value is meaningful, convert RESULT to the proper mode.
3852 Otherwise, return nothing. */
3853 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3854 TYPE_MODE (TREE_TYPE (from)),
3855 result,
3856 TREE_UNSIGNED (TREE_TYPE (to)))
3857 : NULL_RTX);
3860 /* If the rhs is a function call and its value is not an aggregate,
3861 call the function before we start to compute the lhs.
3862 This is needed for correct code for cases such as
3863 val = setjmp (buf) on machines where reference to val
3864 requires loading up part of an address in a separate insn.
3866 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3867 since it might be a promoted variable where the zero- or sign- extension
3868 needs to be done. Handling this in the normal way is safe because no
3869 computation is done before the call. */
3870 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3871 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3872 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3873 && GET_CODE (DECL_RTL (to)) == REG))
3875 rtx value;
3877 push_temp_slots ();
3878 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3879 if (to_rtx == 0)
3880 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3882 /* Handle calls that return values in multiple non-contiguous locations.
3883 The Irix 6 ABI has examples of this. */
3884 if (GET_CODE (to_rtx) == PARALLEL)
3885 emit_group_load (to_rtx, value, TREE_TYPE (from),
3886 int_size_in_bytes (TREE_TYPE (from)));
3887 else if (GET_MODE (to_rtx) == BLKmode)
3888 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3889 else
3891 if (POINTER_TYPE_P (TREE_TYPE (to)))
3892 value = convert_memory_address (GET_MODE (to_rtx), value);
3893 emit_move_insn (to_rtx, value);
3895 preserve_temp_slots (to_rtx);
3896 free_temp_slots ();
3897 pop_temp_slots ();
3898 return want_value ? to_rtx : NULL_RTX;
3901 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3902 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3904 if (to_rtx == 0)
3905 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3907 /* Don't move directly into a return register. */
3908 if (TREE_CODE (to) == RESULT_DECL
3909 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3911 rtx temp;
3913 push_temp_slots ();
3914 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3916 if (GET_CODE (to_rtx) == PARALLEL)
3917 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3918 int_size_in_bytes (TREE_TYPE (from)));
3919 else
3920 emit_move_insn (to_rtx, temp);
3922 preserve_temp_slots (to_rtx);
3923 free_temp_slots ();
3924 pop_temp_slots ();
3925 return want_value ? to_rtx : NULL_RTX;
3928 /* In case we are returning the contents of an object which overlaps
3929 the place the value is being stored, use a safe function when copying
3930 a value through a pointer into a structure value return block. */
3931 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3932 && current_function_returns_struct
3933 && !current_function_returns_pcc_struct)
3935 rtx from_rtx, size;
3937 push_temp_slots ();
3938 size = expr_size (from);
3939 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3941 if (TARGET_MEM_FUNCTIONS)
3942 emit_library_call (memmove_libfunc, LCT_NORMAL,
3943 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3944 XEXP (from_rtx, 0), Pmode,
3945 convert_to_mode (TYPE_MODE (sizetype),
3946 size, TREE_UNSIGNED (sizetype)),
3947 TYPE_MODE (sizetype));
3948 else
3949 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3950 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3951 XEXP (to_rtx, 0), Pmode,
3952 convert_to_mode (TYPE_MODE (integer_type_node),
3953 size,
3954 TREE_UNSIGNED (integer_type_node)),
3955 TYPE_MODE (integer_type_node));
3957 preserve_temp_slots (to_rtx);
3958 free_temp_slots ();
3959 pop_temp_slots ();
3960 return want_value ? to_rtx : NULL_RTX;
3963 /* Compute FROM and store the value in the rtx we got. */
3965 push_temp_slots ();
3966 result = store_expr (from, to_rtx, want_value);
3967 preserve_temp_slots (result);
3968 free_temp_slots ();
3969 pop_temp_slots ();
3970 return want_value ? result : NULL_RTX;
3973 /* Generate code for computing expression EXP,
3974 and storing the value into TARGET.
3975 TARGET may contain a QUEUED rtx.
3977 If WANT_VALUE & 1 is nonzero, return a copy of the value
3978 not in TARGET, so that we can be sure to use the proper
3979 value in a containing expression even if TARGET has something
3980 else stored in it. If possible, we copy the value through a pseudo
3981 and return that pseudo. Or, if the value is constant, we try to
3982 return the constant. In some cases, we return a pseudo
3983 copied *from* TARGET.
3985 If the mode is BLKmode then we may return TARGET itself.
3986 It turns out that in BLKmode it doesn't cause a problem.
3987 because C has no operators that could combine two different
3988 assignments into the same BLKmode object with different values
3989 with no sequence point. Will other languages need this to
3990 be more thorough?
3992 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3993 to catch quickly any cases where the caller uses the value
3994 and fails to set WANT_VALUE.
3996 If WANT_VALUE & 2 is set, this is a store into a call param on the
3997 stack, and block moves may need to be treated specially. */
4000 store_expr (tree exp, rtx target, int want_value)
4002 rtx temp;
4003 rtx alt_rtl = NULL_RTX;
4004 int dont_return_target = 0;
4005 int dont_store_target = 0;
4007 if (VOID_TYPE_P (TREE_TYPE (exp)))
4009 /* C++ can generate ?: expressions with a throw expression in one
4010 branch and an rvalue in the other. Here, we resolve attempts to
4011 store the throw expression's nonexistent result. */
4012 if (want_value)
4013 abort ();
4014 expand_expr (exp, const0_rtx, VOIDmode, 0);
4015 return NULL_RTX;
4017 if (TREE_CODE (exp) == COMPOUND_EXPR)
4019 /* Perform first part of compound expression, then assign from second
4020 part. */
4021 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4022 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4023 emit_queue ();
4024 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4026 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4028 /* For conditional expression, get safe form of the target. Then
4029 test the condition, doing the appropriate assignment on either
4030 side. This avoids the creation of unnecessary temporaries.
4031 For non-BLKmode, it is more efficient not to do this. */
4033 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4035 emit_queue ();
4036 target = protect_from_queue (target, 1);
4038 do_pending_stack_adjust ();
4039 NO_DEFER_POP;
4040 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4041 start_cleanup_deferral ();
4042 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4043 end_cleanup_deferral ();
4044 emit_queue ();
4045 emit_jump_insn (gen_jump (lab2));
4046 emit_barrier ();
4047 emit_label (lab1);
4048 start_cleanup_deferral ();
4049 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4050 end_cleanup_deferral ();
4051 emit_queue ();
4052 emit_label (lab2);
4053 OK_DEFER_POP;
4055 return want_value & 1 ? target : NULL_RTX;
4057 else if (queued_subexp_p (target))
4058 /* If target contains a postincrement, let's not risk
4059 using it as the place to generate the rhs. */
4061 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4063 /* Expand EXP into a new pseudo. */
4064 temp = gen_reg_rtx (GET_MODE (target));
4065 temp = expand_expr (exp, temp, GET_MODE (target),
4066 (want_value & 2
4067 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4069 else
4070 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4071 (want_value & 2
4072 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4074 /* If target is volatile, ANSI requires accessing the value
4075 *from* the target, if it is accessed. So make that happen.
4076 In no case return the target itself. */
4077 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4078 dont_return_target = 1;
4080 else if ((want_value & 1) != 0
4081 && GET_CODE (target) == MEM
4082 && ! MEM_VOLATILE_P (target)
4083 && GET_MODE (target) != BLKmode)
4084 /* If target is in memory and caller wants value in a register instead,
4085 arrange that. Pass TARGET as target for expand_expr so that,
4086 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4087 We know expand_expr will not use the target in that case.
4088 Don't do this if TARGET is volatile because we are supposed
4089 to write it and then read it. */
4091 temp = expand_expr (exp, target, GET_MODE (target),
4092 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4093 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4095 /* If TEMP is already in the desired TARGET, only copy it from
4096 memory and don't store it there again. */
4097 if (temp == target
4098 || (rtx_equal_p (temp, target)
4099 && ! side_effects_p (temp) && ! side_effects_p (target)))
4100 dont_store_target = 1;
4101 temp = copy_to_reg (temp);
4103 dont_return_target = 1;
4105 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4106 /* If this is a scalar in a register that is stored in a wider mode
4107 than the declared mode, compute the result into its declared mode
4108 and then convert to the wider mode. Our value is the computed
4109 expression. */
4111 rtx inner_target = 0;
4113 /* If we don't want a value, we can do the conversion inside EXP,
4114 which will often result in some optimizations. Do the conversion
4115 in two steps: first change the signedness, if needed, then
4116 the extend. But don't do this if the type of EXP is a subtype
4117 of something else since then the conversion might involve
4118 more than just converting modes. */
4119 if ((want_value & 1) == 0
4120 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4121 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4123 if (TREE_UNSIGNED (TREE_TYPE (exp))
4124 != SUBREG_PROMOTED_UNSIGNED_P (target))
4125 exp = convert
4126 ((*lang_hooks.types.signed_or_unsigned_type)
4127 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4129 exp = convert ((*lang_hooks.types.type_for_mode)
4130 (GET_MODE (SUBREG_REG (target)),
4131 SUBREG_PROMOTED_UNSIGNED_P (target)),
4132 exp);
4134 inner_target = SUBREG_REG (target);
4137 temp = expand_expr (exp, inner_target, VOIDmode,
4138 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4140 /* If TEMP is a MEM and we want a result value, make the access
4141 now so it gets done only once. Strictly speaking, this is
4142 only necessary if the MEM is volatile, or if the address
4143 overlaps TARGET. But not performing the load twice also
4144 reduces the amount of rtl we generate and then have to CSE. */
4145 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4146 temp = copy_to_reg (temp);
4148 /* If TEMP is a VOIDmode constant, use convert_modes to make
4149 sure that we properly convert it. */
4150 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4152 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4153 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4154 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4155 GET_MODE (target), temp,
4156 SUBREG_PROMOTED_UNSIGNED_P (target));
4159 convert_move (SUBREG_REG (target), temp,
4160 SUBREG_PROMOTED_UNSIGNED_P (target));
4162 /* If we promoted a constant, change the mode back down to match
4163 target. Otherwise, the caller might get confused by a result whose
4164 mode is larger than expected. */
4166 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4168 if (GET_MODE (temp) != VOIDmode)
4170 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4171 SUBREG_PROMOTED_VAR_P (temp) = 1;
4172 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4173 SUBREG_PROMOTED_UNSIGNED_P (target));
4175 else
4176 temp = convert_modes (GET_MODE (target),
4177 GET_MODE (SUBREG_REG (target)),
4178 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4181 return want_value & 1 ? temp : NULL_RTX;
4183 else
4185 temp = expand_expr_real (exp, target, GET_MODE (target),
4186 (want_value & 2
4187 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4188 &alt_rtl);
4189 /* Return TARGET if it's a specified hardware register.
4190 If TARGET is a volatile mem ref, either return TARGET
4191 or return a reg copied *from* TARGET; ANSI requires this.
4193 Otherwise, if TEMP is not TARGET, return TEMP
4194 if it is constant (for efficiency),
4195 or if we really want the correct value. */
4196 if (!(target && GET_CODE (target) == REG
4197 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4198 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4199 && ! rtx_equal_p (temp, target)
4200 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4201 dont_return_target = 1;
4204 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4205 the same as that of TARGET, adjust the constant. This is needed, for
4206 example, in case it is a CONST_DOUBLE and we want only a word-sized
4207 value. */
4208 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4209 && TREE_CODE (exp) != ERROR_MARK
4210 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4211 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4212 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4214 /* If value was not generated in the target, store it there.
4215 Convert the value to TARGET's type first if necessary.
4216 If TEMP and TARGET compare equal according to rtx_equal_p, but
4217 one or both of them are volatile memory refs, we have to distinguish
4218 two cases:
4219 - expand_expr has used TARGET. In this case, we must not generate
4220 another copy. This can be detected by TARGET being equal according
4221 to == .
4222 - expand_expr has not used TARGET - that means that the source just
4223 happens to have the same RTX form. Since temp will have been created
4224 by expand_expr, it will compare unequal according to == .
4225 We must generate a copy in this case, to reach the correct number
4226 of volatile memory references. */
4228 if ((! rtx_equal_p (temp, target)
4229 || (temp != target && (side_effects_p (temp)
4230 || side_effects_p (target))))
4231 && TREE_CODE (exp) != ERROR_MARK
4232 && ! dont_store_target
4233 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4234 but TARGET is not valid memory reference, TEMP will differ
4235 from TARGET although it is really the same location. */
4236 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4237 /* If there's nothing to copy, don't bother. Don't call expr_size
4238 unless necessary, because some front-ends (C++) expr_size-hook
4239 aborts on objects that are not supposed to be bit-copied or
4240 bit-initialized. */
4241 && expr_size (exp) != const0_rtx)
4243 target = protect_from_queue (target, 1);
4244 if (GET_MODE (temp) != GET_MODE (target)
4245 && GET_MODE (temp) != VOIDmode)
4247 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4248 if (dont_return_target)
4250 /* In this case, we will return TEMP,
4251 so make sure it has the proper mode.
4252 But don't forget to store the value into TARGET. */
4253 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4254 emit_move_insn (target, temp);
4256 else
4257 convert_move (target, temp, unsignedp);
4260 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4262 /* Handle copying a string constant into an array. The string
4263 constant may be shorter than the array. So copy just the string's
4264 actual length, and clear the rest. First get the size of the data
4265 type of the string, which is actually the size of the target. */
4266 rtx size = expr_size (exp);
4268 if (GET_CODE (size) == CONST_INT
4269 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4270 emit_block_move (target, temp, size,
4271 (want_value & 2
4272 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4273 else
4275 /* Compute the size of the data to copy from the string. */
4276 tree copy_size
4277 = size_binop (MIN_EXPR,
4278 make_tree (sizetype, size),
4279 size_int (TREE_STRING_LENGTH (exp)));
4280 rtx copy_size_rtx
4281 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4282 (want_value & 2
4283 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4284 rtx label = 0;
4286 /* Copy that much. */
4287 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4288 TREE_UNSIGNED (sizetype));
4289 emit_block_move (target, temp, copy_size_rtx,
4290 (want_value & 2
4291 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4293 /* Figure out how much is left in TARGET that we have to clear.
4294 Do all calculations in ptr_mode. */
4295 if (GET_CODE (copy_size_rtx) == CONST_INT)
4297 size = plus_constant (size, -INTVAL (copy_size_rtx));
4298 target = adjust_address (target, BLKmode,
4299 INTVAL (copy_size_rtx));
4301 else
4303 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4304 copy_size_rtx, NULL_RTX, 0,
4305 OPTAB_LIB_WIDEN);
4307 #ifdef POINTERS_EXTEND_UNSIGNED
4308 if (GET_MODE (copy_size_rtx) != Pmode)
4309 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4310 TREE_UNSIGNED (sizetype));
4311 #endif
4313 target = offset_address (target, copy_size_rtx,
4314 highest_pow2_factor (copy_size));
4315 label = gen_label_rtx ();
4316 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4317 GET_MODE (size), 0, label);
4320 if (size != const0_rtx)
4321 clear_storage (target, size);
4323 if (label)
4324 emit_label (label);
4327 /* Handle calls that return values in multiple non-contiguous locations.
4328 The Irix 6 ABI has examples of this. */
4329 else if (GET_CODE (target) == PARALLEL)
4330 emit_group_load (target, temp, TREE_TYPE (exp),
4331 int_size_in_bytes (TREE_TYPE (exp)));
4332 else if (GET_MODE (temp) == BLKmode)
4333 emit_block_move (target, temp, expr_size (exp),
4334 (want_value & 2
4335 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4336 else
4338 temp = force_operand (temp, target);
4339 if (temp != target)
4340 emit_move_insn (target, temp);
4344 /* If we don't want a value, return NULL_RTX. */
4345 if ((want_value & 1) == 0)
4346 return NULL_RTX;
4348 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4349 ??? The latter test doesn't seem to make sense. */
4350 else if (dont_return_target && GET_CODE (temp) != MEM)
4351 return temp;
4353 /* Return TARGET itself if it is a hard register. */
4354 else if ((want_value & 1) != 0
4355 && GET_MODE (target) != BLKmode
4356 && ! (GET_CODE (target) == REG
4357 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4358 return copy_to_reg (target);
4360 else
4361 return target;
4364 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4366 static int
4367 is_zeros_p (tree exp)
4369 tree elt;
4371 switch (TREE_CODE (exp))
4373 case CONVERT_EXPR:
4374 case NOP_EXPR:
4375 case NON_LVALUE_EXPR:
4376 case VIEW_CONVERT_EXPR:
4377 return is_zeros_p (TREE_OPERAND (exp, 0));
4379 case INTEGER_CST:
4380 return integer_zerop (exp);
4382 case COMPLEX_CST:
4383 return
4384 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4386 case REAL_CST:
4387 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4389 case VECTOR_CST:
4390 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4391 elt = TREE_CHAIN (elt))
4392 if (!is_zeros_p (TREE_VALUE (elt)))
4393 return 0;
4395 return 1;
4397 case CONSTRUCTOR:
4398 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4399 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4400 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4401 if (! is_zeros_p (TREE_VALUE (elt)))
4402 return 0;
4404 return 1;
4406 default:
4407 return 0;
4411 /* Return 1 if EXP contains mostly (3/4) zeros. */
4414 mostly_zeros_p (tree exp)
4416 if (TREE_CODE (exp) == CONSTRUCTOR)
4418 int elts = 0, zeros = 0;
4419 tree elt = CONSTRUCTOR_ELTS (exp);
4420 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4422 /* If there are no ranges of true bits, it is all zero. */
4423 return elt == NULL_TREE;
4425 for (; elt; elt = TREE_CHAIN (elt))
4427 /* We do not handle the case where the index is a RANGE_EXPR,
4428 so the statistic will be somewhat inaccurate.
4429 We do make a more accurate count in store_constructor itself,
4430 so since this function is only used for nested array elements,
4431 this should be close enough. */
4432 if (mostly_zeros_p (TREE_VALUE (elt)))
4433 zeros++;
4434 elts++;
4437 return 4 * zeros >= 3 * elts;
4440 return is_zeros_p (exp);
4443 /* Helper function for store_constructor.
4444 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4445 TYPE is the type of the CONSTRUCTOR, not the element type.
4446 CLEARED is as for store_constructor.
4447 ALIAS_SET is the alias set to use for any stores.
4449 This provides a recursive shortcut back to store_constructor when it isn't
4450 necessary to go through store_field. This is so that we can pass through
4451 the cleared field to let store_constructor know that we may not have to
4452 clear a substructure if the outer structure has already been cleared. */
4454 static void
4455 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4456 HOST_WIDE_INT bitpos, enum machine_mode mode,
4457 tree exp, tree type, int cleared, int alias_set)
4459 if (TREE_CODE (exp) == CONSTRUCTOR
4460 && bitpos % BITS_PER_UNIT == 0
4461 /* If we have a nonzero bitpos for a register target, then we just
4462 let store_field do the bitfield handling. This is unlikely to
4463 generate unnecessary clear instructions anyways. */
4464 && (bitpos == 0 || GET_CODE (target) == MEM))
4466 if (GET_CODE (target) == MEM)
4467 target
4468 = adjust_address (target,
4469 GET_MODE (target) == BLKmode
4470 || 0 != (bitpos
4471 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4472 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4475 /* Update the alias set, if required. */
4476 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4477 && MEM_ALIAS_SET (target) != 0)
4479 target = copy_rtx (target);
4480 set_mem_alias_set (target, alias_set);
4483 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4485 else
4486 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4487 alias_set);
4490 /* Store the value of constructor EXP into the rtx TARGET.
4491 TARGET is either a REG or a MEM; we know it cannot conflict, since
4492 safe_from_p has been called.
4493 CLEARED is true if TARGET is known to have been zero'd.
4494 SIZE is the number of bytes of TARGET we are allowed to modify: this
4495 may not be the same as the size of EXP if we are assigning to a field
4496 which has been packed to exclude padding bits. */
4498 static void
4499 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4501 tree type = TREE_TYPE (exp);
4502 #ifdef WORD_REGISTER_OPERATIONS
4503 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4504 #endif
4506 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4507 || TREE_CODE (type) == QUAL_UNION_TYPE)
4509 tree elt;
4511 /* If size is zero or the target is already cleared, do nothing. */
4512 if (size == 0 || cleared)
4513 cleared = 1;
4514 /* We either clear the aggregate or indicate the value is dead. */
4515 else if ((TREE_CODE (type) == UNION_TYPE
4516 || TREE_CODE (type) == QUAL_UNION_TYPE)
4517 && ! CONSTRUCTOR_ELTS (exp))
4518 /* If the constructor is empty, clear the union. */
4520 clear_storage (target, expr_size (exp));
4521 cleared = 1;
4524 /* If we are building a static constructor into a register,
4525 set the initial value as zero so we can fold the value into
4526 a constant. But if more than one register is involved,
4527 this probably loses. */
4528 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4529 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4531 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4532 cleared = 1;
4535 /* If the constructor has fewer fields than the structure
4536 or if we are initializing the structure to mostly zeros,
4537 clear the whole structure first. Don't do this if TARGET is a
4538 register whose mode size isn't equal to SIZE since clear_storage
4539 can't handle this case. */
4540 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4541 || mostly_zeros_p (exp))
4542 && (GET_CODE (target) != REG
4543 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4544 == size)))
4546 rtx xtarget = target;
4548 if (readonly_fields_p (type))
4550 xtarget = copy_rtx (xtarget);
4551 RTX_UNCHANGING_P (xtarget) = 1;
4554 clear_storage (xtarget, GEN_INT (size));
4555 cleared = 1;
4558 if (! cleared)
4559 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4561 /* Store each element of the constructor into
4562 the corresponding field of TARGET. */
4564 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4566 tree field = TREE_PURPOSE (elt);
4567 tree value = TREE_VALUE (elt);
4568 enum machine_mode mode;
4569 HOST_WIDE_INT bitsize;
4570 HOST_WIDE_INT bitpos = 0;
4571 tree offset;
4572 rtx to_rtx = target;
4574 /* Just ignore missing fields.
4575 We cleared the whole structure, above,
4576 if any fields are missing. */
4577 if (field == 0)
4578 continue;
4580 if (cleared && is_zeros_p (value))
4581 continue;
4583 if (host_integerp (DECL_SIZE (field), 1))
4584 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4585 else
4586 bitsize = -1;
4588 mode = DECL_MODE (field);
4589 if (DECL_BIT_FIELD (field))
4590 mode = VOIDmode;
4592 offset = DECL_FIELD_OFFSET (field);
4593 if (host_integerp (offset, 0)
4594 && host_integerp (bit_position (field), 0))
4596 bitpos = int_bit_position (field);
4597 offset = 0;
4599 else
4600 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4602 if (offset)
4604 rtx offset_rtx;
4606 if (CONTAINS_PLACEHOLDER_P (offset))
4607 offset = build (WITH_RECORD_EXPR, sizetype,
4608 offset, make_tree (TREE_TYPE (exp), target));
4610 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4611 if (GET_CODE (to_rtx) != MEM)
4612 abort ();
4614 #ifdef POINTERS_EXTEND_UNSIGNED
4615 if (GET_MODE (offset_rtx) != Pmode)
4616 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4617 #else
4618 if (GET_MODE (offset_rtx) != ptr_mode)
4619 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4620 #endif
4622 to_rtx = offset_address (to_rtx, offset_rtx,
4623 highest_pow2_factor (offset));
4626 if (TREE_READONLY (field))
4628 if (GET_CODE (to_rtx) == MEM)
4629 to_rtx = copy_rtx (to_rtx);
4631 RTX_UNCHANGING_P (to_rtx) = 1;
4634 #ifdef WORD_REGISTER_OPERATIONS
4635 /* If this initializes a field that is smaller than a word, at the
4636 start of a word, try to widen it to a full word.
4637 This special case allows us to output C++ member function
4638 initializations in a form that the optimizers can understand. */
4639 if (GET_CODE (target) == REG
4640 && bitsize < BITS_PER_WORD
4641 && bitpos % BITS_PER_WORD == 0
4642 && GET_MODE_CLASS (mode) == MODE_INT
4643 && TREE_CODE (value) == INTEGER_CST
4644 && exp_size >= 0
4645 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4647 tree type = TREE_TYPE (value);
4649 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4651 type = (*lang_hooks.types.type_for_size)
4652 (BITS_PER_WORD, TREE_UNSIGNED (type));
4653 value = convert (type, value);
4656 if (BYTES_BIG_ENDIAN)
4657 value
4658 = fold (build (LSHIFT_EXPR, type, value,
4659 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4660 bitsize = BITS_PER_WORD;
4661 mode = word_mode;
4663 #endif
4665 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4666 && DECL_NONADDRESSABLE_P (field))
4668 to_rtx = copy_rtx (to_rtx);
4669 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4672 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4673 value, type, cleared,
4674 get_alias_set (TREE_TYPE (field)));
4677 else if (TREE_CODE (type) == ARRAY_TYPE
4678 || TREE_CODE (type) == VECTOR_TYPE)
4680 tree elt;
4681 int i;
4682 int need_to_clear;
4683 tree domain = TYPE_DOMAIN (type);
4684 tree elttype = TREE_TYPE (type);
4685 int const_bounds_p;
4686 HOST_WIDE_INT minelt = 0;
4687 HOST_WIDE_INT maxelt = 0;
4688 int icode = 0;
4689 rtx *vector = NULL;
4690 int elt_size = 0;
4691 unsigned n_elts = 0;
4693 /* Vectors are like arrays, but the domain is stored via an array
4694 type indirectly. */
4695 if (TREE_CODE (type) == VECTOR_TYPE)
4697 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4698 the same field as TYPE_DOMAIN, we are not guaranteed that
4699 it always will. */
4700 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4701 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4702 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4704 enum machine_mode mode = GET_MODE (target);
4706 icode = (int) vec_init_optab->handlers[mode].insn_code;
4707 if (icode != CODE_FOR_nothing)
4709 unsigned int i;
4711 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4712 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4713 vector = alloca (n_elts);
4714 for (i = 0; i < n_elts; i++)
4715 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4720 const_bounds_p = (TYPE_MIN_VALUE (domain)
4721 && TYPE_MAX_VALUE (domain)
4722 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4723 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4725 /* If we have constant bounds for the range of the type, get them. */
4726 if (const_bounds_p)
4728 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4729 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4732 /* If the constructor has fewer elements than the array,
4733 clear the whole array first. Similarly if this is
4734 static constructor of a non-BLKmode object. */
4735 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4736 need_to_clear = 1;
4737 else
4739 HOST_WIDE_INT count = 0, zero_count = 0;
4740 need_to_clear = ! const_bounds_p;
4742 /* This loop is a more accurate version of the loop in
4743 mostly_zeros_p (it handles RANGE_EXPR in an index).
4744 It is also needed to check for missing elements. */
4745 for (elt = CONSTRUCTOR_ELTS (exp);
4746 elt != NULL_TREE && ! need_to_clear;
4747 elt = TREE_CHAIN (elt))
4749 tree index = TREE_PURPOSE (elt);
4750 HOST_WIDE_INT this_node_count;
4752 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4754 tree lo_index = TREE_OPERAND (index, 0);
4755 tree hi_index = TREE_OPERAND (index, 1);
4757 if (! host_integerp (lo_index, 1)
4758 || ! host_integerp (hi_index, 1))
4760 need_to_clear = 1;
4761 break;
4764 this_node_count = (tree_low_cst (hi_index, 1)
4765 - tree_low_cst (lo_index, 1) + 1);
4767 else
4768 this_node_count = 1;
4770 count += this_node_count;
4771 if (mostly_zeros_p (TREE_VALUE (elt)))
4772 zero_count += this_node_count;
4775 /* Clear the entire array first if there are any missing elements,
4776 or if the incidence of zero elements is >= 75%. */
4777 if (! need_to_clear
4778 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4779 need_to_clear = 1;
4782 if (need_to_clear && size > 0 && !vector)
4784 if (! cleared)
4786 if (REG_P (target))
4787 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4788 else
4789 clear_storage (target, GEN_INT (size));
4791 cleared = 1;
4793 else if (REG_P (target))
4794 /* Inform later passes that the old value is dead. */
4795 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4797 /* Store each element of the constructor into
4798 the corresponding element of TARGET, determined
4799 by counting the elements. */
4800 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4801 elt;
4802 elt = TREE_CHAIN (elt), i++)
4804 enum machine_mode mode;
4805 HOST_WIDE_INT bitsize;
4806 HOST_WIDE_INT bitpos;
4807 int unsignedp;
4808 tree value = TREE_VALUE (elt);
4809 tree index = TREE_PURPOSE (elt);
4810 rtx xtarget = target;
4812 if (cleared && is_zeros_p (value))
4813 continue;
4815 unsignedp = TREE_UNSIGNED (elttype);
4816 mode = TYPE_MODE (elttype);
4817 if (mode == BLKmode)
4818 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4819 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4820 : -1);
4821 else
4822 bitsize = GET_MODE_BITSIZE (mode);
4824 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4826 tree lo_index = TREE_OPERAND (index, 0);
4827 tree hi_index = TREE_OPERAND (index, 1);
4828 rtx index_r, pos_rtx, loop_end;
4829 struct nesting *loop;
4830 HOST_WIDE_INT lo, hi, count;
4831 tree position;
4833 if (vector)
4834 abort ();
4836 /* If the range is constant and "small", unroll the loop. */
4837 if (const_bounds_p
4838 && host_integerp (lo_index, 0)
4839 && host_integerp (hi_index, 0)
4840 && (lo = tree_low_cst (lo_index, 0),
4841 hi = tree_low_cst (hi_index, 0),
4842 count = hi - lo + 1,
4843 (GET_CODE (target) != MEM
4844 || count <= 2
4845 || (host_integerp (TYPE_SIZE (elttype), 1)
4846 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4847 <= 40 * 8)))))
4849 lo -= minelt; hi -= minelt;
4850 for (; lo <= hi; lo++)
4852 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4854 if (GET_CODE (target) == MEM
4855 && !MEM_KEEP_ALIAS_SET_P (target)
4856 && TREE_CODE (type) == ARRAY_TYPE
4857 && TYPE_NONALIASED_COMPONENT (type))
4859 target = copy_rtx (target);
4860 MEM_KEEP_ALIAS_SET_P (target) = 1;
4863 store_constructor_field
4864 (target, bitsize, bitpos, mode, value, type, cleared,
4865 get_alias_set (elttype));
4868 else
4870 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4871 loop_end = gen_label_rtx ();
4873 unsignedp = TREE_UNSIGNED (domain);
4875 index = build_decl (VAR_DECL, NULL_TREE, domain);
4877 index_r
4878 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4879 &unsignedp, 0));
4880 SET_DECL_RTL (index, index_r);
4881 if (TREE_CODE (value) == SAVE_EXPR
4882 && SAVE_EXPR_RTL (value) == 0)
4884 /* Make sure value gets expanded once before the
4885 loop. */
4886 expand_expr (value, const0_rtx, VOIDmode, 0);
4887 emit_queue ();
4889 store_expr (lo_index, index_r, 0);
4890 loop = expand_start_loop (0);
4892 /* Assign value to element index. */
4893 position
4894 = convert (ssizetype,
4895 fold (build (MINUS_EXPR, TREE_TYPE (index),
4896 index, TYPE_MIN_VALUE (domain))));
4897 position = size_binop (MULT_EXPR, position,
4898 convert (ssizetype,
4899 TYPE_SIZE_UNIT (elttype)));
4901 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4902 xtarget = offset_address (target, pos_rtx,
4903 highest_pow2_factor (position));
4904 xtarget = adjust_address (xtarget, mode, 0);
4905 if (TREE_CODE (value) == CONSTRUCTOR)
4906 store_constructor (value, xtarget, cleared,
4907 bitsize / BITS_PER_UNIT);
4908 else
4909 store_expr (value, xtarget, 0);
4911 expand_exit_loop_if_false (loop,
4912 build (LT_EXPR, integer_type_node,
4913 index, hi_index));
4915 expand_increment (build (PREINCREMENT_EXPR,
4916 TREE_TYPE (index),
4917 index, integer_one_node), 0, 0);
4918 expand_end_loop ();
4919 emit_label (loop_end);
4922 else if ((index != 0 && ! host_integerp (index, 0))
4923 || ! host_integerp (TYPE_SIZE (elttype), 1))
4925 tree position;
4927 if (vector)
4928 abort ();
4930 if (index == 0)
4931 index = ssize_int (1);
4933 if (minelt)
4934 index = convert (ssizetype,
4935 fold (build (MINUS_EXPR, index,
4936 TYPE_MIN_VALUE (domain))));
4938 position = size_binop (MULT_EXPR, index,
4939 convert (ssizetype,
4940 TYPE_SIZE_UNIT (elttype)));
4941 xtarget = offset_address (target,
4942 expand_expr (position, 0, VOIDmode, 0),
4943 highest_pow2_factor (position));
4944 xtarget = adjust_address (xtarget, mode, 0);
4945 store_expr (value, xtarget, 0);
4947 else if (vector)
4949 int pos;
4951 if (index != 0)
4952 pos = tree_low_cst (index, 0) - minelt;
4953 else
4954 pos = i;
4955 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4957 else
4959 if (index != 0)
4960 bitpos = ((tree_low_cst (index, 0) - minelt)
4961 * tree_low_cst (TYPE_SIZE (elttype), 1));
4962 else
4963 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4965 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4966 && TREE_CODE (type) == ARRAY_TYPE
4967 && TYPE_NONALIASED_COMPONENT (type))
4969 target = copy_rtx (target);
4970 MEM_KEEP_ALIAS_SET_P (target) = 1;
4972 store_constructor_field (target, bitsize, bitpos, mode, value,
4973 type, cleared, get_alias_set (elttype));
4976 if (vector)
4978 emit_insn (GEN_FCN (icode) (target,
4979 gen_rtx_PARALLEL (GET_MODE (target),
4980 gen_rtvec_v (n_elts, vector))));
4984 /* Set constructor assignments. */
4985 else if (TREE_CODE (type) == SET_TYPE)
4987 tree elt = CONSTRUCTOR_ELTS (exp);
4988 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4989 tree domain = TYPE_DOMAIN (type);
4990 tree domain_min, domain_max, bitlength;
4992 /* The default implementation strategy is to extract the constant
4993 parts of the constructor, use that to initialize the target,
4994 and then "or" in whatever non-constant ranges we need in addition.
4996 If a large set is all zero or all ones, it is
4997 probably better to set it using memset (if available) or bzero.
4998 Also, if a large set has just a single range, it may also be
4999 better to first clear all the first clear the set (using
5000 bzero/memset), and set the bits we want. */
5002 /* Check for all zeros. */
5003 if (elt == NULL_TREE && size > 0)
5005 if (!cleared)
5006 clear_storage (target, GEN_INT (size));
5007 return;
5010 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5011 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5012 bitlength = size_binop (PLUS_EXPR,
5013 size_diffop (domain_max, domain_min),
5014 ssize_int (1));
5016 nbits = tree_low_cst (bitlength, 1);
5018 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5019 are "complicated" (more than one range), initialize (the
5020 constant parts) by copying from a constant. */
5021 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5022 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5024 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5025 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5026 char *bit_buffer = alloca (nbits);
5027 HOST_WIDE_INT word = 0;
5028 unsigned int bit_pos = 0;
5029 unsigned int ibit = 0;
5030 unsigned int offset = 0; /* In bytes from beginning of set. */
5032 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5033 for (;;)
5035 if (bit_buffer[ibit])
5037 if (BYTES_BIG_ENDIAN)
5038 word |= (1 << (set_word_size - 1 - bit_pos));
5039 else
5040 word |= 1 << bit_pos;
5043 bit_pos++; ibit++;
5044 if (bit_pos >= set_word_size || ibit == nbits)
5046 if (word != 0 || ! cleared)
5048 rtx datum = GEN_INT (word);
5049 rtx to_rtx;
5051 /* The assumption here is that it is safe to use
5052 XEXP if the set is multi-word, but not if
5053 it's single-word. */
5054 if (GET_CODE (target) == MEM)
5055 to_rtx = adjust_address (target, mode, offset);
5056 else if (offset == 0)
5057 to_rtx = target;
5058 else
5059 abort ();
5060 emit_move_insn (to_rtx, datum);
5063 if (ibit == nbits)
5064 break;
5065 word = 0;
5066 bit_pos = 0;
5067 offset += set_word_size / BITS_PER_UNIT;
5071 else if (!cleared)
5072 /* Don't bother clearing storage if the set is all ones. */
5073 if (TREE_CHAIN (elt) != NULL_TREE
5074 || (TREE_PURPOSE (elt) == NULL_TREE
5075 ? nbits != 1
5076 : ( ! host_integerp (TREE_VALUE (elt), 0)
5077 || ! host_integerp (TREE_PURPOSE (elt), 0)
5078 || (tree_low_cst (TREE_VALUE (elt), 0)
5079 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5080 != (HOST_WIDE_INT) nbits))))
5081 clear_storage (target, expr_size (exp));
5083 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5085 /* Start of range of element or NULL. */
5086 tree startbit = TREE_PURPOSE (elt);
5087 /* End of range of element, or element value. */
5088 tree endbit = TREE_VALUE (elt);
5089 HOST_WIDE_INT startb, endb;
5090 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5092 bitlength_rtx = expand_expr (bitlength,
5093 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5095 /* Handle non-range tuple element like [ expr ]. */
5096 if (startbit == NULL_TREE)
5098 startbit = save_expr (endbit);
5099 endbit = startbit;
5102 startbit = convert (sizetype, startbit);
5103 endbit = convert (sizetype, endbit);
5104 if (! integer_zerop (domain_min))
5106 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5107 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5109 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5110 EXPAND_CONST_ADDRESS);
5111 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5112 EXPAND_CONST_ADDRESS);
5114 if (REG_P (target))
5116 targetx
5117 = assign_temp
5118 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5119 (GET_MODE (target), 0),
5120 TYPE_QUAL_CONST)),
5121 0, 1, 1);
5122 emit_move_insn (targetx, target);
5125 else if (GET_CODE (target) == MEM)
5126 targetx = target;
5127 else
5128 abort ();
5130 /* Optimization: If startbit and endbit are constants divisible
5131 by BITS_PER_UNIT, call memset instead. */
5132 if (TARGET_MEM_FUNCTIONS
5133 && TREE_CODE (startbit) == INTEGER_CST
5134 && TREE_CODE (endbit) == INTEGER_CST
5135 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5136 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5138 emit_library_call (memset_libfunc, LCT_NORMAL,
5139 VOIDmode, 3,
5140 plus_constant (XEXP (targetx, 0),
5141 startb / BITS_PER_UNIT),
5142 Pmode,
5143 constm1_rtx, TYPE_MODE (integer_type_node),
5144 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5145 TYPE_MODE (sizetype));
5147 else
5148 emit_library_call (setbits_libfunc, LCT_NORMAL,
5149 VOIDmode, 4, XEXP (targetx, 0),
5150 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5151 startbit_rtx, TYPE_MODE (sizetype),
5152 endbit_rtx, TYPE_MODE (sizetype));
5154 if (REG_P (target))
5155 emit_move_insn (target, targetx);
5159 else
5160 abort ();
5163 /* Store the value of EXP (an expression tree)
5164 into a subfield of TARGET which has mode MODE and occupies
5165 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5166 If MODE is VOIDmode, it means that we are storing into a bit-field.
5168 If VALUE_MODE is VOIDmode, return nothing in particular.
5169 UNSIGNEDP is not used in this case.
5171 Otherwise, return an rtx for the value stored. This rtx
5172 has mode VALUE_MODE if that is convenient to do.
5173 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5175 TYPE is the type of the underlying object,
5177 ALIAS_SET is the alias set for the destination. This value will
5178 (in general) be different from that for TARGET, since TARGET is a
5179 reference to the containing structure. */
5181 static rtx
5182 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5183 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5184 int unsignedp, tree type, int alias_set)
5186 HOST_WIDE_INT width_mask = 0;
5188 if (TREE_CODE (exp) == ERROR_MARK)
5189 return const0_rtx;
5191 /* If we have nothing to store, do nothing unless the expression has
5192 side-effects. */
5193 if (bitsize == 0)
5194 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5195 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5196 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5198 /* If we are storing into an unaligned field of an aligned union that is
5199 in a register, we may have the mode of TARGET being an integer mode but
5200 MODE == BLKmode. In that case, get an aligned object whose size and
5201 alignment are the same as TARGET and store TARGET into it (we can avoid
5202 the store if the field being stored is the entire width of TARGET). Then
5203 call ourselves recursively to store the field into a BLKmode version of
5204 that object. Finally, load from the object into TARGET. This is not
5205 very efficient in general, but should only be slightly more expensive
5206 than the otherwise-required unaligned accesses. Perhaps this can be
5207 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5208 twice, once with emit_move_insn and once via store_field. */
5210 if (mode == BLKmode
5211 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5213 rtx object = assign_temp (type, 0, 1, 1);
5214 rtx blk_object = adjust_address (object, BLKmode, 0);
5216 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5217 emit_move_insn (object, target);
5219 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5220 alias_set);
5222 emit_move_insn (target, object);
5224 /* We want to return the BLKmode version of the data. */
5225 return blk_object;
5228 if (GET_CODE (target) == CONCAT)
5230 /* We're storing into a struct containing a single __complex. */
5232 if (bitpos != 0)
5233 abort ();
5234 return store_expr (exp, target, 0);
5237 /* If the structure is in a register or if the component
5238 is a bit field, we cannot use addressing to access it.
5239 Use bit-field techniques or SUBREG to store in it. */
5241 if (mode == VOIDmode
5242 || (mode != BLKmode && ! direct_store[(int) mode]
5243 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5244 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5245 || GET_CODE (target) == REG
5246 || GET_CODE (target) == SUBREG
5247 /* If the field isn't aligned enough to store as an ordinary memref,
5248 store it as a bit field. */
5249 || (mode != BLKmode
5250 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5251 || bitpos % GET_MODE_ALIGNMENT (mode))
5252 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5253 || (bitpos % BITS_PER_UNIT != 0)))
5254 /* If the RHS and field are a constant size and the size of the
5255 RHS isn't the same size as the bitfield, we must use bitfield
5256 operations. */
5257 || (bitsize >= 0
5258 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5259 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5261 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5263 /* If BITSIZE is narrower than the size of the type of EXP
5264 we will be narrowing TEMP. Normally, what's wanted are the
5265 low-order bits. However, if EXP's type is a record and this is
5266 big-endian machine, we want the upper BITSIZE bits. */
5267 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5268 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5269 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5270 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5271 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5272 - bitsize),
5273 NULL_RTX, 1);
5275 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5276 MODE. */
5277 if (mode != VOIDmode && mode != BLKmode
5278 && mode != TYPE_MODE (TREE_TYPE (exp)))
5279 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5281 /* If the modes of TARGET and TEMP are both BLKmode, both
5282 must be in memory and BITPOS must be aligned on a byte
5283 boundary. If so, we simply do a block copy. */
5284 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5286 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5287 || bitpos % BITS_PER_UNIT != 0)
5288 abort ();
5290 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5291 emit_block_move (target, temp,
5292 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5293 / BITS_PER_UNIT),
5294 BLOCK_OP_NORMAL);
5296 return value_mode == VOIDmode ? const0_rtx : target;
5299 /* Store the value in the bitfield. */
5300 store_bit_field (target, bitsize, bitpos, mode, temp,
5301 int_size_in_bytes (type));
5303 if (value_mode != VOIDmode)
5305 /* The caller wants an rtx for the value.
5306 If possible, avoid refetching from the bitfield itself. */
5307 if (width_mask != 0
5308 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5310 tree count;
5311 enum machine_mode tmode;
5313 tmode = GET_MODE (temp);
5314 if (tmode == VOIDmode)
5315 tmode = value_mode;
5317 if (unsignedp)
5318 return expand_and (tmode, temp,
5319 gen_int_mode (width_mask, tmode),
5320 NULL_RTX);
5322 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5323 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5324 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5327 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5328 NULL_RTX, value_mode, VOIDmode,
5329 int_size_in_bytes (type));
5331 return const0_rtx;
5333 else
5335 rtx addr = XEXP (target, 0);
5336 rtx to_rtx = target;
5338 /* If a value is wanted, it must be the lhs;
5339 so make the address stable for multiple use. */
5341 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5342 && ! CONSTANT_ADDRESS_P (addr)
5343 /* A frame-pointer reference is already stable. */
5344 && ! (GET_CODE (addr) == PLUS
5345 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5346 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5347 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5348 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5350 /* Now build a reference to just the desired component. */
5352 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5354 if (to_rtx == target)
5355 to_rtx = copy_rtx (to_rtx);
5357 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5358 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5359 set_mem_alias_set (to_rtx, alias_set);
5361 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5365 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5366 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5367 codes and find the ultimate containing object, which we return.
5369 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5370 bit position, and *PUNSIGNEDP to the signedness of the field.
5371 If the position of the field is variable, we store a tree
5372 giving the variable offset (in units) in *POFFSET.
5373 This offset is in addition to the bit position.
5374 If the position is not variable, we store 0 in *POFFSET.
5376 If any of the extraction expressions is volatile,
5377 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5379 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5380 is a mode that can be used to access the field. In that case, *PBITSIZE
5381 is redundant.
5383 If the field describes a variable-sized object, *PMODE is set to
5384 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5385 this case, but the address of the object can be found. */
5387 tree
5388 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5389 HOST_WIDE_INT *pbitpos, tree *poffset,
5390 enum machine_mode *pmode, int *punsignedp,
5391 int *pvolatilep)
5393 tree size_tree = 0;
5394 enum machine_mode mode = VOIDmode;
5395 tree offset = size_zero_node;
5396 tree bit_offset = bitsize_zero_node;
5397 tree placeholder_ptr = 0;
5398 tree tem;
5400 /* First get the mode, signedness, and size. We do this from just the
5401 outermost expression. */
5402 if (TREE_CODE (exp) == COMPONENT_REF)
5404 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5405 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5406 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5408 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5410 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5412 size_tree = TREE_OPERAND (exp, 1);
5413 *punsignedp = TREE_UNSIGNED (exp);
5415 else
5417 mode = TYPE_MODE (TREE_TYPE (exp));
5418 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5420 if (mode == BLKmode)
5421 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5422 else
5423 *pbitsize = GET_MODE_BITSIZE (mode);
5426 if (size_tree != 0)
5428 if (! host_integerp (size_tree, 1))
5429 mode = BLKmode, *pbitsize = -1;
5430 else
5431 *pbitsize = tree_low_cst (size_tree, 1);
5434 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5435 and find the ultimate containing object. */
5436 while (1)
5438 if (TREE_CODE (exp) == BIT_FIELD_REF)
5439 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5440 else if (TREE_CODE (exp) == COMPONENT_REF)
5442 tree field = TREE_OPERAND (exp, 1);
5443 tree this_offset = DECL_FIELD_OFFSET (field);
5445 /* If this field hasn't been filled in yet, don't go
5446 past it. This should only happen when folding expressions
5447 made during type construction. */
5448 if (this_offset == 0)
5449 break;
5450 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5451 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5453 offset = size_binop (PLUS_EXPR, offset, this_offset);
5454 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5455 DECL_FIELD_BIT_OFFSET (field));
5457 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5460 else if (TREE_CODE (exp) == ARRAY_REF
5461 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5463 tree index = TREE_OPERAND (exp, 1);
5464 tree array = TREE_OPERAND (exp, 0);
5465 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5466 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5467 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5469 /* We assume all arrays have sizes that are a multiple of a byte.
5470 First subtract the lower bound, if any, in the type of the
5471 index, then convert to sizetype and multiply by the size of the
5472 array element. */
5473 if (low_bound != 0 && ! integer_zerop (low_bound))
5474 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5475 index, low_bound));
5477 /* If the index has a self-referential type, pass it to a
5478 WITH_RECORD_EXPR; if the component size is, pass our
5479 component to one. */
5480 if (CONTAINS_PLACEHOLDER_P (index))
5481 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5482 if (CONTAINS_PLACEHOLDER_P (unit_size))
5483 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5485 offset = size_binop (PLUS_EXPR, offset,
5486 size_binop (MULT_EXPR,
5487 convert (sizetype, index),
5488 unit_size));
5491 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5493 tree new = find_placeholder (exp, &placeholder_ptr);
5495 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5496 We might have been called from tree optimization where we
5497 haven't set up an object yet. */
5498 if (new == 0)
5499 break;
5500 else
5501 exp = new;
5503 continue;
5506 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5507 conversions that don't change the mode, and all view conversions
5508 except those that need to "step up" the alignment. */
5509 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5510 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5511 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5512 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5513 && STRICT_ALIGNMENT
5514 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5515 < BIGGEST_ALIGNMENT)
5516 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5517 || TYPE_ALIGN_OK (TREE_TYPE
5518 (TREE_OPERAND (exp, 0))))))
5519 && ! ((TREE_CODE (exp) == NOP_EXPR
5520 || TREE_CODE (exp) == CONVERT_EXPR)
5521 && (TYPE_MODE (TREE_TYPE (exp))
5522 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5523 break;
5525 /* If any reference in the chain is volatile, the effect is volatile. */
5526 if (TREE_THIS_VOLATILE (exp))
5527 *pvolatilep = 1;
5529 exp = TREE_OPERAND (exp, 0);
5532 /* If OFFSET is constant, see if we can return the whole thing as a
5533 constant bit position. Otherwise, split it up. */
5534 if (host_integerp (offset, 0)
5535 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5536 bitsize_unit_node))
5537 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5538 && host_integerp (tem, 0))
5539 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5540 else
5541 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5543 *pmode = mode;
5544 return exp;
5547 /* Return 1 if T is an expression that get_inner_reference handles. */
5550 handled_component_p (tree t)
5552 switch (TREE_CODE (t))
5554 case BIT_FIELD_REF:
5555 case COMPONENT_REF:
5556 case ARRAY_REF:
5557 case ARRAY_RANGE_REF:
5558 case NON_LVALUE_EXPR:
5559 case VIEW_CONVERT_EXPR:
5560 return 1;
5562 /* ??? Sure they are handled, but get_inner_reference may return
5563 a different PBITSIZE, depending upon whether the expression is
5564 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5565 case NOP_EXPR:
5566 case CONVERT_EXPR:
5567 return (TYPE_MODE (TREE_TYPE (t))
5568 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5570 default:
5571 return 0;
5575 /* Given an rtx VALUE that may contain additions and multiplications, return
5576 an equivalent value that just refers to a register, memory, or constant.
5577 This is done by generating instructions to perform the arithmetic and
5578 returning a pseudo-register containing the value.
5580 The returned value may be a REG, SUBREG, MEM or constant. */
5583 force_operand (rtx value, rtx target)
5585 rtx op1, op2;
5586 /* Use subtarget as the target for operand 0 of a binary operation. */
5587 rtx subtarget = get_subtarget (target);
5588 enum rtx_code code = GET_CODE (value);
5590 /* Check for a PIC address load. */
5591 if ((code == PLUS || code == MINUS)
5592 && XEXP (value, 0) == pic_offset_table_rtx
5593 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5594 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5595 || GET_CODE (XEXP (value, 1)) == CONST))
5597 if (!subtarget)
5598 subtarget = gen_reg_rtx (GET_MODE (value));
5599 emit_move_insn (subtarget, value);
5600 return subtarget;
5603 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5605 if (!target)
5606 target = gen_reg_rtx (GET_MODE (value));
5607 convert_move (target, force_operand (XEXP (value, 0), NULL),
5608 code == ZERO_EXTEND);
5609 return target;
5612 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5614 op2 = XEXP (value, 1);
5615 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5616 subtarget = 0;
5617 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5619 code = PLUS;
5620 op2 = negate_rtx (GET_MODE (value), op2);
5623 /* Check for an addition with OP2 a constant integer and our first
5624 operand a PLUS of a virtual register and something else. In that
5625 case, we want to emit the sum of the virtual register and the
5626 constant first and then add the other value. This allows virtual
5627 register instantiation to simply modify the constant rather than
5628 creating another one around this addition. */
5629 if (code == PLUS && GET_CODE (op2) == CONST_INT
5630 && GET_CODE (XEXP (value, 0)) == PLUS
5631 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5632 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5633 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5635 rtx temp = expand_simple_binop (GET_MODE (value), code,
5636 XEXP (XEXP (value, 0), 0), op2,
5637 subtarget, 0, OPTAB_LIB_WIDEN);
5638 return expand_simple_binop (GET_MODE (value), code, temp,
5639 force_operand (XEXP (XEXP (value,
5640 0), 1), 0),
5641 target, 0, OPTAB_LIB_WIDEN);
5644 op1 = force_operand (XEXP (value, 0), subtarget);
5645 op2 = force_operand (op2, NULL_RTX);
5646 switch (code)
5648 case MULT:
5649 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5650 case DIV:
5651 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5652 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5653 target, 1, OPTAB_LIB_WIDEN);
5654 else
5655 return expand_divmod (0,
5656 FLOAT_MODE_P (GET_MODE (value))
5657 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5658 GET_MODE (value), op1, op2, target, 0);
5659 break;
5660 case MOD:
5661 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5662 target, 0);
5663 break;
5664 case UDIV:
5665 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5666 target, 1);
5667 break;
5668 case UMOD:
5669 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5670 target, 1);
5671 break;
5672 case ASHIFTRT:
5673 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5674 target, 0, OPTAB_LIB_WIDEN);
5675 break;
5676 default:
5677 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5678 target, 1, OPTAB_LIB_WIDEN);
5681 if (GET_RTX_CLASS (code) == '1')
5683 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5684 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5687 #ifdef INSN_SCHEDULING
5688 /* On machines that have insn scheduling, we want all memory reference to be
5689 explicit, so we need to deal with such paradoxical SUBREGs. */
5690 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5691 && (GET_MODE_SIZE (GET_MODE (value))
5692 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5693 value
5694 = simplify_gen_subreg (GET_MODE (value),
5695 force_reg (GET_MODE (SUBREG_REG (value)),
5696 force_operand (SUBREG_REG (value),
5697 NULL_RTX)),
5698 GET_MODE (SUBREG_REG (value)),
5699 SUBREG_BYTE (value));
5700 #endif
5702 return value;
5705 /* Subroutine of expand_expr: return nonzero iff there is no way that
5706 EXP can reference X, which is being modified. TOP_P is nonzero if this
5707 call is going to be used to determine whether we need a temporary
5708 for EXP, as opposed to a recursive call to this function.
5710 It is always safe for this routine to return zero since it merely
5711 searches for optimization opportunities. */
5714 safe_from_p (rtx x, tree exp, int top_p)
5716 rtx exp_rtl = 0;
5717 int i, nops;
5718 static tree save_expr_list;
5720 if (x == 0
5721 /* If EXP has varying size, we MUST use a target since we currently
5722 have no way of allocating temporaries of variable size
5723 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5724 So we assume here that something at a higher level has prevented a
5725 clash. This is somewhat bogus, but the best we can do. Only
5726 do this when X is BLKmode and when we are at the top level. */
5727 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5728 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5729 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5730 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5731 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5732 != INTEGER_CST)
5733 && GET_MODE (x) == BLKmode)
5734 /* If X is in the outgoing argument area, it is always safe. */
5735 || (GET_CODE (x) == MEM
5736 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5737 || (GET_CODE (XEXP (x, 0)) == PLUS
5738 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5739 return 1;
5741 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5742 find the underlying pseudo. */
5743 if (GET_CODE (x) == SUBREG)
5745 x = SUBREG_REG (x);
5746 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5747 return 0;
5750 /* A SAVE_EXPR might appear many times in the expression passed to the
5751 top-level safe_from_p call, and if it has a complex subexpression,
5752 examining it multiple times could result in a combinatorial explosion.
5753 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5754 with optimization took about 28 minutes to compile -- even though it was
5755 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5756 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5757 we have processed. Note that the only test of top_p was above. */
5759 if (top_p)
5761 int rtn;
5762 tree t;
5764 save_expr_list = 0;
5766 rtn = safe_from_p (x, exp, 0);
5768 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5769 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5771 return rtn;
5774 /* Now look at our tree code and possibly recurse. */
5775 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5777 case 'd':
5778 exp_rtl = DECL_RTL_IF_SET (exp);
5779 break;
5781 case 'c':
5782 return 1;
5784 case 'x':
5785 if (TREE_CODE (exp) == TREE_LIST)
5787 while (1)
5789 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5790 return 0;
5791 exp = TREE_CHAIN (exp);
5792 if (!exp)
5793 return 1;
5794 if (TREE_CODE (exp) != TREE_LIST)
5795 return safe_from_p (x, exp, 0);
5798 else if (TREE_CODE (exp) == ERROR_MARK)
5799 return 1; /* An already-visited SAVE_EXPR? */
5800 else
5801 return 0;
5803 case '2':
5804 case '<':
5805 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5806 return 0;
5807 /* Fall through. */
5809 case '1':
5810 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5812 case 'e':
5813 case 'r':
5814 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5815 the expression. If it is set, we conflict iff we are that rtx or
5816 both are in memory. Otherwise, we check all operands of the
5817 expression recursively. */
5819 switch (TREE_CODE (exp))
5821 case ADDR_EXPR:
5822 /* If the operand is static or we are static, we can't conflict.
5823 Likewise if we don't conflict with the operand at all. */
5824 if (staticp (TREE_OPERAND (exp, 0))
5825 || TREE_STATIC (exp)
5826 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5827 return 1;
5829 /* Otherwise, the only way this can conflict is if we are taking
5830 the address of a DECL a that address if part of X, which is
5831 very rare. */
5832 exp = TREE_OPERAND (exp, 0);
5833 if (DECL_P (exp))
5835 if (!DECL_RTL_SET_P (exp)
5836 || GET_CODE (DECL_RTL (exp)) != MEM)
5837 return 0;
5838 else
5839 exp_rtl = XEXP (DECL_RTL (exp), 0);
5841 break;
5843 case INDIRECT_REF:
5844 if (GET_CODE (x) == MEM
5845 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5846 get_alias_set (exp)))
5847 return 0;
5848 break;
5850 case CALL_EXPR:
5851 /* Assume that the call will clobber all hard registers and
5852 all of memory. */
5853 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5854 || GET_CODE (x) == MEM)
5855 return 0;
5856 break;
5858 case RTL_EXPR:
5859 /* If a sequence exists, we would have to scan every instruction
5860 in the sequence to see if it was safe. This is probably not
5861 worthwhile. */
5862 if (RTL_EXPR_SEQUENCE (exp))
5863 return 0;
5865 exp_rtl = RTL_EXPR_RTL (exp);
5866 break;
5868 case WITH_CLEANUP_EXPR:
5869 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5870 break;
5872 case CLEANUP_POINT_EXPR:
5873 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5875 case SAVE_EXPR:
5876 exp_rtl = SAVE_EXPR_RTL (exp);
5877 if (exp_rtl)
5878 break;
5880 /* If we've already scanned this, don't do it again. Otherwise,
5881 show we've scanned it and record for clearing the flag if we're
5882 going on. */
5883 if (TREE_PRIVATE (exp))
5884 return 1;
5886 TREE_PRIVATE (exp) = 1;
5887 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5889 TREE_PRIVATE (exp) = 0;
5890 return 0;
5893 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5894 return 1;
5896 case BIND_EXPR:
5897 /* The only operand we look at is operand 1. The rest aren't
5898 part of the expression. */
5899 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5901 default:
5902 break;
5905 /* If we have an rtx, we do not need to scan our operands. */
5906 if (exp_rtl)
5907 break;
5909 nops = first_rtl_op (TREE_CODE (exp));
5910 for (i = 0; i < nops; i++)
5911 if (TREE_OPERAND (exp, i) != 0
5912 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5913 return 0;
5915 /* If this is a language-specific tree code, it may require
5916 special handling. */
5917 if ((unsigned int) TREE_CODE (exp)
5918 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5919 && !(*lang_hooks.safe_from_p) (x, exp))
5920 return 0;
5923 /* If we have an rtl, find any enclosed object. Then see if we conflict
5924 with it. */
5925 if (exp_rtl)
5927 if (GET_CODE (exp_rtl) == SUBREG)
5929 exp_rtl = SUBREG_REG (exp_rtl);
5930 if (GET_CODE (exp_rtl) == REG
5931 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5932 return 0;
5935 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5936 are memory and they conflict. */
5937 return ! (rtx_equal_p (x, exp_rtl)
5938 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5939 && true_dependence (exp_rtl, VOIDmode, x,
5940 rtx_addr_varies_p)));
5943 /* If we reach here, it is safe. */
5944 return 1;
5947 /* Subroutine of expand_expr: return rtx if EXP is a
5948 variable or parameter; else return 0. */
5950 static rtx
5951 var_rtx (tree exp)
5953 STRIP_NOPS (exp);
5954 switch (TREE_CODE (exp))
5956 case PARM_DECL:
5957 case VAR_DECL:
5958 return DECL_RTL (exp);
5959 default:
5960 return 0;
5964 /* Return the highest power of two that EXP is known to be a multiple of.
5965 This is used in updating alignment of MEMs in array references. */
5967 static unsigned HOST_WIDE_INT
5968 highest_pow2_factor (tree exp)
5970 unsigned HOST_WIDE_INT c0, c1;
5972 switch (TREE_CODE (exp))
5974 case INTEGER_CST:
5975 /* We can find the lowest bit that's a one. If the low
5976 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5977 We need to handle this case since we can find it in a COND_EXPR,
5978 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5979 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5980 later ICE. */
5981 if (TREE_CONSTANT_OVERFLOW (exp))
5982 return BIGGEST_ALIGNMENT;
5983 else
5985 /* Note: tree_low_cst is intentionally not used here,
5986 we don't care about the upper bits. */
5987 c0 = TREE_INT_CST_LOW (exp);
5988 c0 &= -c0;
5989 return c0 ? c0 : BIGGEST_ALIGNMENT;
5991 break;
5993 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5994 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5995 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5996 return MIN (c0, c1);
5998 case MULT_EXPR:
5999 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6000 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6001 return c0 * c1;
6003 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6004 case CEIL_DIV_EXPR:
6005 if (integer_pow2p (TREE_OPERAND (exp, 1))
6006 && host_integerp (TREE_OPERAND (exp, 1), 1))
6008 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6009 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6010 return MAX (1, c0 / c1);
6012 break;
6014 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6015 case SAVE_EXPR: case WITH_RECORD_EXPR:
6016 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6018 case COMPOUND_EXPR:
6019 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6021 case COND_EXPR:
6022 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6023 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6024 return MIN (c0, c1);
6026 default:
6027 break;
6030 return 1;
6033 /* Similar, except that it is known that the expression must be a multiple
6034 of the alignment of TYPE. */
6036 static unsigned HOST_WIDE_INT
6037 highest_pow2_factor_for_type (tree type, tree exp)
6039 unsigned HOST_WIDE_INT type_align, factor;
6041 factor = highest_pow2_factor (exp);
6042 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6043 return MAX (factor, type_align);
6046 /* Return an object on the placeholder list that matches EXP, a
6047 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6048 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6049 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6050 is a location which initially points to a starting location in the
6051 placeholder list (zero means start of the list) and where a pointer into
6052 the placeholder list at which the object is found is placed. */
6054 tree
6055 find_placeholder (tree exp, tree *plist)
6057 tree type = TREE_TYPE (exp);
6058 tree placeholder_expr;
6060 for (placeholder_expr
6061 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6062 placeholder_expr != 0;
6063 placeholder_expr = TREE_CHAIN (placeholder_expr))
6065 tree need_type = TYPE_MAIN_VARIANT (type);
6066 tree elt;
6068 /* Find the outermost reference that is of the type we want. If none,
6069 see if any object has a type that is a pointer to the type we
6070 want. */
6071 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6072 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6073 || TREE_CODE (elt) == COND_EXPR)
6074 ? TREE_OPERAND (elt, 1)
6075 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6076 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6077 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6078 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6079 ? TREE_OPERAND (elt, 0) : 0))
6080 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6082 if (plist)
6083 *plist = placeholder_expr;
6084 return elt;
6087 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6089 = ((TREE_CODE (elt) == COMPOUND_EXPR
6090 || TREE_CODE (elt) == COND_EXPR)
6091 ? TREE_OPERAND (elt, 1)
6092 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6093 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6094 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6095 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6096 ? TREE_OPERAND (elt, 0) : 0))
6097 if (POINTER_TYPE_P (TREE_TYPE (elt))
6098 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6099 == need_type))
6101 if (plist)
6102 *plist = placeholder_expr;
6103 return build1 (INDIRECT_REF, need_type, elt);
6107 return 0;
6110 /* Subroutine of expand_expr. Expand the two operands of a binary
6111 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6112 The value may be stored in TARGET if TARGET is nonzero. The
6113 MODIFIER argument is as documented by expand_expr. */
6115 static void
6116 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6117 enum expand_modifier modifier)
6119 if (! safe_from_p (target, exp1, 1))
6120 target = 0;
6121 if (operand_equal_p (exp0, exp1, 0))
6123 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6124 *op1 = copy_rtx (*op0);
6126 else
6128 /* If we need to preserve evaluation order, copy exp0 into its own
6129 temporary variable so that it can't be clobbered by exp1. */
6130 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6131 exp0 = save_expr (exp0);
6132 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6133 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6138 /* expand_expr: generate code for computing expression EXP.
6139 An rtx for the computed value is returned. The value is never null.
6140 In the case of a void EXP, const0_rtx is returned.
6142 The value may be stored in TARGET if TARGET is nonzero.
6143 TARGET is just a suggestion; callers must assume that
6144 the rtx returned may not be the same as TARGET.
6146 If TARGET is CONST0_RTX, it means that the value will be ignored.
6148 If TMODE is not VOIDmode, it suggests generating the
6149 result in mode TMODE. But this is done only when convenient.
6150 Otherwise, TMODE is ignored and the value generated in its natural mode.
6151 TMODE is just a suggestion; callers must assume that
6152 the rtx returned may not have mode TMODE.
6154 Note that TARGET may have neither TMODE nor MODE. In that case, it
6155 probably will not be used.
6157 If MODIFIER is EXPAND_SUM then when EXP is an addition
6158 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6159 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6160 products as above, or REG or MEM, or constant.
6161 Ordinarily in such cases we would output mul or add instructions
6162 and then return a pseudo reg containing the sum.
6164 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6165 it also marks a label as absolutely required (it can't be dead).
6166 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6167 This is used for outputting expressions used in initializers.
6169 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6170 with a constant address even if that address is not normally legitimate.
6171 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6173 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6174 a call parameter. Such targets require special care as we haven't yet
6175 marked TARGET so that it's safe from being trashed by libcalls. We
6176 don't want to use TARGET for anything but the final result;
6177 Intermediate values must go elsewhere. Additionally, calls to
6178 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6180 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6181 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6182 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6183 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6184 recursively. */
6187 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6188 enum expand_modifier modifier, rtx *alt_rtl)
6190 rtx op0, op1, temp;
6191 tree type = TREE_TYPE (exp);
6192 int unsignedp = TREE_UNSIGNED (type);
6193 enum machine_mode mode;
6194 enum tree_code code = TREE_CODE (exp);
6195 optab this_optab;
6196 rtx subtarget, original_target;
6197 int ignore;
6198 tree context;
6200 /* Handle ERROR_MARK before anybody tries to access its type. */
6201 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6203 op0 = CONST0_RTX (tmode);
6204 if (op0 != 0)
6205 return op0;
6206 return const0_rtx;
6209 mode = TYPE_MODE (type);
6210 /* Use subtarget as the target for operand 0 of a binary operation. */
6211 subtarget = get_subtarget (target);
6212 original_target = target;
6213 ignore = (target == const0_rtx
6214 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6215 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6216 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6217 && TREE_CODE (type) == VOID_TYPE));
6219 /* If we are going to ignore this result, we need only do something
6220 if there is a side-effect somewhere in the expression. If there
6221 is, short-circuit the most common cases here. Note that we must
6222 not call expand_expr with anything but const0_rtx in case this
6223 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6225 if (ignore)
6227 if (! TREE_SIDE_EFFECTS (exp))
6228 return const0_rtx;
6230 /* Ensure we reference a volatile object even if value is ignored, but
6231 don't do this if all we are doing is taking its address. */
6232 if (TREE_THIS_VOLATILE (exp)
6233 && TREE_CODE (exp) != FUNCTION_DECL
6234 && mode != VOIDmode && mode != BLKmode
6235 && modifier != EXPAND_CONST_ADDRESS)
6237 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6238 if (GET_CODE (temp) == MEM)
6239 temp = copy_to_reg (temp);
6240 return const0_rtx;
6243 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6244 || code == INDIRECT_REF || code == BUFFER_REF)
6245 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6246 modifier);
6248 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6249 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6251 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6252 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6253 return const0_rtx;
6255 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6256 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6257 /* If the second operand has no side effects, just evaluate
6258 the first. */
6259 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6260 modifier);
6261 else if (code == BIT_FIELD_REF)
6263 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6264 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6265 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6266 return const0_rtx;
6269 target = 0;
6272 /* If will do cse, generate all results into pseudo registers
6273 since 1) that allows cse to find more things
6274 and 2) otherwise cse could produce an insn the machine
6275 cannot support. An exception is a CONSTRUCTOR into a multi-word
6276 MEM: that's much more likely to be most efficient into the MEM.
6277 Another is a CALL_EXPR which must return in memory. */
6279 if (! cse_not_expected && mode != BLKmode && target
6280 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6281 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6282 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6283 target = 0;
6285 switch (code)
6287 case LABEL_DECL:
6289 tree function = decl_function_context (exp);
6290 /* Labels in containing functions, or labels used from initializers,
6291 must be forced. */
6292 if (modifier == EXPAND_INITIALIZER
6293 || (function != current_function_decl
6294 && function != inline_function_decl
6295 && function != 0))
6296 temp = force_label_rtx (exp);
6297 else
6298 temp = label_rtx (exp);
6300 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6301 if (function != current_function_decl
6302 && function != inline_function_decl && function != 0)
6303 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6304 return temp;
6307 case PARM_DECL:
6308 if (!DECL_RTL_SET_P (exp))
6310 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6311 return CONST0_RTX (mode);
6314 /* ... fall through ... */
6316 case VAR_DECL:
6317 /* If a static var's type was incomplete when the decl was written,
6318 but the type is complete now, lay out the decl now. */
6319 if (DECL_SIZE (exp) == 0
6320 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6321 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6322 layout_decl (exp, 0);
6324 /* ... fall through ... */
6326 case FUNCTION_DECL:
6327 case RESULT_DECL:
6328 if (DECL_RTL (exp) == 0)
6329 abort ();
6331 /* Ensure variable marked as used even if it doesn't go through
6332 a parser. If it hasn't be used yet, write out an external
6333 definition. */
6334 if (! TREE_USED (exp))
6336 assemble_external (exp);
6337 TREE_USED (exp) = 1;
6340 /* Show we haven't gotten RTL for this yet. */
6341 temp = 0;
6343 /* Handle variables inherited from containing functions. */
6344 context = decl_function_context (exp);
6346 /* We treat inline_function_decl as an alias for the current function
6347 because that is the inline function whose vars, types, etc.
6348 are being merged into the current function.
6349 See expand_inline_function. */
6351 if (context != 0 && context != current_function_decl
6352 && context != inline_function_decl
6353 /* If var is static, we don't need a static chain to access it. */
6354 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6355 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6357 rtx addr;
6359 /* Mark as non-local and addressable. */
6360 DECL_NONLOCAL (exp) = 1;
6361 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6362 abort ();
6363 (*lang_hooks.mark_addressable) (exp);
6364 if (GET_CODE (DECL_RTL (exp)) != MEM)
6365 abort ();
6366 addr = XEXP (DECL_RTL (exp), 0);
6367 if (GET_CODE (addr) == MEM)
6368 addr
6369 = replace_equiv_address (addr,
6370 fix_lexical_addr (XEXP (addr, 0), exp));
6371 else
6372 addr = fix_lexical_addr (addr, exp);
6374 temp = replace_equiv_address (DECL_RTL (exp), addr);
6377 /* This is the case of an array whose size is to be determined
6378 from its initializer, while the initializer is still being parsed.
6379 See expand_decl. */
6381 else if (GET_CODE (DECL_RTL (exp)) == MEM
6382 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6383 temp = validize_mem (DECL_RTL (exp));
6385 /* If DECL_RTL is memory, we are in the normal case and either
6386 the address is not valid or it is not a register and -fforce-addr
6387 is specified, get the address into a register. */
6389 else if (GET_CODE (DECL_RTL (exp)) == MEM
6390 && modifier != EXPAND_CONST_ADDRESS
6391 && modifier != EXPAND_SUM
6392 && modifier != EXPAND_INITIALIZER
6393 && (! memory_address_p (DECL_MODE (exp),
6394 XEXP (DECL_RTL (exp), 0))
6395 || (flag_force_addr
6396 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6398 if (alt_rtl)
6399 *alt_rtl = DECL_RTL (exp);
6400 temp = replace_equiv_address (DECL_RTL (exp),
6401 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6404 /* If we got something, return it. But first, set the alignment
6405 if the address is a register. */
6406 if (temp != 0)
6408 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6409 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6411 return temp;
6414 /* If the mode of DECL_RTL does not match that of the decl, it
6415 must be a promoted value. We return a SUBREG of the wanted mode,
6416 but mark it so that we know that it was already extended. */
6418 if (GET_CODE (DECL_RTL (exp)) == REG
6419 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6421 /* Get the signedness used for this variable. Ensure we get the
6422 same mode we got when the variable was declared. */
6423 if (GET_MODE (DECL_RTL (exp))
6424 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6425 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6426 abort ();
6428 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6429 SUBREG_PROMOTED_VAR_P (temp) = 1;
6430 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6431 return temp;
6434 return DECL_RTL (exp);
6436 case INTEGER_CST:
6437 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6438 TREE_INT_CST_HIGH (exp), mode);
6440 /* ??? If overflow is set, fold will have done an incomplete job,
6441 which can result in (plus xx (const_int 0)), which can get
6442 simplified by validate_replace_rtx during virtual register
6443 instantiation, which can result in unrecognizable insns.
6444 Avoid this by forcing all overflows into registers. */
6445 if (TREE_CONSTANT_OVERFLOW (exp)
6446 && modifier != EXPAND_INITIALIZER)
6447 temp = force_reg (mode, temp);
6449 return temp;
6451 case VECTOR_CST:
6452 return const_vector_from_tree (exp);
6454 case CONST_DECL:
6455 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6457 case REAL_CST:
6458 /* If optimized, generate immediate CONST_DOUBLE
6459 which will be turned into memory by reload if necessary.
6461 We used to force a register so that loop.c could see it. But
6462 this does not allow gen_* patterns to perform optimizations with
6463 the constants. It also produces two insns in cases like "x = 1.0;".
6464 On most machines, floating-point constants are not permitted in
6465 many insns, so we'd end up copying it to a register in any case.
6467 Now, we do the copying in expand_binop, if appropriate. */
6468 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6469 TYPE_MODE (TREE_TYPE (exp)));
6471 case COMPLEX_CST:
6472 /* Handle evaluating a complex constant in a CONCAT target. */
6473 if (original_target && GET_CODE (original_target) == CONCAT)
6475 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6476 rtx rtarg, itarg;
6478 rtarg = XEXP (original_target, 0);
6479 itarg = XEXP (original_target, 1);
6481 /* Move the real and imaginary parts separately. */
6482 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6483 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6485 if (op0 != rtarg)
6486 emit_move_insn (rtarg, op0);
6487 if (op1 != itarg)
6488 emit_move_insn (itarg, op1);
6490 return original_target;
6493 /* ... fall through ... */
6495 case STRING_CST:
6496 temp = output_constant_def (exp, 1);
6498 /* temp contains a constant address.
6499 On RISC machines where a constant address isn't valid,
6500 make some insns to get that address into a register. */
6501 if (modifier != EXPAND_CONST_ADDRESS
6502 && modifier != EXPAND_INITIALIZER
6503 && modifier != EXPAND_SUM
6504 && (! memory_address_p (mode, XEXP (temp, 0))
6505 || flag_force_addr))
6506 return replace_equiv_address (temp,
6507 copy_rtx (XEXP (temp, 0)));
6508 return temp;
6510 case EXPR_WITH_FILE_LOCATION:
6512 rtx to_return;
6513 struct file_stack fs;
6515 fs.location = input_location;
6516 fs.next = expr_wfl_stack;
6517 input_filename = EXPR_WFL_FILENAME (exp);
6518 input_line = EXPR_WFL_LINENO (exp);
6519 expr_wfl_stack = &fs;
6520 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6521 emit_line_note (input_location);
6522 /* Possibly avoid switching back and forth here. */
6523 to_return = expand_expr (EXPR_WFL_NODE (exp),
6524 (ignore ? const0_rtx : target),
6525 tmode, modifier);
6526 if (expr_wfl_stack != &fs)
6527 abort ();
6528 input_location = fs.location;
6529 expr_wfl_stack = fs.next;
6530 return to_return;
6533 case SAVE_EXPR:
6534 context = decl_function_context (exp);
6536 /* If this SAVE_EXPR was at global context, assume we are an
6537 initialization function and move it into our context. */
6538 if (context == 0)
6539 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6541 /* We treat inline_function_decl as an alias for the current function
6542 because that is the inline function whose vars, types, etc.
6543 are being merged into the current function.
6544 See expand_inline_function. */
6545 if (context == current_function_decl || context == inline_function_decl)
6546 context = 0;
6548 /* If this is non-local, handle it. */
6549 if (context)
6551 /* The following call just exists to abort if the context is
6552 not of a containing function. */
6553 find_function_data (context);
6555 temp = SAVE_EXPR_RTL (exp);
6556 if (temp && GET_CODE (temp) == REG)
6558 put_var_into_stack (exp, /*rescan=*/true);
6559 temp = SAVE_EXPR_RTL (exp);
6561 if (temp == 0 || GET_CODE (temp) != MEM)
6562 abort ();
6563 return
6564 replace_equiv_address (temp,
6565 fix_lexical_addr (XEXP (temp, 0), exp));
6567 if (SAVE_EXPR_RTL (exp) == 0)
6569 if (mode == VOIDmode)
6570 temp = const0_rtx;
6571 else
6572 temp = assign_temp (build_qualified_type (type,
6573 (TYPE_QUALS (type)
6574 | TYPE_QUAL_CONST)),
6575 3, 0, 0);
6577 SAVE_EXPR_RTL (exp) = temp;
6578 if (!optimize && GET_CODE (temp) == REG)
6579 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6580 save_expr_regs);
6582 /* If the mode of TEMP does not match that of the expression, it
6583 must be a promoted value. We pass store_expr a SUBREG of the
6584 wanted mode but mark it so that we know that it was already
6585 extended. */
6587 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6589 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6590 promote_mode (type, mode, &unsignedp, 0);
6591 SUBREG_PROMOTED_VAR_P (temp) = 1;
6592 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6595 if (temp == const0_rtx)
6596 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6597 else
6598 store_expr (TREE_OPERAND (exp, 0), temp,
6599 modifier == EXPAND_STACK_PARM ? 2 : 0);
6601 TREE_USED (exp) = 1;
6604 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6605 must be a promoted value. We return a SUBREG of the wanted mode,
6606 but mark it so that we know that it was already extended. */
6608 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6609 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6611 /* Compute the signedness and make the proper SUBREG. */
6612 promote_mode (type, mode, &unsignedp, 0);
6613 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6614 SUBREG_PROMOTED_VAR_P (temp) = 1;
6615 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6616 return temp;
6619 return SAVE_EXPR_RTL (exp);
6621 case UNSAVE_EXPR:
6623 rtx temp;
6624 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6625 TREE_OPERAND (exp, 0)
6626 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6627 return temp;
6630 case PLACEHOLDER_EXPR:
6632 tree old_list = placeholder_list;
6633 tree placeholder_expr = 0;
6635 exp = find_placeholder (exp, &placeholder_expr);
6636 if (exp == 0)
6637 abort ();
6639 placeholder_list = TREE_CHAIN (placeholder_expr);
6640 temp = expand_expr (exp, original_target, tmode, modifier);
6641 placeholder_list = old_list;
6642 return temp;
6645 case WITH_RECORD_EXPR:
6646 /* Put the object on the placeholder list, expand our first operand,
6647 and pop the list. */
6648 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6649 placeholder_list);
6650 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6651 modifier);
6652 placeholder_list = TREE_CHAIN (placeholder_list);
6653 return target;
6655 case GOTO_EXPR:
6656 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6657 expand_goto (TREE_OPERAND (exp, 0));
6658 else
6659 expand_computed_goto (TREE_OPERAND (exp, 0));
6660 return const0_rtx;
6662 case EXIT_EXPR:
6663 expand_exit_loop_if_false (NULL,
6664 invert_truthvalue (TREE_OPERAND (exp, 0)));
6665 return const0_rtx;
6667 case LABELED_BLOCK_EXPR:
6668 if (LABELED_BLOCK_BODY (exp))
6669 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6670 /* Should perhaps use expand_label, but this is simpler and safer. */
6671 do_pending_stack_adjust ();
6672 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6673 return const0_rtx;
6675 case EXIT_BLOCK_EXPR:
6676 if (EXIT_BLOCK_RETURN (exp))
6677 sorry ("returned value in block_exit_expr");
6678 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6679 return const0_rtx;
6681 case LOOP_EXPR:
6682 push_temp_slots ();
6683 expand_start_loop (1);
6684 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6685 expand_end_loop ();
6686 pop_temp_slots ();
6688 return const0_rtx;
6690 case BIND_EXPR:
6692 tree vars = TREE_OPERAND (exp, 0);
6694 /* Need to open a binding contour here because
6695 if there are any cleanups they must be contained here. */
6696 expand_start_bindings (2);
6698 /* Mark the corresponding BLOCK for output in its proper place. */
6699 if (TREE_OPERAND (exp, 2) != 0
6700 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6701 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6703 /* If VARS have not yet been expanded, expand them now. */
6704 while (vars)
6706 if (!DECL_RTL_SET_P (vars))
6707 expand_decl (vars);
6708 expand_decl_init (vars);
6709 vars = TREE_CHAIN (vars);
6712 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6714 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6716 return temp;
6719 case RTL_EXPR:
6720 if (RTL_EXPR_SEQUENCE (exp))
6722 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6723 abort ();
6724 emit_insn (RTL_EXPR_SEQUENCE (exp));
6725 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6727 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6728 free_temps_for_rtl_expr (exp);
6729 if (alt_rtl)
6730 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6731 return RTL_EXPR_RTL (exp);
6733 case CONSTRUCTOR:
6734 /* If we don't need the result, just ensure we evaluate any
6735 subexpressions. */
6736 if (ignore)
6738 tree elt;
6740 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6741 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6743 return const0_rtx;
6746 /* All elts simple constants => refer to a constant in memory. But
6747 if this is a non-BLKmode mode, let it store a field at a time
6748 since that should make a CONST_INT or CONST_DOUBLE when we
6749 fold. Likewise, if we have a target we can use, it is best to
6750 store directly into the target unless the type is large enough
6751 that memcpy will be used. If we are making an initializer and
6752 all operands are constant, put it in memory as well.
6754 FIXME: Avoid trying to fill vector constructors piece-meal.
6755 Output them with output_constant_def below unless we're sure
6756 they're zeros. This should go away when vector initializers
6757 are treated like VECTOR_CST instead of arrays.
6759 else if ((TREE_STATIC (exp)
6760 && ((mode == BLKmode
6761 && ! (target != 0 && safe_from_p (target, exp, 1)))
6762 || TREE_ADDRESSABLE (exp)
6763 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6764 && (! MOVE_BY_PIECES_P
6765 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6766 TYPE_ALIGN (type)))
6767 && ((TREE_CODE (type) == VECTOR_TYPE
6768 && !is_zeros_p (exp))
6769 || ! mostly_zeros_p (exp)))))
6770 || ((modifier == EXPAND_INITIALIZER
6771 || modifier == EXPAND_CONST_ADDRESS)
6772 && TREE_CONSTANT (exp)))
6774 rtx constructor = output_constant_def (exp, 1);
6776 if (modifier != EXPAND_CONST_ADDRESS
6777 && modifier != EXPAND_INITIALIZER
6778 && modifier != EXPAND_SUM)
6779 constructor = validize_mem (constructor);
6781 return constructor;
6783 else
6785 /* Handle calls that pass values in multiple non-contiguous
6786 locations. The Irix 6 ABI has examples of this. */
6787 if (target == 0 || ! safe_from_p (target, exp, 1)
6788 || GET_CODE (target) == PARALLEL
6789 || modifier == EXPAND_STACK_PARM)
6790 target
6791 = assign_temp (build_qualified_type (type,
6792 (TYPE_QUALS (type)
6793 | (TREE_READONLY (exp)
6794 * TYPE_QUAL_CONST))),
6795 0, TREE_ADDRESSABLE (exp), 1);
6797 store_constructor (exp, target, 0, int_expr_size (exp));
6798 return target;
6801 case INDIRECT_REF:
6803 tree exp1 = TREE_OPERAND (exp, 0);
6804 tree index;
6805 tree string = string_constant (exp1, &index);
6807 /* Try to optimize reads from const strings. */
6808 if (string
6809 && TREE_CODE (string) == STRING_CST
6810 && TREE_CODE (index) == INTEGER_CST
6811 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6812 && GET_MODE_CLASS (mode) == MODE_INT
6813 && GET_MODE_SIZE (mode) == 1
6814 && modifier != EXPAND_WRITE)
6815 return gen_int_mode (TREE_STRING_POINTER (string)
6816 [TREE_INT_CST_LOW (index)], mode);
6818 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6819 op0 = memory_address (mode, op0);
6820 temp = gen_rtx_MEM (mode, op0);
6821 set_mem_attributes (temp, exp, 0);
6823 /* If we are writing to this object and its type is a record with
6824 readonly fields, we must mark it as readonly so it will
6825 conflict with readonly references to those fields. */
6826 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6827 RTX_UNCHANGING_P (temp) = 1;
6829 return temp;
6832 case ARRAY_REF:
6833 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6834 abort ();
6837 tree array = TREE_OPERAND (exp, 0);
6838 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6839 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6840 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6841 HOST_WIDE_INT i;
6843 /* Optimize the special-case of a zero lower bound.
6845 We convert the low_bound to sizetype to avoid some problems
6846 with constant folding. (E.g. suppose the lower bound is 1,
6847 and its mode is QI. Without the conversion, (ARRAY
6848 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6849 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6851 if (! integer_zerop (low_bound))
6852 index = size_diffop (index, convert (sizetype, low_bound));
6854 /* Fold an expression like: "foo"[2].
6855 This is not done in fold so it won't happen inside &.
6856 Don't fold if this is for wide characters since it's too
6857 difficult to do correctly and this is a very rare case. */
6859 if (modifier != EXPAND_CONST_ADDRESS
6860 && modifier != EXPAND_INITIALIZER
6861 && modifier != EXPAND_MEMORY
6862 && TREE_CODE (array) == STRING_CST
6863 && TREE_CODE (index) == INTEGER_CST
6864 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6865 && GET_MODE_CLASS (mode) == MODE_INT
6866 && GET_MODE_SIZE (mode) == 1)
6867 return gen_int_mode (TREE_STRING_POINTER (array)
6868 [TREE_INT_CST_LOW (index)], mode);
6870 /* If this is a constant index into a constant array,
6871 just get the value from the array. Handle both the cases when
6872 we have an explicit constructor and when our operand is a variable
6873 that was declared const. */
6875 if (modifier != EXPAND_CONST_ADDRESS
6876 && modifier != EXPAND_INITIALIZER
6877 && modifier != EXPAND_MEMORY
6878 && TREE_CODE (array) == CONSTRUCTOR
6879 && ! TREE_SIDE_EFFECTS (array)
6880 && TREE_CODE (index) == INTEGER_CST
6881 && 0 > compare_tree_int (index,
6882 list_length (CONSTRUCTOR_ELTS
6883 (TREE_OPERAND (exp, 0)))))
6885 tree elem;
6887 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6888 i = TREE_INT_CST_LOW (index);
6889 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6892 if (elem)
6893 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6894 modifier);
6897 else if (optimize >= 1
6898 && modifier != EXPAND_CONST_ADDRESS
6899 && modifier != EXPAND_INITIALIZER
6900 && modifier != EXPAND_MEMORY
6901 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6902 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6903 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6904 && targetm.binds_local_p (array))
6906 if (TREE_CODE (index) == INTEGER_CST)
6908 tree init = DECL_INITIAL (array);
6910 if (TREE_CODE (init) == CONSTRUCTOR)
6912 tree elem;
6914 for (elem = CONSTRUCTOR_ELTS (init);
6915 (elem
6916 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6917 elem = TREE_CHAIN (elem))
6920 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6921 return expand_expr (fold (TREE_VALUE (elem)), target,
6922 tmode, modifier);
6924 else if (TREE_CODE (init) == STRING_CST
6925 && 0 > compare_tree_int (index,
6926 TREE_STRING_LENGTH (init)))
6928 tree type = TREE_TYPE (TREE_TYPE (init));
6929 enum machine_mode mode = TYPE_MODE (type);
6931 if (GET_MODE_CLASS (mode) == MODE_INT
6932 && GET_MODE_SIZE (mode) == 1)
6933 return gen_int_mode (TREE_STRING_POINTER (init)
6934 [TREE_INT_CST_LOW (index)], mode);
6939 goto normal_inner_ref;
6941 case COMPONENT_REF:
6942 /* If the operand is a CONSTRUCTOR, we can just extract the
6943 appropriate field if it is present. */
6944 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6946 tree elt;
6948 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6949 elt = TREE_CHAIN (elt))
6950 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6951 /* We can normally use the value of the field in the
6952 CONSTRUCTOR. However, if this is a bitfield in
6953 an integral mode that we can fit in a HOST_WIDE_INT,
6954 we must mask only the number of bits in the bitfield,
6955 since this is done implicitly by the constructor. If
6956 the bitfield does not meet either of those conditions,
6957 we can't do this optimization. */
6958 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6959 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6960 == MODE_INT)
6961 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6962 <= HOST_BITS_PER_WIDE_INT))))
6964 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6965 && modifier == EXPAND_STACK_PARM)
6966 target = 0;
6967 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6968 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6970 HOST_WIDE_INT bitsize
6971 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6972 enum machine_mode imode
6973 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6975 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6977 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6978 op0 = expand_and (imode, op0, op1, target);
6980 else
6982 tree count
6983 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6986 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6987 target, 0);
6988 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6989 target, 0);
6993 return op0;
6996 goto normal_inner_ref;
6998 case BIT_FIELD_REF:
6999 case ARRAY_RANGE_REF:
7000 normal_inner_ref:
7002 enum machine_mode mode1;
7003 HOST_WIDE_INT bitsize, bitpos;
7004 tree offset;
7005 int volatilep = 0;
7006 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7007 &mode1, &unsignedp, &volatilep);
7008 rtx orig_op0;
7010 /* If we got back the original object, something is wrong. Perhaps
7011 we are evaluating an expression too early. In any event, don't
7012 infinitely recurse. */
7013 if (tem == exp)
7014 abort ();
7016 /* If TEM's type is a union of variable size, pass TARGET to the inner
7017 computation, since it will need a temporary and TARGET is known
7018 to have to do. This occurs in unchecked conversion in Ada. */
7020 orig_op0 = op0
7021 = expand_expr (tem,
7022 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7023 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7024 != INTEGER_CST)
7025 && modifier != EXPAND_STACK_PARM
7026 ? target : NULL_RTX),
7027 VOIDmode,
7028 (modifier == EXPAND_INITIALIZER
7029 || modifier == EXPAND_CONST_ADDRESS
7030 || modifier == EXPAND_STACK_PARM)
7031 ? modifier : EXPAND_NORMAL);
7033 /* If this is a constant, put it into a register if it is a
7034 legitimate constant and OFFSET is 0 and memory if it isn't. */
7035 if (CONSTANT_P (op0))
7037 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7038 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7039 && offset == 0)
7040 op0 = force_reg (mode, op0);
7041 else
7042 op0 = validize_mem (force_const_mem (mode, op0));
7045 /* Otherwise, if this object not in memory and we either have an
7046 offset or a BLKmode result, put it there. This case can't occur in
7047 C, but can in Ada if we have unchecked conversion of an expression
7048 from a scalar type to an array or record type or for an
7049 ARRAY_RANGE_REF whose type is BLKmode. */
7050 else if (GET_CODE (op0) != MEM
7051 && (offset != 0
7052 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7054 /* If the operand is a SAVE_EXPR, we can deal with this by
7055 forcing the SAVE_EXPR into memory. */
7056 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7058 put_var_into_stack (TREE_OPERAND (exp, 0),
7059 /*rescan=*/true);
7060 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7062 else
7064 tree nt
7065 = build_qualified_type (TREE_TYPE (tem),
7066 (TYPE_QUALS (TREE_TYPE (tem))
7067 | TYPE_QUAL_CONST));
7068 rtx memloc = assign_temp (nt, 1, 1, 1);
7070 emit_move_insn (memloc, op0);
7071 op0 = memloc;
7075 if (offset != 0)
7077 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7078 EXPAND_SUM);
7080 if (GET_CODE (op0) != MEM)
7081 abort ();
7083 #ifdef POINTERS_EXTEND_UNSIGNED
7084 if (GET_MODE (offset_rtx) != Pmode)
7085 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7086 #else
7087 if (GET_MODE (offset_rtx) != ptr_mode)
7088 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7089 #endif
7091 if (GET_MODE (op0) == BLKmode
7092 /* A constant address in OP0 can have VOIDmode, we must
7093 not try to call force_reg in that case. */
7094 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7095 && bitsize != 0
7096 && (bitpos % bitsize) == 0
7097 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7098 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7100 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7101 bitpos = 0;
7104 op0 = offset_address (op0, offset_rtx,
7105 highest_pow2_factor (offset));
7108 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7109 record its alignment as BIGGEST_ALIGNMENT. */
7110 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7111 && is_aligning_offset (offset, tem))
7112 set_mem_align (op0, BIGGEST_ALIGNMENT);
7114 /* Don't forget about volatility even if this is a bitfield. */
7115 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7117 if (op0 == orig_op0)
7118 op0 = copy_rtx (op0);
7120 MEM_VOLATILE_P (op0) = 1;
7123 /* The following code doesn't handle CONCAT.
7124 Assume only bitpos == 0 can be used for CONCAT, due to
7125 one element arrays having the same mode as its element. */
7126 if (GET_CODE (op0) == CONCAT)
7128 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7129 abort ();
7130 return op0;
7133 /* In cases where an aligned union has an unaligned object
7134 as a field, we might be extracting a BLKmode value from
7135 an integer-mode (e.g., SImode) object. Handle this case
7136 by doing the extract into an object as wide as the field
7137 (which we know to be the width of a basic mode), then
7138 storing into memory, and changing the mode to BLKmode. */
7139 if (mode1 == VOIDmode
7140 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7141 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7142 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7143 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7144 && modifier != EXPAND_CONST_ADDRESS
7145 && modifier != EXPAND_INITIALIZER)
7146 /* If the field isn't aligned enough to fetch as a memref,
7147 fetch it as a bit field. */
7148 || (mode1 != BLKmode
7149 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7150 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7151 || (GET_CODE (op0) == MEM
7152 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7153 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7154 && ((modifier == EXPAND_CONST_ADDRESS
7155 || modifier == EXPAND_INITIALIZER)
7156 ? STRICT_ALIGNMENT
7157 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7158 || (bitpos % BITS_PER_UNIT != 0)))
7159 /* If the type and the field are a constant size and the
7160 size of the type isn't the same size as the bitfield,
7161 we must use bitfield operations. */
7162 || (bitsize >= 0
7163 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7164 == INTEGER_CST)
7165 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7166 bitsize)))
7168 enum machine_mode ext_mode = mode;
7170 if (ext_mode == BLKmode
7171 && ! (target != 0 && GET_CODE (op0) == MEM
7172 && GET_CODE (target) == MEM
7173 && bitpos % BITS_PER_UNIT == 0))
7174 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7176 if (ext_mode == BLKmode)
7178 if (target == 0)
7179 target = assign_temp (type, 0, 1, 1);
7181 if (bitsize == 0)
7182 return target;
7184 /* In this case, BITPOS must start at a byte boundary and
7185 TARGET, if specified, must be a MEM. */
7186 if (GET_CODE (op0) != MEM
7187 || (target != 0 && GET_CODE (target) != MEM)
7188 || bitpos % BITS_PER_UNIT != 0)
7189 abort ();
7191 emit_block_move (target,
7192 adjust_address (op0, VOIDmode,
7193 bitpos / BITS_PER_UNIT),
7194 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7195 / BITS_PER_UNIT),
7196 (modifier == EXPAND_STACK_PARM
7197 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7199 return target;
7202 op0 = validize_mem (op0);
7204 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7205 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7207 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7208 (modifier == EXPAND_STACK_PARM
7209 ? NULL_RTX : target),
7210 ext_mode, ext_mode,
7211 int_size_in_bytes (TREE_TYPE (tem)));
7213 /* If the result is a record type and BITSIZE is narrower than
7214 the mode of OP0, an integral mode, and this is a big endian
7215 machine, we must put the field into the high-order bits. */
7216 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7217 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7218 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7219 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7220 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7221 - bitsize),
7222 op0, 1);
7224 if (mode == BLKmode)
7226 rtx new = assign_temp (build_qualified_type
7227 ((*lang_hooks.types.type_for_mode)
7228 (ext_mode, 0),
7229 TYPE_QUAL_CONST), 0, 1, 1);
7231 emit_move_insn (new, op0);
7232 op0 = copy_rtx (new);
7233 PUT_MODE (op0, BLKmode);
7234 set_mem_attributes (op0, exp, 1);
7237 return op0;
7240 /* If the result is BLKmode, use that to access the object
7241 now as well. */
7242 if (mode == BLKmode)
7243 mode1 = BLKmode;
7245 /* Get a reference to just this component. */
7246 if (modifier == EXPAND_CONST_ADDRESS
7247 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7248 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7249 else
7250 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7252 if (op0 == orig_op0)
7253 op0 = copy_rtx (op0);
7255 set_mem_attributes (op0, exp, 0);
7256 if (GET_CODE (XEXP (op0, 0)) == REG)
7257 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7259 MEM_VOLATILE_P (op0) |= volatilep;
7260 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7261 || modifier == EXPAND_CONST_ADDRESS
7262 || modifier == EXPAND_INITIALIZER)
7263 return op0;
7264 else if (target == 0)
7265 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7267 convert_move (target, op0, unsignedp);
7268 return target;
7271 case VTABLE_REF:
7273 rtx insn, before = get_last_insn (), vtbl_ref;
7275 /* Evaluate the interior expression. */
7276 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7277 tmode, modifier);
7279 /* Get or create an instruction off which to hang a note. */
7280 if (REG_P (subtarget))
7282 target = subtarget;
7283 insn = get_last_insn ();
7284 if (insn == before)
7285 abort ();
7286 if (! INSN_P (insn))
7287 insn = prev_nonnote_insn (insn);
7289 else
7291 target = gen_reg_rtx (GET_MODE (subtarget));
7292 insn = emit_move_insn (target, subtarget);
7295 /* Collect the data for the note. */
7296 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7297 vtbl_ref = plus_constant (vtbl_ref,
7298 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7299 /* Discard the initial CONST that was added. */
7300 vtbl_ref = XEXP (vtbl_ref, 0);
7302 REG_NOTES (insn)
7303 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7305 return target;
7308 /* Intended for a reference to a buffer of a file-object in Pascal.
7309 But it's not certain that a special tree code will really be
7310 necessary for these. INDIRECT_REF might work for them. */
7311 case BUFFER_REF:
7312 abort ();
7314 case IN_EXPR:
7316 /* Pascal set IN expression.
7318 Algorithm:
7319 rlo = set_low - (set_low%bits_per_word);
7320 the_word = set [ (index - rlo)/bits_per_word ];
7321 bit_index = index % bits_per_word;
7322 bitmask = 1 << bit_index;
7323 return !!(the_word & bitmask); */
7325 tree set = TREE_OPERAND (exp, 0);
7326 tree index = TREE_OPERAND (exp, 1);
7327 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7328 tree set_type = TREE_TYPE (set);
7329 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7330 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7331 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7332 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7333 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7334 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7335 rtx setaddr = XEXP (setval, 0);
7336 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7337 rtx rlow;
7338 rtx diff, quo, rem, addr, bit, result;
7340 /* If domain is empty, answer is no. Likewise if index is constant
7341 and out of bounds. */
7342 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7343 && TREE_CODE (set_low_bound) == INTEGER_CST
7344 && tree_int_cst_lt (set_high_bound, set_low_bound))
7345 || (TREE_CODE (index) == INTEGER_CST
7346 && TREE_CODE (set_low_bound) == INTEGER_CST
7347 && tree_int_cst_lt (index, set_low_bound))
7348 || (TREE_CODE (set_high_bound) == INTEGER_CST
7349 && TREE_CODE (index) == INTEGER_CST
7350 && tree_int_cst_lt (set_high_bound, index))))
7351 return const0_rtx;
7353 if (target == 0)
7354 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7356 /* If we get here, we have to generate the code for both cases
7357 (in range and out of range). */
7359 op0 = gen_label_rtx ();
7360 op1 = gen_label_rtx ();
7362 if (! (GET_CODE (index_val) == CONST_INT
7363 && GET_CODE (lo_r) == CONST_INT))
7364 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7365 GET_MODE (index_val), iunsignedp, op1);
7367 if (! (GET_CODE (index_val) == CONST_INT
7368 && GET_CODE (hi_r) == CONST_INT))
7369 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7370 GET_MODE (index_val), iunsignedp, op1);
7372 /* Calculate the element number of bit zero in the first word
7373 of the set. */
7374 if (GET_CODE (lo_r) == CONST_INT)
7375 rlow = GEN_INT (INTVAL (lo_r)
7376 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7377 else
7378 rlow = expand_binop (index_mode, and_optab, lo_r,
7379 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7380 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7382 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7383 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7385 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7386 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7387 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7388 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7390 addr = memory_address (byte_mode,
7391 expand_binop (index_mode, add_optab, diff,
7392 setaddr, NULL_RTX, iunsignedp,
7393 OPTAB_LIB_WIDEN));
7395 /* Extract the bit we want to examine. */
7396 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7397 gen_rtx_MEM (byte_mode, addr),
7398 make_tree (TREE_TYPE (index), rem),
7399 NULL_RTX, 1);
7400 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7401 GET_MODE (target) == byte_mode ? target : 0,
7402 1, OPTAB_LIB_WIDEN);
7404 if (result != target)
7405 convert_move (target, result, 1);
7407 /* Output the code to handle the out-of-range case. */
7408 emit_jump (op0);
7409 emit_label (op1);
7410 emit_move_insn (target, const0_rtx);
7411 emit_label (op0);
7412 return target;
7415 case WITH_CLEANUP_EXPR:
7416 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7418 WITH_CLEANUP_EXPR_RTL (exp)
7419 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7420 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7421 CLEANUP_EH_ONLY (exp));
7423 /* That's it for this cleanup. */
7424 TREE_OPERAND (exp, 1) = 0;
7426 return WITH_CLEANUP_EXPR_RTL (exp);
7428 case CLEANUP_POINT_EXPR:
7430 /* Start a new binding layer that will keep track of all cleanup
7431 actions to be performed. */
7432 expand_start_bindings (2);
7434 target_temp_slot_level = temp_slot_level;
7436 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7437 /* If we're going to use this value, load it up now. */
7438 if (! ignore)
7439 op0 = force_not_mem (op0);
7440 preserve_temp_slots (op0);
7441 expand_end_bindings (NULL_TREE, 0, 0);
7443 return op0;
7445 case CALL_EXPR:
7446 /* Check for a built-in function. */
7447 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7448 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7449 == FUNCTION_DECL)
7450 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7452 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7453 == BUILT_IN_FRONTEND)
7454 return (*lang_hooks.expand_expr) (exp, original_target,
7455 tmode, modifier,
7456 alt_rtl);
7457 else
7458 return expand_builtin (exp, target, subtarget, tmode, ignore);
7461 return expand_call (exp, target, ignore);
7463 case NON_LVALUE_EXPR:
7464 case NOP_EXPR:
7465 case CONVERT_EXPR:
7466 case REFERENCE_EXPR:
7467 if (TREE_OPERAND (exp, 0) == error_mark_node)
7468 return const0_rtx;
7470 if (TREE_CODE (type) == UNION_TYPE)
7472 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7474 /* If both input and output are BLKmode, this conversion isn't doing
7475 anything except possibly changing memory attribute. */
7476 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7478 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7479 modifier);
7481 result = copy_rtx (result);
7482 set_mem_attributes (result, exp, 0);
7483 return result;
7486 if (target == 0)
7488 if (TYPE_MODE (type) != BLKmode)
7489 target = gen_reg_rtx (TYPE_MODE (type));
7490 else
7491 target = assign_temp (type, 0, 1, 1);
7494 if (GET_CODE (target) == MEM)
7495 /* Store data into beginning of memory target. */
7496 store_expr (TREE_OPERAND (exp, 0),
7497 adjust_address (target, TYPE_MODE (valtype), 0),
7498 modifier == EXPAND_STACK_PARM ? 2 : 0);
7500 else if (GET_CODE (target) == REG)
7501 /* Store this field into a union of the proper type. */
7502 store_field (target,
7503 MIN ((int_size_in_bytes (TREE_TYPE
7504 (TREE_OPERAND (exp, 0)))
7505 * BITS_PER_UNIT),
7506 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7507 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7508 VOIDmode, 0, type, 0);
7509 else
7510 abort ();
7512 /* Return the entire union. */
7513 return target;
7516 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7518 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7519 modifier);
7521 /* If the signedness of the conversion differs and OP0 is
7522 a promoted SUBREG, clear that indication since we now
7523 have to do the proper extension. */
7524 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7525 && GET_CODE (op0) == SUBREG)
7526 SUBREG_PROMOTED_VAR_P (op0) = 0;
7528 return op0;
7531 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7532 if (GET_MODE (op0) == mode)
7533 return op0;
7535 /* If OP0 is a constant, just convert it into the proper mode. */
7536 if (CONSTANT_P (op0))
7538 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7539 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7541 if (modifier == EXPAND_INITIALIZER)
7542 return simplify_gen_subreg (mode, op0, inner_mode,
7543 subreg_lowpart_offset (mode,
7544 inner_mode));
7545 else
7546 return convert_modes (mode, inner_mode, op0,
7547 TREE_UNSIGNED (inner_type));
7550 if (modifier == EXPAND_INITIALIZER)
7551 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7553 if (target == 0)
7554 return
7555 convert_to_mode (mode, op0,
7556 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7557 else
7558 convert_move (target, op0,
7559 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7560 return target;
7562 case VIEW_CONVERT_EXPR:
7563 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7565 /* If the input and output modes are both the same, we are done.
7566 Otherwise, if neither mode is BLKmode and both are integral and within
7567 a word, we can use gen_lowpart. If neither is true, make sure the
7568 operand is in memory and convert the MEM to the new mode. */
7569 if (TYPE_MODE (type) == GET_MODE (op0))
7571 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7572 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7573 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7574 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7575 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7576 op0 = gen_lowpart (TYPE_MODE (type), op0);
7577 else if (GET_CODE (op0) != MEM)
7579 /* If the operand is not a MEM, force it into memory. Since we
7580 are going to be be changing the mode of the MEM, don't call
7581 force_const_mem for constants because we don't allow pool
7582 constants to change mode. */
7583 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7585 if (TREE_ADDRESSABLE (exp))
7586 abort ();
7588 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7589 target
7590 = assign_stack_temp_for_type
7591 (TYPE_MODE (inner_type),
7592 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7594 emit_move_insn (target, op0);
7595 op0 = target;
7598 /* At this point, OP0 is in the correct mode. If the output type is such
7599 that the operand is known to be aligned, indicate that it is.
7600 Otherwise, we need only be concerned about alignment for non-BLKmode
7601 results. */
7602 if (GET_CODE (op0) == MEM)
7604 op0 = copy_rtx (op0);
7606 if (TYPE_ALIGN_OK (type))
7607 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7608 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7609 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7611 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7612 HOST_WIDE_INT temp_size
7613 = MAX (int_size_in_bytes (inner_type),
7614 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7615 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7616 temp_size, 0, type);
7617 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7619 if (TREE_ADDRESSABLE (exp))
7620 abort ();
7622 if (GET_MODE (op0) == BLKmode)
7623 emit_block_move (new_with_op0_mode, op0,
7624 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7625 (modifier == EXPAND_STACK_PARM
7626 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7627 else
7628 emit_move_insn (new_with_op0_mode, op0);
7630 op0 = new;
7633 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7636 return op0;
7638 case PLUS_EXPR:
7639 this_optab = ! unsignedp && flag_trapv
7640 && (GET_MODE_CLASS (mode) == MODE_INT)
7641 ? addv_optab : add_optab;
7643 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7644 something else, make sure we add the register to the constant and
7645 then to the other thing. This case can occur during strength
7646 reduction and doing it this way will produce better code if the
7647 frame pointer or argument pointer is eliminated.
7649 fold-const.c will ensure that the constant is always in the inner
7650 PLUS_EXPR, so the only case we need to do anything about is if
7651 sp, ap, or fp is our second argument, in which case we must swap
7652 the innermost first argument and our second argument. */
7654 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7655 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7656 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7657 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7658 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7659 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7661 tree t = TREE_OPERAND (exp, 1);
7663 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7664 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7667 /* If the result is to be ptr_mode and we are adding an integer to
7668 something, we might be forming a constant. So try to use
7669 plus_constant. If it produces a sum and we can't accept it,
7670 use force_operand. This allows P = &ARR[const] to generate
7671 efficient code on machines where a SYMBOL_REF is not a valid
7672 address.
7674 If this is an EXPAND_SUM call, always return the sum. */
7675 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7676 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7678 if (modifier == EXPAND_STACK_PARM)
7679 target = 0;
7680 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7681 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7682 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7684 rtx constant_part;
7686 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7687 EXPAND_SUM);
7688 /* Use immed_double_const to ensure that the constant is
7689 truncated according to the mode of OP1, then sign extended
7690 to a HOST_WIDE_INT. Using the constant directly can result
7691 in non-canonical RTL in a 64x32 cross compile. */
7692 constant_part
7693 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7694 (HOST_WIDE_INT) 0,
7695 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7696 op1 = plus_constant (op1, INTVAL (constant_part));
7697 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7698 op1 = force_operand (op1, target);
7699 return op1;
7702 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7703 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7704 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7706 rtx constant_part;
7708 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7709 (modifier == EXPAND_INITIALIZER
7710 ? EXPAND_INITIALIZER : EXPAND_SUM));
7711 if (! CONSTANT_P (op0))
7713 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7714 VOIDmode, modifier);
7715 /* Return a PLUS if modifier says it's OK. */
7716 if (modifier == EXPAND_SUM
7717 || modifier == EXPAND_INITIALIZER)
7718 return simplify_gen_binary (PLUS, mode, op0, op1);
7719 goto binop2;
7721 /* Use immed_double_const to ensure that the constant is
7722 truncated according to the mode of OP1, then sign extended
7723 to a HOST_WIDE_INT. Using the constant directly can result
7724 in non-canonical RTL in a 64x32 cross compile. */
7725 constant_part
7726 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7727 (HOST_WIDE_INT) 0,
7728 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7729 op0 = plus_constant (op0, INTVAL (constant_part));
7730 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7731 op0 = force_operand (op0, target);
7732 return op0;
7736 /* No sense saving up arithmetic to be done
7737 if it's all in the wrong mode to form part of an address.
7738 And force_operand won't know whether to sign-extend or
7739 zero-extend. */
7740 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7741 || mode != ptr_mode)
7743 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7744 subtarget, &op0, &op1, 0);
7745 if (op0 == const0_rtx)
7746 return op1;
7747 if (op1 == const0_rtx)
7748 return op0;
7749 goto binop2;
7752 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7753 subtarget, &op0, &op1, modifier);
7754 return simplify_gen_binary (PLUS, mode, op0, op1);
7756 case MINUS_EXPR:
7757 /* For initializers, we are allowed to return a MINUS of two
7758 symbolic constants. Here we handle all cases when both operands
7759 are constant. */
7760 /* Handle difference of two symbolic constants,
7761 for the sake of an initializer. */
7762 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7763 && really_constant_p (TREE_OPERAND (exp, 0))
7764 && really_constant_p (TREE_OPERAND (exp, 1)))
7766 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7767 NULL_RTX, &op0, &op1, modifier);
7769 /* If the last operand is a CONST_INT, use plus_constant of
7770 the negated constant. Else make the MINUS. */
7771 if (GET_CODE (op1) == CONST_INT)
7772 return plus_constant (op0, - INTVAL (op1));
7773 else
7774 return gen_rtx_MINUS (mode, op0, op1);
7777 this_optab = ! unsignedp && flag_trapv
7778 && (GET_MODE_CLASS(mode) == MODE_INT)
7779 ? subv_optab : sub_optab;
7781 /* No sense saving up arithmetic to be done
7782 if it's all in the wrong mode to form part of an address.
7783 And force_operand won't know whether to sign-extend or
7784 zero-extend. */
7785 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7786 || mode != ptr_mode)
7787 goto binop;
7789 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7790 subtarget, &op0, &op1, modifier);
7792 /* Convert A - const to A + (-const). */
7793 if (GET_CODE (op1) == CONST_INT)
7795 op1 = negate_rtx (mode, op1);
7796 return simplify_gen_binary (PLUS, mode, op0, op1);
7799 goto binop2;
7801 case MULT_EXPR:
7802 /* If first operand is constant, swap them.
7803 Thus the following special case checks need only
7804 check the second operand. */
7805 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7807 tree t1 = TREE_OPERAND (exp, 0);
7808 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7809 TREE_OPERAND (exp, 1) = t1;
7812 /* Attempt to return something suitable for generating an
7813 indexed address, for machines that support that. */
7815 if (modifier == EXPAND_SUM && mode == ptr_mode
7816 && host_integerp (TREE_OPERAND (exp, 1), 0))
7818 tree exp1 = TREE_OPERAND (exp, 1);
7820 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7821 EXPAND_SUM);
7823 if (GET_CODE (op0) != REG)
7824 op0 = force_operand (op0, NULL_RTX);
7825 if (GET_CODE (op0) != REG)
7826 op0 = copy_to_mode_reg (mode, op0);
7828 return gen_rtx_MULT (mode, op0,
7829 gen_int_mode (tree_low_cst (exp1, 0),
7830 TYPE_MODE (TREE_TYPE (exp1))));
7833 if (modifier == EXPAND_STACK_PARM)
7834 target = 0;
7836 /* Check for multiplying things that have been extended
7837 from a narrower type. If this machine supports multiplying
7838 in that narrower type with a result in the desired type,
7839 do it that way, and avoid the explicit type-conversion. */
7840 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7841 && TREE_CODE (type) == INTEGER_TYPE
7842 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7843 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7844 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7845 && int_fits_type_p (TREE_OPERAND (exp, 1),
7846 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7847 /* Don't use a widening multiply if a shift will do. */
7848 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7849 > HOST_BITS_PER_WIDE_INT)
7850 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7852 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7853 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7855 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7856 /* If both operands are extended, they must either both
7857 be zero-extended or both be sign-extended. */
7858 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7860 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7862 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7863 enum machine_mode innermode = TYPE_MODE (op0type);
7864 bool zextend_p = TREE_UNSIGNED (op0type);
7865 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7866 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7868 if (mode == GET_MODE_WIDER_MODE (innermode))
7870 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7872 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7873 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7874 TREE_OPERAND (exp, 1),
7875 NULL_RTX, &op0, &op1, 0);
7876 else
7877 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7878 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7879 NULL_RTX, &op0, &op1, 0);
7880 goto binop2;
7882 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7883 && innermode == word_mode)
7885 rtx htem, hipart;
7886 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7887 NULL_RTX, VOIDmode, 0);
7888 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7889 op1 = convert_modes (innermode, mode,
7890 expand_expr (TREE_OPERAND (exp, 1),
7891 NULL_RTX, VOIDmode, 0),
7892 unsignedp);
7893 else
7894 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7895 NULL_RTX, VOIDmode, 0);
7896 temp = expand_binop (mode, other_optab, op0, op1, target,
7897 unsignedp, OPTAB_LIB_WIDEN);
7898 hipart = gen_highpart (innermode, temp);
7899 htem = expand_mult_highpart_adjust (innermode, hipart,
7900 op0, op1, hipart,
7901 zextend_p);
7902 if (htem != hipart)
7903 emit_move_insn (hipart, htem);
7904 return temp;
7908 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7909 subtarget, &op0, &op1, 0);
7910 return expand_mult (mode, op0, op1, target, unsignedp);
7912 case TRUNC_DIV_EXPR:
7913 case FLOOR_DIV_EXPR:
7914 case CEIL_DIV_EXPR:
7915 case ROUND_DIV_EXPR:
7916 case EXACT_DIV_EXPR:
7917 if (modifier == EXPAND_STACK_PARM)
7918 target = 0;
7919 /* Possible optimization: compute the dividend with EXPAND_SUM
7920 then if the divisor is constant can optimize the case
7921 where some terms of the dividend have coeffs divisible by it. */
7922 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7923 subtarget, &op0, &op1, 0);
7924 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7926 case RDIV_EXPR:
7927 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7928 expensive divide. If not, combine will rebuild the original
7929 computation. */
7930 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7931 && TREE_CODE (type) == REAL_TYPE
7932 && !real_onep (TREE_OPERAND (exp, 0)))
7933 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7934 build (RDIV_EXPR, type,
7935 build_real (type, dconst1),
7936 TREE_OPERAND (exp, 1))),
7937 target, tmode, modifier);
7938 this_optab = sdiv_optab;
7939 goto binop;
7941 case TRUNC_MOD_EXPR:
7942 case FLOOR_MOD_EXPR:
7943 case CEIL_MOD_EXPR:
7944 case ROUND_MOD_EXPR:
7945 if (modifier == EXPAND_STACK_PARM)
7946 target = 0;
7947 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7948 subtarget, &op0, &op1, 0);
7949 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7951 case FIX_ROUND_EXPR:
7952 case FIX_FLOOR_EXPR:
7953 case FIX_CEIL_EXPR:
7954 abort (); /* Not used for C. */
7956 case FIX_TRUNC_EXPR:
7957 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7958 if (target == 0 || modifier == EXPAND_STACK_PARM)
7959 target = gen_reg_rtx (mode);
7960 expand_fix (target, op0, unsignedp);
7961 return target;
7963 case FLOAT_EXPR:
7964 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7965 if (target == 0 || modifier == EXPAND_STACK_PARM)
7966 target = gen_reg_rtx (mode);
7967 /* expand_float can't figure out what to do if FROM has VOIDmode.
7968 So give it the correct mode. With -O, cse will optimize this. */
7969 if (GET_MODE (op0) == VOIDmode)
7970 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7971 op0);
7972 expand_float (target, op0,
7973 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7974 return target;
7976 case NEGATE_EXPR:
7977 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7978 if (modifier == EXPAND_STACK_PARM)
7979 target = 0;
7980 temp = expand_unop (mode,
7981 ! unsignedp && flag_trapv
7982 && (GET_MODE_CLASS(mode) == MODE_INT)
7983 ? negv_optab : neg_optab, op0, target, 0);
7984 if (temp == 0)
7985 abort ();
7986 return temp;
7988 case ABS_EXPR:
7989 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7990 if (modifier == EXPAND_STACK_PARM)
7991 target = 0;
7993 /* ABS_EXPR is not valid for complex arguments. */
7994 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7995 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7996 abort ();
7998 /* Unsigned abs is simply the operand. Testing here means we don't
7999 risk generating incorrect code below. */
8000 if (TREE_UNSIGNED (type))
8001 return op0;
8003 return expand_abs (mode, op0, target, unsignedp,
8004 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8006 case MAX_EXPR:
8007 case MIN_EXPR:
8008 target = original_target;
8009 if (target == 0
8010 || modifier == EXPAND_STACK_PARM
8011 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8012 || GET_MODE (target) != mode
8013 || (GET_CODE (target) == REG
8014 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8015 target = gen_reg_rtx (mode);
8016 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8017 target, &op0, &op1, 0);
8019 /* First try to do it with a special MIN or MAX instruction.
8020 If that does not win, use a conditional jump to select the proper
8021 value. */
8022 this_optab = (TREE_UNSIGNED (type)
8023 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8024 : (code == MIN_EXPR ? smin_optab : smax_optab));
8026 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8027 OPTAB_WIDEN);
8028 if (temp != 0)
8029 return temp;
8031 /* At this point, a MEM target is no longer useful; we will get better
8032 code without it. */
8034 if (GET_CODE (target) == MEM)
8035 target = gen_reg_rtx (mode);
8037 /* If op1 was placed in target, swap op0 and op1. */
8038 if (target != op0 && target == op1)
8040 rtx tem = op0;
8041 op0 = op1;
8042 op1 = tem;
8045 if (target != op0)
8046 emit_move_insn (target, op0);
8048 op0 = gen_label_rtx ();
8050 /* If this mode is an integer too wide to compare properly,
8051 compare word by word. Rely on cse to optimize constant cases. */
8052 if (GET_MODE_CLASS (mode) == MODE_INT
8053 && ! can_compare_p (GE, mode, ccp_jump))
8055 if (code == MAX_EXPR)
8056 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8057 target, op1, NULL_RTX, op0);
8058 else
8059 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8060 op1, target, NULL_RTX, op0);
8062 else
8064 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8065 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8066 unsignedp, mode, NULL_RTX, NULL_RTX,
8067 op0);
8069 emit_move_insn (target, op1);
8070 emit_label (op0);
8071 return target;
8073 case BIT_NOT_EXPR:
8074 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8075 if (modifier == EXPAND_STACK_PARM)
8076 target = 0;
8077 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8078 if (temp == 0)
8079 abort ();
8080 return temp;
8082 /* ??? Can optimize bitwise operations with one arg constant.
8083 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8084 and (a bitwise1 b) bitwise2 b (etc)
8085 but that is probably not worth while. */
8087 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8088 boolean values when we want in all cases to compute both of them. In
8089 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8090 as actual zero-or-1 values and then bitwise anding. In cases where
8091 there cannot be any side effects, better code would be made by
8092 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8093 how to recognize those cases. */
8095 case TRUTH_AND_EXPR:
8096 case BIT_AND_EXPR:
8097 this_optab = and_optab;
8098 goto binop;
8100 case TRUTH_OR_EXPR:
8101 case BIT_IOR_EXPR:
8102 this_optab = ior_optab;
8103 goto binop;
8105 case TRUTH_XOR_EXPR:
8106 case BIT_XOR_EXPR:
8107 this_optab = xor_optab;
8108 goto binop;
8110 case LSHIFT_EXPR:
8111 case RSHIFT_EXPR:
8112 case LROTATE_EXPR:
8113 case RROTATE_EXPR:
8114 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8115 subtarget = 0;
8116 if (modifier == EXPAND_STACK_PARM)
8117 target = 0;
8118 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8119 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8120 unsignedp);
8122 /* Could determine the answer when only additive constants differ. Also,
8123 the addition of one can be handled by changing the condition. */
8124 case LT_EXPR:
8125 case LE_EXPR:
8126 case GT_EXPR:
8127 case GE_EXPR:
8128 case EQ_EXPR:
8129 case NE_EXPR:
8130 case UNORDERED_EXPR:
8131 case ORDERED_EXPR:
8132 case UNLT_EXPR:
8133 case UNLE_EXPR:
8134 case UNGT_EXPR:
8135 case UNGE_EXPR:
8136 case UNEQ_EXPR:
8137 temp = do_store_flag (exp,
8138 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8139 tmode != VOIDmode ? tmode : mode, 0);
8140 if (temp != 0)
8141 return temp;
8143 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8144 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8145 && original_target
8146 && GET_CODE (original_target) == REG
8147 && (GET_MODE (original_target)
8148 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8150 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8151 VOIDmode, 0);
8153 /* If temp is constant, we can just compute the result. */
8154 if (GET_CODE (temp) == CONST_INT)
8156 if (INTVAL (temp) != 0)
8157 emit_move_insn (target, const1_rtx);
8158 else
8159 emit_move_insn (target, const0_rtx);
8161 return target;
8164 if (temp != original_target)
8166 enum machine_mode mode1 = GET_MODE (temp);
8167 if (mode1 == VOIDmode)
8168 mode1 = tmode != VOIDmode ? tmode : mode;
8170 temp = copy_to_mode_reg (mode1, temp);
8173 op1 = gen_label_rtx ();
8174 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8175 GET_MODE (temp), unsignedp, op1);
8176 emit_move_insn (temp, const1_rtx);
8177 emit_label (op1);
8178 return temp;
8181 /* If no set-flag instruction, must generate a conditional
8182 store into a temporary variable. Drop through
8183 and handle this like && and ||. */
8185 case TRUTH_ANDIF_EXPR:
8186 case TRUTH_ORIF_EXPR:
8187 if (! ignore
8188 && (target == 0
8189 || modifier == EXPAND_STACK_PARM
8190 || ! safe_from_p (target, exp, 1)
8191 /* Make sure we don't have a hard reg (such as function's return
8192 value) live across basic blocks, if not optimizing. */
8193 || (!optimize && GET_CODE (target) == REG
8194 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8195 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8197 if (target)
8198 emit_clr_insn (target);
8200 op1 = gen_label_rtx ();
8201 jumpifnot (exp, op1);
8203 if (target)
8204 emit_0_to_1_insn (target);
8206 emit_label (op1);
8207 return ignore ? const0_rtx : target;
8209 case TRUTH_NOT_EXPR:
8210 if (modifier == EXPAND_STACK_PARM)
8211 target = 0;
8212 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8213 /* The parser is careful to generate TRUTH_NOT_EXPR
8214 only with operands that are always zero or one. */
8215 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8216 target, 1, OPTAB_LIB_WIDEN);
8217 if (temp == 0)
8218 abort ();
8219 return temp;
8221 case COMPOUND_EXPR:
8222 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8223 emit_queue ();
8224 return expand_expr_real (TREE_OPERAND (exp, 1),
8225 (ignore ? const0_rtx : target),
8226 VOIDmode, modifier, alt_rtl);
8228 case COND_EXPR:
8229 /* If we would have a "singleton" (see below) were it not for a
8230 conversion in each arm, bring that conversion back out. */
8231 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8232 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8233 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8234 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8236 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8237 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8239 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8240 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8241 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8242 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8243 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8244 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8245 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8246 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8247 return expand_expr (build1 (NOP_EXPR, type,
8248 build (COND_EXPR, TREE_TYPE (iftrue),
8249 TREE_OPERAND (exp, 0),
8250 iftrue, iffalse)),
8251 target, tmode, modifier);
8255 /* Note that COND_EXPRs whose type is a structure or union
8256 are required to be constructed to contain assignments of
8257 a temporary variable, so that we can evaluate them here
8258 for side effect only. If type is void, we must do likewise. */
8260 /* If an arm of the branch requires a cleanup,
8261 only that cleanup is performed. */
8263 tree singleton = 0;
8264 tree binary_op = 0, unary_op = 0;
8266 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8267 convert it to our mode, if necessary. */
8268 if (integer_onep (TREE_OPERAND (exp, 1))
8269 && integer_zerop (TREE_OPERAND (exp, 2))
8270 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8272 if (ignore)
8274 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8275 modifier);
8276 return const0_rtx;
8279 if (modifier == EXPAND_STACK_PARM)
8280 target = 0;
8281 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8282 if (GET_MODE (op0) == mode)
8283 return op0;
8285 if (target == 0)
8286 target = gen_reg_rtx (mode);
8287 convert_move (target, op0, unsignedp);
8288 return target;
8291 /* Check for X ? A + B : A. If we have this, we can copy A to the
8292 output and conditionally add B. Similarly for unary operations.
8293 Don't do this if X has side-effects because those side effects
8294 might affect A or B and the "?" operation is a sequence point in
8295 ANSI. (operand_equal_p tests for side effects.) */
8297 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8298 && operand_equal_p (TREE_OPERAND (exp, 2),
8299 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8300 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8301 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8302 && operand_equal_p (TREE_OPERAND (exp, 1),
8303 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8304 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8305 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8306 && operand_equal_p (TREE_OPERAND (exp, 2),
8307 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8308 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8309 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8310 && operand_equal_p (TREE_OPERAND (exp, 1),
8311 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8312 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8314 /* If we are not to produce a result, we have no target. Otherwise,
8315 if a target was specified use it; it will not be used as an
8316 intermediate target unless it is safe. If no target, use a
8317 temporary. */
8319 if (ignore)
8320 temp = 0;
8321 else if (modifier == EXPAND_STACK_PARM)
8322 temp = assign_temp (type, 0, 0, 1);
8323 else if (original_target
8324 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8325 || (singleton && GET_CODE (original_target) == REG
8326 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8327 && original_target == var_rtx (singleton)))
8328 && GET_MODE (original_target) == mode
8329 #ifdef HAVE_conditional_move
8330 && (! can_conditionally_move_p (mode)
8331 || GET_CODE (original_target) == REG
8332 || TREE_ADDRESSABLE (type))
8333 #endif
8334 && (GET_CODE (original_target) != MEM
8335 || TREE_ADDRESSABLE (type)))
8336 temp = original_target;
8337 else if (TREE_ADDRESSABLE (type))
8338 abort ();
8339 else
8340 temp = assign_temp (type, 0, 0, 1);
8342 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8343 do the test of X as a store-flag operation, do this as
8344 A + ((X != 0) << log C). Similarly for other simple binary
8345 operators. Only do for C == 1 if BRANCH_COST is low. */
8346 if (temp && singleton && binary_op
8347 && (TREE_CODE (binary_op) == PLUS_EXPR
8348 || TREE_CODE (binary_op) == MINUS_EXPR
8349 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8350 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8351 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8352 : integer_onep (TREE_OPERAND (binary_op, 1)))
8353 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8355 rtx result;
8356 tree cond;
8357 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8358 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8359 ? addv_optab : add_optab)
8360 : TREE_CODE (binary_op) == MINUS_EXPR
8361 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8362 ? subv_optab : sub_optab)
8363 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8364 : xor_optab);
8366 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8367 if (singleton == TREE_OPERAND (exp, 1))
8368 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8369 else
8370 cond = TREE_OPERAND (exp, 0);
8372 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8373 ? temp : NULL_RTX),
8374 mode, BRANCH_COST <= 1);
8376 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8377 result = expand_shift (LSHIFT_EXPR, mode, result,
8378 build_int_2 (tree_log2
8379 (TREE_OPERAND
8380 (binary_op, 1)),
8382 (safe_from_p (temp, singleton, 1)
8383 ? temp : NULL_RTX), 0);
8385 if (result)
8387 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8388 return expand_binop (mode, boptab, op1, result, temp,
8389 unsignedp, OPTAB_LIB_WIDEN);
8393 do_pending_stack_adjust ();
8394 NO_DEFER_POP;
8395 op0 = gen_label_rtx ();
8397 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8399 if (temp != 0)
8401 /* If the target conflicts with the other operand of the
8402 binary op, we can't use it. Also, we can't use the target
8403 if it is a hard register, because evaluating the condition
8404 might clobber it. */
8405 if ((binary_op
8406 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8407 || (GET_CODE (temp) == REG
8408 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8409 temp = gen_reg_rtx (mode);
8410 store_expr (singleton, temp,
8411 modifier == EXPAND_STACK_PARM ? 2 : 0);
8413 else
8414 expand_expr (singleton,
8415 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8416 if (singleton == TREE_OPERAND (exp, 1))
8417 jumpif (TREE_OPERAND (exp, 0), op0);
8418 else
8419 jumpifnot (TREE_OPERAND (exp, 0), op0);
8421 start_cleanup_deferral ();
8422 if (binary_op && temp == 0)
8423 /* Just touch the other operand. */
8424 expand_expr (TREE_OPERAND (binary_op, 1),
8425 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8426 else if (binary_op)
8427 store_expr (build (TREE_CODE (binary_op), type,
8428 make_tree (type, temp),
8429 TREE_OPERAND (binary_op, 1)),
8430 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8431 else
8432 store_expr (build1 (TREE_CODE (unary_op), type,
8433 make_tree (type, temp)),
8434 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8435 op1 = op0;
8437 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8438 comparison operator. If we have one of these cases, set the
8439 output to A, branch on A (cse will merge these two references),
8440 then set the output to FOO. */
8441 else if (temp
8442 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8443 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8444 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8445 TREE_OPERAND (exp, 1), 0)
8446 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8447 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8448 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8450 if (GET_CODE (temp) == REG
8451 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8452 temp = gen_reg_rtx (mode);
8453 store_expr (TREE_OPERAND (exp, 1), temp,
8454 modifier == EXPAND_STACK_PARM ? 2 : 0);
8455 jumpif (TREE_OPERAND (exp, 0), op0);
8457 start_cleanup_deferral ();
8458 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8459 store_expr (TREE_OPERAND (exp, 2), temp,
8460 modifier == EXPAND_STACK_PARM ? 2 : 0);
8461 else
8462 expand_expr (TREE_OPERAND (exp, 2),
8463 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8464 op1 = op0;
8466 else if (temp
8467 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8468 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8469 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8470 TREE_OPERAND (exp, 2), 0)
8471 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8472 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8473 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8475 if (GET_CODE (temp) == REG
8476 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8477 temp = gen_reg_rtx (mode);
8478 store_expr (TREE_OPERAND (exp, 2), temp,
8479 modifier == EXPAND_STACK_PARM ? 2 : 0);
8480 jumpifnot (TREE_OPERAND (exp, 0), op0);
8482 start_cleanup_deferral ();
8483 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8484 store_expr (TREE_OPERAND (exp, 1), temp,
8485 modifier == EXPAND_STACK_PARM ? 2 : 0);
8486 else
8487 expand_expr (TREE_OPERAND (exp, 1),
8488 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8489 op1 = op0;
8491 else
8493 op1 = gen_label_rtx ();
8494 jumpifnot (TREE_OPERAND (exp, 0), op0);
8496 start_cleanup_deferral ();
8498 /* One branch of the cond can be void, if it never returns. For
8499 example A ? throw : E */
8500 if (temp != 0
8501 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8502 store_expr (TREE_OPERAND (exp, 1), temp,
8503 modifier == EXPAND_STACK_PARM ? 2 : 0);
8504 else
8505 expand_expr (TREE_OPERAND (exp, 1),
8506 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8507 end_cleanup_deferral ();
8508 emit_queue ();
8509 emit_jump_insn (gen_jump (op1));
8510 emit_barrier ();
8511 emit_label (op0);
8512 start_cleanup_deferral ();
8513 if (temp != 0
8514 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8515 store_expr (TREE_OPERAND (exp, 2), temp,
8516 modifier == EXPAND_STACK_PARM ? 2 : 0);
8517 else
8518 expand_expr (TREE_OPERAND (exp, 2),
8519 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8522 end_cleanup_deferral ();
8524 emit_queue ();
8525 emit_label (op1);
8526 OK_DEFER_POP;
8528 return temp;
8531 case TARGET_EXPR:
8533 /* Something needs to be initialized, but we didn't know
8534 where that thing was when building the tree. For example,
8535 it could be the return value of a function, or a parameter
8536 to a function which lays down in the stack, or a temporary
8537 variable which must be passed by reference.
8539 We guarantee that the expression will either be constructed
8540 or copied into our original target. */
8542 tree slot = TREE_OPERAND (exp, 0);
8543 tree cleanups = NULL_TREE;
8544 tree exp1;
8546 if (TREE_CODE (slot) != VAR_DECL)
8547 abort ();
8549 if (! ignore)
8550 target = original_target;
8552 /* Set this here so that if we get a target that refers to a
8553 register variable that's already been used, put_reg_into_stack
8554 knows that it should fix up those uses. */
8555 TREE_USED (slot) = 1;
8557 if (target == 0)
8559 if (DECL_RTL_SET_P (slot))
8561 target = DECL_RTL (slot);
8562 /* If we have already expanded the slot, so don't do
8563 it again. (mrs) */
8564 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8565 return target;
8567 else
8569 target = assign_temp (type, 2, 0, 1);
8570 /* All temp slots at this level must not conflict. */
8571 preserve_temp_slots (target);
8572 SET_DECL_RTL (slot, target);
8573 if (TREE_ADDRESSABLE (slot))
8574 put_var_into_stack (slot, /*rescan=*/false);
8576 /* Since SLOT is not known to the called function
8577 to belong to its stack frame, we must build an explicit
8578 cleanup. This case occurs when we must build up a reference
8579 to pass the reference as an argument. In this case,
8580 it is very likely that such a reference need not be
8581 built here. */
8583 if (TREE_OPERAND (exp, 2) == 0)
8584 TREE_OPERAND (exp, 2)
8585 = (*lang_hooks.maybe_build_cleanup) (slot);
8586 cleanups = TREE_OPERAND (exp, 2);
8589 else
8591 /* This case does occur, when expanding a parameter which
8592 needs to be constructed on the stack. The target
8593 is the actual stack address that we want to initialize.
8594 The function we call will perform the cleanup in this case. */
8596 /* If we have already assigned it space, use that space,
8597 not target that we were passed in, as our target
8598 parameter is only a hint. */
8599 if (DECL_RTL_SET_P (slot))
8601 target = DECL_RTL (slot);
8602 /* If we have already expanded the slot, so don't do
8603 it again. (mrs) */
8604 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8605 return target;
8607 else
8609 SET_DECL_RTL (slot, target);
8610 /* If we must have an addressable slot, then make sure that
8611 the RTL that we just stored in slot is OK. */
8612 if (TREE_ADDRESSABLE (slot))
8613 put_var_into_stack (slot, /*rescan=*/true);
8617 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8618 /* Mark it as expanded. */
8619 TREE_OPERAND (exp, 1) = NULL_TREE;
8621 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8623 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8625 return target;
8628 case INIT_EXPR:
8630 tree lhs = TREE_OPERAND (exp, 0);
8631 tree rhs = TREE_OPERAND (exp, 1);
8633 temp = expand_assignment (lhs, rhs, ! ignore);
8634 return temp;
8637 case MODIFY_EXPR:
8639 /* If lhs is complex, expand calls in rhs before computing it.
8640 That's so we don't compute a pointer and save it over a
8641 call. If lhs is simple, compute it first so we can give it
8642 as a target if the rhs is just a call. This avoids an
8643 extra temp and copy and that prevents a partial-subsumption
8644 which makes bad code. Actually we could treat
8645 component_ref's of vars like vars. */
8647 tree lhs = TREE_OPERAND (exp, 0);
8648 tree rhs = TREE_OPERAND (exp, 1);
8650 temp = 0;
8652 /* Check for |= or &= of a bitfield of size one into another bitfield
8653 of size 1. In this case, (unless we need the result of the
8654 assignment) we can do this more efficiently with a
8655 test followed by an assignment, if necessary.
8657 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8658 things change so we do, this code should be enhanced to
8659 support it. */
8660 if (ignore
8661 && TREE_CODE (lhs) == COMPONENT_REF
8662 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8663 || TREE_CODE (rhs) == BIT_AND_EXPR)
8664 && TREE_OPERAND (rhs, 0) == lhs
8665 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8666 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8667 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8669 rtx label = gen_label_rtx ();
8671 do_jump (TREE_OPERAND (rhs, 1),
8672 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8673 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8674 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8675 (TREE_CODE (rhs) == BIT_IOR_EXPR
8676 ? integer_one_node
8677 : integer_zero_node)),
8679 do_pending_stack_adjust ();
8680 emit_label (label);
8681 return const0_rtx;
8684 temp = expand_assignment (lhs, rhs, ! ignore);
8686 return temp;
8689 case RETURN_EXPR:
8690 if (!TREE_OPERAND (exp, 0))
8691 expand_null_return ();
8692 else
8693 expand_return (TREE_OPERAND (exp, 0));
8694 return const0_rtx;
8696 case PREINCREMENT_EXPR:
8697 case PREDECREMENT_EXPR:
8698 return expand_increment (exp, 0, ignore);
8700 case POSTINCREMENT_EXPR:
8701 case POSTDECREMENT_EXPR:
8702 /* Faster to treat as pre-increment if result is not used. */
8703 return expand_increment (exp, ! ignore, ignore);
8705 case ADDR_EXPR:
8706 if (modifier == EXPAND_STACK_PARM)
8707 target = 0;
8708 /* Are we taking the address of a nested function? */
8709 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8710 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8711 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8712 && ! TREE_STATIC (exp))
8714 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8715 op0 = force_operand (op0, target);
8717 /* If we are taking the address of something erroneous, just
8718 return a zero. */
8719 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8720 return const0_rtx;
8721 /* If we are taking the address of a constant and are at the
8722 top level, we have to use output_constant_def since we can't
8723 call force_const_mem at top level. */
8724 else if (cfun == 0
8725 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8726 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8727 == 'c')))
8728 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8729 else
8731 /* We make sure to pass const0_rtx down if we came in with
8732 ignore set, to avoid doing the cleanups twice for something. */
8733 op0 = expand_expr (TREE_OPERAND (exp, 0),
8734 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8735 (modifier == EXPAND_INITIALIZER
8736 ? modifier : EXPAND_CONST_ADDRESS));
8738 /* If we are going to ignore the result, OP0 will have been set
8739 to const0_rtx, so just return it. Don't get confused and
8740 think we are taking the address of the constant. */
8741 if (ignore)
8742 return op0;
8744 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8745 clever and returns a REG when given a MEM. */
8746 op0 = protect_from_queue (op0, 1);
8748 /* We would like the object in memory. If it is a constant, we can
8749 have it be statically allocated into memory. For a non-constant,
8750 we need to allocate some memory and store the value into it. */
8752 if (CONSTANT_P (op0))
8753 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8754 op0);
8755 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8756 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8757 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8759 /* If the operand is a SAVE_EXPR, we can deal with this by
8760 forcing the SAVE_EXPR into memory. */
8761 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8763 put_var_into_stack (TREE_OPERAND (exp, 0),
8764 /*rescan=*/true);
8765 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8767 else
8769 /* If this object is in a register, it can't be BLKmode. */
8770 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8771 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8773 if (GET_CODE (op0) == PARALLEL)
8774 /* Handle calls that pass values in multiple
8775 non-contiguous locations. The Irix 6 ABI has examples
8776 of this. */
8777 emit_group_store (memloc, op0, inner_type,
8778 int_size_in_bytes (inner_type));
8779 else
8780 emit_move_insn (memloc, op0);
8782 op0 = memloc;
8786 if (GET_CODE (op0) != MEM)
8787 abort ();
8789 mark_temp_addr_taken (op0);
8790 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8792 op0 = XEXP (op0, 0);
8793 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8794 op0 = convert_memory_address (ptr_mode, op0);
8795 return op0;
8798 /* If OP0 is not aligned as least as much as the type requires, we
8799 need to make a temporary, copy OP0 to it, and take the address of
8800 the temporary. We want to use the alignment of the type, not of
8801 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8802 the test for BLKmode means that can't happen. The test for
8803 BLKmode is because we never make mis-aligned MEMs with
8804 non-BLKmode.
8806 We don't need to do this at all if the machine doesn't have
8807 strict alignment. */
8808 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8809 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8810 > MEM_ALIGN (op0))
8811 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8813 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8814 rtx new;
8816 if (TYPE_ALIGN_OK (inner_type))
8817 abort ();
8819 if (TREE_ADDRESSABLE (inner_type))
8821 /* We can't make a bitwise copy of this object, so fail. */
8822 error ("cannot take the address of an unaligned member");
8823 return const0_rtx;
8826 new = assign_stack_temp_for_type
8827 (TYPE_MODE (inner_type),
8828 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8829 : int_size_in_bytes (inner_type),
8830 1, build_qualified_type (inner_type,
8831 (TYPE_QUALS (inner_type)
8832 | TYPE_QUAL_CONST)));
8834 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8835 (modifier == EXPAND_STACK_PARM
8836 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8838 op0 = new;
8841 op0 = force_operand (XEXP (op0, 0), target);
8844 if (flag_force_addr
8845 && GET_CODE (op0) != REG
8846 && modifier != EXPAND_CONST_ADDRESS
8847 && modifier != EXPAND_INITIALIZER
8848 && modifier != EXPAND_SUM)
8849 op0 = force_reg (Pmode, op0);
8851 if (GET_CODE (op0) == REG
8852 && ! REG_USERVAR_P (op0))
8853 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8855 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8856 op0 = convert_memory_address (ptr_mode, op0);
8858 return op0;
8860 case ENTRY_VALUE_EXPR:
8861 abort ();
8863 /* COMPLEX type for Extended Pascal & Fortran */
8864 case COMPLEX_EXPR:
8866 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8867 rtx insns;
8869 /* Get the rtx code of the operands. */
8870 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8871 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8873 if (! target)
8874 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8876 start_sequence ();
8878 /* Move the real (op0) and imaginary (op1) parts to their location. */
8879 emit_move_insn (gen_realpart (mode, target), op0);
8880 emit_move_insn (gen_imagpart (mode, target), op1);
8882 insns = get_insns ();
8883 end_sequence ();
8885 /* Complex construction should appear as a single unit. */
8886 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8887 each with a separate pseudo as destination.
8888 It's not correct for flow to treat them as a unit. */
8889 if (GET_CODE (target) != CONCAT)
8890 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8891 else
8892 emit_insn (insns);
8894 return target;
8897 case REALPART_EXPR:
8898 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8899 return gen_realpart (mode, op0);
8901 case IMAGPART_EXPR:
8902 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8903 return gen_imagpart (mode, op0);
8905 case CONJ_EXPR:
8907 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8908 rtx imag_t;
8909 rtx insns;
8911 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8913 if (! target)
8914 target = gen_reg_rtx (mode);
8916 start_sequence ();
8918 /* Store the realpart and the negated imagpart to target. */
8919 emit_move_insn (gen_realpart (partmode, target),
8920 gen_realpart (partmode, op0));
8922 imag_t = gen_imagpart (partmode, target);
8923 temp = expand_unop (partmode,
8924 ! unsignedp && flag_trapv
8925 && (GET_MODE_CLASS(partmode) == MODE_INT)
8926 ? negv_optab : neg_optab,
8927 gen_imagpart (partmode, op0), imag_t, 0);
8928 if (temp != imag_t)
8929 emit_move_insn (imag_t, temp);
8931 insns = get_insns ();
8932 end_sequence ();
8934 /* Conjugate should appear as a single unit
8935 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8936 each with a separate pseudo as destination.
8937 It's not correct for flow to treat them as a unit. */
8938 if (GET_CODE (target) != CONCAT)
8939 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8940 else
8941 emit_insn (insns);
8943 return target;
8946 case TRY_CATCH_EXPR:
8948 tree handler = TREE_OPERAND (exp, 1);
8950 expand_eh_region_start ();
8952 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8954 expand_eh_region_end_cleanup (handler);
8956 return op0;
8959 case TRY_FINALLY_EXPR:
8961 tree try_block = TREE_OPERAND (exp, 0);
8962 tree finally_block = TREE_OPERAND (exp, 1);
8964 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8966 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8967 is not sufficient, so we cannot expand the block twice.
8968 So we play games with GOTO_SUBROUTINE_EXPR to let us
8969 expand the thing only once. */
8970 /* When not optimizing, we go ahead with this form since
8971 (1) user breakpoints operate more predictably without
8972 code duplication, and
8973 (2) we're not running any of the global optimizers
8974 that would explode in time/space with the highly
8975 connected CFG created by the indirect branching. */
8977 rtx finally_label = gen_label_rtx ();
8978 rtx done_label = gen_label_rtx ();
8979 rtx return_link = gen_reg_rtx (Pmode);
8980 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8981 (tree) finally_label, (tree) return_link);
8982 TREE_SIDE_EFFECTS (cleanup) = 1;
8984 /* Start a new binding layer that will keep track of all cleanup
8985 actions to be performed. */
8986 expand_start_bindings (2);
8987 target_temp_slot_level = temp_slot_level;
8989 expand_decl_cleanup (NULL_TREE, cleanup);
8990 op0 = expand_expr (try_block, target, tmode, modifier);
8992 preserve_temp_slots (op0);
8993 expand_end_bindings (NULL_TREE, 0, 0);
8994 emit_jump (done_label);
8995 emit_label (finally_label);
8996 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8997 emit_indirect_jump (return_link);
8998 emit_label (done_label);
9000 else
9002 expand_start_bindings (2);
9003 target_temp_slot_level = temp_slot_level;
9005 expand_decl_cleanup (NULL_TREE, finally_block);
9006 op0 = expand_expr (try_block, target, tmode, modifier);
9008 preserve_temp_slots (op0);
9009 expand_end_bindings (NULL_TREE, 0, 0);
9012 return op0;
9015 case GOTO_SUBROUTINE_EXPR:
9017 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9018 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9019 rtx return_address = gen_label_rtx ();
9020 emit_move_insn (return_link,
9021 gen_rtx_LABEL_REF (Pmode, return_address));
9022 emit_jump (subr);
9023 emit_label (return_address);
9024 return const0_rtx;
9027 case VA_ARG_EXPR:
9028 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9030 case EXC_PTR_EXPR:
9031 return get_exception_pointer (cfun);
9033 case FDESC_EXPR:
9034 /* Function descriptors are not valid except for as
9035 initialization constants, and should not be expanded. */
9036 abort ();
9038 default:
9039 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier,
9040 alt_rtl);
9043 /* Here to do an ordinary binary operator, generating an instruction
9044 from the optab already placed in `this_optab'. */
9045 binop:
9046 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9047 subtarget, &op0, &op1, 0);
9048 binop2:
9049 if (modifier == EXPAND_STACK_PARM)
9050 target = 0;
9051 temp = expand_binop (mode, this_optab, op0, op1, target,
9052 unsignedp, OPTAB_LIB_WIDEN);
9053 if (temp == 0)
9054 abort ();
9055 return temp;
9058 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9059 when applied to the address of EXP produces an address known to be
9060 aligned more than BIGGEST_ALIGNMENT. */
9062 static int
9063 is_aligning_offset (tree offset, tree exp)
9065 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9066 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9067 || TREE_CODE (offset) == NOP_EXPR
9068 || TREE_CODE (offset) == CONVERT_EXPR
9069 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9070 offset = TREE_OPERAND (offset, 0);
9072 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9073 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9074 if (TREE_CODE (offset) != BIT_AND_EXPR
9075 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9076 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9077 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9078 return 0;
9080 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9081 It must be NEGATE_EXPR. Then strip any more conversions. */
9082 offset = TREE_OPERAND (offset, 0);
9083 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9084 || TREE_CODE (offset) == NOP_EXPR
9085 || TREE_CODE (offset) == CONVERT_EXPR)
9086 offset = TREE_OPERAND (offset, 0);
9088 if (TREE_CODE (offset) != NEGATE_EXPR)
9089 return 0;
9091 offset = TREE_OPERAND (offset, 0);
9092 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9093 || TREE_CODE (offset) == NOP_EXPR
9094 || TREE_CODE (offset) == CONVERT_EXPR)
9095 offset = TREE_OPERAND (offset, 0);
9097 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9098 whose type is the same as EXP. */
9099 return (TREE_CODE (offset) == ADDR_EXPR
9100 && (TREE_OPERAND (offset, 0) == exp
9101 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9102 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9103 == TREE_TYPE (exp)))));
9106 /* Return the tree node if an ARG corresponds to a string constant or zero
9107 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9108 in bytes within the string that ARG is accessing. The type of the
9109 offset will be `sizetype'. */
9111 tree
9112 string_constant (tree arg, tree *ptr_offset)
9114 STRIP_NOPS (arg);
9116 if (TREE_CODE (arg) == ADDR_EXPR
9117 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9119 *ptr_offset = size_zero_node;
9120 return TREE_OPERAND (arg, 0);
9122 else if (TREE_CODE (arg) == PLUS_EXPR)
9124 tree arg0 = TREE_OPERAND (arg, 0);
9125 tree arg1 = TREE_OPERAND (arg, 1);
9127 STRIP_NOPS (arg0);
9128 STRIP_NOPS (arg1);
9130 if (TREE_CODE (arg0) == ADDR_EXPR
9131 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9133 *ptr_offset = convert (sizetype, arg1);
9134 return TREE_OPERAND (arg0, 0);
9136 else if (TREE_CODE (arg1) == ADDR_EXPR
9137 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9139 *ptr_offset = convert (sizetype, arg0);
9140 return TREE_OPERAND (arg1, 0);
9144 return 0;
9147 /* Expand code for a post- or pre- increment or decrement
9148 and return the RTX for the result.
9149 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9151 static rtx
9152 expand_increment (tree exp, int post, int ignore)
9154 rtx op0, op1;
9155 rtx temp, value;
9156 tree incremented = TREE_OPERAND (exp, 0);
9157 optab this_optab = add_optab;
9158 int icode;
9159 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9160 int op0_is_copy = 0;
9161 int single_insn = 0;
9162 /* 1 means we can't store into OP0 directly,
9163 because it is a subreg narrower than a word,
9164 and we don't dare clobber the rest of the word. */
9165 int bad_subreg = 0;
9167 /* Stabilize any component ref that might need to be
9168 evaluated more than once below. */
9169 if (!post
9170 || TREE_CODE (incremented) == BIT_FIELD_REF
9171 || (TREE_CODE (incremented) == COMPONENT_REF
9172 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9173 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9174 incremented = stabilize_reference (incremented);
9175 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9176 ones into save exprs so that they don't accidentally get evaluated
9177 more than once by the code below. */
9178 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9179 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9180 incremented = save_expr (incremented);
9182 /* Compute the operands as RTX.
9183 Note whether OP0 is the actual lvalue or a copy of it:
9184 I believe it is a copy iff it is a register or subreg
9185 and insns were generated in computing it. */
9187 temp = get_last_insn ();
9188 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9190 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9191 in place but instead must do sign- or zero-extension during assignment,
9192 so we copy it into a new register and let the code below use it as
9193 a copy.
9195 Note that we can safely modify this SUBREG since it is know not to be
9196 shared (it was made by the expand_expr call above). */
9198 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9200 if (post)
9201 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9202 else
9203 bad_subreg = 1;
9205 else if (GET_CODE (op0) == SUBREG
9206 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9208 /* We cannot increment this SUBREG in place. If we are
9209 post-incrementing, get a copy of the old value. Otherwise,
9210 just mark that we cannot increment in place. */
9211 if (post)
9212 op0 = copy_to_reg (op0);
9213 else
9214 bad_subreg = 1;
9217 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9218 && temp != get_last_insn ());
9219 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9221 /* Decide whether incrementing or decrementing. */
9222 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9223 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9224 this_optab = sub_optab;
9226 /* Convert decrement by a constant into a negative increment. */
9227 if (this_optab == sub_optab
9228 && GET_CODE (op1) == CONST_INT)
9230 op1 = GEN_INT (-INTVAL (op1));
9231 this_optab = add_optab;
9234 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9235 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9237 /* For a preincrement, see if we can do this with a single instruction. */
9238 if (!post)
9240 icode = (int) this_optab->handlers[(int) mode].insn_code;
9241 if (icode != (int) CODE_FOR_nothing
9242 /* Make sure that OP0 is valid for operands 0 and 1
9243 of the insn we want to queue. */
9244 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9245 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9246 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9247 single_insn = 1;
9250 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9251 then we cannot just increment OP0. We must therefore contrive to
9252 increment the original value. Then, for postincrement, we can return
9253 OP0 since it is a copy of the old value. For preincrement, expand here
9254 unless we can do it with a single insn.
9256 Likewise if storing directly into OP0 would clobber high bits
9257 we need to preserve (bad_subreg). */
9258 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9260 /* This is the easiest way to increment the value wherever it is.
9261 Problems with multiple evaluation of INCREMENTED are prevented
9262 because either (1) it is a component_ref or preincrement,
9263 in which case it was stabilized above, or (2) it is an array_ref
9264 with constant index in an array in a register, which is
9265 safe to reevaluate. */
9266 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9267 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9268 ? MINUS_EXPR : PLUS_EXPR),
9269 TREE_TYPE (exp),
9270 incremented,
9271 TREE_OPERAND (exp, 1));
9273 while (TREE_CODE (incremented) == NOP_EXPR
9274 || TREE_CODE (incremented) == CONVERT_EXPR)
9276 newexp = convert (TREE_TYPE (incremented), newexp);
9277 incremented = TREE_OPERAND (incremented, 0);
9280 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9281 return post ? op0 : temp;
9284 if (post)
9286 /* We have a true reference to the value in OP0.
9287 If there is an insn to add or subtract in this mode, queue it.
9288 Queuing the increment insn avoids the register shuffling
9289 that often results if we must increment now and first save
9290 the old value for subsequent use. */
9292 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9293 op0 = stabilize (op0);
9294 #endif
9296 icode = (int) this_optab->handlers[(int) mode].insn_code;
9297 if (icode != (int) CODE_FOR_nothing
9298 /* Make sure that OP0 is valid for operands 0 and 1
9299 of the insn we want to queue. */
9300 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9301 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9303 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9304 op1 = force_reg (mode, op1);
9306 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9308 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9310 rtx addr = (general_operand (XEXP (op0, 0), mode)
9311 ? force_reg (Pmode, XEXP (op0, 0))
9312 : copy_to_reg (XEXP (op0, 0)));
9313 rtx temp, result;
9315 op0 = replace_equiv_address (op0, addr);
9316 temp = force_reg (GET_MODE (op0), op0);
9317 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9318 op1 = force_reg (mode, op1);
9320 /* The increment queue is LIFO, thus we have to `queue'
9321 the instructions in reverse order. */
9322 enqueue_insn (op0, gen_move_insn (op0, temp));
9323 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9324 return result;
9328 /* Preincrement, or we can't increment with one simple insn. */
9329 if (post)
9330 /* Save a copy of the value before inc or dec, to return it later. */
9331 temp = value = copy_to_reg (op0);
9332 else
9333 /* Arrange to return the incremented value. */
9334 /* Copy the rtx because expand_binop will protect from the queue,
9335 and the results of that would be invalid for us to return
9336 if our caller does emit_queue before using our result. */
9337 temp = copy_rtx (value = op0);
9339 /* Increment however we can. */
9340 op1 = expand_binop (mode, this_optab, value, op1, op0,
9341 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9343 /* Make sure the value is stored into OP0. */
9344 if (op1 != op0)
9345 emit_move_insn (op0, op1);
9347 return temp;
9350 /* Generate code to calculate EXP using a store-flag instruction
9351 and return an rtx for the result. EXP is either a comparison
9352 or a TRUTH_NOT_EXPR whose operand is a comparison.
9354 If TARGET is nonzero, store the result there if convenient.
9356 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9357 cheap.
9359 Return zero if there is no suitable set-flag instruction
9360 available on this machine.
9362 Once expand_expr has been called on the arguments of the comparison,
9363 we are committed to doing the store flag, since it is not safe to
9364 re-evaluate the expression. We emit the store-flag insn by calling
9365 emit_store_flag, but only expand the arguments if we have a reason
9366 to believe that emit_store_flag will be successful. If we think that
9367 it will, but it isn't, we have to simulate the store-flag with a
9368 set/jump/set sequence. */
9370 static rtx
9371 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9373 enum rtx_code code;
9374 tree arg0, arg1, type;
9375 tree tem;
9376 enum machine_mode operand_mode;
9377 int invert = 0;
9378 int unsignedp;
9379 rtx op0, op1;
9380 enum insn_code icode;
9381 rtx subtarget = target;
9382 rtx result, label;
9384 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9385 result at the end. We can't simply invert the test since it would
9386 have already been inverted if it were valid. This case occurs for
9387 some floating-point comparisons. */
9389 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9390 invert = 1, exp = TREE_OPERAND (exp, 0);
9392 arg0 = TREE_OPERAND (exp, 0);
9393 arg1 = TREE_OPERAND (exp, 1);
9395 /* Don't crash if the comparison was erroneous. */
9396 if (arg0 == error_mark_node || arg1 == error_mark_node)
9397 return const0_rtx;
9399 type = TREE_TYPE (arg0);
9400 operand_mode = TYPE_MODE (type);
9401 unsignedp = TREE_UNSIGNED (type);
9403 /* We won't bother with BLKmode store-flag operations because it would mean
9404 passing a lot of information to emit_store_flag. */
9405 if (operand_mode == BLKmode)
9406 return 0;
9408 /* We won't bother with store-flag operations involving function pointers
9409 when function pointers must be canonicalized before comparisons. */
9410 #ifdef HAVE_canonicalize_funcptr_for_compare
9411 if (HAVE_canonicalize_funcptr_for_compare
9412 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9413 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9414 == FUNCTION_TYPE))
9415 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9416 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9417 == FUNCTION_TYPE))))
9418 return 0;
9419 #endif
9421 STRIP_NOPS (arg0);
9422 STRIP_NOPS (arg1);
9424 /* Get the rtx comparison code to use. We know that EXP is a comparison
9425 operation of some type. Some comparisons against 1 and -1 can be
9426 converted to comparisons with zero. Do so here so that the tests
9427 below will be aware that we have a comparison with zero. These
9428 tests will not catch constants in the first operand, but constants
9429 are rarely passed as the first operand. */
9431 switch (TREE_CODE (exp))
9433 case EQ_EXPR:
9434 code = EQ;
9435 break;
9436 case NE_EXPR:
9437 code = NE;
9438 break;
9439 case LT_EXPR:
9440 if (integer_onep (arg1))
9441 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9442 else
9443 code = unsignedp ? LTU : LT;
9444 break;
9445 case LE_EXPR:
9446 if (! unsignedp && integer_all_onesp (arg1))
9447 arg1 = integer_zero_node, code = LT;
9448 else
9449 code = unsignedp ? LEU : LE;
9450 break;
9451 case GT_EXPR:
9452 if (! unsignedp && integer_all_onesp (arg1))
9453 arg1 = integer_zero_node, code = GE;
9454 else
9455 code = unsignedp ? GTU : GT;
9456 break;
9457 case GE_EXPR:
9458 if (integer_onep (arg1))
9459 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9460 else
9461 code = unsignedp ? GEU : GE;
9462 break;
9464 case UNORDERED_EXPR:
9465 code = UNORDERED;
9466 break;
9467 case ORDERED_EXPR:
9468 code = ORDERED;
9469 break;
9470 case UNLT_EXPR:
9471 code = UNLT;
9472 break;
9473 case UNLE_EXPR:
9474 code = UNLE;
9475 break;
9476 case UNGT_EXPR:
9477 code = UNGT;
9478 break;
9479 case UNGE_EXPR:
9480 code = UNGE;
9481 break;
9482 case UNEQ_EXPR:
9483 code = UNEQ;
9484 break;
9486 default:
9487 abort ();
9490 /* Put a constant second. */
9491 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9493 tem = arg0; arg0 = arg1; arg1 = tem;
9494 code = swap_condition (code);
9497 /* If this is an equality or inequality test of a single bit, we can
9498 do this by shifting the bit being tested to the low-order bit and
9499 masking the result with the constant 1. If the condition was EQ,
9500 we xor it with 1. This does not require an scc insn and is faster
9501 than an scc insn even if we have it.
9503 The code to make this transformation was moved into fold_single_bit_test,
9504 so we just call into the folder and expand its result. */
9506 if ((code == NE || code == EQ)
9507 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9508 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9510 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9511 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9512 arg0, arg1, type),
9513 target, VOIDmode, EXPAND_NORMAL);
9516 /* Now see if we are likely to be able to do this. Return if not. */
9517 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9518 return 0;
9520 icode = setcc_gen_code[(int) code];
9521 if (icode == CODE_FOR_nothing
9522 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9524 /* We can only do this if it is one of the special cases that
9525 can be handled without an scc insn. */
9526 if ((code == LT && integer_zerop (arg1))
9527 || (! only_cheap && code == GE && integer_zerop (arg1)))
9529 else if (BRANCH_COST >= 0
9530 && ! only_cheap && (code == NE || code == EQ)
9531 && TREE_CODE (type) != REAL_TYPE
9532 && ((abs_optab->handlers[(int) operand_mode].insn_code
9533 != CODE_FOR_nothing)
9534 || (ffs_optab->handlers[(int) operand_mode].insn_code
9535 != CODE_FOR_nothing)))
9537 else
9538 return 0;
9541 if (! get_subtarget (target)
9542 || GET_MODE (subtarget) != operand_mode)
9543 subtarget = 0;
9545 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9547 if (target == 0)
9548 target = gen_reg_rtx (mode);
9550 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9551 because, if the emit_store_flag does anything it will succeed and
9552 OP0 and OP1 will not be used subsequently. */
9554 result = emit_store_flag (target, code,
9555 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9556 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9557 operand_mode, unsignedp, 1);
9559 if (result)
9561 if (invert)
9562 result = expand_binop (mode, xor_optab, result, const1_rtx,
9563 result, 0, OPTAB_LIB_WIDEN);
9564 return result;
9567 /* If this failed, we have to do this with set/compare/jump/set code. */
9568 if (GET_CODE (target) != REG
9569 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9570 target = gen_reg_rtx (GET_MODE (target));
9572 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9573 result = compare_from_rtx (op0, op1, code, unsignedp,
9574 operand_mode, NULL_RTX);
9575 if (GET_CODE (result) == CONST_INT)
9576 return (((result == const0_rtx && ! invert)
9577 || (result != const0_rtx && invert))
9578 ? const0_rtx : const1_rtx);
9580 /* The code of RESULT may not match CODE if compare_from_rtx
9581 decided to swap its operands and reverse the original code.
9583 We know that compare_from_rtx returns either a CONST_INT or
9584 a new comparison code, so it is safe to just extract the
9585 code from RESULT. */
9586 code = GET_CODE (result);
9588 label = gen_label_rtx ();
9589 if (bcc_gen_fctn[(int) code] == 0)
9590 abort ();
9592 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9593 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9594 emit_label (label);
9596 return target;
9600 /* Stubs in case we haven't got a casesi insn. */
9601 #ifndef HAVE_casesi
9602 # define HAVE_casesi 0
9603 # define gen_casesi(a, b, c, d, e) (0)
9604 # define CODE_FOR_casesi CODE_FOR_nothing
9605 #endif
9607 /* If the machine does not have a case insn that compares the bounds,
9608 this means extra overhead for dispatch tables, which raises the
9609 threshold for using them. */
9610 #ifndef CASE_VALUES_THRESHOLD
9611 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9612 #endif /* CASE_VALUES_THRESHOLD */
9614 unsigned int
9615 case_values_threshold (void)
9617 return CASE_VALUES_THRESHOLD;
9620 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9621 0 otherwise (i.e. if there is no casesi instruction). */
9623 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9624 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9626 enum machine_mode index_mode = SImode;
9627 int index_bits = GET_MODE_BITSIZE (index_mode);
9628 rtx op1, op2, index;
9629 enum machine_mode op_mode;
9631 if (! HAVE_casesi)
9632 return 0;
9634 /* Convert the index to SImode. */
9635 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9637 enum machine_mode omode = TYPE_MODE (index_type);
9638 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9640 /* We must handle the endpoints in the original mode. */
9641 index_expr = build (MINUS_EXPR, index_type,
9642 index_expr, minval);
9643 minval = integer_zero_node;
9644 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9645 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9646 omode, 1, default_label);
9647 /* Now we can safely truncate. */
9648 index = convert_to_mode (index_mode, index, 0);
9650 else
9652 if (TYPE_MODE (index_type) != index_mode)
9654 index_expr = convert ((*lang_hooks.types.type_for_size)
9655 (index_bits, 0), index_expr);
9656 index_type = TREE_TYPE (index_expr);
9659 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9661 emit_queue ();
9662 index = protect_from_queue (index, 0);
9663 do_pending_stack_adjust ();
9665 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9666 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9667 (index, op_mode))
9668 index = copy_to_mode_reg (op_mode, index);
9670 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9672 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9673 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9674 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9675 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9676 (op1, op_mode))
9677 op1 = copy_to_mode_reg (op_mode, op1);
9679 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9681 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9682 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9683 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9684 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9685 (op2, op_mode))
9686 op2 = copy_to_mode_reg (op_mode, op2);
9688 emit_jump_insn (gen_casesi (index, op1, op2,
9689 table_label, default_label));
9690 return 1;
9693 /* Attempt to generate a tablejump instruction; same concept. */
9694 #ifndef HAVE_tablejump
9695 #define HAVE_tablejump 0
9696 #define gen_tablejump(x, y) (0)
9697 #endif
9699 /* Subroutine of the next function.
9701 INDEX is the value being switched on, with the lowest value
9702 in the table already subtracted.
9703 MODE is its expected mode (needed if INDEX is constant).
9704 RANGE is the length of the jump table.
9705 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9707 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9708 index value is out of range. */
9710 static void
9711 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9712 rtx default_label)
9714 rtx temp, vector;
9716 if (INTVAL (range) > cfun->max_jumptable_ents)
9717 cfun->max_jumptable_ents = INTVAL (range);
9719 /* Do an unsigned comparison (in the proper mode) between the index
9720 expression and the value which represents the length of the range.
9721 Since we just finished subtracting the lower bound of the range
9722 from the index expression, this comparison allows us to simultaneously
9723 check that the original index expression value is both greater than
9724 or equal to the minimum value of the range and less than or equal to
9725 the maximum value of the range. */
9727 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9728 default_label);
9730 /* If index is in range, it must fit in Pmode.
9731 Convert to Pmode so we can index with it. */
9732 if (mode != Pmode)
9733 index = convert_to_mode (Pmode, index, 1);
9735 /* Don't let a MEM slip through, because then INDEX that comes
9736 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9737 and break_out_memory_refs will go to work on it and mess it up. */
9738 #ifdef PIC_CASE_VECTOR_ADDRESS
9739 if (flag_pic && GET_CODE (index) != REG)
9740 index = copy_to_mode_reg (Pmode, index);
9741 #endif
9743 /* If flag_force_addr were to affect this address
9744 it could interfere with the tricky assumptions made
9745 about addresses that contain label-refs,
9746 which may be valid only very near the tablejump itself. */
9747 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9748 GET_MODE_SIZE, because this indicates how large insns are. The other
9749 uses should all be Pmode, because they are addresses. This code
9750 could fail if addresses and insns are not the same size. */
9751 index = gen_rtx_PLUS (Pmode,
9752 gen_rtx_MULT (Pmode, index,
9753 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9754 gen_rtx_LABEL_REF (Pmode, table_label));
9755 #ifdef PIC_CASE_VECTOR_ADDRESS
9756 if (flag_pic)
9757 index = PIC_CASE_VECTOR_ADDRESS (index);
9758 else
9759 #endif
9760 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9761 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9762 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9763 RTX_UNCHANGING_P (vector) = 1;
9764 MEM_NOTRAP_P (vector) = 1;
9765 convert_move (temp, vector, 0);
9767 emit_jump_insn (gen_tablejump (temp, table_label));
9769 /* If we are generating PIC code or if the table is PC-relative, the
9770 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9771 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9772 emit_barrier ();
9776 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9777 rtx table_label, rtx default_label)
9779 rtx index;
9781 if (! HAVE_tablejump)
9782 return 0;
9784 index_expr = fold (build (MINUS_EXPR, index_type,
9785 convert (index_type, index_expr),
9786 convert (index_type, minval)));
9787 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9788 emit_queue ();
9789 index = protect_from_queue (index, 0);
9790 do_pending_stack_adjust ();
9792 do_tablejump (index, TYPE_MODE (index_type),
9793 convert_modes (TYPE_MODE (index_type),
9794 TYPE_MODE (TREE_TYPE (range)),
9795 expand_expr (range, NULL_RTX,
9796 VOIDmode, 0),
9797 TREE_UNSIGNED (TREE_TYPE (range))),
9798 table_label, default_label);
9799 return 1;
9802 /* Nonzero if the mode is a valid vector mode for this architecture.
9803 This returns nonzero even if there is no hardware support for the
9804 vector mode, but we can emulate with narrower modes. */
9807 vector_mode_valid_p (enum machine_mode mode)
9809 enum mode_class class = GET_MODE_CLASS (mode);
9810 enum machine_mode innermode;
9812 /* Doh! What's going on? */
9813 if (class != MODE_VECTOR_INT
9814 && class != MODE_VECTOR_FLOAT)
9815 return 0;
9817 /* Hardware support. Woo hoo! */
9818 if (VECTOR_MODE_SUPPORTED_P (mode))
9819 return 1;
9821 innermode = GET_MODE_INNER (mode);
9823 /* We should probably return 1 if requesting V4DI and we have no DI,
9824 but we have V2DI, but this is probably very unlikely. */
9826 /* If we have support for the inner mode, we can safely emulate it.
9827 We may not have V2DI, but me can emulate with a pair of DIs. */
9828 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9831 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9832 static rtx
9833 const_vector_from_tree (tree exp)
9835 rtvec v;
9836 int units, i;
9837 tree link, elt;
9838 enum machine_mode inner, mode;
9840 mode = TYPE_MODE (TREE_TYPE (exp));
9842 if (is_zeros_p (exp))
9843 return CONST0_RTX (mode);
9845 units = GET_MODE_NUNITS (mode);
9846 inner = GET_MODE_INNER (mode);
9848 v = rtvec_alloc (units);
9850 link = TREE_VECTOR_CST_ELTS (exp);
9851 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9853 elt = TREE_VALUE (link);
9855 if (TREE_CODE (elt) == REAL_CST)
9856 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9857 inner);
9858 else
9859 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9860 TREE_INT_CST_HIGH (elt),
9861 inner);
9864 /* Initialize remaining elements to 0. */
9865 for (; i < units; ++i)
9866 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9868 return gen_rtx_raw_CONST_VECTOR (mode, v);
9871 #include "gt-expr.h"