* gcc.dg/const-elim-1.c: xfail for xtensa.
[official-gcc.git] / gcc / expr.c
blobe2063c1950f06a69a32c522d4e8f883980a125a7
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
58 #ifdef PUSH_ROUNDING
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
66 #endif
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
80 #else
81 #define TARGET_MEM_FUNCTIONS 0
82 #endif
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 tree placeholder_list = 0;
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
110 int reverse;
113 /* This structure is used by store_by_pieces to describe the clear to
114 be performed. */
116 struct store_by_pieces
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
125 void *constfundata;
126 int reverse;
129 static rtx enqueue_insn (rtx, rtx);
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
131 unsigned int);
132 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
133 struct move_by_pieces *);
134 static bool block_move_libcall_safe_for_call_parm (void);
135 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
136 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
137 static tree emit_block_move_libcall_fn (int);
138 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
139 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
140 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
141 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
142 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
143 struct store_by_pieces *);
144 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
145 static rtx clear_storage_via_libcall (rtx, rtx);
146 static tree clear_storage_libcall_fn (int);
147 static rtx compress_float_constant (rtx, rtx);
148 static rtx get_subtarget (rtx);
149 static int is_zeros_p (tree);
150 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
151 HOST_WIDE_INT, enum machine_mode,
152 tree, tree, int, int);
153 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
154 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
155 tree, enum machine_mode, int, tree, int);
156 static rtx var_rtx (tree);
158 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
161 static int is_aligning_offset (tree, tree);
162 static rtx expand_increment (tree, int, int);
163 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
164 enum expand_modifier);
165 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
166 #ifdef PUSH_ROUNDING
167 static void emit_single_push_insn (enum machine_mode, rtx, tree);
168 #endif
169 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
170 static rtx const_vector_from_tree (tree);
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
176 static char direct_load[NUM_MACHINE_MODES];
177 static char direct_store[NUM_MACHINE_MODES];
179 /* Record for each mode whether we can float-extend from memory. */
181 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
188 #endif
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
195 #endif
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
202 #endif
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
215 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
216 struct file_stack *expr_wfl_stack;
218 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
222 #endif
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
227 void
228 init_expr_once (void)
230 rtx insn, pat;
231 enum machine_mode mode;
232 int num_clobbers;
233 rtx mem, mem1;
234 rtx reg;
236 /* Try indexing by frame ptr and try by stack ptr.
237 It is known that on the Convex the stack ptr isn't a valid index.
238 With luck, one or the other is valid on any machine. */
239 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
240 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
242 /* A scratch register we can modify in-place below to avoid
243 useless RTL allocations. */
244 reg = gen_rtx_REG (VOIDmode, -1);
246 insn = rtx_alloc (INSN);
247 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
248 PATTERN (insn) = pat;
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
253 int regno;
255 direct_load[(int) mode] = direct_store[(int) mode] = 0;
256 PUT_MODE (mem, mode);
257 PUT_MODE (mem1, mode);
258 PUT_MODE (reg, mode);
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
266 regno++)
268 if (! HARD_REGNO_MODE_OK (regno, mode))
269 continue;
271 REGNO (reg) = regno;
273 SET_SRC (pat) = mem;
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
283 SET_SRC (pat) = reg;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
295 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
298 mode = GET_MODE_WIDER_MODE (mode))
300 enum machine_mode srcmode;
301 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
302 srcmode = GET_MODE_WIDER_MODE (srcmode))
304 enum insn_code ic;
306 ic = can_extend_p (mode, srcmode, 0);
307 if (ic == CODE_FOR_nothing)
308 continue;
310 PUT_MODE (mem, srcmode);
312 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
313 float_extend_from_mem[mode][srcmode] = true;
318 /* This is run at the start of compiling a function. */
320 void
321 init_expr (void)
323 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
326 /* Small sanity check that the queue is empty at the end of a function. */
328 void
329 finish_expr_for_function (void)
331 if (pending_chain)
332 abort ();
335 /* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
338 /* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
345 static rtx
346 enqueue_insn (rtx var, rtx body)
348 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
349 body, pending_chain);
350 return pending_chain;
353 /* Use protect_from_queue to convert a QUEUED expression
354 into something that you can put immediately into an instruction.
355 If the queued incrementation has not happened yet,
356 protect_from_queue returns the variable itself.
357 If the incrementation has happened, protect_from_queue returns a temp
358 that contains a copy of the old value of the variable.
360 Any time an rtx which might possibly be a QUEUED is to be put
361 into an instruction, it must be passed through protect_from_queue first.
362 QUEUED expressions are not meaningful in instructions.
364 Do not pass a value through protect_from_queue and then hold
365 on to it for a while before putting it in an instruction!
366 If the queue is flushed in between, incorrect code will result. */
369 protect_from_queue (rtx x, int modify)
371 RTX_CODE code = GET_CODE (x);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
376 return x;
377 #endif
379 if (code != QUEUED)
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
385 shared. */
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
389 rtx y = XEXP (x, 0);
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
392 if (QUEUED_INSN (y))
394 rtx temp = gen_reg_rtx (GET_MODE (x));
396 emit_insn_before (gen_move_insn (temp, new),
397 QUEUED_INSN (y));
398 return temp;
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
408 if (code == MEM)
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
413 x = copy_rtx (x);
414 XEXP (x, 0) = tem;
417 else if (code == PLUS || code == MULT)
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
423 x = copy_rtx (x);
424 XEXP (x, 0) = new0;
425 XEXP (x, 1) = new1;
428 return x;
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
432 emit_queue. */
433 if (QUEUED_INSN (x) == 0)
434 return copy_to_reg (QUEUED_VAR (x));
435 /* If the increment has happened and a pre-increment copy exists,
436 use that copy. */
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
443 QUEUED_INSN (x));
444 return QUEUED_COPY (x);
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
453 queued_subexp_p (rtx x)
455 enum rtx_code code = GET_CODE (x);
456 switch (code)
458 case QUEUED:
459 return 1;
460 case MEM:
461 return queued_subexp_p (XEXP (x, 0));
462 case MULT:
463 case PLUS:
464 case MINUS:
465 return (queued_subexp_p (XEXP (x, 0))
466 || queued_subexp_p (XEXP (x, 1)));
467 default:
468 return 0;
472 /* Perform all the pending incrementations. */
474 void
475 emit_queue (void)
477 rtx p;
478 while ((p = pending_chain))
480 rtx body = QUEUED_BODY (p);
482 switch (GET_CODE (body))
484 case INSN:
485 case JUMP_INSN:
486 case CALL_INSN:
487 case CODE_LABEL:
488 case BARRIER:
489 case NOTE:
490 QUEUED_INSN (p) = body;
491 emit_insn (body);
492 break;
494 #ifdef ENABLE_CHECKING
495 case SEQUENCE:
496 abort ();
497 break;
498 #endif
500 default:
501 QUEUED_INSN (p) = emit_insn (body);
502 break;
505 pending_chain = QUEUED_NEXT (p);
509 /* Copy data from FROM to TO, where the machine modes are not the same.
510 Both modes may be integer, or both may be floating.
511 UNSIGNEDP should be nonzero if FROM is an unsigned type.
512 This causes zero-extension instead of sign-extension. */
514 void
515 convert_move (rtx to, rtx from, int unsignedp)
517 enum machine_mode to_mode = GET_MODE (to);
518 enum machine_mode from_mode = GET_MODE (from);
519 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
520 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
521 enum insn_code code;
522 rtx libcall;
524 /* rtx code for making an equivalent value. */
525 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
526 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
528 to = protect_from_queue (to, 1);
529 from = protect_from_queue (from, 0);
531 if (to_real != from_real)
532 abort ();
534 /* If FROM is a SUBREG that indicates that we have already done at least
535 the required extension, strip it. We don't handle such SUBREGs as
536 TO here. */
538 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
539 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
540 >= GET_MODE_SIZE (to_mode))
541 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
542 from = gen_lowpart (to_mode, from), from_mode = to_mode;
544 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
545 abort ();
547 if (to_mode == from_mode
548 || (from_mode == VOIDmode && CONSTANT_P (from)))
550 emit_move_insn (to, from);
551 return;
554 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
556 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
557 abort ();
559 if (VECTOR_MODE_P (to_mode))
560 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
561 else
562 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
564 emit_move_insn (to, from);
565 return;
568 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
570 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
571 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
572 return;
575 if (to_real)
577 rtx value, insns;
578 convert_optab tab;
580 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
581 tab = sext_optab;
582 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
583 tab = trunc_optab;
584 else
585 abort ();
587 /* Try converting directly if the insn is supported. */
589 code = tab->handlers[to_mode][from_mode].insn_code;
590 if (code != CODE_FOR_nothing)
592 emit_unop_insn (code, to, from,
593 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
594 return;
597 /* Otherwise use a libcall. */
598 libcall = tab->handlers[to_mode][from_mode].libfunc;
600 if (!libcall)
601 /* This conversion is not implemented yet. */
602 abort ();
604 start_sequence ();
605 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
606 1, from, from_mode);
607 insns = get_insns ();
608 end_sequence ();
609 emit_libcall_block (insns, to, value,
610 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
611 from)
612 : gen_rtx_FLOAT_EXTEND (to_mode, from));
613 return;
616 /* Handle pointer conversion. */ /* SPEE 900220. */
617 /* Targets are expected to provide conversion insns between PxImode and
618 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
619 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
621 enum machine_mode full_mode
622 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
624 if (trunc_optab->handlers[to_mode][full_mode].insn_code
625 == CODE_FOR_nothing)
626 abort ();
628 if (full_mode != from_mode)
629 from = convert_to_mode (full_mode, from, unsignedp);
630 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
631 to, from, UNKNOWN);
632 return;
634 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
636 enum machine_mode full_mode
637 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
639 if (sext_optab->handlers[full_mode][from_mode].insn_code
640 == CODE_FOR_nothing)
641 abort ();
643 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
644 to, from, UNKNOWN);
645 if (to_mode == full_mode)
646 return;
648 /* else proceed to integer conversions below. */
649 from_mode = full_mode;
652 /* Now both modes are integers. */
654 /* Handle expanding beyond a word. */
655 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
656 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
658 rtx insns;
659 rtx lowpart;
660 rtx fill_value;
661 rtx lowfrom;
662 int i;
663 enum machine_mode lowpart_mode;
664 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
666 /* Try converting directly if the insn is supported. */
667 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
668 != CODE_FOR_nothing)
670 /* If FROM is a SUBREG, put it into a register. Do this
671 so that we always generate the same set of insns for
672 better cse'ing; if an intermediate assignment occurred,
673 we won't be doing the operation directly on the SUBREG. */
674 if (optimize > 0 && GET_CODE (from) == SUBREG)
675 from = force_reg (from_mode, from);
676 emit_unop_insn (code, to, from, equiv_code);
677 return;
679 /* Next, try converting via full word. */
680 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
681 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
682 != CODE_FOR_nothing))
684 if (GET_CODE (to) == REG)
685 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
686 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
687 emit_unop_insn (code, to,
688 gen_lowpart (word_mode, to), equiv_code);
689 return;
692 /* No special multiword conversion insn; do it by hand. */
693 start_sequence ();
695 /* Since we will turn this into a no conflict block, we must ensure
696 that the source does not overlap the target. */
698 if (reg_overlap_mentioned_p (to, from))
699 from = force_reg (from_mode, from);
701 /* Get a copy of FROM widened to a word, if necessary. */
702 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
703 lowpart_mode = word_mode;
704 else
705 lowpart_mode = from_mode;
707 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
709 lowpart = gen_lowpart (lowpart_mode, to);
710 emit_move_insn (lowpart, lowfrom);
712 /* Compute the value to put in each remaining word. */
713 if (unsignedp)
714 fill_value = const0_rtx;
715 else
717 #ifdef HAVE_slt
718 if (HAVE_slt
719 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
720 && STORE_FLAG_VALUE == -1)
722 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
723 lowpart_mode, 0);
724 fill_value = gen_reg_rtx (word_mode);
725 emit_insn (gen_slt (fill_value));
727 else
728 #endif
730 fill_value
731 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
732 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
733 NULL_RTX, 0);
734 fill_value = convert_to_mode (word_mode, fill_value, 1);
738 /* Fill the remaining words. */
739 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
741 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
742 rtx subword = operand_subword (to, index, 1, to_mode);
744 if (subword == 0)
745 abort ();
747 if (fill_value != subword)
748 emit_move_insn (subword, fill_value);
751 insns = get_insns ();
752 end_sequence ();
754 emit_no_conflict_block (insns, to, from, NULL_RTX,
755 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
756 return;
759 /* Truncating multi-word to a word or less. */
760 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
761 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
763 if (!((GET_CODE (from) == MEM
764 && ! MEM_VOLATILE_P (from)
765 && direct_load[(int) to_mode]
766 && ! mode_dependent_address_p (XEXP (from, 0)))
767 || GET_CODE (from) == REG
768 || GET_CODE (from) == SUBREG))
769 from = force_reg (from_mode, from);
770 convert_move (to, gen_lowpart (word_mode, from), 0);
771 return;
774 /* Now follow all the conversions between integers
775 no more than a word long. */
777 /* For truncation, usually we can just refer to FROM in a narrower mode. */
778 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
779 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
780 GET_MODE_BITSIZE (from_mode)))
782 if (!((GET_CODE (from) == MEM
783 && ! MEM_VOLATILE_P (from)
784 && direct_load[(int) to_mode]
785 && ! mode_dependent_address_p (XEXP (from, 0)))
786 || GET_CODE (from) == REG
787 || GET_CODE (from) == SUBREG))
788 from = force_reg (from_mode, from);
789 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
790 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
791 from = copy_to_reg (from);
792 emit_move_insn (to, gen_lowpart (to_mode, from));
793 return;
796 /* Handle extension. */
797 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
799 /* Convert directly if that works. */
800 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
801 != CODE_FOR_nothing)
803 if (flag_force_mem)
804 from = force_not_mem (from);
806 emit_unop_insn (code, to, from, equiv_code);
807 return;
809 else
811 enum machine_mode intermediate;
812 rtx tmp;
813 tree shift_amount;
815 /* Search for a mode to convert via. */
816 for (intermediate = from_mode; intermediate != VOIDmode;
817 intermediate = GET_MODE_WIDER_MODE (intermediate))
818 if (((can_extend_p (to_mode, intermediate, unsignedp)
819 != CODE_FOR_nothing)
820 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
821 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
822 GET_MODE_BITSIZE (intermediate))))
823 && (can_extend_p (intermediate, from_mode, unsignedp)
824 != CODE_FOR_nothing))
826 convert_move (to, convert_to_mode (intermediate, from,
827 unsignedp), unsignedp);
828 return;
831 /* No suitable intermediate mode.
832 Generate what we need with shifts. */
833 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
834 - GET_MODE_BITSIZE (from_mode), 0);
835 from = gen_lowpart (to_mode, force_reg (from_mode, from));
836 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
837 to, unsignedp);
838 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
839 to, unsignedp);
840 if (tmp != to)
841 emit_move_insn (to, tmp);
842 return;
846 /* Support special truncate insns for certain modes. */
847 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
849 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
850 to, from, UNKNOWN);
851 return;
854 /* Handle truncation of volatile memrefs, and so on;
855 the things that couldn't be truncated directly,
856 and for which there was no special instruction.
858 ??? Code above formerly short-circuited this, for most integer
859 mode pairs, with a force_reg in from_mode followed by a recursive
860 call to this routine. Appears always to have been wrong. */
861 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
863 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
864 emit_move_insn (to, temp);
865 return;
868 /* Mode combination is not recognized. */
869 abort ();
872 /* Return an rtx for a value that would result
873 from converting X to mode MODE.
874 Both X and MODE may be floating, or both integer.
875 UNSIGNEDP is nonzero if X is an unsigned value.
876 This can be done by referring to a part of X in place
877 or by copying to a new temporary with conversion.
879 This function *must not* call protect_from_queue
880 except when putting X into an insn (in which case convert_move does it). */
883 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
885 return convert_modes (mode, VOIDmode, x, unsignedp);
888 /* Return an rtx for a value that would result
889 from converting X from mode OLDMODE to mode MODE.
890 Both modes may be floating, or both integer.
891 UNSIGNEDP is nonzero if X is an unsigned value.
893 This can be done by referring to a part of X in place
894 or by copying to a new temporary with conversion.
896 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
898 This function *must not* call protect_from_queue
899 except when putting X into an insn (in which case convert_move does it). */
902 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
904 rtx temp;
906 /* If FROM is a SUBREG that indicates that we have already done at least
907 the required extension, strip it. */
909 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
910 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
911 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
912 x = gen_lowpart (mode, x);
914 if (GET_MODE (x) != VOIDmode)
915 oldmode = GET_MODE (x);
917 if (mode == oldmode)
918 return x;
920 /* There is one case that we must handle specially: If we are converting
921 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
922 we are to interpret the constant as unsigned, gen_lowpart will do
923 the wrong if the constant appears negative. What we want to do is
924 make the high-order word of the constant zero, not all ones. */
926 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
927 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
928 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
930 HOST_WIDE_INT val = INTVAL (x);
932 if (oldmode != VOIDmode
933 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
935 int width = GET_MODE_BITSIZE (oldmode);
937 /* We need to zero extend VAL. */
938 val &= ((HOST_WIDE_INT) 1 << width) - 1;
941 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
944 /* We can do this with a gen_lowpart if both desired and current modes
945 are integer, and this is either a constant integer, a register, or a
946 non-volatile MEM. Except for the constant case where MODE is no
947 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
949 if ((GET_CODE (x) == CONST_INT
950 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
951 || (GET_MODE_CLASS (mode) == MODE_INT
952 && GET_MODE_CLASS (oldmode) == MODE_INT
953 && (GET_CODE (x) == CONST_DOUBLE
954 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
955 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
956 && direct_load[(int) mode])
957 || (GET_CODE (x) == REG
958 && (! HARD_REGISTER_P (x)
959 || HARD_REGNO_MODE_OK (REGNO (x), mode))
960 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
961 GET_MODE_BITSIZE (GET_MODE (x)))))))))
963 /* ?? If we don't know OLDMODE, we have to assume here that
964 X does not need sign- or zero-extension. This may not be
965 the case, but it's the best we can do. */
966 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
967 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
969 HOST_WIDE_INT val = INTVAL (x);
970 int width = GET_MODE_BITSIZE (oldmode);
972 /* We must sign or zero-extend in this case. Start by
973 zero-extending, then sign extend if we need to. */
974 val &= ((HOST_WIDE_INT) 1 << width) - 1;
975 if (! unsignedp
976 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
977 val |= (HOST_WIDE_INT) (-1) << width;
979 return gen_int_mode (val, mode);
982 return gen_lowpart (mode, x);
985 /* Converting from integer constant into mode is always equivalent to an
986 subreg operation. */
987 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
989 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
990 abort ();
991 return simplify_gen_subreg (mode, x, oldmode, 0);
994 temp = gen_reg_rtx (mode);
995 convert_move (temp, x, unsignedp);
996 return temp;
999 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1000 store efficiently. Due to internal GCC limitations, this is
1001 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1002 for an immediate constant. */
1004 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1006 /* Determine whether the LEN bytes can be moved by using several move
1007 instructions. Return nonzero if a call to move_by_pieces should
1008 succeed. */
1011 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1012 unsigned int align ATTRIBUTE_UNUSED)
1014 return MOVE_BY_PIECES_P (len, align);
1017 /* Generate several move instructions to copy LEN bytes from block FROM to
1018 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1019 and TO through protect_from_queue before calling.
1021 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1022 used to push FROM to the stack.
1024 ALIGN is maximum stack alignment we can assume.
1026 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1027 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1028 stpcpy. */
1031 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1032 unsigned int align, int endp)
1034 struct move_by_pieces data;
1035 rtx to_addr, from_addr = XEXP (from, 0);
1036 unsigned int max_size = MOVE_MAX_PIECES + 1;
1037 enum machine_mode mode = VOIDmode, tmode;
1038 enum insn_code icode;
1040 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1042 data.offset = 0;
1043 data.from_addr = from_addr;
1044 if (to)
1046 to_addr = XEXP (to, 0);
1047 data.to = to;
1048 data.autinc_to
1049 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1050 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1051 data.reverse
1052 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1054 else
1056 to_addr = NULL_RTX;
1057 data.to = NULL_RTX;
1058 data.autinc_to = 1;
1059 #ifdef STACK_GROWS_DOWNWARD
1060 data.reverse = 1;
1061 #else
1062 data.reverse = 0;
1063 #endif
1065 data.to_addr = to_addr;
1066 data.from = from;
1067 data.autinc_from
1068 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1069 || GET_CODE (from_addr) == POST_INC
1070 || GET_CODE (from_addr) == POST_DEC);
1072 data.explicit_inc_from = 0;
1073 data.explicit_inc_to = 0;
1074 if (data.reverse) data.offset = len;
1075 data.len = len;
1077 /* If copying requires more than two move insns,
1078 copy addresses to registers (to make displacements shorter)
1079 and use post-increment if available. */
1080 if (!(data.autinc_from && data.autinc_to)
1081 && move_by_pieces_ninsns (len, align) > 2)
1083 /* Find the mode of the largest move... */
1084 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1085 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1086 if (GET_MODE_SIZE (tmode) < max_size)
1087 mode = tmode;
1089 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1091 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1092 data.autinc_from = 1;
1093 data.explicit_inc_from = -1;
1095 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1097 data.from_addr = copy_addr_to_reg (from_addr);
1098 data.autinc_from = 1;
1099 data.explicit_inc_from = 1;
1101 if (!data.autinc_from && CONSTANT_P (from_addr))
1102 data.from_addr = copy_addr_to_reg (from_addr);
1103 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1105 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1106 data.autinc_to = 1;
1107 data.explicit_inc_to = -1;
1109 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1111 data.to_addr = copy_addr_to_reg (to_addr);
1112 data.autinc_to = 1;
1113 data.explicit_inc_to = 1;
1115 if (!data.autinc_to && CONSTANT_P (to_addr))
1116 data.to_addr = copy_addr_to_reg (to_addr);
1119 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1120 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1121 align = MOVE_MAX * BITS_PER_UNIT;
1123 /* First move what we can in the largest integer mode, then go to
1124 successively smaller modes. */
1126 while (max_size > 1)
1128 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1129 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1130 if (GET_MODE_SIZE (tmode) < max_size)
1131 mode = tmode;
1133 if (mode == VOIDmode)
1134 break;
1136 icode = mov_optab->handlers[(int) mode].insn_code;
1137 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1138 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1140 max_size = GET_MODE_SIZE (mode);
1143 /* The code above should have handled everything. */
1144 if (data.len > 0)
1145 abort ();
1147 if (endp)
1149 rtx to1;
1151 if (data.reverse)
1152 abort ();
1153 if (data.autinc_to)
1155 if (endp == 2)
1157 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1158 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1159 else
1160 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1161 -1));
1163 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1164 data.offset);
1166 else
1168 if (endp == 2)
1169 --data.offset;
1170 to1 = adjust_address (data.to, QImode, data.offset);
1172 return to1;
1174 else
1175 return data.to;
1178 /* Return number of insns required to move L bytes by pieces.
1179 ALIGN (in bits) is maximum alignment we can assume. */
1181 static unsigned HOST_WIDE_INT
1182 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1184 unsigned HOST_WIDE_INT n_insns = 0;
1185 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1187 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1188 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1189 align = MOVE_MAX * BITS_PER_UNIT;
1191 while (max_size > 1)
1193 enum machine_mode mode = VOIDmode, tmode;
1194 enum insn_code icode;
1196 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1197 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1198 if (GET_MODE_SIZE (tmode) < max_size)
1199 mode = tmode;
1201 if (mode == VOIDmode)
1202 break;
1204 icode = mov_optab->handlers[(int) mode].insn_code;
1205 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1206 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1208 max_size = GET_MODE_SIZE (mode);
1211 if (l)
1212 abort ();
1213 return n_insns;
1216 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1217 with move instructions for mode MODE. GENFUN is the gen_... function
1218 to make a move insn for that mode. DATA has all the other info. */
1220 static void
1221 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1222 struct move_by_pieces *data)
1224 unsigned int size = GET_MODE_SIZE (mode);
1225 rtx to1 = NULL_RTX, from1;
1227 while (data->len >= size)
1229 if (data->reverse)
1230 data->offset -= size;
1232 if (data->to)
1234 if (data->autinc_to)
1235 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1236 data->offset);
1237 else
1238 to1 = adjust_address (data->to, mode, data->offset);
1241 if (data->autinc_from)
1242 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1243 data->offset);
1244 else
1245 from1 = adjust_address (data->from, mode, data->offset);
1247 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1248 emit_insn (gen_add2_insn (data->to_addr,
1249 GEN_INT (-(HOST_WIDE_INT)size)));
1250 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1251 emit_insn (gen_add2_insn (data->from_addr,
1252 GEN_INT (-(HOST_WIDE_INT)size)));
1254 if (data->to)
1255 emit_insn ((*genfun) (to1, from1));
1256 else
1258 #ifdef PUSH_ROUNDING
1259 emit_single_push_insn (mode, from1, NULL);
1260 #else
1261 abort ();
1262 #endif
1265 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1266 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1267 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1268 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1270 if (! data->reverse)
1271 data->offset += size;
1273 data->len -= size;
1277 /* Emit code to move a block Y to a block X. This may be done with
1278 string-move instructions, with multiple scalar move instructions,
1279 or with a library call.
1281 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1282 SIZE is an rtx that says how long they are.
1283 ALIGN is the maximum alignment we can assume they have.
1284 METHOD describes what kind of copy this is, and what mechanisms may be used.
1286 Return the address of the new block, if memcpy is called and returns it,
1287 0 otherwise. */
1290 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1292 bool may_use_call;
1293 rtx retval = 0;
1294 unsigned int align;
1296 switch (method)
1298 case BLOCK_OP_NORMAL:
1299 may_use_call = true;
1300 break;
1302 case BLOCK_OP_CALL_PARM:
1303 may_use_call = block_move_libcall_safe_for_call_parm ();
1305 /* Make inhibit_defer_pop nonzero around the library call
1306 to force it to pop the arguments right away. */
1307 NO_DEFER_POP;
1308 break;
1310 case BLOCK_OP_NO_LIBCALL:
1311 may_use_call = false;
1312 break;
1314 default:
1315 abort ();
1318 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1320 if (GET_MODE (x) != BLKmode)
1321 abort ();
1322 if (GET_MODE (y) != BLKmode)
1323 abort ();
1325 x = protect_from_queue (x, 1);
1326 y = protect_from_queue (y, 0);
1327 size = protect_from_queue (size, 0);
1329 if (GET_CODE (x) != MEM)
1330 abort ();
1331 if (GET_CODE (y) != MEM)
1332 abort ();
1333 if (size == 0)
1334 abort ();
1336 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1337 can be incorrect is coming from __builtin_memcpy. */
1338 if (GET_CODE (size) == CONST_INT)
1340 if (INTVAL (size) == 0)
1341 return 0;
1343 x = shallow_copy_rtx (x);
1344 y = shallow_copy_rtx (y);
1345 set_mem_size (x, size);
1346 set_mem_size (y, size);
1349 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1350 move_by_pieces (x, y, INTVAL (size), align, 0);
1351 else if (emit_block_move_via_movstr (x, y, size, align))
1353 else if (may_use_call)
1354 retval = emit_block_move_via_libcall (x, y, size);
1355 else
1356 emit_block_move_via_loop (x, y, size, align);
1358 if (method == BLOCK_OP_CALL_PARM)
1359 OK_DEFER_POP;
1361 return retval;
1364 /* A subroutine of emit_block_move. Returns true if calling the
1365 block move libcall will not clobber any parameters which may have
1366 already been placed on the stack. */
1368 static bool
1369 block_move_libcall_safe_for_call_parm (void)
1371 /* If arguments are pushed on the stack, then they're safe. */
1372 if (PUSH_ARGS)
1373 return true;
1375 /* If registers go on the stack anyway, any argument is sure to clobber
1376 an outgoing argument. */
1377 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1379 tree fn = emit_block_move_libcall_fn (false);
1380 (void) fn;
1381 if (REG_PARM_STACK_SPACE (fn) != 0)
1382 return false;
1384 #endif
1386 /* If any argument goes in memory, then it might clobber an outgoing
1387 argument. */
1389 CUMULATIVE_ARGS args_so_far;
1390 tree fn, arg;
1392 fn = emit_block_move_libcall_fn (false);
1393 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1395 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1396 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1398 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1399 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1400 if (!tmp || !REG_P (tmp))
1401 return false;
1402 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1403 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1404 NULL_TREE, 1))
1405 return false;
1406 #endif
1407 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1410 return true;
1413 /* A subroutine of emit_block_move. Expand a movstr pattern;
1414 return true if successful. */
1416 static bool
1417 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1419 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1420 int save_volatile_ok = volatile_ok;
1421 enum machine_mode mode;
1423 /* Since this is a move insn, we don't care about volatility. */
1424 volatile_ok = 1;
1426 /* Try the most limited insn first, because there's no point
1427 including more than one in the machine description unless
1428 the more limited one has some advantage. */
1430 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1431 mode = GET_MODE_WIDER_MODE (mode))
1433 enum insn_code code = movstr_optab[(int) mode];
1434 insn_operand_predicate_fn pred;
1436 if (code != CODE_FOR_nothing
1437 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1438 here because if SIZE is less than the mode mask, as it is
1439 returned by the macro, it will definitely be less than the
1440 actual mode mask. */
1441 && ((GET_CODE (size) == CONST_INT
1442 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1443 <= (GET_MODE_MASK (mode) >> 1)))
1444 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1445 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1446 || (*pred) (x, BLKmode))
1447 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1448 || (*pred) (y, BLKmode))
1449 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1450 || (*pred) (opalign, VOIDmode)))
1452 rtx op2;
1453 rtx last = get_last_insn ();
1454 rtx pat;
1456 op2 = convert_to_mode (mode, size, 1);
1457 pred = insn_data[(int) code].operand[2].predicate;
1458 if (pred != 0 && ! (*pred) (op2, mode))
1459 op2 = copy_to_mode_reg (mode, op2);
1461 /* ??? When called via emit_block_move_for_call, it'd be
1462 nice if there were some way to inform the backend, so
1463 that it doesn't fail the expansion because it thinks
1464 emitting the libcall would be more efficient. */
1466 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1467 if (pat)
1469 emit_insn (pat);
1470 volatile_ok = save_volatile_ok;
1471 return true;
1473 else
1474 delete_insns_since (last);
1478 volatile_ok = save_volatile_ok;
1479 return false;
1482 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1483 Return the return value from memcpy, 0 otherwise. */
1485 static rtx
1486 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1488 rtx dst_addr, src_addr;
1489 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1490 enum machine_mode size_mode;
1491 rtx retval;
1493 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1495 It is unsafe to save the value generated by protect_from_queue and reuse
1496 it later. Consider what happens if emit_queue is called before the
1497 return value from protect_from_queue is used.
1499 Expansion of the CALL_EXPR below will call emit_queue before we are
1500 finished emitting RTL for argument setup. So if we are not careful we
1501 could get the wrong value for an argument.
1503 To avoid this problem we go ahead and emit code to copy the addresses of
1504 DST and SRC and SIZE into new pseudos. We can then place those new
1505 pseudos into an RTL_EXPR and use them later, even after a call to
1506 emit_queue.
1508 Note this is not strictly needed for library calls since they do not call
1509 emit_queue before loading their arguments. However, we may need to have
1510 library calls call emit_queue in the future since failing to do so could
1511 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1512 arguments in registers. */
1514 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1515 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1517 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1518 src_addr = convert_memory_address (ptr_mode, src_addr);
1520 dst_tree = make_tree (ptr_type_node, dst_addr);
1521 src_tree = make_tree (ptr_type_node, src_addr);
1523 if (TARGET_MEM_FUNCTIONS)
1524 size_mode = TYPE_MODE (sizetype);
1525 else
1526 size_mode = TYPE_MODE (unsigned_type_node);
1528 size = convert_to_mode (size_mode, size, 1);
1529 size = copy_to_mode_reg (size_mode, size);
1531 /* It is incorrect to use the libcall calling conventions to call
1532 memcpy in this context. This could be a user call to memcpy and
1533 the user may wish to examine the return value from memcpy. For
1534 targets where libcalls and normal calls have different conventions
1535 for returning pointers, we could end up generating incorrect code.
1537 For convenience, we generate the call to bcopy this way as well. */
1539 if (TARGET_MEM_FUNCTIONS)
1540 size_tree = make_tree (sizetype, size);
1541 else
1542 size_tree = make_tree (unsigned_type_node, size);
1544 fn = emit_block_move_libcall_fn (true);
1545 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1546 if (TARGET_MEM_FUNCTIONS)
1548 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1549 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1551 else
1553 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1554 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1557 /* Now we have to build up the CALL_EXPR itself. */
1558 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1559 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1560 call_expr, arg_list, NULL_TREE);
1562 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1564 /* If we are initializing a readonly value, show the above call clobbered
1565 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1566 the delay slot scheduler might overlook conflicts and take nasty
1567 decisions. */
1568 if (RTX_UNCHANGING_P (dst))
1569 add_function_usage_to
1570 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1571 gen_rtx_CLOBBER (VOIDmode, dst),
1572 NULL_RTX));
1574 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1577 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1578 for the function we use for block copies. The first time FOR_CALL
1579 is true, we call assemble_external. */
1581 static GTY(()) tree block_move_fn;
1583 void
1584 init_block_move_fn (const char *asmspec)
1586 if (!block_move_fn)
1588 tree args, fn;
1590 if (TARGET_MEM_FUNCTIONS)
1592 fn = get_identifier ("memcpy");
1593 args = build_function_type_list (ptr_type_node, ptr_type_node,
1594 const_ptr_type_node, sizetype,
1595 NULL_TREE);
1597 else
1599 fn = get_identifier ("bcopy");
1600 args = build_function_type_list (void_type_node, const_ptr_type_node,
1601 ptr_type_node, unsigned_type_node,
1602 NULL_TREE);
1605 fn = build_decl (FUNCTION_DECL, fn, args);
1606 DECL_EXTERNAL (fn) = 1;
1607 TREE_PUBLIC (fn) = 1;
1608 DECL_ARTIFICIAL (fn) = 1;
1609 TREE_NOTHROW (fn) = 1;
1611 block_move_fn = fn;
1614 if (asmspec)
1616 SET_DECL_RTL (block_move_fn, NULL_RTX);
1617 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1621 static tree
1622 emit_block_move_libcall_fn (int for_call)
1624 static bool emitted_extern;
1626 if (!block_move_fn)
1627 init_block_move_fn (NULL);
1629 if (for_call && !emitted_extern)
1631 emitted_extern = true;
1632 make_decl_rtl (block_move_fn, NULL);
1633 assemble_external (block_move_fn);
1636 return block_move_fn;
1639 /* A subroutine of emit_block_move. Copy the data via an explicit
1640 loop. This is used only when libcalls are forbidden. */
1641 /* ??? It'd be nice to copy in hunks larger than QImode. */
1643 static void
1644 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1645 unsigned int align ATTRIBUTE_UNUSED)
1647 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1648 enum machine_mode iter_mode;
1650 iter_mode = GET_MODE (size);
1651 if (iter_mode == VOIDmode)
1652 iter_mode = word_mode;
1654 top_label = gen_label_rtx ();
1655 cmp_label = gen_label_rtx ();
1656 iter = gen_reg_rtx (iter_mode);
1658 emit_move_insn (iter, const0_rtx);
1660 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1661 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1662 do_pending_stack_adjust ();
1664 emit_note (NOTE_INSN_LOOP_BEG);
1666 emit_jump (cmp_label);
1667 emit_label (top_label);
1669 tmp = convert_modes (Pmode, iter_mode, iter, true);
1670 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1671 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1672 x = change_address (x, QImode, x_addr);
1673 y = change_address (y, QImode, y_addr);
1675 emit_move_insn (x, y);
1677 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1678 true, OPTAB_LIB_WIDEN);
1679 if (tmp != iter)
1680 emit_move_insn (iter, tmp);
1682 emit_note (NOTE_INSN_LOOP_CONT);
1683 emit_label (cmp_label);
1685 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1686 true, top_label);
1688 emit_note (NOTE_INSN_LOOP_END);
1691 /* Copy all or part of a value X into registers starting at REGNO.
1692 The number of registers to be filled is NREGS. */
1694 void
1695 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1697 int i;
1698 #ifdef HAVE_load_multiple
1699 rtx pat;
1700 rtx last;
1701 #endif
1703 if (nregs == 0)
1704 return;
1706 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1707 x = validize_mem (force_const_mem (mode, x));
1709 /* See if the machine can do this with a load multiple insn. */
1710 #ifdef HAVE_load_multiple
1711 if (HAVE_load_multiple)
1713 last = get_last_insn ();
1714 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1715 GEN_INT (nregs));
1716 if (pat)
1718 emit_insn (pat);
1719 return;
1721 else
1722 delete_insns_since (last);
1724 #endif
1726 for (i = 0; i < nregs; i++)
1727 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1728 operand_subword_force (x, i, mode));
1731 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1732 The number of registers to be filled is NREGS. */
1734 void
1735 move_block_from_reg (int regno, rtx x, int nregs)
1737 int i;
1739 if (nregs == 0)
1740 return;
1742 /* See if the machine can do this with a store multiple insn. */
1743 #ifdef HAVE_store_multiple
1744 if (HAVE_store_multiple)
1746 rtx last = get_last_insn ();
1747 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1748 GEN_INT (nregs));
1749 if (pat)
1751 emit_insn (pat);
1752 return;
1754 else
1755 delete_insns_since (last);
1757 #endif
1759 for (i = 0; i < nregs; i++)
1761 rtx tem = operand_subword (x, i, 1, BLKmode);
1763 if (tem == 0)
1764 abort ();
1766 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1770 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1771 ORIG, where ORIG is a non-consecutive group of registers represented by
1772 a PARALLEL. The clone is identical to the original except in that the
1773 original set of registers is replaced by a new set of pseudo registers.
1774 The new set has the same modes as the original set. */
1777 gen_group_rtx (rtx orig)
1779 int i, length;
1780 rtx *tmps;
1782 if (GET_CODE (orig) != PARALLEL)
1783 abort ();
1785 length = XVECLEN (orig, 0);
1786 tmps = alloca (sizeof (rtx) * length);
1788 /* Skip a NULL entry in first slot. */
1789 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1791 if (i)
1792 tmps[0] = 0;
1794 for (; i < length; i++)
1796 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1797 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1799 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1802 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1805 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1806 where DST is non-consecutive registers represented by a PARALLEL.
1807 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1808 if not known. */
1810 void
1811 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1813 rtx *tmps, src;
1814 int start, i;
1816 if (GET_CODE (dst) != PARALLEL)
1817 abort ();
1819 /* Check for a NULL entry, used to indicate that the parameter goes
1820 both on the stack and in registers. */
1821 if (XEXP (XVECEXP (dst, 0, 0), 0))
1822 start = 0;
1823 else
1824 start = 1;
1826 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1828 /* Process the pieces. */
1829 for (i = start; i < XVECLEN (dst, 0); i++)
1831 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1832 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1833 unsigned int bytelen = GET_MODE_SIZE (mode);
1834 int shift = 0;
1836 /* Handle trailing fragments that run over the size of the struct. */
1837 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1839 /* Arrange to shift the fragment to where it belongs.
1840 extract_bit_field loads to the lsb of the reg. */
1841 if (
1842 #ifdef BLOCK_REG_PADDING
1843 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1844 == (BYTES_BIG_ENDIAN ? upward : downward)
1845 #else
1846 BYTES_BIG_ENDIAN
1847 #endif
1849 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1850 bytelen = ssize - bytepos;
1851 if (bytelen <= 0)
1852 abort ();
1855 /* If we won't be loading directly from memory, protect the real source
1856 from strange tricks we might play; but make sure that the source can
1857 be loaded directly into the destination. */
1858 src = orig_src;
1859 if (GET_CODE (orig_src) != MEM
1860 && (!CONSTANT_P (orig_src)
1861 || (GET_MODE (orig_src) != mode
1862 && GET_MODE (orig_src) != VOIDmode)))
1864 if (GET_MODE (orig_src) == VOIDmode)
1865 src = gen_reg_rtx (mode);
1866 else
1867 src = gen_reg_rtx (GET_MODE (orig_src));
1869 emit_move_insn (src, orig_src);
1872 /* Optimize the access just a bit. */
1873 if (GET_CODE (src) == MEM
1874 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1875 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1876 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1877 && bytelen == GET_MODE_SIZE (mode))
1879 tmps[i] = gen_reg_rtx (mode);
1880 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1882 else if (GET_CODE (src) == CONCAT)
1884 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1885 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1887 if ((bytepos == 0 && bytelen == slen0)
1888 || (bytepos != 0 && bytepos + bytelen <= slen))
1890 /* The following assumes that the concatenated objects all
1891 have the same size. In this case, a simple calculation
1892 can be used to determine the object and the bit field
1893 to be extracted. */
1894 tmps[i] = XEXP (src, bytepos / slen0);
1895 if (! CONSTANT_P (tmps[i])
1896 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1897 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1898 (bytepos % slen0) * BITS_PER_UNIT,
1899 1, NULL_RTX, mode, mode, ssize);
1901 else if (bytepos == 0)
1903 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1904 emit_move_insn (mem, src);
1905 tmps[i] = adjust_address (mem, mode, 0);
1907 else
1908 abort ();
1910 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1911 SIMD register, which is currently broken. While we get GCC
1912 to emit proper RTL for these cases, let's dump to memory. */
1913 else if (VECTOR_MODE_P (GET_MODE (dst))
1914 && GET_CODE (src) == REG)
1916 int slen = GET_MODE_SIZE (GET_MODE (src));
1917 rtx mem;
1919 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1920 emit_move_insn (mem, src);
1921 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1923 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1924 && XVECLEN (dst, 0) > 1)
1925 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1926 else if (CONSTANT_P (src)
1927 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1928 tmps[i] = src;
1929 else
1930 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1931 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1932 mode, mode, ssize);
1934 if (shift)
1935 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1936 tmps[i], 0, OPTAB_WIDEN);
1939 emit_queue ();
1941 /* Copy the extracted pieces into the proper (probable) hard regs. */
1942 for (i = start; i < XVECLEN (dst, 0); i++)
1943 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1946 /* Emit code to move a block SRC to block DST, where SRC and DST are
1947 non-consecutive groups of registers, each represented by a PARALLEL. */
1949 void
1950 emit_group_move (rtx dst, rtx src)
1952 int i;
1954 if (GET_CODE (src) != PARALLEL
1955 || GET_CODE (dst) != PARALLEL
1956 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1957 abort ();
1959 /* Skip first entry if NULL. */
1960 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1961 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1962 XEXP (XVECEXP (src, 0, i), 0));
1965 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1966 where SRC is non-consecutive registers represented by a PARALLEL.
1967 SSIZE represents the total size of block ORIG_DST, or -1 if not
1968 known. */
1970 void
1971 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1973 rtx *tmps, dst;
1974 int start, i;
1976 if (GET_CODE (src) != PARALLEL)
1977 abort ();
1979 /* Check for a NULL entry, used to indicate that the parameter goes
1980 both on the stack and in registers. */
1981 if (XEXP (XVECEXP (src, 0, 0), 0))
1982 start = 0;
1983 else
1984 start = 1;
1986 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1988 /* Copy the (probable) hard regs into pseudos. */
1989 for (i = start; i < XVECLEN (src, 0); i++)
1991 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1992 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1993 emit_move_insn (tmps[i], reg);
1995 emit_queue ();
1997 /* If we won't be storing directly into memory, protect the real destination
1998 from strange tricks we might play. */
1999 dst = orig_dst;
2000 if (GET_CODE (dst) == PARALLEL)
2002 rtx temp;
2004 /* We can get a PARALLEL dst if there is a conditional expression in
2005 a return statement. In that case, the dst and src are the same,
2006 so no action is necessary. */
2007 if (rtx_equal_p (dst, src))
2008 return;
2010 /* It is unclear if we can ever reach here, but we may as well handle
2011 it. Allocate a temporary, and split this into a store/load to/from
2012 the temporary. */
2014 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2015 emit_group_store (temp, src, type, ssize);
2016 emit_group_load (dst, temp, type, ssize);
2017 return;
2019 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2021 dst = gen_reg_rtx (GET_MODE (orig_dst));
2022 /* Make life a bit easier for combine. */
2023 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2026 /* Process the pieces. */
2027 for (i = start; i < XVECLEN (src, 0); i++)
2029 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2030 enum machine_mode mode = GET_MODE (tmps[i]);
2031 unsigned int bytelen = GET_MODE_SIZE (mode);
2032 rtx dest = dst;
2034 /* Handle trailing fragments that run over the size of the struct. */
2035 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2037 /* store_bit_field always takes its value from the lsb.
2038 Move the fragment to the lsb if it's not already there. */
2039 if (
2040 #ifdef BLOCK_REG_PADDING
2041 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2042 == (BYTES_BIG_ENDIAN ? upward : downward)
2043 #else
2044 BYTES_BIG_ENDIAN
2045 #endif
2048 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2049 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2050 tmps[i], 0, OPTAB_WIDEN);
2052 bytelen = ssize - bytepos;
2055 if (GET_CODE (dst) == CONCAT)
2057 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2058 dest = XEXP (dst, 0);
2059 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2061 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2062 dest = XEXP (dst, 1);
2064 else if (bytepos == 0 && XVECLEN (src, 0))
2066 dest = assign_stack_temp (GET_MODE (dest),
2067 GET_MODE_SIZE (GET_MODE (dest)), 0);
2068 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2069 tmps[i]);
2070 dst = dest;
2071 break;
2073 else
2074 abort ();
2077 /* Optimize the access just a bit. */
2078 if (GET_CODE (dest) == MEM
2079 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2080 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2081 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2082 && bytelen == GET_MODE_SIZE (mode))
2083 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2084 else
2085 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2086 mode, tmps[i], ssize);
2089 emit_queue ();
2091 /* Copy from the pseudo into the (probable) hard reg. */
2092 if (orig_dst != dst)
2093 emit_move_insn (orig_dst, dst);
2096 /* Generate code to copy a BLKmode object of TYPE out of a
2097 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2098 is null, a stack temporary is created. TGTBLK is returned.
2100 The purpose of this routine is to handle functions that return
2101 BLKmode structures in registers. Some machines (the PA for example)
2102 want to return all small structures in registers regardless of the
2103 structure's alignment. */
2106 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2108 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2109 rtx src = NULL, dst = NULL;
2110 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2111 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2113 if (tgtblk == 0)
2115 tgtblk = assign_temp (build_qualified_type (type,
2116 (TYPE_QUALS (type)
2117 | TYPE_QUAL_CONST)),
2118 0, 1, 1);
2119 preserve_temp_slots (tgtblk);
2122 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2123 into a new pseudo which is a full word. */
2125 if (GET_MODE (srcreg) != BLKmode
2126 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2127 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2129 /* If the structure doesn't take up a whole number of words, see whether
2130 SRCREG is padded on the left or on the right. If it's on the left,
2131 set PADDING_CORRECTION to the number of bits to skip.
2133 In most ABIs, the structure will be returned at the least end of
2134 the register, which translates to right padding on little-endian
2135 targets and left padding on big-endian targets. The opposite
2136 holds if the structure is returned at the most significant
2137 end of the register. */
2138 if (bytes % UNITS_PER_WORD != 0
2139 && (targetm.calls.return_in_msb (type)
2140 ? !BYTES_BIG_ENDIAN
2141 : BYTES_BIG_ENDIAN))
2142 padding_correction
2143 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2145 /* Copy the structure BITSIZE bites at a time.
2147 We could probably emit more efficient code for machines which do not use
2148 strict alignment, but it doesn't seem worth the effort at the current
2149 time. */
2150 for (bitpos = 0, xbitpos = padding_correction;
2151 bitpos < bytes * BITS_PER_UNIT;
2152 bitpos += bitsize, xbitpos += bitsize)
2154 /* We need a new source operand each time xbitpos is on a
2155 word boundary and when xbitpos == padding_correction
2156 (the first time through). */
2157 if (xbitpos % BITS_PER_WORD == 0
2158 || xbitpos == padding_correction)
2159 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2160 GET_MODE (srcreg));
2162 /* We need a new destination operand each time bitpos is on
2163 a word boundary. */
2164 if (bitpos % BITS_PER_WORD == 0)
2165 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2167 /* Use xbitpos for the source extraction (right justified) and
2168 xbitpos for the destination store (left justified). */
2169 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2170 extract_bit_field (src, bitsize,
2171 xbitpos % BITS_PER_WORD, 1,
2172 NULL_RTX, word_mode, word_mode,
2173 BITS_PER_WORD),
2174 BITS_PER_WORD);
2177 return tgtblk;
2180 /* Add a USE expression for REG to the (possibly empty) list pointed
2181 to by CALL_FUSAGE. REG must denote a hard register. */
2183 void
2184 use_reg (rtx *call_fusage, rtx reg)
2186 if (GET_CODE (reg) != REG
2187 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2188 abort ();
2190 *call_fusage
2191 = gen_rtx_EXPR_LIST (VOIDmode,
2192 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2195 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2196 starting at REGNO. All of these registers must be hard registers. */
2198 void
2199 use_regs (rtx *call_fusage, int regno, int nregs)
2201 int i;
2203 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2204 abort ();
2206 for (i = 0; i < nregs; i++)
2207 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2210 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2211 PARALLEL REGS. This is for calls that pass values in multiple
2212 non-contiguous locations. The Irix 6 ABI has examples of this. */
2214 void
2215 use_group_regs (rtx *call_fusage, rtx regs)
2217 int i;
2219 for (i = 0; i < XVECLEN (regs, 0); i++)
2221 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2223 /* A NULL entry means the parameter goes both on the stack and in
2224 registers. This can also be a MEM for targets that pass values
2225 partially on the stack and partially in registers. */
2226 if (reg != 0 && GET_CODE (reg) == REG)
2227 use_reg (call_fusage, reg);
2232 /* Determine whether the LEN bytes generated by CONSTFUN can be
2233 stored to memory using several move instructions. CONSTFUNDATA is
2234 a pointer which will be passed as argument in every CONSTFUN call.
2235 ALIGN is maximum alignment we can assume. Return nonzero if a
2236 call to store_by_pieces should succeed. */
2239 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2240 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2241 void *constfundata, unsigned int align)
2243 unsigned HOST_WIDE_INT max_size, l;
2244 HOST_WIDE_INT offset = 0;
2245 enum machine_mode mode, tmode;
2246 enum insn_code icode;
2247 int reverse;
2248 rtx cst;
2250 if (len == 0)
2251 return 1;
2253 if (! STORE_BY_PIECES_P (len, align))
2254 return 0;
2256 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2257 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2258 align = MOVE_MAX * BITS_PER_UNIT;
2260 /* We would first store what we can in the largest integer mode, then go to
2261 successively smaller modes. */
2263 for (reverse = 0;
2264 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2265 reverse++)
2267 l = len;
2268 mode = VOIDmode;
2269 max_size = STORE_MAX_PIECES + 1;
2270 while (max_size > 1)
2272 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2273 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2274 if (GET_MODE_SIZE (tmode) < max_size)
2275 mode = tmode;
2277 if (mode == VOIDmode)
2278 break;
2280 icode = mov_optab->handlers[(int) mode].insn_code;
2281 if (icode != CODE_FOR_nothing
2282 && align >= GET_MODE_ALIGNMENT (mode))
2284 unsigned int size = GET_MODE_SIZE (mode);
2286 while (l >= size)
2288 if (reverse)
2289 offset -= size;
2291 cst = (*constfun) (constfundata, offset, mode);
2292 if (!LEGITIMATE_CONSTANT_P (cst))
2293 return 0;
2295 if (!reverse)
2296 offset += size;
2298 l -= size;
2302 max_size = GET_MODE_SIZE (mode);
2305 /* The code above should have handled everything. */
2306 if (l != 0)
2307 abort ();
2310 return 1;
2313 /* Generate several move instructions to store LEN bytes generated by
2314 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2315 pointer which will be passed as argument in every CONSTFUN call.
2316 ALIGN is maximum alignment we can assume.
2317 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2318 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2319 stpcpy. */
2322 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2323 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2324 void *constfundata, unsigned int align, int endp)
2326 struct store_by_pieces data;
2328 if (len == 0)
2330 if (endp == 2)
2331 abort ();
2332 return to;
2335 if (! STORE_BY_PIECES_P (len, align))
2336 abort ();
2337 to = protect_from_queue (to, 1);
2338 data.constfun = constfun;
2339 data.constfundata = constfundata;
2340 data.len = len;
2341 data.to = to;
2342 store_by_pieces_1 (&data, align);
2343 if (endp)
2345 rtx to1;
2347 if (data.reverse)
2348 abort ();
2349 if (data.autinc_to)
2351 if (endp == 2)
2353 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2354 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2355 else
2356 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2357 -1));
2359 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2360 data.offset);
2362 else
2364 if (endp == 2)
2365 --data.offset;
2366 to1 = adjust_address (data.to, QImode, data.offset);
2368 return to1;
2370 else
2371 return data.to;
2374 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2375 rtx with BLKmode). The caller must pass TO through protect_from_queue
2376 before calling. ALIGN is maximum alignment we can assume. */
2378 static void
2379 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2381 struct store_by_pieces data;
2383 if (len == 0)
2384 return;
2386 data.constfun = clear_by_pieces_1;
2387 data.constfundata = NULL;
2388 data.len = len;
2389 data.to = to;
2390 store_by_pieces_1 (&data, align);
2393 /* Callback routine for clear_by_pieces.
2394 Return const0_rtx unconditionally. */
2396 static rtx
2397 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2398 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2399 enum machine_mode mode ATTRIBUTE_UNUSED)
2401 return const0_rtx;
2404 /* Subroutine of clear_by_pieces and store_by_pieces.
2405 Generate several move instructions to store LEN bytes of block TO. (A MEM
2406 rtx with BLKmode). The caller must pass TO through protect_from_queue
2407 before calling. ALIGN is maximum alignment we can assume. */
2409 static void
2410 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2411 unsigned int align ATTRIBUTE_UNUSED)
2413 rtx to_addr = XEXP (data->to, 0);
2414 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2415 enum machine_mode mode = VOIDmode, tmode;
2416 enum insn_code icode;
2418 data->offset = 0;
2419 data->to_addr = to_addr;
2420 data->autinc_to
2421 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2422 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2424 data->explicit_inc_to = 0;
2425 data->reverse
2426 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2427 if (data->reverse)
2428 data->offset = data->len;
2430 /* If storing requires more than two move insns,
2431 copy addresses to registers (to make displacements shorter)
2432 and use post-increment if available. */
2433 if (!data->autinc_to
2434 && move_by_pieces_ninsns (data->len, align) > 2)
2436 /* Determine the main mode we'll be using. */
2437 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2438 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2439 if (GET_MODE_SIZE (tmode) < max_size)
2440 mode = tmode;
2442 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2444 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2445 data->autinc_to = 1;
2446 data->explicit_inc_to = -1;
2449 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2450 && ! data->autinc_to)
2452 data->to_addr = copy_addr_to_reg (to_addr);
2453 data->autinc_to = 1;
2454 data->explicit_inc_to = 1;
2457 if ( !data->autinc_to && CONSTANT_P (to_addr))
2458 data->to_addr = copy_addr_to_reg (to_addr);
2461 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2462 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2463 align = MOVE_MAX * BITS_PER_UNIT;
2465 /* First store what we can in the largest integer mode, then go to
2466 successively smaller modes. */
2468 while (max_size > 1)
2470 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2471 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2472 if (GET_MODE_SIZE (tmode) < max_size)
2473 mode = tmode;
2475 if (mode == VOIDmode)
2476 break;
2478 icode = mov_optab->handlers[(int) mode].insn_code;
2479 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2480 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2482 max_size = GET_MODE_SIZE (mode);
2485 /* The code above should have handled everything. */
2486 if (data->len != 0)
2487 abort ();
2490 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2491 with move instructions for mode MODE. GENFUN is the gen_... function
2492 to make a move insn for that mode. DATA has all the other info. */
2494 static void
2495 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2496 struct store_by_pieces *data)
2498 unsigned int size = GET_MODE_SIZE (mode);
2499 rtx to1, cst;
2501 while (data->len >= size)
2503 if (data->reverse)
2504 data->offset -= size;
2506 if (data->autinc_to)
2507 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2508 data->offset);
2509 else
2510 to1 = adjust_address (data->to, mode, data->offset);
2512 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2513 emit_insn (gen_add2_insn (data->to_addr,
2514 GEN_INT (-(HOST_WIDE_INT) size)));
2516 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2517 emit_insn ((*genfun) (to1, cst));
2519 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2520 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2522 if (! data->reverse)
2523 data->offset += size;
2525 data->len -= size;
2529 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2530 its length in bytes. */
2533 clear_storage (rtx object, rtx size)
2535 rtx retval = 0;
2536 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2537 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2539 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2540 just move a zero. Otherwise, do this a piece at a time. */
2541 if (GET_MODE (object) != BLKmode
2542 && GET_CODE (size) == CONST_INT
2543 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2544 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2545 else
2547 object = protect_from_queue (object, 1);
2548 size = protect_from_queue (size, 0);
2550 if (size == const0_rtx)
2552 else if (GET_CODE (size) == CONST_INT
2553 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2554 clear_by_pieces (object, INTVAL (size), align);
2555 else if (clear_storage_via_clrstr (object, size, align))
2557 else
2558 retval = clear_storage_via_libcall (object, size);
2561 return retval;
2564 /* A subroutine of clear_storage. Expand a clrstr pattern;
2565 return true if successful. */
2567 static bool
2568 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2570 /* Try the most limited insn first, because there's no point
2571 including more than one in the machine description unless
2572 the more limited one has some advantage. */
2574 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2575 enum machine_mode mode;
2577 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2578 mode = GET_MODE_WIDER_MODE (mode))
2580 enum insn_code code = clrstr_optab[(int) mode];
2581 insn_operand_predicate_fn pred;
2583 if (code != CODE_FOR_nothing
2584 /* We don't need MODE to be narrower than
2585 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2586 the mode mask, as it is returned by the macro, it will
2587 definitely be less than the actual mode mask. */
2588 && ((GET_CODE (size) == CONST_INT
2589 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2590 <= (GET_MODE_MASK (mode) >> 1)))
2591 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2592 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2593 || (*pred) (object, BLKmode))
2594 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2595 || (*pred) (opalign, VOIDmode)))
2597 rtx op1;
2598 rtx last = get_last_insn ();
2599 rtx pat;
2601 op1 = convert_to_mode (mode, size, 1);
2602 pred = insn_data[(int) code].operand[1].predicate;
2603 if (pred != 0 && ! (*pred) (op1, mode))
2604 op1 = copy_to_mode_reg (mode, op1);
2606 pat = GEN_FCN ((int) code) (object, op1, opalign);
2607 if (pat)
2609 emit_insn (pat);
2610 return true;
2612 else
2613 delete_insns_since (last);
2617 return false;
2620 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2621 Return the return value of memset, 0 otherwise. */
2623 static rtx
2624 clear_storage_via_libcall (rtx object, rtx size)
2626 tree call_expr, arg_list, fn, object_tree, size_tree;
2627 enum machine_mode size_mode;
2628 rtx retval;
2630 /* OBJECT or SIZE may have been passed through protect_from_queue.
2632 It is unsafe to save the value generated by protect_from_queue
2633 and reuse it later. Consider what happens if emit_queue is
2634 called before the return value from protect_from_queue is used.
2636 Expansion of the CALL_EXPR below will call emit_queue before
2637 we are finished emitting RTL for argument setup. So if we are
2638 not careful we could get the wrong value for an argument.
2640 To avoid this problem we go ahead and emit code to copy OBJECT
2641 and SIZE into new pseudos. We can then place those new pseudos
2642 into an RTL_EXPR and use them later, even after a call to
2643 emit_queue.
2645 Note this is not strictly needed for library calls since they
2646 do not call emit_queue before loading their arguments. However,
2647 we may need to have library calls call emit_queue in the future
2648 since failing to do so could cause problems for targets which
2649 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2651 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2653 if (TARGET_MEM_FUNCTIONS)
2654 size_mode = TYPE_MODE (sizetype);
2655 else
2656 size_mode = TYPE_MODE (unsigned_type_node);
2657 size = convert_to_mode (size_mode, size, 1);
2658 size = copy_to_mode_reg (size_mode, size);
2660 /* It is incorrect to use the libcall calling conventions to call
2661 memset in this context. This could be a user call to memset and
2662 the user may wish to examine the return value from memset. For
2663 targets where libcalls and normal calls have different conventions
2664 for returning pointers, we could end up generating incorrect code.
2666 For convenience, we generate the call to bzero this way as well. */
2668 object_tree = make_tree (ptr_type_node, object);
2669 if (TARGET_MEM_FUNCTIONS)
2670 size_tree = make_tree (sizetype, size);
2671 else
2672 size_tree = make_tree (unsigned_type_node, size);
2674 fn = clear_storage_libcall_fn (true);
2675 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2676 if (TARGET_MEM_FUNCTIONS)
2677 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2678 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2680 /* Now we have to build up the CALL_EXPR itself. */
2681 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2682 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2683 call_expr, arg_list, NULL_TREE);
2685 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2687 /* If we are initializing a readonly value, show the above call
2688 clobbered it. Otherwise, a load from it may erroneously be
2689 hoisted from a loop. */
2690 if (RTX_UNCHANGING_P (object))
2691 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2693 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2696 /* A subroutine of clear_storage_via_libcall. Create the tree node
2697 for the function we use for block clears. The first time FOR_CALL
2698 is true, we call assemble_external. */
2700 static GTY(()) tree block_clear_fn;
2702 void
2703 init_block_clear_fn (const char *asmspec)
2705 if (!block_clear_fn)
2707 tree fn, args;
2709 if (TARGET_MEM_FUNCTIONS)
2711 fn = get_identifier ("memset");
2712 args = build_function_type_list (ptr_type_node, ptr_type_node,
2713 integer_type_node, sizetype,
2714 NULL_TREE);
2716 else
2718 fn = get_identifier ("bzero");
2719 args = build_function_type_list (void_type_node, ptr_type_node,
2720 unsigned_type_node, NULL_TREE);
2723 fn = build_decl (FUNCTION_DECL, fn, args);
2724 DECL_EXTERNAL (fn) = 1;
2725 TREE_PUBLIC (fn) = 1;
2726 DECL_ARTIFICIAL (fn) = 1;
2727 TREE_NOTHROW (fn) = 1;
2729 block_clear_fn = fn;
2732 if (asmspec)
2734 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2735 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2739 static tree
2740 clear_storage_libcall_fn (int for_call)
2742 static bool emitted_extern;
2744 if (!block_clear_fn)
2745 init_block_clear_fn (NULL);
2747 if (for_call && !emitted_extern)
2749 emitted_extern = true;
2750 make_decl_rtl (block_clear_fn, NULL);
2751 assemble_external (block_clear_fn);
2754 return block_clear_fn;
2757 /* Generate code to copy Y into X.
2758 Both Y and X must have the same mode, except that
2759 Y can be a constant with VOIDmode.
2760 This mode cannot be BLKmode; use emit_block_move for that.
2762 Return the last instruction emitted. */
2765 emit_move_insn (rtx x, rtx y)
2767 enum machine_mode mode = GET_MODE (x);
2768 rtx y_cst = NULL_RTX;
2769 rtx last_insn, set;
2771 x = protect_from_queue (x, 1);
2772 y = protect_from_queue (y, 0);
2774 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2775 abort ();
2777 /* Never force constant_p_rtx to memory. */
2778 if (GET_CODE (y) == CONSTANT_P_RTX)
2780 else if (CONSTANT_P (y))
2782 if (optimize
2783 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2784 && (last_insn = compress_float_constant (x, y)))
2785 return last_insn;
2787 y_cst = y;
2789 if (!LEGITIMATE_CONSTANT_P (y))
2791 y = force_const_mem (mode, y);
2793 /* If the target's cannot_force_const_mem prevented the spill,
2794 assume that the target's move expanders will also take care
2795 of the non-legitimate constant. */
2796 if (!y)
2797 y = y_cst;
2801 /* If X or Y are memory references, verify that their addresses are valid
2802 for the machine. */
2803 if (GET_CODE (x) == MEM
2804 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2805 && ! push_operand (x, GET_MODE (x)))
2806 || (flag_force_addr
2807 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2808 x = validize_mem (x);
2810 if (GET_CODE (y) == MEM
2811 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2812 || (flag_force_addr
2813 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2814 y = validize_mem (y);
2816 if (mode == BLKmode)
2817 abort ();
2819 last_insn = emit_move_insn_1 (x, y);
2821 if (y_cst && GET_CODE (x) == REG
2822 && (set = single_set (last_insn)) != NULL_RTX
2823 && SET_DEST (set) == x
2824 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2825 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2827 return last_insn;
2830 /* Low level part of emit_move_insn.
2831 Called just like emit_move_insn, but assumes X and Y
2832 are basically valid. */
2835 emit_move_insn_1 (rtx x, rtx y)
2837 enum machine_mode mode = GET_MODE (x);
2838 enum machine_mode submode;
2839 enum mode_class class = GET_MODE_CLASS (mode);
2841 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2842 abort ();
2844 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2845 return
2846 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2848 /* Expand complex moves by moving real part and imag part, if possible. */
2849 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2850 && BLKmode != (submode = GET_MODE_INNER (mode))
2851 && (mov_optab->handlers[(int) submode].insn_code
2852 != CODE_FOR_nothing))
2854 /* Don't split destination if it is a stack push. */
2855 int stack = push_operand (x, GET_MODE (x));
2857 #ifdef PUSH_ROUNDING
2858 /* In case we output to the stack, but the size is smaller than the
2859 machine can push exactly, we need to use move instructions. */
2860 if (stack
2861 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2862 != GET_MODE_SIZE (submode)))
2864 rtx temp;
2865 HOST_WIDE_INT offset1, offset2;
2867 /* Do not use anti_adjust_stack, since we don't want to update
2868 stack_pointer_delta. */
2869 temp = expand_binop (Pmode,
2870 #ifdef STACK_GROWS_DOWNWARD
2871 sub_optab,
2872 #else
2873 add_optab,
2874 #endif
2875 stack_pointer_rtx,
2876 GEN_INT
2877 (PUSH_ROUNDING
2878 (GET_MODE_SIZE (GET_MODE (x)))),
2879 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2881 if (temp != stack_pointer_rtx)
2882 emit_move_insn (stack_pointer_rtx, temp);
2884 #ifdef STACK_GROWS_DOWNWARD
2885 offset1 = 0;
2886 offset2 = GET_MODE_SIZE (submode);
2887 #else
2888 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2889 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2890 + GET_MODE_SIZE (submode));
2891 #endif
2893 emit_move_insn (change_address (x, submode,
2894 gen_rtx_PLUS (Pmode,
2895 stack_pointer_rtx,
2896 GEN_INT (offset1))),
2897 gen_realpart (submode, y));
2898 emit_move_insn (change_address (x, submode,
2899 gen_rtx_PLUS (Pmode,
2900 stack_pointer_rtx,
2901 GEN_INT (offset2))),
2902 gen_imagpart (submode, y));
2904 else
2905 #endif
2906 /* If this is a stack, push the highpart first, so it
2907 will be in the argument order.
2909 In that case, change_address is used only to convert
2910 the mode, not to change the address. */
2911 if (stack)
2913 /* Note that the real part always precedes the imag part in memory
2914 regardless of machine's endianness. */
2915 #ifdef STACK_GROWS_DOWNWARD
2916 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2917 gen_imagpart (submode, y));
2918 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2919 gen_realpart (submode, y));
2920 #else
2921 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2922 gen_realpart (submode, y));
2923 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2924 gen_imagpart (submode, y));
2925 #endif
2927 else
2929 rtx realpart_x, realpart_y;
2930 rtx imagpart_x, imagpart_y;
2932 /* If this is a complex value with each part being smaller than a
2933 word, the usual calling sequence will likely pack the pieces into
2934 a single register. Unfortunately, SUBREG of hard registers only
2935 deals in terms of words, so we have a problem converting input
2936 arguments to the CONCAT of two registers that is used elsewhere
2937 for complex values. If this is before reload, we can copy it into
2938 memory and reload. FIXME, we should see about using extract and
2939 insert on integer registers, but complex short and complex char
2940 variables should be rarely used. */
2941 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2942 && (reload_in_progress | reload_completed) == 0)
2944 int packed_dest_p
2945 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2946 int packed_src_p
2947 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2949 if (packed_dest_p || packed_src_p)
2951 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2952 ? MODE_FLOAT : MODE_INT);
2954 enum machine_mode reg_mode
2955 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2957 if (reg_mode != BLKmode)
2959 rtx mem = assign_stack_temp (reg_mode,
2960 GET_MODE_SIZE (mode), 0);
2961 rtx cmem = adjust_address (mem, mode, 0);
2963 cfun->cannot_inline
2964 = N_("function using short complex types cannot be inline");
2966 if (packed_dest_p)
2968 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2970 emit_move_insn_1 (cmem, y);
2971 return emit_move_insn_1 (sreg, mem);
2973 else
2975 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2977 emit_move_insn_1 (mem, sreg);
2978 return emit_move_insn_1 (x, cmem);
2984 realpart_x = gen_realpart (submode, x);
2985 realpart_y = gen_realpart (submode, y);
2986 imagpart_x = gen_imagpart (submode, x);
2987 imagpart_y = gen_imagpart (submode, y);
2989 /* Show the output dies here. This is necessary for SUBREGs
2990 of pseudos since we cannot track their lifetimes correctly;
2991 hard regs shouldn't appear here except as return values.
2992 We never want to emit such a clobber after reload. */
2993 if (x != y
2994 && ! (reload_in_progress || reload_completed)
2995 && (GET_CODE (realpart_x) == SUBREG
2996 || GET_CODE (imagpart_x) == SUBREG))
2997 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2999 emit_move_insn (realpart_x, realpart_y);
3000 emit_move_insn (imagpart_x, imagpart_y);
3003 return get_last_insn ();
3006 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3007 find a mode to do it in. If we have a movcc, use it. Otherwise,
3008 find the MODE_INT mode of the same width. */
3009 else if (GET_MODE_CLASS (mode) == MODE_CC
3010 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3012 enum insn_code insn_code;
3013 enum machine_mode tmode = VOIDmode;
3014 rtx x1 = x, y1 = y;
3016 if (mode != CCmode
3017 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3018 tmode = CCmode;
3019 else
3020 for (tmode = QImode; tmode != VOIDmode;
3021 tmode = GET_MODE_WIDER_MODE (tmode))
3022 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3023 break;
3025 if (tmode == VOIDmode)
3026 abort ();
3028 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3029 may call change_address which is not appropriate if we were
3030 called when a reload was in progress. We don't have to worry
3031 about changing the address since the size in bytes is supposed to
3032 be the same. Copy the MEM to change the mode and move any
3033 substitutions from the old MEM to the new one. */
3035 if (reload_in_progress)
3037 x = gen_lowpart_common (tmode, x1);
3038 if (x == 0 && GET_CODE (x1) == MEM)
3040 x = adjust_address_nv (x1, tmode, 0);
3041 copy_replacements (x1, x);
3044 y = gen_lowpart_common (tmode, y1);
3045 if (y == 0 && GET_CODE (y1) == MEM)
3047 y = adjust_address_nv (y1, tmode, 0);
3048 copy_replacements (y1, y);
3051 else
3053 x = gen_lowpart (tmode, x);
3054 y = gen_lowpart (tmode, y);
3057 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3058 return emit_insn (GEN_FCN (insn_code) (x, y));
3061 /* Try using a move pattern for the corresponding integer mode. This is
3062 only safe when simplify_subreg can convert MODE constants into integer
3063 constants. At present, it can only do this reliably if the value
3064 fits within a HOST_WIDE_INT. */
3065 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3066 && (submode = int_mode_for_mode (mode)) != BLKmode
3067 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3068 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3069 (simplify_gen_subreg (submode, x, mode, 0),
3070 simplify_gen_subreg (submode, y, mode, 0)));
3072 /* This will handle any multi-word or full-word mode that lacks a move_insn
3073 pattern. However, you will get better code if you define such patterns,
3074 even if they must turn into multiple assembler instructions. */
3075 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3077 rtx last_insn = 0;
3078 rtx seq, inner;
3079 int need_clobber;
3080 int i;
3082 #ifdef PUSH_ROUNDING
3084 /* If X is a push on the stack, do the push now and replace
3085 X with a reference to the stack pointer. */
3086 if (push_operand (x, GET_MODE (x)))
3088 rtx temp;
3089 enum rtx_code code;
3091 /* Do not use anti_adjust_stack, since we don't want to update
3092 stack_pointer_delta. */
3093 temp = expand_binop (Pmode,
3094 #ifdef STACK_GROWS_DOWNWARD
3095 sub_optab,
3096 #else
3097 add_optab,
3098 #endif
3099 stack_pointer_rtx,
3100 GEN_INT
3101 (PUSH_ROUNDING
3102 (GET_MODE_SIZE (GET_MODE (x)))),
3103 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3105 if (temp != stack_pointer_rtx)
3106 emit_move_insn (stack_pointer_rtx, temp);
3108 code = GET_CODE (XEXP (x, 0));
3110 /* Just hope that small offsets off SP are OK. */
3111 if (code == POST_INC)
3112 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3113 GEN_INT (-((HOST_WIDE_INT)
3114 GET_MODE_SIZE (GET_MODE (x)))));
3115 else if (code == POST_DEC)
3116 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3117 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3118 else
3119 temp = stack_pointer_rtx;
3121 x = change_address (x, VOIDmode, temp);
3123 #endif
3125 /* If we are in reload, see if either operand is a MEM whose address
3126 is scheduled for replacement. */
3127 if (reload_in_progress && GET_CODE (x) == MEM
3128 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3129 x = replace_equiv_address_nv (x, inner);
3130 if (reload_in_progress && GET_CODE (y) == MEM
3131 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3132 y = replace_equiv_address_nv (y, inner);
3134 start_sequence ();
3136 need_clobber = 0;
3137 for (i = 0;
3138 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3139 i++)
3141 rtx xpart = operand_subword (x, i, 1, mode);
3142 rtx ypart = operand_subword (y, i, 1, mode);
3144 /* If we can't get a part of Y, put Y into memory if it is a
3145 constant. Otherwise, force it into a register. If we still
3146 can't get a part of Y, abort. */
3147 if (ypart == 0 && CONSTANT_P (y))
3149 y = force_const_mem (mode, y);
3150 ypart = operand_subword (y, i, 1, mode);
3152 else if (ypart == 0)
3153 ypart = operand_subword_force (y, i, mode);
3155 if (xpart == 0 || ypart == 0)
3156 abort ();
3158 need_clobber |= (GET_CODE (xpart) == SUBREG);
3160 last_insn = emit_move_insn (xpart, ypart);
3163 seq = get_insns ();
3164 end_sequence ();
3166 /* Show the output dies here. This is necessary for SUBREGs
3167 of pseudos since we cannot track their lifetimes correctly;
3168 hard regs shouldn't appear here except as return values.
3169 We never want to emit such a clobber after reload. */
3170 if (x != y
3171 && ! (reload_in_progress || reload_completed)
3172 && need_clobber != 0)
3173 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3175 emit_insn (seq);
3177 return last_insn;
3179 else
3180 abort ();
3183 /* If Y is representable exactly in a narrower mode, and the target can
3184 perform the extension directly from constant or memory, then emit the
3185 move as an extension. */
3187 static rtx
3188 compress_float_constant (rtx x, rtx y)
3190 enum machine_mode dstmode = GET_MODE (x);
3191 enum machine_mode orig_srcmode = GET_MODE (y);
3192 enum machine_mode srcmode;
3193 REAL_VALUE_TYPE r;
3195 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3197 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3198 srcmode != orig_srcmode;
3199 srcmode = GET_MODE_WIDER_MODE (srcmode))
3201 enum insn_code ic;
3202 rtx trunc_y, last_insn;
3204 /* Skip if the target can't extend this way. */
3205 ic = can_extend_p (dstmode, srcmode, 0);
3206 if (ic == CODE_FOR_nothing)
3207 continue;
3209 /* Skip if the narrowed value isn't exact. */
3210 if (! exact_real_truncate (srcmode, &r))
3211 continue;
3213 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3215 if (LEGITIMATE_CONSTANT_P (trunc_y))
3217 /* Skip if the target needs extra instructions to perform
3218 the extension. */
3219 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3220 continue;
3222 else if (float_extend_from_mem[dstmode][srcmode])
3223 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3224 else
3225 continue;
3227 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3228 last_insn = get_last_insn ();
3230 if (GET_CODE (x) == REG)
3231 set_unique_reg_note (last_insn, REG_EQUAL, y);
3233 return last_insn;
3236 return NULL_RTX;
3239 /* Pushing data onto the stack. */
3241 /* Push a block of length SIZE (perhaps variable)
3242 and return an rtx to address the beginning of the block.
3243 Note that it is not possible for the value returned to be a QUEUED.
3244 The value may be virtual_outgoing_args_rtx.
3246 EXTRA is the number of bytes of padding to push in addition to SIZE.
3247 BELOW nonzero means this padding comes at low addresses;
3248 otherwise, the padding comes at high addresses. */
3251 push_block (rtx size, int extra, int below)
3253 rtx temp;
3255 size = convert_modes (Pmode, ptr_mode, size, 1);
3256 if (CONSTANT_P (size))
3257 anti_adjust_stack (plus_constant (size, extra));
3258 else if (GET_CODE (size) == REG && extra == 0)
3259 anti_adjust_stack (size);
3260 else
3262 temp = copy_to_mode_reg (Pmode, size);
3263 if (extra != 0)
3264 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3265 temp, 0, OPTAB_LIB_WIDEN);
3266 anti_adjust_stack (temp);
3269 #ifndef STACK_GROWS_DOWNWARD
3270 if (0)
3271 #else
3272 if (1)
3273 #endif
3275 temp = virtual_outgoing_args_rtx;
3276 if (extra != 0 && below)
3277 temp = plus_constant (temp, extra);
3279 else
3281 if (GET_CODE (size) == CONST_INT)
3282 temp = plus_constant (virtual_outgoing_args_rtx,
3283 -INTVAL (size) - (below ? 0 : extra));
3284 else if (extra != 0 && !below)
3285 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3286 negate_rtx (Pmode, plus_constant (size, extra)));
3287 else
3288 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3289 negate_rtx (Pmode, size));
3292 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3295 #ifdef PUSH_ROUNDING
3297 /* Emit single push insn. */
3299 static void
3300 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3302 rtx dest_addr;
3303 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3304 rtx dest;
3305 enum insn_code icode;
3306 insn_operand_predicate_fn pred;
3308 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3309 /* If there is push pattern, use it. Otherwise try old way of throwing
3310 MEM representing push operation to move expander. */
3311 icode = push_optab->handlers[(int) mode].insn_code;
3312 if (icode != CODE_FOR_nothing)
3314 if (((pred = insn_data[(int) icode].operand[0].predicate)
3315 && !((*pred) (x, mode))))
3316 x = force_reg (mode, x);
3317 emit_insn (GEN_FCN (icode) (x));
3318 return;
3320 if (GET_MODE_SIZE (mode) == rounded_size)
3321 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3322 /* If we are to pad downward, adjust the stack pointer first and
3323 then store X into the stack location using an offset. This is
3324 because emit_move_insn does not know how to pad; it does not have
3325 access to type. */
3326 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3328 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3329 HOST_WIDE_INT offset;
3331 emit_move_insn (stack_pointer_rtx,
3332 expand_binop (Pmode,
3333 #ifdef STACK_GROWS_DOWNWARD
3334 sub_optab,
3335 #else
3336 add_optab,
3337 #endif
3338 stack_pointer_rtx,
3339 GEN_INT (rounded_size),
3340 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3342 offset = (HOST_WIDE_INT) padding_size;
3343 #ifdef STACK_GROWS_DOWNWARD
3344 if (STACK_PUSH_CODE == POST_DEC)
3345 /* We have already decremented the stack pointer, so get the
3346 previous value. */
3347 offset += (HOST_WIDE_INT) rounded_size;
3348 #else
3349 if (STACK_PUSH_CODE == POST_INC)
3350 /* We have already incremented the stack pointer, so get the
3351 previous value. */
3352 offset -= (HOST_WIDE_INT) rounded_size;
3353 #endif
3354 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3356 else
3358 #ifdef STACK_GROWS_DOWNWARD
3359 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3360 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3361 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3362 #else
3363 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3364 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3365 GEN_INT (rounded_size));
3366 #endif
3367 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3370 dest = gen_rtx_MEM (mode, dest_addr);
3372 if (type != 0)
3374 set_mem_attributes (dest, type, 1);
3376 if (flag_optimize_sibling_calls)
3377 /* Function incoming arguments may overlap with sibling call
3378 outgoing arguments and we cannot allow reordering of reads
3379 from function arguments with stores to outgoing arguments
3380 of sibling calls. */
3381 set_mem_alias_set (dest, 0);
3383 emit_move_insn (dest, x);
3385 #endif
3387 /* Generate code to push X onto the stack, assuming it has mode MODE and
3388 type TYPE.
3389 MODE is redundant except when X is a CONST_INT (since they don't
3390 carry mode info).
3391 SIZE is an rtx for the size of data to be copied (in bytes),
3392 needed only if X is BLKmode.
3394 ALIGN (in bits) is maximum alignment we can assume.
3396 If PARTIAL and REG are both nonzero, then copy that many of the first
3397 words of X into registers starting with REG, and push the rest of X.
3398 The amount of space pushed is decreased by PARTIAL words,
3399 rounded *down* to a multiple of PARM_BOUNDARY.
3400 REG must be a hard register in this case.
3401 If REG is zero but PARTIAL is not, take any all others actions for an
3402 argument partially in registers, but do not actually load any
3403 registers.
3405 EXTRA is the amount in bytes of extra space to leave next to this arg.
3406 This is ignored if an argument block has already been allocated.
3408 On a machine that lacks real push insns, ARGS_ADDR is the address of
3409 the bottom of the argument block for this call. We use indexing off there
3410 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3411 argument block has not been preallocated.
3413 ARGS_SO_FAR is the size of args previously pushed for this call.
3415 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3416 for arguments passed in registers. If nonzero, it will be the number
3417 of bytes required. */
3419 void
3420 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3421 unsigned int align, int partial, rtx reg, int extra,
3422 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3423 rtx alignment_pad)
3425 rtx xinner;
3426 enum direction stack_direction
3427 #ifdef STACK_GROWS_DOWNWARD
3428 = downward;
3429 #else
3430 = upward;
3431 #endif
3433 /* Decide where to pad the argument: `downward' for below,
3434 `upward' for above, or `none' for don't pad it.
3435 Default is below for small data on big-endian machines; else above. */
3436 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3438 /* Invert direction if stack is post-decrement.
3439 FIXME: why? */
3440 if (STACK_PUSH_CODE == POST_DEC)
3441 if (where_pad != none)
3442 where_pad = (where_pad == downward ? upward : downward);
3444 xinner = x = protect_from_queue (x, 0);
3446 if (mode == BLKmode)
3448 /* Copy a block into the stack, entirely or partially. */
3450 rtx temp;
3451 int used = partial * UNITS_PER_WORD;
3452 int offset;
3453 int skip;
3455 if (reg && GET_CODE (reg) == PARALLEL)
3457 /* Use the size of the elt to compute offset. */
3458 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3459 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3460 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3462 else
3463 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3465 if (size == 0)
3466 abort ();
3468 used -= offset;
3470 /* USED is now the # of bytes we need not copy to the stack
3471 because registers will take care of them. */
3473 if (partial != 0)
3474 xinner = adjust_address (xinner, BLKmode, used);
3476 /* If the partial register-part of the arg counts in its stack size,
3477 skip the part of stack space corresponding to the registers.
3478 Otherwise, start copying to the beginning of the stack space,
3479 by setting SKIP to 0. */
3480 skip = (reg_parm_stack_space == 0) ? 0 : used;
3482 #ifdef PUSH_ROUNDING
3483 /* Do it with several push insns if that doesn't take lots of insns
3484 and if there is no difficulty with push insns that skip bytes
3485 on the stack for alignment purposes. */
3486 if (args_addr == 0
3487 && PUSH_ARGS
3488 && GET_CODE (size) == CONST_INT
3489 && skip == 0
3490 && MEM_ALIGN (xinner) >= align
3491 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3492 /* Here we avoid the case of a structure whose weak alignment
3493 forces many pushes of a small amount of data,
3494 and such small pushes do rounding that causes trouble. */
3495 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3496 || align >= BIGGEST_ALIGNMENT
3497 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3498 == (align / BITS_PER_UNIT)))
3499 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3501 /* Push padding now if padding above and stack grows down,
3502 or if padding below and stack grows up.
3503 But if space already allocated, this has already been done. */
3504 if (extra && args_addr == 0
3505 && where_pad != none && where_pad != stack_direction)
3506 anti_adjust_stack (GEN_INT (extra));
3508 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3510 else
3511 #endif /* PUSH_ROUNDING */
3513 rtx target;
3515 /* Otherwise make space on the stack and copy the data
3516 to the address of that space. */
3518 /* Deduct words put into registers from the size we must copy. */
3519 if (partial != 0)
3521 if (GET_CODE (size) == CONST_INT)
3522 size = GEN_INT (INTVAL (size) - used);
3523 else
3524 size = expand_binop (GET_MODE (size), sub_optab, size,
3525 GEN_INT (used), NULL_RTX, 0,
3526 OPTAB_LIB_WIDEN);
3529 /* Get the address of the stack space.
3530 In this case, we do not deal with EXTRA separately.
3531 A single stack adjust will do. */
3532 if (! args_addr)
3534 temp = push_block (size, extra, where_pad == downward);
3535 extra = 0;
3537 else if (GET_CODE (args_so_far) == CONST_INT)
3538 temp = memory_address (BLKmode,
3539 plus_constant (args_addr,
3540 skip + INTVAL (args_so_far)));
3541 else
3542 temp = memory_address (BLKmode,
3543 plus_constant (gen_rtx_PLUS (Pmode,
3544 args_addr,
3545 args_so_far),
3546 skip));
3548 if (!ACCUMULATE_OUTGOING_ARGS)
3550 /* If the source is referenced relative to the stack pointer,
3551 copy it to another register to stabilize it. We do not need
3552 to do this if we know that we won't be changing sp. */
3554 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3555 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3556 temp = copy_to_reg (temp);
3559 target = gen_rtx_MEM (BLKmode, temp);
3561 if (type != 0)
3563 set_mem_attributes (target, type, 1);
3564 /* Function incoming arguments may overlap with sibling call
3565 outgoing arguments and we cannot allow reordering of reads
3566 from function arguments with stores to outgoing arguments
3567 of sibling calls. */
3568 set_mem_alias_set (target, 0);
3571 /* ALIGN may well be better aligned than TYPE, e.g. due to
3572 PARM_BOUNDARY. Assume the caller isn't lying. */
3573 set_mem_align (target, align);
3575 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3578 else if (partial > 0)
3580 /* Scalar partly in registers. */
3582 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3583 int i;
3584 int not_stack;
3585 /* # words of start of argument
3586 that we must make space for but need not store. */
3587 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3588 int args_offset = INTVAL (args_so_far);
3589 int skip;
3591 /* Push padding now if padding above and stack grows down,
3592 or if padding below and stack grows up.
3593 But if space already allocated, this has already been done. */
3594 if (extra && args_addr == 0
3595 && where_pad != none && where_pad != stack_direction)
3596 anti_adjust_stack (GEN_INT (extra));
3598 /* If we make space by pushing it, we might as well push
3599 the real data. Otherwise, we can leave OFFSET nonzero
3600 and leave the space uninitialized. */
3601 if (args_addr == 0)
3602 offset = 0;
3604 /* Now NOT_STACK gets the number of words that we don't need to
3605 allocate on the stack. */
3606 not_stack = partial - offset;
3608 /* If the partial register-part of the arg counts in its stack size,
3609 skip the part of stack space corresponding to the registers.
3610 Otherwise, start copying to the beginning of the stack space,
3611 by setting SKIP to 0. */
3612 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3614 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3615 x = validize_mem (force_const_mem (mode, x));
3617 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3618 SUBREGs of such registers are not allowed. */
3619 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3620 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3621 x = copy_to_reg (x);
3623 /* Loop over all the words allocated on the stack for this arg. */
3624 /* We can do it by words, because any scalar bigger than a word
3625 has a size a multiple of a word. */
3626 #ifndef PUSH_ARGS_REVERSED
3627 for (i = not_stack; i < size; i++)
3628 #else
3629 for (i = size - 1; i >= not_stack; i--)
3630 #endif
3631 if (i >= not_stack + offset)
3632 emit_push_insn (operand_subword_force (x, i, mode),
3633 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3634 0, args_addr,
3635 GEN_INT (args_offset + ((i - not_stack + skip)
3636 * UNITS_PER_WORD)),
3637 reg_parm_stack_space, alignment_pad);
3639 else
3641 rtx addr;
3642 rtx dest;
3644 /* Push padding now if padding above and stack grows down,
3645 or if padding below and stack grows up.
3646 But if space already allocated, this has already been done. */
3647 if (extra && args_addr == 0
3648 && where_pad != none && where_pad != stack_direction)
3649 anti_adjust_stack (GEN_INT (extra));
3651 #ifdef PUSH_ROUNDING
3652 if (args_addr == 0 && PUSH_ARGS)
3653 emit_single_push_insn (mode, x, type);
3654 else
3655 #endif
3657 if (GET_CODE (args_so_far) == CONST_INT)
3658 addr
3659 = memory_address (mode,
3660 plus_constant (args_addr,
3661 INTVAL (args_so_far)));
3662 else
3663 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3664 args_so_far));
3665 dest = gen_rtx_MEM (mode, addr);
3666 if (type != 0)
3668 set_mem_attributes (dest, type, 1);
3669 /* Function incoming arguments may overlap with sibling call
3670 outgoing arguments and we cannot allow reordering of reads
3671 from function arguments with stores to outgoing arguments
3672 of sibling calls. */
3673 set_mem_alias_set (dest, 0);
3676 emit_move_insn (dest, x);
3680 /* If part should go in registers, copy that part
3681 into the appropriate registers. Do this now, at the end,
3682 since mem-to-mem copies above may do function calls. */
3683 if (partial > 0 && reg != 0)
3685 /* Handle calls that pass values in multiple non-contiguous locations.
3686 The Irix 6 ABI has examples of this. */
3687 if (GET_CODE (reg) == PARALLEL)
3688 emit_group_load (reg, x, type, -1);
3689 else
3690 move_block_to_reg (REGNO (reg), x, partial, mode);
3693 if (extra && args_addr == 0 && where_pad == stack_direction)
3694 anti_adjust_stack (GEN_INT (extra));
3696 if (alignment_pad && args_addr == 0)
3697 anti_adjust_stack (alignment_pad);
3700 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3701 operations. */
3703 static rtx
3704 get_subtarget (rtx x)
3706 return ((x == 0
3707 /* Only registers can be subtargets. */
3708 || GET_CODE (x) != REG
3709 /* If the register is readonly, it can't be set more than once. */
3710 || RTX_UNCHANGING_P (x)
3711 /* Don't use hard regs to avoid extending their life. */
3712 || REGNO (x) < FIRST_PSEUDO_REGISTER
3713 /* Avoid subtargets inside loops,
3714 since they hide some invariant expressions. */
3715 || preserve_subexpressions_p ())
3716 ? 0 : x);
3719 /* Expand an assignment that stores the value of FROM into TO.
3720 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3721 (This may contain a QUEUED rtx;
3722 if the value is constant, this rtx is a constant.)
3723 Otherwise, the returned value is NULL_RTX. */
3726 expand_assignment (tree to, tree from, int want_value)
3728 rtx to_rtx = 0;
3729 rtx result;
3731 /* Don't crash if the lhs of the assignment was erroneous. */
3733 if (TREE_CODE (to) == ERROR_MARK)
3735 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3736 return want_value ? result : NULL_RTX;
3739 /* Assignment of a structure component needs special treatment
3740 if the structure component's rtx is not simply a MEM.
3741 Assignment of an array element at a constant index, and assignment of
3742 an array element in an unaligned packed structure field, has the same
3743 problem. */
3745 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3746 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3747 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3749 enum machine_mode mode1;
3750 HOST_WIDE_INT bitsize, bitpos;
3751 rtx orig_to_rtx;
3752 tree offset;
3753 int unsignedp;
3754 int volatilep = 0;
3755 tree tem;
3757 push_temp_slots ();
3758 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3759 &unsignedp, &volatilep);
3761 /* If we are going to use store_bit_field and extract_bit_field,
3762 make sure to_rtx will be safe for multiple use. */
3764 if (mode1 == VOIDmode && want_value)
3765 tem = stabilize_reference (tem);
3767 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3769 if (offset != 0)
3771 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3773 if (GET_CODE (to_rtx) != MEM)
3774 abort ();
3776 #ifdef POINTERS_EXTEND_UNSIGNED
3777 if (GET_MODE (offset_rtx) != Pmode)
3778 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3779 #else
3780 if (GET_MODE (offset_rtx) != ptr_mode)
3781 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3782 #endif
3784 /* A constant address in TO_RTX can have VOIDmode, we must not try
3785 to call force_reg for that case. Avoid that case. */
3786 if (GET_CODE (to_rtx) == MEM
3787 && GET_MODE (to_rtx) == BLKmode
3788 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3789 && bitsize > 0
3790 && (bitpos % bitsize) == 0
3791 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3792 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3794 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3795 bitpos = 0;
3798 to_rtx = offset_address (to_rtx, offset_rtx,
3799 highest_pow2_factor_for_type (TREE_TYPE (to),
3800 offset));
3803 if (GET_CODE (to_rtx) == MEM)
3805 /* If the field is at offset zero, we could have been given the
3806 DECL_RTX of the parent struct. Don't munge it. */
3807 to_rtx = shallow_copy_rtx (to_rtx);
3809 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3812 /* Deal with volatile and readonly fields. The former is only done
3813 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3814 if (volatilep && GET_CODE (to_rtx) == MEM)
3816 if (to_rtx == orig_to_rtx)
3817 to_rtx = copy_rtx (to_rtx);
3818 MEM_VOLATILE_P (to_rtx) = 1;
3821 if (TREE_CODE (to) == COMPONENT_REF
3822 && TREE_READONLY (TREE_OPERAND (to, 1))
3823 /* We can't assert that a MEM won't be set more than once
3824 if the component is not addressable because another
3825 non-addressable component may be referenced by the same MEM. */
3826 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3828 if (to_rtx == orig_to_rtx)
3829 to_rtx = copy_rtx (to_rtx);
3830 RTX_UNCHANGING_P (to_rtx) = 1;
3833 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3835 if (to_rtx == orig_to_rtx)
3836 to_rtx = copy_rtx (to_rtx);
3837 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3840 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3841 (want_value
3842 /* Spurious cast for HPUX compiler. */
3843 ? ((enum machine_mode)
3844 TYPE_MODE (TREE_TYPE (to)))
3845 : VOIDmode),
3846 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3848 preserve_temp_slots (result);
3849 free_temp_slots ();
3850 pop_temp_slots ();
3852 /* If the value is meaningful, convert RESULT to the proper mode.
3853 Otherwise, return nothing. */
3854 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3855 TYPE_MODE (TREE_TYPE (from)),
3856 result,
3857 TREE_UNSIGNED (TREE_TYPE (to)))
3858 : NULL_RTX);
3861 /* If the rhs is a function call and its value is not an aggregate,
3862 call the function before we start to compute the lhs.
3863 This is needed for correct code for cases such as
3864 val = setjmp (buf) on machines where reference to val
3865 requires loading up part of an address in a separate insn.
3867 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3868 since it might be a promoted variable where the zero- or sign- extension
3869 needs to be done. Handling this in the normal way is safe because no
3870 computation is done before the call. */
3871 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3872 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3873 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3874 && GET_CODE (DECL_RTL (to)) == REG))
3876 rtx value;
3878 push_temp_slots ();
3879 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3880 if (to_rtx == 0)
3881 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3883 /* Handle calls that return values in multiple non-contiguous locations.
3884 The Irix 6 ABI has examples of this. */
3885 if (GET_CODE (to_rtx) == PARALLEL)
3886 emit_group_load (to_rtx, value, TREE_TYPE (from),
3887 int_size_in_bytes (TREE_TYPE (from)));
3888 else if (GET_MODE (to_rtx) == BLKmode)
3889 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3890 else
3892 if (POINTER_TYPE_P (TREE_TYPE (to)))
3893 value = convert_memory_address (GET_MODE (to_rtx), value);
3894 emit_move_insn (to_rtx, value);
3896 preserve_temp_slots (to_rtx);
3897 free_temp_slots ();
3898 pop_temp_slots ();
3899 return want_value ? to_rtx : NULL_RTX;
3902 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3903 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3905 if (to_rtx == 0)
3906 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3908 /* Don't move directly into a return register. */
3909 if (TREE_CODE (to) == RESULT_DECL
3910 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3912 rtx temp;
3914 push_temp_slots ();
3915 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3917 if (GET_CODE (to_rtx) == PARALLEL)
3918 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3919 int_size_in_bytes (TREE_TYPE (from)));
3920 else
3921 emit_move_insn (to_rtx, temp);
3923 preserve_temp_slots (to_rtx);
3924 free_temp_slots ();
3925 pop_temp_slots ();
3926 return want_value ? to_rtx : NULL_RTX;
3929 /* In case we are returning the contents of an object which overlaps
3930 the place the value is being stored, use a safe function when copying
3931 a value through a pointer into a structure value return block. */
3932 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3933 && current_function_returns_struct
3934 && !current_function_returns_pcc_struct)
3936 rtx from_rtx, size;
3938 push_temp_slots ();
3939 size = expr_size (from);
3940 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3942 if (TARGET_MEM_FUNCTIONS)
3943 emit_library_call (memmove_libfunc, LCT_NORMAL,
3944 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3945 XEXP (from_rtx, 0), Pmode,
3946 convert_to_mode (TYPE_MODE (sizetype),
3947 size, TREE_UNSIGNED (sizetype)),
3948 TYPE_MODE (sizetype));
3949 else
3950 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3951 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3952 XEXP (to_rtx, 0), Pmode,
3953 convert_to_mode (TYPE_MODE (integer_type_node),
3954 size,
3955 TREE_UNSIGNED (integer_type_node)),
3956 TYPE_MODE (integer_type_node));
3958 preserve_temp_slots (to_rtx);
3959 free_temp_slots ();
3960 pop_temp_slots ();
3961 return want_value ? to_rtx : NULL_RTX;
3964 /* Compute FROM and store the value in the rtx we got. */
3966 push_temp_slots ();
3967 result = store_expr (from, to_rtx, want_value);
3968 preserve_temp_slots (result);
3969 free_temp_slots ();
3970 pop_temp_slots ();
3971 return want_value ? result : NULL_RTX;
3974 /* Generate code for computing expression EXP,
3975 and storing the value into TARGET.
3976 TARGET may contain a QUEUED rtx.
3978 If WANT_VALUE & 1 is nonzero, return a copy of the value
3979 not in TARGET, so that we can be sure to use the proper
3980 value in a containing expression even if TARGET has something
3981 else stored in it. If possible, we copy the value through a pseudo
3982 and return that pseudo. Or, if the value is constant, we try to
3983 return the constant. In some cases, we return a pseudo
3984 copied *from* TARGET.
3986 If the mode is BLKmode then we may return TARGET itself.
3987 It turns out that in BLKmode it doesn't cause a problem.
3988 because C has no operators that could combine two different
3989 assignments into the same BLKmode object with different values
3990 with no sequence point. Will other languages need this to
3991 be more thorough?
3993 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3994 to catch quickly any cases where the caller uses the value
3995 and fails to set WANT_VALUE.
3997 If WANT_VALUE & 2 is set, this is a store into a call param on the
3998 stack, and block moves may need to be treated specially. */
4001 store_expr (tree exp, rtx target, int want_value)
4003 rtx temp;
4004 rtx alt_rtl = NULL_RTX;
4005 int dont_return_target = 0;
4006 int dont_store_target = 0;
4008 if (VOID_TYPE_P (TREE_TYPE (exp)))
4010 /* C++ can generate ?: expressions with a throw expression in one
4011 branch and an rvalue in the other. Here, we resolve attempts to
4012 store the throw expression's nonexistent result. */
4013 if (want_value)
4014 abort ();
4015 expand_expr (exp, const0_rtx, VOIDmode, 0);
4016 return NULL_RTX;
4018 if (TREE_CODE (exp) == COMPOUND_EXPR)
4020 /* Perform first part of compound expression, then assign from second
4021 part. */
4022 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4023 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4024 emit_queue ();
4025 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4027 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4029 /* For conditional expression, get safe form of the target. Then
4030 test the condition, doing the appropriate assignment on either
4031 side. This avoids the creation of unnecessary temporaries.
4032 For non-BLKmode, it is more efficient not to do this. */
4034 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4036 emit_queue ();
4037 target = protect_from_queue (target, 1);
4039 do_pending_stack_adjust ();
4040 NO_DEFER_POP;
4041 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4042 start_cleanup_deferral ();
4043 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4044 end_cleanup_deferral ();
4045 emit_queue ();
4046 emit_jump_insn (gen_jump (lab2));
4047 emit_barrier ();
4048 emit_label (lab1);
4049 start_cleanup_deferral ();
4050 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4051 end_cleanup_deferral ();
4052 emit_queue ();
4053 emit_label (lab2);
4054 OK_DEFER_POP;
4056 return want_value & 1 ? target : NULL_RTX;
4058 else if (queued_subexp_p (target))
4059 /* If target contains a postincrement, let's not risk
4060 using it as the place to generate the rhs. */
4062 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4064 /* Expand EXP into a new pseudo. */
4065 temp = gen_reg_rtx (GET_MODE (target));
4066 temp = expand_expr (exp, temp, GET_MODE (target),
4067 (want_value & 2
4068 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4070 else
4071 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4072 (want_value & 2
4073 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4075 /* If target is volatile, ANSI requires accessing the value
4076 *from* the target, if it is accessed. So make that happen.
4077 In no case return the target itself. */
4078 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4079 dont_return_target = 1;
4081 else if ((want_value & 1) != 0
4082 && GET_CODE (target) == MEM
4083 && ! MEM_VOLATILE_P (target)
4084 && GET_MODE (target) != BLKmode)
4085 /* If target is in memory and caller wants value in a register instead,
4086 arrange that. Pass TARGET as target for expand_expr so that,
4087 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4088 We know expand_expr will not use the target in that case.
4089 Don't do this if TARGET is volatile because we are supposed
4090 to write it and then read it. */
4092 temp = expand_expr (exp, target, GET_MODE (target),
4093 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4094 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4096 /* If TEMP is already in the desired TARGET, only copy it from
4097 memory and don't store it there again. */
4098 if (temp == target
4099 || (rtx_equal_p (temp, target)
4100 && ! side_effects_p (temp) && ! side_effects_p (target)))
4101 dont_store_target = 1;
4102 temp = copy_to_reg (temp);
4104 dont_return_target = 1;
4106 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4107 /* If this is a scalar in a register that is stored in a wider mode
4108 than the declared mode, compute the result into its declared mode
4109 and then convert to the wider mode. Our value is the computed
4110 expression. */
4112 rtx inner_target = 0;
4114 /* If we don't want a value, we can do the conversion inside EXP,
4115 which will often result in some optimizations. Do the conversion
4116 in two steps: first change the signedness, if needed, then
4117 the extend. But don't do this if the type of EXP is a subtype
4118 of something else since then the conversion might involve
4119 more than just converting modes. */
4120 if ((want_value & 1) == 0
4121 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4122 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4124 if (TREE_UNSIGNED (TREE_TYPE (exp))
4125 != SUBREG_PROMOTED_UNSIGNED_P (target))
4126 exp = convert
4127 ((*lang_hooks.types.signed_or_unsigned_type)
4128 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4130 exp = convert ((*lang_hooks.types.type_for_mode)
4131 (GET_MODE (SUBREG_REG (target)),
4132 SUBREG_PROMOTED_UNSIGNED_P (target)),
4133 exp);
4135 inner_target = SUBREG_REG (target);
4138 temp = expand_expr (exp, inner_target, VOIDmode,
4139 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4141 /* If TEMP is a MEM and we want a result value, make the access
4142 now so it gets done only once. Strictly speaking, this is
4143 only necessary if the MEM is volatile, or if the address
4144 overlaps TARGET. But not performing the load twice also
4145 reduces the amount of rtl we generate and then have to CSE. */
4146 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4147 temp = copy_to_reg (temp);
4149 /* If TEMP is a VOIDmode constant, use convert_modes to make
4150 sure that we properly convert it. */
4151 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4153 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4154 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4155 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4156 GET_MODE (target), temp,
4157 SUBREG_PROMOTED_UNSIGNED_P (target));
4160 convert_move (SUBREG_REG (target), temp,
4161 SUBREG_PROMOTED_UNSIGNED_P (target));
4163 /* If we promoted a constant, change the mode back down to match
4164 target. Otherwise, the caller might get confused by a result whose
4165 mode is larger than expected. */
4167 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4169 if (GET_MODE (temp) != VOIDmode)
4171 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4172 SUBREG_PROMOTED_VAR_P (temp) = 1;
4173 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4174 SUBREG_PROMOTED_UNSIGNED_P (target));
4176 else
4177 temp = convert_modes (GET_MODE (target),
4178 GET_MODE (SUBREG_REG (target)),
4179 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4182 return want_value & 1 ? temp : NULL_RTX;
4184 else
4186 temp = expand_expr_real (exp, target, GET_MODE (target),
4187 (want_value & 2
4188 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4189 &alt_rtl);
4190 /* Return TARGET if it's a specified hardware register.
4191 If TARGET is a volatile mem ref, either return TARGET
4192 or return a reg copied *from* TARGET; ANSI requires this.
4194 Otherwise, if TEMP is not TARGET, return TEMP
4195 if it is constant (for efficiency),
4196 or if we really want the correct value. */
4197 if (!(target && GET_CODE (target) == REG
4198 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4199 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4200 && ! rtx_equal_p (temp, target)
4201 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4202 dont_return_target = 1;
4205 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4206 the same as that of TARGET, adjust the constant. This is needed, for
4207 example, in case it is a CONST_DOUBLE and we want only a word-sized
4208 value. */
4209 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4210 && TREE_CODE (exp) != ERROR_MARK
4211 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4212 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4213 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4215 /* If value was not generated in the target, store it there.
4216 Convert the value to TARGET's type first if necessary.
4217 If TEMP and TARGET compare equal according to rtx_equal_p, but
4218 one or both of them are volatile memory refs, we have to distinguish
4219 two cases:
4220 - expand_expr has used TARGET. In this case, we must not generate
4221 another copy. This can be detected by TARGET being equal according
4222 to == .
4223 - expand_expr has not used TARGET - that means that the source just
4224 happens to have the same RTX form. Since temp will have been created
4225 by expand_expr, it will compare unequal according to == .
4226 We must generate a copy in this case, to reach the correct number
4227 of volatile memory references. */
4229 if ((! rtx_equal_p (temp, target)
4230 || (temp != target && (side_effects_p (temp)
4231 || side_effects_p (target))))
4232 && TREE_CODE (exp) != ERROR_MARK
4233 && ! dont_store_target
4234 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4235 but TARGET is not valid memory reference, TEMP will differ
4236 from TARGET although it is really the same location. */
4237 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4238 /* If there's nothing to copy, don't bother. Don't call expr_size
4239 unless necessary, because some front-ends (C++) expr_size-hook
4240 aborts on objects that are not supposed to be bit-copied or
4241 bit-initialized. */
4242 && expr_size (exp) != const0_rtx)
4244 target = protect_from_queue (target, 1);
4245 if (GET_MODE (temp) != GET_MODE (target)
4246 && GET_MODE (temp) != VOIDmode)
4248 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4249 if (dont_return_target)
4251 /* In this case, we will return TEMP,
4252 so make sure it has the proper mode.
4253 But don't forget to store the value into TARGET. */
4254 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4255 emit_move_insn (target, temp);
4257 else
4258 convert_move (target, temp, unsignedp);
4261 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4263 /* Handle copying a string constant into an array. The string
4264 constant may be shorter than the array. So copy just the string's
4265 actual length, and clear the rest. First get the size of the data
4266 type of the string, which is actually the size of the target. */
4267 rtx size = expr_size (exp);
4269 if (GET_CODE (size) == CONST_INT
4270 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4271 emit_block_move (target, temp, size,
4272 (want_value & 2
4273 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4274 else
4276 /* Compute the size of the data to copy from the string. */
4277 tree copy_size
4278 = size_binop (MIN_EXPR,
4279 make_tree (sizetype, size),
4280 size_int (TREE_STRING_LENGTH (exp)));
4281 rtx copy_size_rtx
4282 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4283 (want_value & 2
4284 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4285 rtx label = 0;
4287 /* Copy that much. */
4288 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4289 TREE_UNSIGNED (sizetype));
4290 emit_block_move (target, temp, copy_size_rtx,
4291 (want_value & 2
4292 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4294 /* Figure out how much is left in TARGET that we have to clear.
4295 Do all calculations in ptr_mode. */
4296 if (GET_CODE (copy_size_rtx) == CONST_INT)
4298 size = plus_constant (size, -INTVAL (copy_size_rtx));
4299 target = adjust_address (target, BLKmode,
4300 INTVAL (copy_size_rtx));
4302 else
4304 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4305 copy_size_rtx, NULL_RTX, 0,
4306 OPTAB_LIB_WIDEN);
4308 #ifdef POINTERS_EXTEND_UNSIGNED
4309 if (GET_MODE (copy_size_rtx) != Pmode)
4310 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4311 TREE_UNSIGNED (sizetype));
4312 #endif
4314 target = offset_address (target, copy_size_rtx,
4315 highest_pow2_factor (copy_size));
4316 label = gen_label_rtx ();
4317 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4318 GET_MODE (size), 0, label);
4321 if (size != const0_rtx)
4322 clear_storage (target, size);
4324 if (label)
4325 emit_label (label);
4328 /* Handle calls that return values in multiple non-contiguous locations.
4329 The Irix 6 ABI has examples of this. */
4330 else if (GET_CODE (target) == PARALLEL)
4331 emit_group_load (target, temp, TREE_TYPE (exp),
4332 int_size_in_bytes (TREE_TYPE (exp)));
4333 else if (GET_MODE (temp) == BLKmode)
4334 emit_block_move (target, temp, expr_size (exp),
4335 (want_value & 2
4336 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4337 else
4339 temp = force_operand (temp, target);
4340 if (temp != target)
4341 emit_move_insn (target, temp);
4345 /* If we don't want a value, return NULL_RTX. */
4346 if ((want_value & 1) == 0)
4347 return NULL_RTX;
4349 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4350 ??? The latter test doesn't seem to make sense. */
4351 else if (dont_return_target && GET_CODE (temp) != MEM)
4352 return temp;
4354 /* Return TARGET itself if it is a hard register. */
4355 else if ((want_value & 1) != 0
4356 && GET_MODE (target) != BLKmode
4357 && ! (GET_CODE (target) == REG
4358 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4359 return copy_to_reg (target);
4361 else
4362 return target;
4365 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4367 static int
4368 is_zeros_p (tree exp)
4370 tree elt;
4372 switch (TREE_CODE (exp))
4374 case CONVERT_EXPR:
4375 case NOP_EXPR:
4376 case NON_LVALUE_EXPR:
4377 case VIEW_CONVERT_EXPR:
4378 return is_zeros_p (TREE_OPERAND (exp, 0));
4380 case INTEGER_CST:
4381 return integer_zerop (exp);
4383 case COMPLEX_CST:
4384 return
4385 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4387 case REAL_CST:
4388 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4390 case VECTOR_CST:
4391 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4392 elt = TREE_CHAIN (elt))
4393 if (!is_zeros_p (TREE_VALUE (elt)))
4394 return 0;
4396 return 1;
4398 case CONSTRUCTOR:
4399 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4400 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4401 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4402 if (! is_zeros_p (TREE_VALUE (elt)))
4403 return 0;
4405 return 1;
4407 default:
4408 return 0;
4412 /* Return 1 if EXP contains mostly (3/4) zeros. */
4415 mostly_zeros_p (tree exp)
4417 if (TREE_CODE (exp) == CONSTRUCTOR)
4419 int elts = 0, zeros = 0;
4420 tree elt = CONSTRUCTOR_ELTS (exp);
4421 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4423 /* If there are no ranges of true bits, it is all zero. */
4424 return elt == NULL_TREE;
4426 for (; elt; elt = TREE_CHAIN (elt))
4428 /* We do not handle the case where the index is a RANGE_EXPR,
4429 so the statistic will be somewhat inaccurate.
4430 We do make a more accurate count in store_constructor itself,
4431 so since this function is only used for nested array elements,
4432 this should be close enough. */
4433 if (mostly_zeros_p (TREE_VALUE (elt)))
4434 zeros++;
4435 elts++;
4438 return 4 * zeros >= 3 * elts;
4441 return is_zeros_p (exp);
4444 /* Helper function for store_constructor.
4445 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4446 TYPE is the type of the CONSTRUCTOR, not the element type.
4447 CLEARED is as for store_constructor.
4448 ALIAS_SET is the alias set to use for any stores.
4450 This provides a recursive shortcut back to store_constructor when it isn't
4451 necessary to go through store_field. This is so that we can pass through
4452 the cleared field to let store_constructor know that we may not have to
4453 clear a substructure if the outer structure has already been cleared. */
4455 static void
4456 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4457 HOST_WIDE_INT bitpos, enum machine_mode mode,
4458 tree exp, tree type, int cleared, int alias_set)
4460 if (TREE_CODE (exp) == CONSTRUCTOR
4461 && bitpos % BITS_PER_UNIT == 0
4462 /* If we have a nonzero bitpos for a register target, then we just
4463 let store_field do the bitfield handling. This is unlikely to
4464 generate unnecessary clear instructions anyways. */
4465 && (bitpos == 0 || GET_CODE (target) == MEM))
4467 if (GET_CODE (target) == MEM)
4468 target
4469 = adjust_address (target,
4470 GET_MODE (target) == BLKmode
4471 || 0 != (bitpos
4472 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4473 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4476 /* Update the alias set, if required. */
4477 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4478 && MEM_ALIAS_SET (target) != 0)
4480 target = copy_rtx (target);
4481 set_mem_alias_set (target, alias_set);
4484 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4486 else
4487 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4488 alias_set);
4491 /* Store the value of constructor EXP into the rtx TARGET.
4492 TARGET is either a REG or a MEM; we know it cannot conflict, since
4493 safe_from_p has been called.
4494 CLEARED is true if TARGET is known to have been zero'd.
4495 SIZE is the number of bytes of TARGET we are allowed to modify: this
4496 may not be the same as the size of EXP if we are assigning to a field
4497 which has been packed to exclude padding bits. */
4499 static void
4500 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4502 tree type = TREE_TYPE (exp);
4503 #ifdef WORD_REGISTER_OPERATIONS
4504 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4505 #endif
4507 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4508 || TREE_CODE (type) == QUAL_UNION_TYPE)
4510 tree elt;
4512 /* If size is zero or the target is already cleared, do nothing. */
4513 if (size == 0 || cleared)
4514 cleared = 1;
4515 /* We either clear the aggregate or indicate the value is dead. */
4516 else if ((TREE_CODE (type) == UNION_TYPE
4517 || TREE_CODE (type) == QUAL_UNION_TYPE)
4518 && ! CONSTRUCTOR_ELTS (exp))
4519 /* If the constructor is empty, clear the union. */
4521 clear_storage (target, expr_size (exp));
4522 cleared = 1;
4525 /* If we are building a static constructor into a register,
4526 set the initial value as zero so we can fold the value into
4527 a constant. But if more than one register is involved,
4528 this probably loses. */
4529 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4530 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4532 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4533 cleared = 1;
4536 /* If the constructor has fewer fields than the structure
4537 or if we are initializing the structure to mostly zeros,
4538 clear the whole structure first. Don't do this if TARGET is a
4539 register whose mode size isn't equal to SIZE since clear_storage
4540 can't handle this case. */
4541 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4542 || mostly_zeros_p (exp))
4543 && (GET_CODE (target) != REG
4544 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4545 == size)))
4547 rtx xtarget = target;
4549 if (readonly_fields_p (type))
4551 xtarget = copy_rtx (xtarget);
4552 RTX_UNCHANGING_P (xtarget) = 1;
4555 clear_storage (xtarget, GEN_INT (size));
4556 cleared = 1;
4559 if (! cleared)
4560 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4562 /* Store each element of the constructor into
4563 the corresponding field of TARGET. */
4565 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4567 tree field = TREE_PURPOSE (elt);
4568 tree value = TREE_VALUE (elt);
4569 enum machine_mode mode;
4570 HOST_WIDE_INT bitsize;
4571 HOST_WIDE_INT bitpos = 0;
4572 tree offset;
4573 rtx to_rtx = target;
4575 /* Just ignore missing fields.
4576 We cleared the whole structure, above,
4577 if any fields are missing. */
4578 if (field == 0)
4579 continue;
4581 if (cleared && is_zeros_p (value))
4582 continue;
4584 if (host_integerp (DECL_SIZE (field), 1))
4585 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4586 else
4587 bitsize = -1;
4589 mode = DECL_MODE (field);
4590 if (DECL_BIT_FIELD (field))
4591 mode = VOIDmode;
4593 offset = DECL_FIELD_OFFSET (field);
4594 if (host_integerp (offset, 0)
4595 && host_integerp (bit_position (field), 0))
4597 bitpos = int_bit_position (field);
4598 offset = 0;
4600 else
4601 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4603 if (offset)
4605 rtx offset_rtx;
4607 if (CONTAINS_PLACEHOLDER_P (offset))
4608 offset = build (WITH_RECORD_EXPR, sizetype,
4609 offset, make_tree (TREE_TYPE (exp), target));
4611 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4612 if (GET_CODE (to_rtx) != MEM)
4613 abort ();
4615 #ifdef POINTERS_EXTEND_UNSIGNED
4616 if (GET_MODE (offset_rtx) != Pmode)
4617 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4618 #else
4619 if (GET_MODE (offset_rtx) != ptr_mode)
4620 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4621 #endif
4623 to_rtx = offset_address (to_rtx, offset_rtx,
4624 highest_pow2_factor (offset));
4627 if (TREE_READONLY (field))
4629 if (GET_CODE (to_rtx) == MEM)
4630 to_rtx = copy_rtx (to_rtx);
4632 RTX_UNCHANGING_P (to_rtx) = 1;
4635 #ifdef WORD_REGISTER_OPERATIONS
4636 /* If this initializes a field that is smaller than a word, at the
4637 start of a word, try to widen it to a full word.
4638 This special case allows us to output C++ member function
4639 initializations in a form that the optimizers can understand. */
4640 if (GET_CODE (target) == REG
4641 && bitsize < BITS_PER_WORD
4642 && bitpos % BITS_PER_WORD == 0
4643 && GET_MODE_CLASS (mode) == MODE_INT
4644 && TREE_CODE (value) == INTEGER_CST
4645 && exp_size >= 0
4646 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4648 tree type = TREE_TYPE (value);
4650 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4652 type = (*lang_hooks.types.type_for_size)
4653 (BITS_PER_WORD, TREE_UNSIGNED (type));
4654 value = convert (type, value);
4657 if (BYTES_BIG_ENDIAN)
4658 value
4659 = fold (build (LSHIFT_EXPR, type, value,
4660 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4661 bitsize = BITS_PER_WORD;
4662 mode = word_mode;
4664 #endif
4666 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4667 && DECL_NONADDRESSABLE_P (field))
4669 to_rtx = copy_rtx (to_rtx);
4670 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4673 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4674 value, type, cleared,
4675 get_alias_set (TREE_TYPE (field)));
4678 else if (TREE_CODE (type) == ARRAY_TYPE
4679 || TREE_CODE (type) == VECTOR_TYPE)
4681 tree elt;
4682 int i;
4683 int need_to_clear;
4684 tree domain = TYPE_DOMAIN (type);
4685 tree elttype = TREE_TYPE (type);
4686 int const_bounds_p;
4687 HOST_WIDE_INT minelt = 0;
4688 HOST_WIDE_INT maxelt = 0;
4689 int icode = 0;
4690 rtx *vector = NULL;
4691 int elt_size = 0;
4692 unsigned n_elts = 0;
4694 /* Vectors are like arrays, but the domain is stored via an array
4695 type indirectly. */
4696 if (TREE_CODE (type) == VECTOR_TYPE)
4698 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4699 the same field as TYPE_DOMAIN, we are not guaranteed that
4700 it always will. */
4701 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4702 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4703 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4705 enum machine_mode mode = GET_MODE (target);
4707 icode = (int) vec_init_optab->handlers[mode].insn_code;
4708 if (icode != CODE_FOR_nothing)
4710 unsigned int i;
4712 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4713 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4714 vector = alloca (n_elts);
4715 for (i = 0; i < n_elts; i++)
4716 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4721 const_bounds_p = (TYPE_MIN_VALUE (domain)
4722 && TYPE_MAX_VALUE (domain)
4723 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4724 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4726 /* If we have constant bounds for the range of the type, get them. */
4727 if (const_bounds_p)
4729 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4730 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4733 /* If the constructor has fewer elements than the array,
4734 clear the whole array first. Similarly if this is
4735 static constructor of a non-BLKmode object. */
4736 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4737 need_to_clear = 1;
4738 else
4740 HOST_WIDE_INT count = 0, zero_count = 0;
4741 need_to_clear = ! const_bounds_p;
4743 /* This loop is a more accurate version of the loop in
4744 mostly_zeros_p (it handles RANGE_EXPR in an index).
4745 It is also needed to check for missing elements. */
4746 for (elt = CONSTRUCTOR_ELTS (exp);
4747 elt != NULL_TREE && ! need_to_clear;
4748 elt = TREE_CHAIN (elt))
4750 tree index = TREE_PURPOSE (elt);
4751 HOST_WIDE_INT this_node_count;
4753 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4755 tree lo_index = TREE_OPERAND (index, 0);
4756 tree hi_index = TREE_OPERAND (index, 1);
4758 if (! host_integerp (lo_index, 1)
4759 || ! host_integerp (hi_index, 1))
4761 need_to_clear = 1;
4762 break;
4765 this_node_count = (tree_low_cst (hi_index, 1)
4766 - tree_low_cst (lo_index, 1) + 1);
4768 else
4769 this_node_count = 1;
4771 count += this_node_count;
4772 if (mostly_zeros_p (TREE_VALUE (elt)))
4773 zero_count += this_node_count;
4776 /* Clear the entire array first if there are any missing elements,
4777 or if the incidence of zero elements is >= 75%. */
4778 if (! need_to_clear
4779 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4780 need_to_clear = 1;
4783 if (need_to_clear && size > 0 && !vector)
4785 if (! cleared)
4787 if (REG_P (target))
4788 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4789 else
4790 clear_storage (target, GEN_INT (size));
4792 cleared = 1;
4794 else if (REG_P (target))
4795 /* Inform later passes that the old value is dead. */
4796 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4798 /* Store each element of the constructor into
4799 the corresponding element of TARGET, determined
4800 by counting the elements. */
4801 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4802 elt;
4803 elt = TREE_CHAIN (elt), i++)
4805 enum machine_mode mode;
4806 HOST_WIDE_INT bitsize;
4807 HOST_WIDE_INT bitpos;
4808 int unsignedp;
4809 tree value = TREE_VALUE (elt);
4810 tree index = TREE_PURPOSE (elt);
4811 rtx xtarget = target;
4813 if (cleared && is_zeros_p (value))
4814 continue;
4816 unsignedp = TREE_UNSIGNED (elttype);
4817 mode = TYPE_MODE (elttype);
4818 if (mode == BLKmode)
4819 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4820 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4821 : -1);
4822 else
4823 bitsize = GET_MODE_BITSIZE (mode);
4825 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4827 tree lo_index = TREE_OPERAND (index, 0);
4828 tree hi_index = TREE_OPERAND (index, 1);
4829 rtx index_r, pos_rtx, loop_end;
4830 struct nesting *loop;
4831 HOST_WIDE_INT lo, hi, count;
4832 tree position;
4834 if (vector)
4835 abort ();
4837 /* If the range is constant and "small", unroll the loop. */
4838 if (const_bounds_p
4839 && host_integerp (lo_index, 0)
4840 && host_integerp (hi_index, 0)
4841 && (lo = tree_low_cst (lo_index, 0),
4842 hi = tree_low_cst (hi_index, 0),
4843 count = hi - lo + 1,
4844 (GET_CODE (target) != MEM
4845 || count <= 2
4846 || (host_integerp (TYPE_SIZE (elttype), 1)
4847 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4848 <= 40 * 8)))))
4850 lo -= minelt; hi -= minelt;
4851 for (; lo <= hi; lo++)
4853 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4855 if (GET_CODE (target) == MEM
4856 && !MEM_KEEP_ALIAS_SET_P (target)
4857 && TREE_CODE (type) == ARRAY_TYPE
4858 && TYPE_NONALIASED_COMPONENT (type))
4860 target = copy_rtx (target);
4861 MEM_KEEP_ALIAS_SET_P (target) = 1;
4864 store_constructor_field
4865 (target, bitsize, bitpos, mode, value, type, cleared,
4866 get_alias_set (elttype));
4869 else
4871 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4872 loop_end = gen_label_rtx ();
4874 unsignedp = TREE_UNSIGNED (domain);
4876 index = build_decl (VAR_DECL, NULL_TREE, domain);
4878 index_r
4879 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4880 &unsignedp, 0));
4881 SET_DECL_RTL (index, index_r);
4882 if (TREE_CODE (value) == SAVE_EXPR
4883 && SAVE_EXPR_RTL (value) == 0)
4885 /* Make sure value gets expanded once before the
4886 loop. */
4887 expand_expr (value, const0_rtx, VOIDmode, 0);
4888 emit_queue ();
4890 store_expr (lo_index, index_r, 0);
4891 loop = expand_start_loop (0);
4893 /* Assign value to element index. */
4894 position
4895 = convert (ssizetype,
4896 fold (build (MINUS_EXPR, TREE_TYPE (index),
4897 index, TYPE_MIN_VALUE (domain))));
4898 position = size_binop (MULT_EXPR, position,
4899 convert (ssizetype,
4900 TYPE_SIZE_UNIT (elttype)));
4902 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4903 xtarget = offset_address (target, pos_rtx,
4904 highest_pow2_factor (position));
4905 xtarget = adjust_address (xtarget, mode, 0);
4906 if (TREE_CODE (value) == CONSTRUCTOR)
4907 store_constructor (value, xtarget, cleared,
4908 bitsize / BITS_PER_UNIT);
4909 else
4910 store_expr (value, xtarget, 0);
4912 expand_exit_loop_if_false (loop,
4913 build (LT_EXPR, integer_type_node,
4914 index, hi_index));
4916 expand_increment (build (PREINCREMENT_EXPR,
4917 TREE_TYPE (index),
4918 index, integer_one_node), 0, 0);
4919 expand_end_loop ();
4920 emit_label (loop_end);
4923 else if ((index != 0 && ! host_integerp (index, 0))
4924 || ! host_integerp (TYPE_SIZE (elttype), 1))
4926 tree position;
4928 if (vector)
4929 abort ();
4931 if (index == 0)
4932 index = ssize_int (1);
4934 if (minelt)
4935 index = convert (ssizetype,
4936 fold (build (MINUS_EXPR, index,
4937 TYPE_MIN_VALUE (domain))));
4939 position = size_binop (MULT_EXPR, index,
4940 convert (ssizetype,
4941 TYPE_SIZE_UNIT (elttype)));
4942 xtarget = offset_address (target,
4943 expand_expr (position, 0, VOIDmode, 0),
4944 highest_pow2_factor (position));
4945 xtarget = adjust_address (xtarget, mode, 0);
4946 store_expr (value, xtarget, 0);
4948 else if (vector)
4950 int pos;
4952 if (index != 0)
4953 pos = tree_low_cst (index, 0) - minelt;
4954 else
4955 pos = i;
4956 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4958 else
4960 if (index != 0)
4961 bitpos = ((tree_low_cst (index, 0) - minelt)
4962 * tree_low_cst (TYPE_SIZE (elttype), 1));
4963 else
4964 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4966 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4967 && TREE_CODE (type) == ARRAY_TYPE
4968 && TYPE_NONALIASED_COMPONENT (type))
4970 target = copy_rtx (target);
4971 MEM_KEEP_ALIAS_SET_P (target) = 1;
4973 store_constructor_field (target, bitsize, bitpos, mode, value,
4974 type, cleared, get_alias_set (elttype));
4977 if (vector)
4979 emit_insn (GEN_FCN (icode) (target,
4980 gen_rtx_PARALLEL (GET_MODE (target),
4981 gen_rtvec_v (n_elts, vector))));
4985 /* Set constructor assignments. */
4986 else if (TREE_CODE (type) == SET_TYPE)
4988 tree elt = CONSTRUCTOR_ELTS (exp);
4989 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4990 tree domain = TYPE_DOMAIN (type);
4991 tree domain_min, domain_max, bitlength;
4993 /* The default implementation strategy is to extract the constant
4994 parts of the constructor, use that to initialize the target,
4995 and then "or" in whatever non-constant ranges we need in addition.
4997 If a large set is all zero or all ones, it is
4998 probably better to set it using memset (if available) or bzero.
4999 Also, if a large set has just a single range, it may also be
5000 better to first clear all the first clear the set (using
5001 bzero/memset), and set the bits we want. */
5003 /* Check for all zeros. */
5004 if (elt == NULL_TREE && size > 0)
5006 if (!cleared)
5007 clear_storage (target, GEN_INT (size));
5008 return;
5011 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5012 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5013 bitlength = size_binop (PLUS_EXPR,
5014 size_diffop (domain_max, domain_min),
5015 ssize_int (1));
5017 nbits = tree_low_cst (bitlength, 1);
5019 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5020 are "complicated" (more than one range), initialize (the
5021 constant parts) by copying from a constant. */
5022 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5023 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5025 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5026 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5027 char *bit_buffer = alloca (nbits);
5028 HOST_WIDE_INT word = 0;
5029 unsigned int bit_pos = 0;
5030 unsigned int ibit = 0;
5031 unsigned int offset = 0; /* In bytes from beginning of set. */
5033 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5034 for (;;)
5036 if (bit_buffer[ibit])
5038 if (BYTES_BIG_ENDIAN)
5039 word |= (1 << (set_word_size - 1 - bit_pos));
5040 else
5041 word |= 1 << bit_pos;
5044 bit_pos++; ibit++;
5045 if (bit_pos >= set_word_size || ibit == nbits)
5047 if (word != 0 || ! cleared)
5049 rtx datum = GEN_INT (word);
5050 rtx to_rtx;
5052 /* The assumption here is that it is safe to use
5053 XEXP if the set is multi-word, but not if
5054 it's single-word. */
5055 if (GET_CODE (target) == MEM)
5056 to_rtx = adjust_address (target, mode, offset);
5057 else if (offset == 0)
5058 to_rtx = target;
5059 else
5060 abort ();
5061 emit_move_insn (to_rtx, datum);
5064 if (ibit == nbits)
5065 break;
5066 word = 0;
5067 bit_pos = 0;
5068 offset += set_word_size / BITS_PER_UNIT;
5072 else if (!cleared)
5073 /* Don't bother clearing storage if the set is all ones. */
5074 if (TREE_CHAIN (elt) != NULL_TREE
5075 || (TREE_PURPOSE (elt) == NULL_TREE
5076 ? nbits != 1
5077 : ( ! host_integerp (TREE_VALUE (elt), 0)
5078 || ! host_integerp (TREE_PURPOSE (elt), 0)
5079 || (tree_low_cst (TREE_VALUE (elt), 0)
5080 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5081 != (HOST_WIDE_INT) nbits))))
5082 clear_storage (target, expr_size (exp));
5084 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5086 /* Start of range of element or NULL. */
5087 tree startbit = TREE_PURPOSE (elt);
5088 /* End of range of element, or element value. */
5089 tree endbit = TREE_VALUE (elt);
5090 HOST_WIDE_INT startb, endb;
5091 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5093 bitlength_rtx = expand_expr (bitlength,
5094 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5096 /* Handle non-range tuple element like [ expr ]. */
5097 if (startbit == NULL_TREE)
5099 startbit = save_expr (endbit);
5100 endbit = startbit;
5103 startbit = convert (sizetype, startbit);
5104 endbit = convert (sizetype, endbit);
5105 if (! integer_zerop (domain_min))
5107 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5108 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5110 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5111 EXPAND_CONST_ADDRESS);
5112 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5113 EXPAND_CONST_ADDRESS);
5115 if (REG_P (target))
5117 targetx
5118 = assign_temp
5119 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5120 (GET_MODE (target), 0),
5121 TYPE_QUAL_CONST)),
5122 0, 1, 1);
5123 emit_move_insn (targetx, target);
5126 else if (GET_CODE (target) == MEM)
5127 targetx = target;
5128 else
5129 abort ();
5131 /* Optimization: If startbit and endbit are constants divisible
5132 by BITS_PER_UNIT, call memset instead. */
5133 if (TARGET_MEM_FUNCTIONS
5134 && TREE_CODE (startbit) == INTEGER_CST
5135 && TREE_CODE (endbit) == INTEGER_CST
5136 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5137 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5139 emit_library_call (memset_libfunc, LCT_NORMAL,
5140 VOIDmode, 3,
5141 plus_constant (XEXP (targetx, 0),
5142 startb / BITS_PER_UNIT),
5143 Pmode,
5144 constm1_rtx, TYPE_MODE (integer_type_node),
5145 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5146 TYPE_MODE (sizetype));
5148 else
5149 emit_library_call (setbits_libfunc, LCT_NORMAL,
5150 VOIDmode, 4, XEXP (targetx, 0),
5151 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5152 startbit_rtx, TYPE_MODE (sizetype),
5153 endbit_rtx, TYPE_MODE (sizetype));
5155 if (REG_P (target))
5156 emit_move_insn (target, targetx);
5160 else
5161 abort ();
5164 /* Store the value of EXP (an expression tree)
5165 into a subfield of TARGET which has mode MODE and occupies
5166 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5167 If MODE is VOIDmode, it means that we are storing into a bit-field.
5169 If VALUE_MODE is VOIDmode, return nothing in particular.
5170 UNSIGNEDP is not used in this case.
5172 Otherwise, return an rtx for the value stored. This rtx
5173 has mode VALUE_MODE if that is convenient to do.
5174 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5176 TYPE is the type of the underlying object,
5178 ALIAS_SET is the alias set for the destination. This value will
5179 (in general) be different from that for TARGET, since TARGET is a
5180 reference to the containing structure. */
5182 static rtx
5183 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5184 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5185 int unsignedp, tree type, int alias_set)
5187 HOST_WIDE_INT width_mask = 0;
5189 if (TREE_CODE (exp) == ERROR_MARK)
5190 return const0_rtx;
5192 /* If we have nothing to store, do nothing unless the expression has
5193 side-effects. */
5194 if (bitsize == 0)
5195 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5196 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5197 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5199 /* If we are storing into an unaligned field of an aligned union that is
5200 in a register, we may have the mode of TARGET being an integer mode but
5201 MODE == BLKmode. In that case, get an aligned object whose size and
5202 alignment are the same as TARGET and store TARGET into it (we can avoid
5203 the store if the field being stored is the entire width of TARGET). Then
5204 call ourselves recursively to store the field into a BLKmode version of
5205 that object. Finally, load from the object into TARGET. This is not
5206 very efficient in general, but should only be slightly more expensive
5207 than the otherwise-required unaligned accesses. Perhaps this can be
5208 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5209 twice, once with emit_move_insn and once via store_field. */
5211 if (mode == BLKmode
5212 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5214 rtx object = assign_temp (type, 0, 1, 1);
5215 rtx blk_object = adjust_address (object, BLKmode, 0);
5217 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5218 emit_move_insn (object, target);
5220 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5221 alias_set);
5223 emit_move_insn (target, object);
5225 /* We want to return the BLKmode version of the data. */
5226 return blk_object;
5229 if (GET_CODE (target) == CONCAT)
5231 /* We're storing into a struct containing a single __complex. */
5233 if (bitpos != 0)
5234 abort ();
5235 return store_expr (exp, target, 0);
5238 /* If the structure is in a register or if the component
5239 is a bit field, we cannot use addressing to access it.
5240 Use bit-field techniques or SUBREG to store in it. */
5242 if (mode == VOIDmode
5243 || (mode != BLKmode && ! direct_store[(int) mode]
5244 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5245 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5246 || GET_CODE (target) == REG
5247 || GET_CODE (target) == SUBREG
5248 /* If the field isn't aligned enough to store as an ordinary memref,
5249 store it as a bit field. */
5250 || (mode != BLKmode
5251 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5252 || bitpos % GET_MODE_ALIGNMENT (mode))
5253 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5254 || (bitpos % BITS_PER_UNIT != 0)))
5255 /* If the RHS and field are a constant size and the size of the
5256 RHS isn't the same size as the bitfield, we must use bitfield
5257 operations. */
5258 || (bitsize >= 0
5259 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5260 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5262 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5264 /* If BITSIZE is narrower than the size of the type of EXP
5265 we will be narrowing TEMP. Normally, what's wanted are the
5266 low-order bits. However, if EXP's type is a record and this is
5267 big-endian machine, we want the upper BITSIZE bits. */
5268 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5269 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5270 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5271 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5272 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5273 - bitsize),
5274 NULL_RTX, 1);
5276 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5277 MODE. */
5278 if (mode != VOIDmode && mode != BLKmode
5279 && mode != TYPE_MODE (TREE_TYPE (exp)))
5280 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5282 /* If the modes of TARGET and TEMP are both BLKmode, both
5283 must be in memory and BITPOS must be aligned on a byte
5284 boundary. If so, we simply do a block copy. */
5285 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5287 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5288 || bitpos % BITS_PER_UNIT != 0)
5289 abort ();
5291 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5292 emit_block_move (target, temp,
5293 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5294 / BITS_PER_UNIT),
5295 BLOCK_OP_NORMAL);
5297 return value_mode == VOIDmode ? const0_rtx : target;
5300 /* Store the value in the bitfield. */
5301 store_bit_field (target, bitsize, bitpos, mode, temp,
5302 int_size_in_bytes (type));
5304 if (value_mode != VOIDmode)
5306 /* The caller wants an rtx for the value.
5307 If possible, avoid refetching from the bitfield itself. */
5308 if (width_mask != 0
5309 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5311 tree count;
5312 enum machine_mode tmode;
5314 tmode = GET_MODE (temp);
5315 if (tmode == VOIDmode)
5316 tmode = value_mode;
5318 if (unsignedp)
5319 return expand_and (tmode, temp,
5320 gen_int_mode (width_mask, tmode),
5321 NULL_RTX);
5323 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5324 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5325 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5328 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5329 NULL_RTX, value_mode, VOIDmode,
5330 int_size_in_bytes (type));
5332 return const0_rtx;
5334 else
5336 rtx addr = XEXP (target, 0);
5337 rtx to_rtx = target;
5339 /* If a value is wanted, it must be the lhs;
5340 so make the address stable for multiple use. */
5342 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5343 && ! CONSTANT_ADDRESS_P (addr)
5344 /* A frame-pointer reference is already stable. */
5345 && ! (GET_CODE (addr) == PLUS
5346 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5347 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5348 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5349 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5351 /* Now build a reference to just the desired component. */
5353 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5355 if (to_rtx == target)
5356 to_rtx = copy_rtx (to_rtx);
5358 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5359 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5360 set_mem_alias_set (to_rtx, alias_set);
5362 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5366 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5367 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5368 codes and find the ultimate containing object, which we return.
5370 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5371 bit position, and *PUNSIGNEDP to the signedness of the field.
5372 If the position of the field is variable, we store a tree
5373 giving the variable offset (in units) in *POFFSET.
5374 This offset is in addition to the bit position.
5375 If the position is not variable, we store 0 in *POFFSET.
5377 If any of the extraction expressions is volatile,
5378 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5380 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5381 is a mode that can be used to access the field. In that case, *PBITSIZE
5382 is redundant.
5384 If the field describes a variable-sized object, *PMODE is set to
5385 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5386 this case, but the address of the object can be found. */
5388 tree
5389 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5390 HOST_WIDE_INT *pbitpos, tree *poffset,
5391 enum machine_mode *pmode, int *punsignedp,
5392 int *pvolatilep)
5394 tree size_tree = 0;
5395 enum machine_mode mode = VOIDmode;
5396 tree offset = size_zero_node;
5397 tree bit_offset = bitsize_zero_node;
5398 tree placeholder_ptr = 0;
5399 tree tem;
5401 /* First get the mode, signedness, and size. We do this from just the
5402 outermost expression. */
5403 if (TREE_CODE (exp) == COMPONENT_REF)
5405 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5406 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5407 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5409 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5411 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5413 size_tree = TREE_OPERAND (exp, 1);
5414 *punsignedp = TREE_UNSIGNED (exp);
5416 else
5418 mode = TYPE_MODE (TREE_TYPE (exp));
5419 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5421 if (mode == BLKmode)
5422 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5423 else
5424 *pbitsize = GET_MODE_BITSIZE (mode);
5427 if (size_tree != 0)
5429 if (! host_integerp (size_tree, 1))
5430 mode = BLKmode, *pbitsize = -1;
5431 else
5432 *pbitsize = tree_low_cst (size_tree, 1);
5435 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5436 and find the ultimate containing object. */
5437 while (1)
5439 if (TREE_CODE (exp) == BIT_FIELD_REF)
5440 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5441 else if (TREE_CODE (exp) == COMPONENT_REF)
5443 tree field = TREE_OPERAND (exp, 1);
5444 tree this_offset = DECL_FIELD_OFFSET (field);
5446 /* If this field hasn't been filled in yet, don't go
5447 past it. This should only happen when folding expressions
5448 made during type construction. */
5449 if (this_offset == 0)
5450 break;
5451 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5452 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5454 offset = size_binop (PLUS_EXPR, offset, this_offset);
5455 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5456 DECL_FIELD_BIT_OFFSET (field));
5458 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5461 else if (TREE_CODE (exp) == ARRAY_REF
5462 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5464 tree index = TREE_OPERAND (exp, 1);
5465 tree array = TREE_OPERAND (exp, 0);
5466 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5467 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5468 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5470 /* We assume all arrays have sizes that are a multiple of a byte.
5471 First subtract the lower bound, if any, in the type of the
5472 index, then convert to sizetype and multiply by the size of the
5473 array element. */
5474 if (low_bound != 0 && ! integer_zerop (low_bound))
5475 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5476 index, low_bound));
5478 /* If the index has a self-referential type, pass it to a
5479 WITH_RECORD_EXPR; if the component size is, pass our
5480 component to one. */
5481 if (CONTAINS_PLACEHOLDER_P (index))
5482 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5483 if (CONTAINS_PLACEHOLDER_P (unit_size))
5484 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5486 offset = size_binop (PLUS_EXPR, offset,
5487 size_binop (MULT_EXPR,
5488 convert (sizetype, index),
5489 unit_size));
5492 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5494 tree new = find_placeholder (exp, &placeholder_ptr);
5496 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5497 We might have been called from tree optimization where we
5498 haven't set up an object yet. */
5499 if (new == 0)
5500 break;
5501 else
5502 exp = new;
5504 continue;
5507 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5508 conversions that don't change the mode, and all view conversions
5509 except those that need to "step up" the alignment. */
5510 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5511 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5512 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5513 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5514 && STRICT_ALIGNMENT
5515 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5516 < BIGGEST_ALIGNMENT)
5517 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5518 || TYPE_ALIGN_OK (TREE_TYPE
5519 (TREE_OPERAND (exp, 0))))))
5520 && ! ((TREE_CODE (exp) == NOP_EXPR
5521 || TREE_CODE (exp) == CONVERT_EXPR)
5522 && (TYPE_MODE (TREE_TYPE (exp))
5523 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5524 break;
5526 /* If any reference in the chain is volatile, the effect is volatile. */
5527 if (TREE_THIS_VOLATILE (exp))
5528 *pvolatilep = 1;
5530 exp = TREE_OPERAND (exp, 0);
5533 /* If OFFSET is constant, see if we can return the whole thing as a
5534 constant bit position. Otherwise, split it up. */
5535 if (host_integerp (offset, 0)
5536 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5537 bitsize_unit_node))
5538 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5539 && host_integerp (tem, 0))
5540 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5541 else
5542 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5544 *pmode = mode;
5545 return exp;
5548 /* Return 1 if T is an expression that get_inner_reference handles. */
5551 handled_component_p (tree t)
5553 switch (TREE_CODE (t))
5555 case BIT_FIELD_REF:
5556 case COMPONENT_REF:
5557 case ARRAY_REF:
5558 case ARRAY_RANGE_REF:
5559 case NON_LVALUE_EXPR:
5560 case VIEW_CONVERT_EXPR:
5561 return 1;
5563 /* ??? Sure they are handled, but get_inner_reference may return
5564 a different PBITSIZE, depending upon whether the expression is
5565 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5566 case NOP_EXPR:
5567 case CONVERT_EXPR:
5568 return (TYPE_MODE (TREE_TYPE (t))
5569 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5571 default:
5572 return 0;
5576 /* Given an rtx VALUE that may contain additions and multiplications, return
5577 an equivalent value that just refers to a register, memory, or constant.
5578 This is done by generating instructions to perform the arithmetic and
5579 returning a pseudo-register containing the value.
5581 The returned value may be a REG, SUBREG, MEM or constant. */
5584 force_operand (rtx value, rtx target)
5586 rtx op1, op2;
5587 /* Use subtarget as the target for operand 0 of a binary operation. */
5588 rtx subtarget = get_subtarget (target);
5589 enum rtx_code code = GET_CODE (value);
5591 /* Check for subreg applied to an expression produced by loop optimizer. */
5592 if (code == SUBREG
5593 && GET_CODE (SUBREG_REG (value)) != REG
5594 && GET_CODE (SUBREG_REG (value)) != MEM)
5596 value = simplify_gen_subreg (GET_MODE (value),
5597 force_reg (GET_MODE (SUBREG_REG (value)),
5598 force_operand (SUBREG_REG (value),
5599 NULL_RTX)),
5600 GET_MODE (SUBREG_REG (value)),
5601 SUBREG_BYTE (value));
5602 code = GET_CODE (value);
5605 /* Check for a PIC address load. */
5606 if ((code == PLUS || code == MINUS)
5607 && XEXP (value, 0) == pic_offset_table_rtx
5608 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5609 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5610 || GET_CODE (XEXP (value, 1)) == CONST))
5612 if (!subtarget)
5613 subtarget = gen_reg_rtx (GET_MODE (value));
5614 emit_move_insn (subtarget, value);
5615 return subtarget;
5618 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5620 if (!target)
5621 target = gen_reg_rtx (GET_MODE (value));
5622 convert_move (target, force_operand (XEXP (value, 0), NULL),
5623 code == ZERO_EXTEND);
5624 return target;
5627 if (ARITHMETIC_P (value))
5629 op2 = XEXP (value, 1);
5630 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5631 subtarget = 0;
5632 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5634 code = PLUS;
5635 op2 = negate_rtx (GET_MODE (value), op2);
5638 /* Check for an addition with OP2 a constant integer and our first
5639 operand a PLUS of a virtual register and something else. In that
5640 case, we want to emit the sum of the virtual register and the
5641 constant first and then add the other value. This allows virtual
5642 register instantiation to simply modify the constant rather than
5643 creating another one around this addition. */
5644 if (code == PLUS && GET_CODE (op2) == CONST_INT
5645 && GET_CODE (XEXP (value, 0)) == PLUS
5646 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5647 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5648 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5650 rtx temp = expand_simple_binop (GET_MODE (value), code,
5651 XEXP (XEXP (value, 0), 0), op2,
5652 subtarget, 0, OPTAB_LIB_WIDEN);
5653 return expand_simple_binop (GET_MODE (value), code, temp,
5654 force_operand (XEXP (XEXP (value,
5655 0), 1), 0),
5656 target, 0, OPTAB_LIB_WIDEN);
5659 op1 = force_operand (XEXP (value, 0), subtarget);
5660 op2 = force_operand (op2, NULL_RTX);
5661 switch (code)
5663 case MULT:
5664 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5665 case DIV:
5666 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5667 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5668 target, 1, OPTAB_LIB_WIDEN);
5669 else
5670 return expand_divmod (0,
5671 FLOAT_MODE_P (GET_MODE (value))
5672 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5673 GET_MODE (value), op1, op2, target, 0);
5674 break;
5675 case MOD:
5676 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5677 target, 0);
5678 break;
5679 case UDIV:
5680 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5681 target, 1);
5682 break;
5683 case UMOD:
5684 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5685 target, 1);
5686 break;
5687 case ASHIFTRT:
5688 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5689 target, 0, OPTAB_LIB_WIDEN);
5690 break;
5691 default:
5692 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5693 target, 1, OPTAB_LIB_WIDEN);
5696 if (UNARY_P (value))
5698 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5699 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5702 #ifdef INSN_SCHEDULING
5703 /* On machines that have insn scheduling, we want all memory reference to be
5704 explicit, so we need to deal with such paradoxical SUBREGs. */
5705 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5706 && (GET_MODE_SIZE (GET_MODE (value))
5707 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5708 value
5709 = simplify_gen_subreg (GET_MODE (value),
5710 force_reg (GET_MODE (SUBREG_REG (value)),
5711 force_operand (SUBREG_REG (value),
5712 NULL_RTX)),
5713 GET_MODE (SUBREG_REG (value)),
5714 SUBREG_BYTE (value));
5715 #endif
5717 return value;
5720 /* Subroutine of expand_expr: return nonzero iff there is no way that
5721 EXP can reference X, which is being modified. TOP_P is nonzero if this
5722 call is going to be used to determine whether we need a temporary
5723 for EXP, as opposed to a recursive call to this function.
5725 It is always safe for this routine to return zero since it merely
5726 searches for optimization opportunities. */
5729 safe_from_p (rtx x, tree exp, int top_p)
5731 rtx exp_rtl = 0;
5732 int i, nops;
5733 static tree save_expr_list;
5735 if (x == 0
5736 /* If EXP has varying size, we MUST use a target since we currently
5737 have no way of allocating temporaries of variable size
5738 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5739 So we assume here that something at a higher level has prevented a
5740 clash. This is somewhat bogus, but the best we can do. Only
5741 do this when X is BLKmode and when we are at the top level. */
5742 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5743 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5744 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5745 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5746 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5747 != INTEGER_CST)
5748 && GET_MODE (x) == BLKmode)
5749 /* If X is in the outgoing argument area, it is always safe. */
5750 || (GET_CODE (x) == MEM
5751 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5752 || (GET_CODE (XEXP (x, 0)) == PLUS
5753 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5754 return 1;
5756 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5757 find the underlying pseudo. */
5758 if (GET_CODE (x) == SUBREG)
5760 x = SUBREG_REG (x);
5761 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5762 return 0;
5765 /* A SAVE_EXPR might appear many times in the expression passed to the
5766 top-level safe_from_p call, and if it has a complex subexpression,
5767 examining it multiple times could result in a combinatorial explosion.
5768 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5769 with optimization took about 28 minutes to compile -- even though it was
5770 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5771 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5772 we have processed. Note that the only test of top_p was above. */
5774 if (top_p)
5776 int rtn;
5777 tree t;
5779 save_expr_list = 0;
5781 rtn = safe_from_p (x, exp, 0);
5783 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5784 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5786 return rtn;
5789 /* Now look at our tree code and possibly recurse. */
5790 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5792 case 'd':
5793 exp_rtl = DECL_RTL_IF_SET (exp);
5794 break;
5796 case 'c':
5797 return 1;
5799 case 'x':
5800 if (TREE_CODE (exp) == TREE_LIST)
5802 while (1)
5804 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5805 return 0;
5806 exp = TREE_CHAIN (exp);
5807 if (!exp)
5808 return 1;
5809 if (TREE_CODE (exp) != TREE_LIST)
5810 return safe_from_p (x, exp, 0);
5813 else if (TREE_CODE (exp) == ERROR_MARK)
5814 return 1; /* An already-visited SAVE_EXPR? */
5815 else
5816 return 0;
5818 case '2':
5819 case '<':
5820 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5821 return 0;
5822 /* Fall through. */
5824 case '1':
5825 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5827 case 'e':
5828 case 'r':
5829 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5830 the expression. If it is set, we conflict iff we are that rtx or
5831 both are in memory. Otherwise, we check all operands of the
5832 expression recursively. */
5834 switch (TREE_CODE (exp))
5836 case ADDR_EXPR:
5837 /* If the operand is static or we are static, we can't conflict.
5838 Likewise if we don't conflict with the operand at all. */
5839 if (staticp (TREE_OPERAND (exp, 0))
5840 || TREE_STATIC (exp)
5841 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5842 return 1;
5844 /* Otherwise, the only way this can conflict is if we are taking
5845 the address of a DECL a that address if part of X, which is
5846 very rare. */
5847 exp = TREE_OPERAND (exp, 0);
5848 if (DECL_P (exp))
5850 if (!DECL_RTL_SET_P (exp)
5851 || GET_CODE (DECL_RTL (exp)) != MEM)
5852 return 0;
5853 else
5854 exp_rtl = XEXP (DECL_RTL (exp), 0);
5856 break;
5858 case INDIRECT_REF:
5859 if (GET_CODE (x) == MEM
5860 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5861 get_alias_set (exp)))
5862 return 0;
5863 break;
5865 case CALL_EXPR:
5866 /* Assume that the call will clobber all hard registers and
5867 all of memory. */
5868 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5869 || GET_CODE (x) == MEM)
5870 return 0;
5871 break;
5873 case RTL_EXPR:
5874 /* If a sequence exists, we would have to scan every instruction
5875 in the sequence to see if it was safe. This is probably not
5876 worthwhile. */
5877 if (RTL_EXPR_SEQUENCE (exp))
5878 return 0;
5880 exp_rtl = RTL_EXPR_RTL (exp);
5881 break;
5883 case WITH_CLEANUP_EXPR:
5884 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5885 break;
5887 case CLEANUP_POINT_EXPR:
5888 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5890 case SAVE_EXPR:
5891 exp_rtl = SAVE_EXPR_RTL (exp);
5892 if (exp_rtl)
5893 break;
5895 /* If we've already scanned this, don't do it again. Otherwise,
5896 show we've scanned it and record for clearing the flag if we're
5897 going on. */
5898 if (TREE_PRIVATE (exp))
5899 return 1;
5901 TREE_PRIVATE (exp) = 1;
5902 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5904 TREE_PRIVATE (exp) = 0;
5905 return 0;
5908 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5909 return 1;
5911 case BIND_EXPR:
5912 /* The only operand we look at is operand 1. The rest aren't
5913 part of the expression. */
5914 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5916 default:
5917 break;
5920 /* If we have an rtx, we do not need to scan our operands. */
5921 if (exp_rtl)
5922 break;
5924 nops = first_rtl_op (TREE_CODE (exp));
5925 for (i = 0; i < nops; i++)
5926 if (TREE_OPERAND (exp, i) != 0
5927 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5928 return 0;
5930 /* If this is a language-specific tree code, it may require
5931 special handling. */
5932 if ((unsigned int) TREE_CODE (exp)
5933 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5934 && !(*lang_hooks.safe_from_p) (x, exp))
5935 return 0;
5938 /* If we have an rtl, find any enclosed object. Then see if we conflict
5939 with it. */
5940 if (exp_rtl)
5942 if (GET_CODE (exp_rtl) == SUBREG)
5944 exp_rtl = SUBREG_REG (exp_rtl);
5945 if (GET_CODE (exp_rtl) == REG
5946 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5947 return 0;
5950 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5951 are memory and they conflict. */
5952 return ! (rtx_equal_p (x, exp_rtl)
5953 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5954 && true_dependence (exp_rtl, VOIDmode, x,
5955 rtx_addr_varies_p)));
5958 /* If we reach here, it is safe. */
5959 return 1;
5962 /* Subroutine of expand_expr: return rtx if EXP is a
5963 variable or parameter; else return 0. */
5965 static rtx
5966 var_rtx (tree exp)
5968 STRIP_NOPS (exp);
5969 switch (TREE_CODE (exp))
5971 case PARM_DECL:
5972 case VAR_DECL:
5973 return DECL_RTL (exp);
5974 default:
5975 return 0;
5979 /* Return the highest power of two that EXP is known to be a multiple of.
5980 This is used in updating alignment of MEMs in array references. */
5982 static unsigned HOST_WIDE_INT
5983 highest_pow2_factor (tree exp)
5985 unsigned HOST_WIDE_INT c0, c1;
5987 switch (TREE_CODE (exp))
5989 case INTEGER_CST:
5990 /* We can find the lowest bit that's a one. If the low
5991 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5992 We need to handle this case since we can find it in a COND_EXPR,
5993 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5994 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5995 later ICE. */
5996 if (TREE_CONSTANT_OVERFLOW (exp))
5997 return BIGGEST_ALIGNMENT;
5998 else
6000 /* Note: tree_low_cst is intentionally not used here,
6001 we don't care about the upper bits. */
6002 c0 = TREE_INT_CST_LOW (exp);
6003 c0 &= -c0;
6004 return c0 ? c0 : BIGGEST_ALIGNMENT;
6006 break;
6008 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6009 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6010 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6011 return MIN (c0, c1);
6013 case MULT_EXPR:
6014 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6015 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6016 return c0 * c1;
6018 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6019 case CEIL_DIV_EXPR:
6020 if (integer_pow2p (TREE_OPERAND (exp, 1))
6021 && host_integerp (TREE_OPERAND (exp, 1), 1))
6023 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6024 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6025 return MAX (1, c0 / c1);
6027 break;
6029 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6030 case SAVE_EXPR: case WITH_RECORD_EXPR:
6031 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6033 case COMPOUND_EXPR:
6034 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6036 case COND_EXPR:
6037 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6038 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6039 return MIN (c0, c1);
6041 default:
6042 break;
6045 return 1;
6048 /* Similar, except that it is known that the expression must be a multiple
6049 of the alignment of TYPE. */
6051 static unsigned HOST_WIDE_INT
6052 highest_pow2_factor_for_type (tree type, tree exp)
6054 unsigned HOST_WIDE_INT type_align, factor;
6056 factor = highest_pow2_factor (exp);
6057 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6058 return MAX (factor, type_align);
6061 /* Return an object on the placeholder list that matches EXP, a
6062 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6063 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6064 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6065 is a location which initially points to a starting location in the
6066 placeholder list (zero means start of the list) and where a pointer into
6067 the placeholder list at which the object is found is placed. */
6069 tree
6070 find_placeholder (tree exp, tree *plist)
6072 tree type = TREE_TYPE (exp);
6073 tree placeholder_expr;
6075 for (placeholder_expr
6076 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6077 placeholder_expr != 0;
6078 placeholder_expr = TREE_CHAIN (placeholder_expr))
6080 tree need_type = TYPE_MAIN_VARIANT (type);
6081 tree elt;
6083 /* Find the outermost reference that is of the type we want. If none,
6084 see if any object has a type that is a pointer to the type we
6085 want. */
6086 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6087 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6088 || TREE_CODE (elt) == COND_EXPR)
6089 ? TREE_OPERAND (elt, 1)
6090 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6091 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6092 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6093 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6094 ? TREE_OPERAND (elt, 0) : 0))
6095 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6097 if (plist)
6098 *plist = placeholder_expr;
6099 return elt;
6102 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6104 = ((TREE_CODE (elt) == COMPOUND_EXPR
6105 || TREE_CODE (elt) == COND_EXPR)
6106 ? TREE_OPERAND (elt, 1)
6107 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6108 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6109 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6110 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6111 ? TREE_OPERAND (elt, 0) : 0))
6112 if (POINTER_TYPE_P (TREE_TYPE (elt))
6113 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6114 == need_type))
6116 if (plist)
6117 *plist = placeholder_expr;
6118 return build1 (INDIRECT_REF, need_type, elt);
6122 return 0;
6125 /* Subroutine of expand_expr. Expand the two operands of a binary
6126 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6127 The value may be stored in TARGET if TARGET is nonzero. The
6128 MODIFIER argument is as documented by expand_expr. */
6130 static void
6131 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6132 enum expand_modifier modifier)
6134 if (! safe_from_p (target, exp1, 1))
6135 target = 0;
6136 if (operand_equal_p (exp0, exp1, 0))
6138 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6139 *op1 = copy_rtx (*op0);
6141 else
6143 /* If we need to preserve evaluation order, copy exp0 into its own
6144 temporary variable so that it can't be clobbered by exp1. */
6145 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6146 exp0 = save_expr (exp0);
6147 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6148 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6153 /* expand_expr: generate code for computing expression EXP.
6154 An rtx for the computed value is returned. The value is never null.
6155 In the case of a void EXP, const0_rtx is returned.
6157 The value may be stored in TARGET if TARGET is nonzero.
6158 TARGET is just a suggestion; callers must assume that
6159 the rtx returned may not be the same as TARGET.
6161 If TARGET is CONST0_RTX, it means that the value will be ignored.
6163 If TMODE is not VOIDmode, it suggests generating the
6164 result in mode TMODE. But this is done only when convenient.
6165 Otherwise, TMODE is ignored and the value generated in its natural mode.
6166 TMODE is just a suggestion; callers must assume that
6167 the rtx returned may not have mode TMODE.
6169 Note that TARGET may have neither TMODE nor MODE. In that case, it
6170 probably will not be used.
6172 If MODIFIER is EXPAND_SUM then when EXP is an addition
6173 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6174 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6175 products as above, or REG or MEM, or constant.
6176 Ordinarily in such cases we would output mul or add instructions
6177 and then return a pseudo reg containing the sum.
6179 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6180 it also marks a label as absolutely required (it can't be dead).
6181 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6182 This is used for outputting expressions used in initializers.
6184 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6185 with a constant address even if that address is not normally legitimate.
6186 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6188 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6189 a call parameter. Such targets require special care as we haven't yet
6190 marked TARGET so that it's safe from being trashed by libcalls. We
6191 don't want to use TARGET for anything but the final result;
6192 Intermediate values must go elsewhere. Additionally, calls to
6193 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6195 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6196 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6197 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6198 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6199 recursively. */
6202 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6203 enum expand_modifier modifier, rtx *alt_rtl)
6205 rtx op0, op1, temp;
6206 tree type = TREE_TYPE (exp);
6207 int unsignedp = TREE_UNSIGNED (type);
6208 enum machine_mode mode;
6209 enum tree_code code = TREE_CODE (exp);
6210 optab this_optab;
6211 rtx subtarget, original_target;
6212 int ignore;
6213 tree context;
6215 /* Handle ERROR_MARK before anybody tries to access its type. */
6216 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6218 op0 = CONST0_RTX (tmode);
6219 if (op0 != 0)
6220 return op0;
6221 return const0_rtx;
6224 mode = TYPE_MODE (type);
6225 /* Use subtarget as the target for operand 0 of a binary operation. */
6226 subtarget = get_subtarget (target);
6227 original_target = target;
6228 ignore = (target == const0_rtx
6229 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6230 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6231 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6232 && TREE_CODE (type) == VOID_TYPE));
6234 /* If we are going to ignore this result, we need only do something
6235 if there is a side-effect somewhere in the expression. If there
6236 is, short-circuit the most common cases here. Note that we must
6237 not call expand_expr with anything but const0_rtx in case this
6238 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6240 if (ignore)
6242 if (! TREE_SIDE_EFFECTS (exp))
6243 return const0_rtx;
6245 /* Ensure we reference a volatile object even if value is ignored, but
6246 don't do this if all we are doing is taking its address. */
6247 if (TREE_THIS_VOLATILE (exp)
6248 && TREE_CODE (exp) != FUNCTION_DECL
6249 && mode != VOIDmode && mode != BLKmode
6250 && modifier != EXPAND_CONST_ADDRESS)
6252 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6253 if (GET_CODE (temp) == MEM)
6254 temp = copy_to_reg (temp);
6255 return const0_rtx;
6258 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6259 || code == INDIRECT_REF || code == BUFFER_REF)
6260 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6261 modifier);
6263 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6264 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6266 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6267 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6268 return const0_rtx;
6270 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6271 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6272 /* If the second operand has no side effects, just evaluate
6273 the first. */
6274 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6275 modifier);
6276 else if (code == BIT_FIELD_REF)
6278 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6279 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6280 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6281 return const0_rtx;
6284 target = 0;
6287 /* If will do cse, generate all results into pseudo registers
6288 since 1) that allows cse to find more things
6289 and 2) otherwise cse could produce an insn the machine
6290 cannot support. An exception is a CONSTRUCTOR into a multi-word
6291 MEM: that's much more likely to be most efficient into the MEM.
6292 Another is a CALL_EXPR which must return in memory. */
6294 if (! cse_not_expected && mode != BLKmode && target
6295 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6296 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6297 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6298 target = 0;
6300 switch (code)
6302 case LABEL_DECL:
6304 tree function = decl_function_context (exp);
6305 /* Labels in containing functions, or labels used from initializers,
6306 must be forced. */
6307 if (modifier == EXPAND_INITIALIZER
6308 || (function != current_function_decl
6309 && function != inline_function_decl
6310 && function != 0))
6311 temp = force_label_rtx (exp);
6312 else
6313 temp = label_rtx (exp);
6315 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6316 if (function != current_function_decl
6317 && function != inline_function_decl && function != 0)
6318 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6319 return temp;
6322 case PARM_DECL:
6323 if (!DECL_RTL_SET_P (exp))
6325 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6326 return CONST0_RTX (mode);
6329 /* ... fall through ... */
6331 case VAR_DECL:
6332 /* If a static var's type was incomplete when the decl was written,
6333 but the type is complete now, lay out the decl now. */
6334 if (DECL_SIZE (exp) == 0
6335 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6336 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6337 layout_decl (exp, 0);
6339 /* ... fall through ... */
6341 case FUNCTION_DECL:
6342 case RESULT_DECL:
6343 if (DECL_RTL (exp) == 0)
6344 abort ();
6346 /* Ensure variable marked as used even if it doesn't go through
6347 a parser. If it hasn't be used yet, write out an external
6348 definition. */
6349 if (! TREE_USED (exp))
6351 assemble_external (exp);
6352 TREE_USED (exp) = 1;
6355 /* Show we haven't gotten RTL for this yet. */
6356 temp = 0;
6358 /* Handle variables inherited from containing functions. */
6359 context = decl_function_context (exp);
6361 /* We treat inline_function_decl as an alias for the current function
6362 because that is the inline function whose vars, types, etc.
6363 are being merged into the current function.
6364 See expand_inline_function. */
6366 if (context != 0 && context != current_function_decl
6367 && context != inline_function_decl
6368 /* If var is static, we don't need a static chain to access it. */
6369 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6370 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6372 rtx addr;
6374 /* Mark as non-local and addressable. */
6375 DECL_NONLOCAL (exp) = 1;
6376 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6377 abort ();
6378 (*lang_hooks.mark_addressable) (exp);
6379 if (GET_CODE (DECL_RTL (exp)) != MEM)
6380 abort ();
6381 addr = XEXP (DECL_RTL (exp), 0);
6382 if (GET_CODE (addr) == MEM)
6383 addr
6384 = replace_equiv_address (addr,
6385 fix_lexical_addr (XEXP (addr, 0), exp));
6386 else
6387 addr = fix_lexical_addr (addr, exp);
6389 temp = replace_equiv_address (DECL_RTL (exp), addr);
6392 /* This is the case of an array whose size is to be determined
6393 from its initializer, while the initializer is still being parsed.
6394 See expand_decl. */
6396 else if (GET_CODE (DECL_RTL (exp)) == MEM
6397 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6398 temp = validize_mem (DECL_RTL (exp));
6400 /* If DECL_RTL is memory, we are in the normal case and either
6401 the address is not valid or it is not a register and -fforce-addr
6402 is specified, get the address into a register. */
6404 else if (GET_CODE (DECL_RTL (exp)) == MEM
6405 && modifier != EXPAND_CONST_ADDRESS
6406 && modifier != EXPAND_SUM
6407 && modifier != EXPAND_INITIALIZER
6408 && (! memory_address_p (DECL_MODE (exp),
6409 XEXP (DECL_RTL (exp), 0))
6410 || (flag_force_addr
6411 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6413 if (alt_rtl)
6414 *alt_rtl = DECL_RTL (exp);
6415 temp = replace_equiv_address (DECL_RTL (exp),
6416 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6419 /* If we got something, return it. But first, set the alignment
6420 if the address is a register. */
6421 if (temp != 0)
6423 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6424 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6426 return temp;
6429 /* If the mode of DECL_RTL does not match that of the decl, it
6430 must be a promoted value. We return a SUBREG of the wanted mode,
6431 but mark it so that we know that it was already extended. */
6433 if (GET_CODE (DECL_RTL (exp)) == REG
6434 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6436 /* Get the signedness used for this variable. Ensure we get the
6437 same mode we got when the variable was declared. */
6438 if (GET_MODE (DECL_RTL (exp))
6439 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6440 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6441 abort ();
6443 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6444 SUBREG_PROMOTED_VAR_P (temp) = 1;
6445 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6446 return temp;
6449 return DECL_RTL (exp);
6451 case INTEGER_CST:
6452 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6453 TREE_INT_CST_HIGH (exp), mode);
6455 /* ??? If overflow is set, fold will have done an incomplete job,
6456 which can result in (plus xx (const_int 0)), which can get
6457 simplified by validate_replace_rtx during virtual register
6458 instantiation, which can result in unrecognizable insns.
6459 Avoid this by forcing all overflows into registers. */
6460 if (TREE_CONSTANT_OVERFLOW (exp)
6461 && modifier != EXPAND_INITIALIZER)
6462 temp = force_reg (mode, temp);
6464 return temp;
6466 case VECTOR_CST:
6467 return const_vector_from_tree (exp);
6469 case CONST_DECL:
6470 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6472 case REAL_CST:
6473 /* If optimized, generate immediate CONST_DOUBLE
6474 which will be turned into memory by reload if necessary.
6476 We used to force a register so that loop.c could see it. But
6477 this does not allow gen_* patterns to perform optimizations with
6478 the constants. It also produces two insns in cases like "x = 1.0;".
6479 On most machines, floating-point constants are not permitted in
6480 many insns, so we'd end up copying it to a register in any case.
6482 Now, we do the copying in expand_binop, if appropriate. */
6483 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6484 TYPE_MODE (TREE_TYPE (exp)));
6486 case COMPLEX_CST:
6487 /* Handle evaluating a complex constant in a CONCAT target. */
6488 if (original_target && GET_CODE (original_target) == CONCAT)
6490 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6491 rtx rtarg, itarg;
6493 rtarg = XEXP (original_target, 0);
6494 itarg = XEXP (original_target, 1);
6496 /* Move the real and imaginary parts separately. */
6497 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6498 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6500 if (op0 != rtarg)
6501 emit_move_insn (rtarg, op0);
6502 if (op1 != itarg)
6503 emit_move_insn (itarg, op1);
6505 return original_target;
6508 /* ... fall through ... */
6510 case STRING_CST:
6511 temp = output_constant_def (exp, 1);
6513 /* temp contains a constant address.
6514 On RISC machines where a constant address isn't valid,
6515 make some insns to get that address into a register. */
6516 if (modifier != EXPAND_CONST_ADDRESS
6517 && modifier != EXPAND_INITIALIZER
6518 && modifier != EXPAND_SUM
6519 && (! memory_address_p (mode, XEXP (temp, 0))
6520 || flag_force_addr))
6521 return replace_equiv_address (temp,
6522 copy_rtx (XEXP (temp, 0)));
6523 return temp;
6525 case EXPR_WITH_FILE_LOCATION:
6527 rtx to_return;
6528 struct file_stack fs;
6530 fs.location = input_location;
6531 fs.next = expr_wfl_stack;
6532 input_filename = EXPR_WFL_FILENAME (exp);
6533 input_line = EXPR_WFL_LINENO (exp);
6534 expr_wfl_stack = &fs;
6535 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6536 emit_line_note (input_location);
6537 /* Possibly avoid switching back and forth here. */
6538 to_return = expand_expr (EXPR_WFL_NODE (exp),
6539 (ignore ? const0_rtx : target),
6540 tmode, modifier);
6541 if (expr_wfl_stack != &fs)
6542 abort ();
6543 input_location = fs.location;
6544 expr_wfl_stack = fs.next;
6545 return to_return;
6548 case SAVE_EXPR:
6549 context = decl_function_context (exp);
6551 /* If this SAVE_EXPR was at global context, assume we are an
6552 initialization function and move it into our context. */
6553 if (context == 0)
6554 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6556 /* We treat inline_function_decl as an alias for the current function
6557 because that is the inline function whose vars, types, etc.
6558 are being merged into the current function.
6559 See expand_inline_function. */
6560 if (context == current_function_decl || context == inline_function_decl)
6561 context = 0;
6563 /* If this is non-local, handle it. */
6564 if (context)
6566 /* The following call just exists to abort if the context is
6567 not of a containing function. */
6568 find_function_data (context);
6570 temp = SAVE_EXPR_RTL (exp);
6571 if (temp && GET_CODE (temp) == REG)
6573 put_var_into_stack (exp, /*rescan=*/true);
6574 temp = SAVE_EXPR_RTL (exp);
6576 if (temp == 0 || GET_CODE (temp) != MEM)
6577 abort ();
6578 return
6579 replace_equiv_address (temp,
6580 fix_lexical_addr (XEXP (temp, 0), exp));
6582 if (SAVE_EXPR_RTL (exp) == 0)
6584 if (mode == VOIDmode)
6585 temp = const0_rtx;
6586 else
6587 temp = assign_temp (build_qualified_type (type,
6588 (TYPE_QUALS (type)
6589 | TYPE_QUAL_CONST)),
6590 3, 0, 0);
6592 SAVE_EXPR_RTL (exp) = temp;
6593 if (!optimize && GET_CODE (temp) == REG)
6594 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6595 save_expr_regs);
6597 /* If the mode of TEMP does not match that of the expression, it
6598 must be a promoted value. We pass store_expr a SUBREG of the
6599 wanted mode but mark it so that we know that it was already
6600 extended. */
6602 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6604 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6605 promote_mode (type, mode, &unsignedp, 0);
6606 SUBREG_PROMOTED_VAR_P (temp) = 1;
6607 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6610 if (temp == const0_rtx)
6611 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6612 else
6613 store_expr (TREE_OPERAND (exp, 0), temp,
6614 modifier == EXPAND_STACK_PARM ? 2 : 0);
6616 TREE_USED (exp) = 1;
6619 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6620 must be a promoted value. We return a SUBREG of the wanted mode,
6621 but mark it so that we know that it was already extended. */
6623 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6624 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6626 /* Compute the signedness and make the proper SUBREG. */
6627 promote_mode (type, mode, &unsignedp, 0);
6628 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6629 SUBREG_PROMOTED_VAR_P (temp) = 1;
6630 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6631 return temp;
6634 return SAVE_EXPR_RTL (exp);
6636 case UNSAVE_EXPR:
6638 rtx temp;
6639 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6640 TREE_OPERAND (exp, 0)
6641 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6642 return temp;
6645 case PLACEHOLDER_EXPR:
6647 tree old_list = placeholder_list;
6648 tree placeholder_expr = 0;
6650 exp = find_placeholder (exp, &placeholder_expr);
6651 if (exp == 0)
6652 abort ();
6654 placeholder_list = TREE_CHAIN (placeholder_expr);
6655 temp = expand_expr (exp, original_target, tmode, modifier);
6656 placeholder_list = old_list;
6657 return temp;
6660 case WITH_RECORD_EXPR:
6661 /* Put the object on the placeholder list, expand our first operand,
6662 and pop the list. */
6663 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6664 placeholder_list);
6665 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6666 modifier);
6667 placeholder_list = TREE_CHAIN (placeholder_list);
6668 return target;
6670 case GOTO_EXPR:
6671 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6672 expand_goto (TREE_OPERAND (exp, 0));
6673 else
6674 expand_computed_goto (TREE_OPERAND (exp, 0));
6675 return const0_rtx;
6677 case EXIT_EXPR:
6678 expand_exit_loop_if_false (NULL,
6679 invert_truthvalue (TREE_OPERAND (exp, 0)));
6680 return const0_rtx;
6682 case LABELED_BLOCK_EXPR:
6683 if (LABELED_BLOCK_BODY (exp))
6684 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6685 /* Should perhaps use expand_label, but this is simpler and safer. */
6686 do_pending_stack_adjust ();
6687 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6688 return const0_rtx;
6690 case EXIT_BLOCK_EXPR:
6691 if (EXIT_BLOCK_RETURN (exp))
6692 sorry ("returned value in block_exit_expr");
6693 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6694 return const0_rtx;
6696 case LOOP_EXPR:
6697 push_temp_slots ();
6698 expand_start_loop (1);
6699 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6700 expand_end_loop ();
6701 pop_temp_slots ();
6703 return const0_rtx;
6705 case BIND_EXPR:
6707 tree vars = TREE_OPERAND (exp, 0);
6709 /* Need to open a binding contour here because
6710 if there are any cleanups they must be contained here. */
6711 expand_start_bindings (2);
6713 /* Mark the corresponding BLOCK for output in its proper place. */
6714 if (TREE_OPERAND (exp, 2) != 0
6715 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6716 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6718 /* If VARS have not yet been expanded, expand them now. */
6719 while (vars)
6721 if (!DECL_RTL_SET_P (vars))
6722 expand_decl (vars);
6723 expand_decl_init (vars);
6724 vars = TREE_CHAIN (vars);
6727 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6729 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6731 return temp;
6734 case RTL_EXPR:
6735 if (RTL_EXPR_SEQUENCE (exp))
6737 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6738 abort ();
6739 emit_insn (RTL_EXPR_SEQUENCE (exp));
6740 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6742 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6743 free_temps_for_rtl_expr (exp);
6744 if (alt_rtl)
6745 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6746 return RTL_EXPR_RTL (exp);
6748 case CONSTRUCTOR:
6749 /* If we don't need the result, just ensure we evaluate any
6750 subexpressions. */
6751 if (ignore)
6753 tree elt;
6755 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6756 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6758 return const0_rtx;
6761 /* All elts simple constants => refer to a constant in memory. But
6762 if this is a non-BLKmode mode, let it store a field at a time
6763 since that should make a CONST_INT or CONST_DOUBLE when we
6764 fold. Likewise, if we have a target we can use, it is best to
6765 store directly into the target unless the type is large enough
6766 that memcpy will be used. If we are making an initializer and
6767 all operands are constant, put it in memory as well.
6769 FIXME: Avoid trying to fill vector constructors piece-meal.
6770 Output them with output_constant_def below unless we're sure
6771 they're zeros. This should go away when vector initializers
6772 are treated like VECTOR_CST instead of arrays.
6774 else if ((TREE_STATIC (exp)
6775 && ((mode == BLKmode
6776 && ! (target != 0 && safe_from_p (target, exp, 1)))
6777 || TREE_ADDRESSABLE (exp)
6778 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6779 && (! MOVE_BY_PIECES_P
6780 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6781 TYPE_ALIGN (type)))
6782 && ((TREE_CODE (type) == VECTOR_TYPE
6783 && !is_zeros_p (exp))
6784 || ! mostly_zeros_p (exp)))))
6785 || ((modifier == EXPAND_INITIALIZER
6786 || modifier == EXPAND_CONST_ADDRESS)
6787 && TREE_CONSTANT (exp)))
6789 rtx constructor = output_constant_def (exp, 1);
6791 if (modifier != EXPAND_CONST_ADDRESS
6792 && modifier != EXPAND_INITIALIZER
6793 && modifier != EXPAND_SUM)
6794 constructor = validize_mem (constructor);
6796 return constructor;
6798 else
6800 /* Handle calls that pass values in multiple non-contiguous
6801 locations. The Irix 6 ABI has examples of this. */
6802 if (target == 0 || ! safe_from_p (target, exp, 1)
6803 || GET_CODE (target) == PARALLEL
6804 || modifier == EXPAND_STACK_PARM)
6805 target
6806 = assign_temp (build_qualified_type (type,
6807 (TYPE_QUALS (type)
6808 | (TREE_READONLY (exp)
6809 * TYPE_QUAL_CONST))),
6810 0, TREE_ADDRESSABLE (exp), 1);
6812 store_constructor (exp, target, 0, int_expr_size (exp));
6813 return target;
6816 case INDIRECT_REF:
6818 tree exp1 = TREE_OPERAND (exp, 0);
6819 tree index;
6820 tree string = string_constant (exp1, &index);
6822 /* Try to optimize reads from const strings. */
6823 if (string
6824 && TREE_CODE (string) == STRING_CST
6825 && TREE_CODE (index) == INTEGER_CST
6826 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6827 && GET_MODE_CLASS (mode) == MODE_INT
6828 && GET_MODE_SIZE (mode) == 1
6829 && modifier != EXPAND_WRITE)
6830 return gen_int_mode (TREE_STRING_POINTER (string)
6831 [TREE_INT_CST_LOW (index)], mode);
6833 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6834 op0 = memory_address (mode, op0);
6835 temp = gen_rtx_MEM (mode, op0);
6836 set_mem_attributes (temp, exp, 0);
6838 /* If we are writing to this object and its type is a record with
6839 readonly fields, we must mark it as readonly so it will
6840 conflict with readonly references to those fields. */
6841 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6842 RTX_UNCHANGING_P (temp) = 1;
6844 return temp;
6847 case ARRAY_REF:
6848 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6849 abort ();
6852 tree array = TREE_OPERAND (exp, 0);
6853 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6854 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6855 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6856 HOST_WIDE_INT i;
6858 /* Optimize the special-case of a zero lower bound.
6860 We convert the low_bound to sizetype to avoid some problems
6861 with constant folding. (E.g. suppose the lower bound is 1,
6862 and its mode is QI. Without the conversion, (ARRAY
6863 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6864 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6866 if (! integer_zerop (low_bound))
6867 index = size_diffop (index, convert (sizetype, low_bound));
6869 /* Fold an expression like: "foo"[2].
6870 This is not done in fold so it won't happen inside &.
6871 Don't fold if this is for wide characters since it's too
6872 difficult to do correctly and this is a very rare case. */
6874 if (modifier != EXPAND_CONST_ADDRESS
6875 && modifier != EXPAND_INITIALIZER
6876 && modifier != EXPAND_MEMORY
6877 && TREE_CODE (array) == STRING_CST
6878 && TREE_CODE (index) == INTEGER_CST
6879 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6880 && GET_MODE_CLASS (mode) == MODE_INT
6881 && GET_MODE_SIZE (mode) == 1)
6882 return gen_int_mode (TREE_STRING_POINTER (array)
6883 [TREE_INT_CST_LOW (index)], mode);
6885 /* If this is a constant index into a constant array,
6886 just get the value from the array. Handle both the cases when
6887 we have an explicit constructor and when our operand is a variable
6888 that was declared const. */
6890 if (modifier != EXPAND_CONST_ADDRESS
6891 && modifier != EXPAND_INITIALIZER
6892 && modifier != EXPAND_MEMORY
6893 && TREE_CODE (array) == CONSTRUCTOR
6894 && ! TREE_SIDE_EFFECTS (array)
6895 && TREE_CODE (index) == INTEGER_CST
6896 && 0 > compare_tree_int (index,
6897 list_length (CONSTRUCTOR_ELTS
6898 (TREE_OPERAND (exp, 0)))))
6900 tree elem;
6902 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6903 i = TREE_INT_CST_LOW (index);
6904 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6907 if (elem)
6908 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6909 modifier);
6912 else if (optimize >= 1
6913 && modifier != EXPAND_CONST_ADDRESS
6914 && modifier != EXPAND_INITIALIZER
6915 && modifier != EXPAND_MEMORY
6916 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6917 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6918 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6919 && targetm.binds_local_p (array))
6921 if (TREE_CODE (index) == INTEGER_CST)
6923 tree init = DECL_INITIAL (array);
6925 if (TREE_CODE (init) == CONSTRUCTOR)
6927 tree elem;
6929 for (elem = CONSTRUCTOR_ELTS (init);
6930 (elem
6931 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6932 elem = TREE_CHAIN (elem))
6935 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6936 return expand_expr (fold (TREE_VALUE (elem)), target,
6937 tmode, modifier);
6939 else if (TREE_CODE (init) == STRING_CST
6940 && 0 > compare_tree_int (index,
6941 TREE_STRING_LENGTH (init)))
6943 tree type = TREE_TYPE (TREE_TYPE (init));
6944 enum machine_mode mode = TYPE_MODE (type);
6946 if (GET_MODE_CLASS (mode) == MODE_INT
6947 && GET_MODE_SIZE (mode) == 1)
6948 return gen_int_mode (TREE_STRING_POINTER (init)
6949 [TREE_INT_CST_LOW (index)], mode);
6954 goto normal_inner_ref;
6956 case COMPONENT_REF:
6957 /* If the operand is a CONSTRUCTOR, we can just extract the
6958 appropriate field if it is present. */
6959 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6961 tree elt;
6963 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6964 elt = TREE_CHAIN (elt))
6965 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6966 /* We can normally use the value of the field in the
6967 CONSTRUCTOR. However, if this is a bitfield in
6968 an integral mode that we can fit in a HOST_WIDE_INT,
6969 we must mask only the number of bits in the bitfield,
6970 since this is done implicitly by the constructor. If
6971 the bitfield does not meet either of those conditions,
6972 we can't do this optimization. */
6973 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6974 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6975 == MODE_INT)
6976 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6977 <= HOST_BITS_PER_WIDE_INT))))
6979 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6980 && modifier == EXPAND_STACK_PARM)
6981 target = 0;
6982 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6983 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6985 HOST_WIDE_INT bitsize
6986 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6987 enum machine_mode imode
6988 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6990 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6992 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6993 op0 = expand_and (imode, op0, op1, target);
6995 else
6997 tree count
6998 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7001 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7002 target, 0);
7003 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7004 target, 0);
7008 return op0;
7011 goto normal_inner_ref;
7013 case BIT_FIELD_REF:
7014 case ARRAY_RANGE_REF:
7015 normal_inner_ref:
7017 enum machine_mode mode1;
7018 HOST_WIDE_INT bitsize, bitpos;
7019 tree offset;
7020 int volatilep = 0;
7021 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7022 &mode1, &unsignedp, &volatilep);
7023 rtx orig_op0;
7025 /* If we got back the original object, something is wrong. Perhaps
7026 we are evaluating an expression too early. In any event, don't
7027 infinitely recurse. */
7028 if (tem == exp)
7029 abort ();
7031 /* If TEM's type is a union of variable size, pass TARGET to the inner
7032 computation, since it will need a temporary and TARGET is known
7033 to have to do. This occurs in unchecked conversion in Ada. */
7035 orig_op0 = op0
7036 = expand_expr (tem,
7037 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7038 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7039 != INTEGER_CST)
7040 && modifier != EXPAND_STACK_PARM
7041 ? target : NULL_RTX),
7042 VOIDmode,
7043 (modifier == EXPAND_INITIALIZER
7044 || modifier == EXPAND_CONST_ADDRESS
7045 || modifier == EXPAND_STACK_PARM)
7046 ? modifier : EXPAND_NORMAL);
7048 /* If this is a constant, put it into a register if it is a
7049 legitimate constant and OFFSET is 0 and memory if it isn't. */
7050 if (CONSTANT_P (op0))
7052 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7053 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7054 && offset == 0)
7055 op0 = force_reg (mode, op0);
7056 else
7057 op0 = validize_mem (force_const_mem (mode, op0));
7060 /* Otherwise, if this object not in memory and we either have an
7061 offset or a BLKmode result, put it there. This case can't occur in
7062 C, but can in Ada if we have unchecked conversion of an expression
7063 from a scalar type to an array or record type or for an
7064 ARRAY_RANGE_REF whose type is BLKmode. */
7065 else if (GET_CODE (op0) != MEM
7066 && (offset != 0
7067 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7069 /* If the operand is a SAVE_EXPR, we can deal with this by
7070 forcing the SAVE_EXPR into memory. */
7071 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7073 put_var_into_stack (TREE_OPERAND (exp, 0),
7074 /*rescan=*/true);
7075 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7077 else
7079 tree nt
7080 = build_qualified_type (TREE_TYPE (tem),
7081 (TYPE_QUALS (TREE_TYPE (tem))
7082 | TYPE_QUAL_CONST));
7083 rtx memloc = assign_temp (nt, 1, 1, 1);
7085 emit_move_insn (memloc, op0);
7086 op0 = memloc;
7090 if (offset != 0)
7092 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7093 EXPAND_SUM);
7095 if (GET_CODE (op0) != MEM)
7096 abort ();
7098 #ifdef POINTERS_EXTEND_UNSIGNED
7099 if (GET_MODE (offset_rtx) != Pmode)
7100 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7101 #else
7102 if (GET_MODE (offset_rtx) != ptr_mode)
7103 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7104 #endif
7106 if (GET_MODE (op0) == BLKmode
7107 /* A constant address in OP0 can have VOIDmode, we must
7108 not try to call force_reg in that case. */
7109 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7110 && bitsize != 0
7111 && (bitpos % bitsize) == 0
7112 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7113 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7115 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7116 bitpos = 0;
7119 op0 = offset_address (op0, offset_rtx,
7120 highest_pow2_factor (offset));
7123 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7124 record its alignment as BIGGEST_ALIGNMENT. */
7125 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7126 && is_aligning_offset (offset, tem))
7127 set_mem_align (op0, BIGGEST_ALIGNMENT);
7129 /* Don't forget about volatility even if this is a bitfield. */
7130 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7132 if (op0 == orig_op0)
7133 op0 = copy_rtx (op0);
7135 MEM_VOLATILE_P (op0) = 1;
7138 /* The following code doesn't handle CONCAT.
7139 Assume only bitpos == 0 can be used for CONCAT, due to
7140 one element arrays having the same mode as its element. */
7141 if (GET_CODE (op0) == CONCAT)
7143 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7144 abort ();
7145 return op0;
7148 /* In cases where an aligned union has an unaligned object
7149 as a field, we might be extracting a BLKmode value from
7150 an integer-mode (e.g., SImode) object. Handle this case
7151 by doing the extract into an object as wide as the field
7152 (which we know to be the width of a basic mode), then
7153 storing into memory, and changing the mode to BLKmode. */
7154 if (mode1 == VOIDmode
7155 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7156 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7157 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7158 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7159 && modifier != EXPAND_CONST_ADDRESS
7160 && modifier != EXPAND_INITIALIZER)
7161 /* If the field isn't aligned enough to fetch as a memref,
7162 fetch it as a bit field. */
7163 || (mode1 != BLKmode
7164 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7165 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7166 || (GET_CODE (op0) == MEM
7167 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7168 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7169 && ((modifier == EXPAND_CONST_ADDRESS
7170 || modifier == EXPAND_INITIALIZER)
7171 ? STRICT_ALIGNMENT
7172 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7173 || (bitpos % BITS_PER_UNIT != 0)))
7174 /* If the type and the field are a constant size and the
7175 size of the type isn't the same size as the bitfield,
7176 we must use bitfield operations. */
7177 || (bitsize >= 0
7178 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7179 == INTEGER_CST)
7180 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7181 bitsize)))
7183 enum machine_mode ext_mode = mode;
7185 if (ext_mode == BLKmode
7186 && ! (target != 0 && GET_CODE (op0) == MEM
7187 && GET_CODE (target) == MEM
7188 && bitpos % BITS_PER_UNIT == 0))
7189 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7191 if (ext_mode == BLKmode)
7193 if (target == 0)
7194 target = assign_temp (type, 0, 1, 1);
7196 if (bitsize == 0)
7197 return target;
7199 /* In this case, BITPOS must start at a byte boundary and
7200 TARGET, if specified, must be a MEM. */
7201 if (GET_CODE (op0) != MEM
7202 || (target != 0 && GET_CODE (target) != MEM)
7203 || bitpos % BITS_PER_UNIT != 0)
7204 abort ();
7206 emit_block_move (target,
7207 adjust_address (op0, VOIDmode,
7208 bitpos / BITS_PER_UNIT),
7209 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7210 / BITS_PER_UNIT),
7211 (modifier == EXPAND_STACK_PARM
7212 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7214 return target;
7217 op0 = validize_mem (op0);
7219 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7220 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7222 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7223 (modifier == EXPAND_STACK_PARM
7224 ? NULL_RTX : target),
7225 ext_mode, ext_mode,
7226 int_size_in_bytes (TREE_TYPE (tem)));
7228 /* If the result is a record type and BITSIZE is narrower than
7229 the mode of OP0, an integral mode, and this is a big endian
7230 machine, we must put the field into the high-order bits. */
7231 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7232 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7233 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7234 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7235 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7236 - bitsize),
7237 op0, 1);
7239 if (mode == BLKmode)
7241 rtx new = assign_temp (build_qualified_type
7242 ((*lang_hooks.types.type_for_mode)
7243 (ext_mode, 0),
7244 TYPE_QUAL_CONST), 0, 1, 1);
7246 emit_move_insn (new, op0);
7247 op0 = copy_rtx (new);
7248 PUT_MODE (op0, BLKmode);
7249 set_mem_attributes (op0, exp, 1);
7252 return op0;
7255 /* If the result is BLKmode, use that to access the object
7256 now as well. */
7257 if (mode == BLKmode)
7258 mode1 = BLKmode;
7260 /* Get a reference to just this component. */
7261 if (modifier == EXPAND_CONST_ADDRESS
7262 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7263 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7264 else
7265 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7267 if (op0 == orig_op0)
7268 op0 = copy_rtx (op0);
7270 set_mem_attributes (op0, exp, 0);
7271 if (GET_CODE (XEXP (op0, 0)) == REG)
7272 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7274 MEM_VOLATILE_P (op0) |= volatilep;
7275 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7276 || modifier == EXPAND_CONST_ADDRESS
7277 || modifier == EXPAND_INITIALIZER)
7278 return op0;
7279 else if (target == 0)
7280 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7282 convert_move (target, op0, unsignedp);
7283 return target;
7286 case VTABLE_REF:
7288 rtx insn, before = get_last_insn (), vtbl_ref;
7290 /* Evaluate the interior expression. */
7291 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7292 tmode, modifier);
7294 /* Get or create an instruction off which to hang a note. */
7295 if (REG_P (subtarget))
7297 target = subtarget;
7298 insn = get_last_insn ();
7299 if (insn == before)
7300 abort ();
7301 if (! INSN_P (insn))
7302 insn = prev_nonnote_insn (insn);
7304 else
7306 target = gen_reg_rtx (GET_MODE (subtarget));
7307 insn = emit_move_insn (target, subtarget);
7310 /* Collect the data for the note. */
7311 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7312 vtbl_ref = plus_constant (vtbl_ref,
7313 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7314 /* Discard the initial CONST that was added. */
7315 vtbl_ref = XEXP (vtbl_ref, 0);
7317 REG_NOTES (insn)
7318 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7320 return target;
7323 /* Intended for a reference to a buffer of a file-object in Pascal.
7324 But it's not certain that a special tree code will really be
7325 necessary for these. INDIRECT_REF might work for them. */
7326 case BUFFER_REF:
7327 abort ();
7329 case IN_EXPR:
7331 /* Pascal set IN expression.
7333 Algorithm:
7334 rlo = set_low - (set_low%bits_per_word);
7335 the_word = set [ (index - rlo)/bits_per_word ];
7336 bit_index = index % bits_per_word;
7337 bitmask = 1 << bit_index;
7338 return !!(the_word & bitmask); */
7340 tree set = TREE_OPERAND (exp, 0);
7341 tree index = TREE_OPERAND (exp, 1);
7342 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7343 tree set_type = TREE_TYPE (set);
7344 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7345 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7346 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7347 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7348 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7349 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7350 rtx setaddr = XEXP (setval, 0);
7351 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7352 rtx rlow;
7353 rtx diff, quo, rem, addr, bit, result;
7355 /* If domain is empty, answer is no. Likewise if index is constant
7356 and out of bounds. */
7357 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7358 && TREE_CODE (set_low_bound) == INTEGER_CST
7359 && tree_int_cst_lt (set_high_bound, set_low_bound))
7360 || (TREE_CODE (index) == INTEGER_CST
7361 && TREE_CODE (set_low_bound) == INTEGER_CST
7362 && tree_int_cst_lt (index, set_low_bound))
7363 || (TREE_CODE (set_high_bound) == INTEGER_CST
7364 && TREE_CODE (index) == INTEGER_CST
7365 && tree_int_cst_lt (set_high_bound, index))))
7366 return const0_rtx;
7368 if (target == 0)
7369 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7371 /* If we get here, we have to generate the code for both cases
7372 (in range and out of range). */
7374 op0 = gen_label_rtx ();
7375 op1 = gen_label_rtx ();
7377 if (! (GET_CODE (index_val) == CONST_INT
7378 && GET_CODE (lo_r) == CONST_INT))
7379 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7380 GET_MODE (index_val), iunsignedp, op1);
7382 if (! (GET_CODE (index_val) == CONST_INT
7383 && GET_CODE (hi_r) == CONST_INT))
7384 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7385 GET_MODE (index_val), iunsignedp, op1);
7387 /* Calculate the element number of bit zero in the first word
7388 of the set. */
7389 if (GET_CODE (lo_r) == CONST_INT)
7390 rlow = GEN_INT (INTVAL (lo_r)
7391 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7392 else
7393 rlow = expand_binop (index_mode, and_optab, lo_r,
7394 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7395 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7397 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7398 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7400 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7401 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7402 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7403 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7405 addr = memory_address (byte_mode,
7406 expand_binop (index_mode, add_optab, diff,
7407 setaddr, NULL_RTX, iunsignedp,
7408 OPTAB_LIB_WIDEN));
7410 /* Extract the bit we want to examine. */
7411 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7412 gen_rtx_MEM (byte_mode, addr),
7413 make_tree (TREE_TYPE (index), rem),
7414 NULL_RTX, 1);
7415 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7416 GET_MODE (target) == byte_mode ? target : 0,
7417 1, OPTAB_LIB_WIDEN);
7419 if (result != target)
7420 convert_move (target, result, 1);
7422 /* Output the code to handle the out-of-range case. */
7423 emit_jump (op0);
7424 emit_label (op1);
7425 emit_move_insn (target, const0_rtx);
7426 emit_label (op0);
7427 return target;
7430 case WITH_CLEANUP_EXPR:
7431 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7433 WITH_CLEANUP_EXPR_RTL (exp)
7434 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7435 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7436 CLEANUP_EH_ONLY (exp));
7438 /* That's it for this cleanup. */
7439 TREE_OPERAND (exp, 1) = 0;
7441 return WITH_CLEANUP_EXPR_RTL (exp);
7443 case CLEANUP_POINT_EXPR:
7445 /* Start a new binding layer that will keep track of all cleanup
7446 actions to be performed. */
7447 expand_start_bindings (2);
7449 target_temp_slot_level = temp_slot_level;
7451 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7452 /* If we're going to use this value, load it up now. */
7453 if (! ignore)
7454 op0 = force_not_mem (op0);
7455 preserve_temp_slots (op0);
7456 expand_end_bindings (NULL_TREE, 0, 0);
7458 return op0;
7460 case CALL_EXPR:
7461 /* Check for a built-in function. */
7462 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7463 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7464 == FUNCTION_DECL)
7465 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7467 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7468 == BUILT_IN_FRONTEND)
7469 return (*lang_hooks.expand_expr) (exp, original_target,
7470 tmode, modifier,
7471 alt_rtl);
7472 else
7473 return expand_builtin (exp, target, subtarget, tmode, ignore);
7476 return expand_call (exp, target, ignore);
7478 case NON_LVALUE_EXPR:
7479 case NOP_EXPR:
7480 case CONVERT_EXPR:
7481 case REFERENCE_EXPR:
7482 if (TREE_OPERAND (exp, 0) == error_mark_node)
7483 return const0_rtx;
7485 if (TREE_CODE (type) == UNION_TYPE)
7487 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7489 /* If both input and output are BLKmode, this conversion isn't doing
7490 anything except possibly changing memory attribute. */
7491 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7493 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7494 modifier);
7496 result = copy_rtx (result);
7497 set_mem_attributes (result, exp, 0);
7498 return result;
7501 if (target == 0)
7503 if (TYPE_MODE (type) != BLKmode)
7504 target = gen_reg_rtx (TYPE_MODE (type));
7505 else
7506 target = assign_temp (type, 0, 1, 1);
7509 if (GET_CODE (target) == MEM)
7510 /* Store data into beginning of memory target. */
7511 store_expr (TREE_OPERAND (exp, 0),
7512 adjust_address (target, TYPE_MODE (valtype), 0),
7513 modifier == EXPAND_STACK_PARM ? 2 : 0);
7515 else if (GET_CODE (target) == REG)
7516 /* Store this field into a union of the proper type. */
7517 store_field (target,
7518 MIN ((int_size_in_bytes (TREE_TYPE
7519 (TREE_OPERAND (exp, 0)))
7520 * BITS_PER_UNIT),
7521 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7522 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7523 VOIDmode, 0, type, 0);
7524 else
7525 abort ();
7527 /* Return the entire union. */
7528 return target;
7531 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7533 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7534 modifier);
7536 /* If the signedness of the conversion differs and OP0 is
7537 a promoted SUBREG, clear that indication since we now
7538 have to do the proper extension. */
7539 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7540 && GET_CODE (op0) == SUBREG)
7541 SUBREG_PROMOTED_VAR_P (op0) = 0;
7543 return op0;
7546 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7547 if (GET_MODE (op0) == mode)
7548 return op0;
7550 /* If OP0 is a constant, just convert it into the proper mode. */
7551 if (CONSTANT_P (op0))
7553 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7554 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7556 if (modifier == EXPAND_INITIALIZER)
7557 return simplify_gen_subreg (mode, op0, inner_mode,
7558 subreg_lowpart_offset (mode,
7559 inner_mode));
7560 else
7561 return convert_modes (mode, inner_mode, op0,
7562 TREE_UNSIGNED (inner_type));
7565 if (modifier == EXPAND_INITIALIZER)
7566 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7568 if (target == 0)
7569 return
7570 convert_to_mode (mode, op0,
7571 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7572 else
7573 convert_move (target, op0,
7574 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7575 return target;
7577 case VIEW_CONVERT_EXPR:
7578 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7580 /* If the input and output modes are both the same, we are done.
7581 Otherwise, if neither mode is BLKmode and both are integral and within
7582 a word, we can use gen_lowpart. If neither is true, make sure the
7583 operand is in memory and convert the MEM to the new mode. */
7584 if (TYPE_MODE (type) == GET_MODE (op0))
7586 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7587 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7588 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7589 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7590 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7591 op0 = gen_lowpart (TYPE_MODE (type), op0);
7592 else if (GET_CODE (op0) != MEM)
7594 /* If the operand is not a MEM, force it into memory. Since we
7595 are going to be be changing the mode of the MEM, don't call
7596 force_const_mem for constants because we don't allow pool
7597 constants to change mode. */
7598 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7600 if (TREE_ADDRESSABLE (exp))
7601 abort ();
7603 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7604 target
7605 = assign_stack_temp_for_type
7606 (TYPE_MODE (inner_type),
7607 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7609 emit_move_insn (target, op0);
7610 op0 = target;
7613 /* At this point, OP0 is in the correct mode. If the output type is such
7614 that the operand is known to be aligned, indicate that it is.
7615 Otherwise, we need only be concerned about alignment for non-BLKmode
7616 results. */
7617 if (GET_CODE (op0) == MEM)
7619 op0 = copy_rtx (op0);
7621 if (TYPE_ALIGN_OK (type))
7622 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7623 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7624 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7626 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7627 HOST_WIDE_INT temp_size
7628 = MAX (int_size_in_bytes (inner_type),
7629 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7630 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7631 temp_size, 0, type);
7632 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7634 if (TREE_ADDRESSABLE (exp))
7635 abort ();
7637 if (GET_MODE (op0) == BLKmode)
7638 emit_block_move (new_with_op0_mode, op0,
7639 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7640 (modifier == EXPAND_STACK_PARM
7641 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7642 else
7643 emit_move_insn (new_with_op0_mode, op0);
7645 op0 = new;
7648 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7651 return op0;
7653 case PLUS_EXPR:
7654 this_optab = ! unsignedp && flag_trapv
7655 && (GET_MODE_CLASS (mode) == MODE_INT)
7656 ? addv_optab : add_optab;
7658 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7659 something else, make sure we add the register to the constant and
7660 then to the other thing. This case can occur during strength
7661 reduction and doing it this way will produce better code if the
7662 frame pointer or argument pointer is eliminated.
7664 fold-const.c will ensure that the constant is always in the inner
7665 PLUS_EXPR, so the only case we need to do anything about is if
7666 sp, ap, or fp is our second argument, in which case we must swap
7667 the innermost first argument and our second argument. */
7669 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7670 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7671 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7672 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7673 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7674 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7676 tree t = TREE_OPERAND (exp, 1);
7678 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7679 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7682 /* If the result is to be ptr_mode and we are adding an integer to
7683 something, we might be forming a constant. So try to use
7684 plus_constant. If it produces a sum and we can't accept it,
7685 use force_operand. This allows P = &ARR[const] to generate
7686 efficient code on machines where a SYMBOL_REF is not a valid
7687 address.
7689 If this is an EXPAND_SUM call, always return the sum. */
7690 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7691 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7693 if (modifier == EXPAND_STACK_PARM)
7694 target = 0;
7695 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7696 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7697 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7699 rtx constant_part;
7701 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7702 EXPAND_SUM);
7703 /* Use immed_double_const to ensure that the constant is
7704 truncated according to the mode of OP1, then sign extended
7705 to a HOST_WIDE_INT. Using the constant directly can result
7706 in non-canonical RTL in a 64x32 cross compile. */
7707 constant_part
7708 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7709 (HOST_WIDE_INT) 0,
7710 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7711 op1 = plus_constant (op1, INTVAL (constant_part));
7712 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7713 op1 = force_operand (op1, target);
7714 return op1;
7717 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7718 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7719 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7721 rtx constant_part;
7723 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7724 (modifier == EXPAND_INITIALIZER
7725 ? EXPAND_INITIALIZER : EXPAND_SUM));
7726 if (! CONSTANT_P (op0))
7728 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7729 VOIDmode, modifier);
7730 /* Return a PLUS if modifier says it's OK. */
7731 if (modifier == EXPAND_SUM
7732 || modifier == EXPAND_INITIALIZER)
7733 return simplify_gen_binary (PLUS, mode, op0, op1);
7734 goto binop2;
7736 /* Use immed_double_const to ensure that the constant is
7737 truncated according to the mode of OP1, then sign extended
7738 to a HOST_WIDE_INT. Using the constant directly can result
7739 in non-canonical RTL in a 64x32 cross compile. */
7740 constant_part
7741 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7742 (HOST_WIDE_INT) 0,
7743 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7744 op0 = plus_constant (op0, INTVAL (constant_part));
7745 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7746 op0 = force_operand (op0, target);
7747 return op0;
7751 /* No sense saving up arithmetic to be done
7752 if it's all in the wrong mode to form part of an address.
7753 And force_operand won't know whether to sign-extend or
7754 zero-extend. */
7755 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7756 || mode != ptr_mode)
7758 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7759 subtarget, &op0, &op1, 0);
7760 if (op0 == const0_rtx)
7761 return op1;
7762 if (op1 == const0_rtx)
7763 return op0;
7764 goto binop2;
7767 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7768 subtarget, &op0, &op1, modifier);
7769 return simplify_gen_binary (PLUS, mode, op0, op1);
7771 case MINUS_EXPR:
7772 /* For initializers, we are allowed to return a MINUS of two
7773 symbolic constants. Here we handle all cases when both operands
7774 are constant. */
7775 /* Handle difference of two symbolic constants,
7776 for the sake of an initializer. */
7777 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7778 && really_constant_p (TREE_OPERAND (exp, 0))
7779 && really_constant_p (TREE_OPERAND (exp, 1)))
7781 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7782 NULL_RTX, &op0, &op1, modifier);
7784 /* If the last operand is a CONST_INT, use plus_constant of
7785 the negated constant. Else make the MINUS. */
7786 if (GET_CODE (op1) == CONST_INT)
7787 return plus_constant (op0, - INTVAL (op1));
7788 else
7789 return gen_rtx_MINUS (mode, op0, op1);
7792 this_optab = ! unsignedp && flag_trapv
7793 && (GET_MODE_CLASS(mode) == MODE_INT)
7794 ? subv_optab : sub_optab;
7796 /* No sense saving up arithmetic to be done
7797 if it's all in the wrong mode to form part of an address.
7798 And force_operand won't know whether to sign-extend or
7799 zero-extend. */
7800 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7801 || mode != ptr_mode)
7802 goto binop;
7804 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7805 subtarget, &op0, &op1, modifier);
7807 /* Convert A - const to A + (-const). */
7808 if (GET_CODE (op1) == CONST_INT)
7810 op1 = negate_rtx (mode, op1);
7811 return simplify_gen_binary (PLUS, mode, op0, op1);
7814 goto binop2;
7816 case MULT_EXPR:
7817 /* If first operand is constant, swap them.
7818 Thus the following special case checks need only
7819 check the second operand. */
7820 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7822 tree t1 = TREE_OPERAND (exp, 0);
7823 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7824 TREE_OPERAND (exp, 1) = t1;
7827 /* Attempt to return something suitable for generating an
7828 indexed address, for machines that support that. */
7830 if (modifier == EXPAND_SUM && mode == ptr_mode
7831 && host_integerp (TREE_OPERAND (exp, 1), 0))
7833 tree exp1 = TREE_OPERAND (exp, 1);
7835 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7836 EXPAND_SUM);
7838 if (GET_CODE (op0) != REG)
7839 op0 = force_operand (op0, NULL_RTX);
7840 if (GET_CODE (op0) != REG)
7841 op0 = copy_to_mode_reg (mode, op0);
7843 return gen_rtx_MULT (mode, op0,
7844 gen_int_mode (tree_low_cst (exp1, 0),
7845 TYPE_MODE (TREE_TYPE (exp1))));
7848 if (modifier == EXPAND_STACK_PARM)
7849 target = 0;
7851 /* Check for multiplying things that have been extended
7852 from a narrower type. If this machine supports multiplying
7853 in that narrower type with a result in the desired type,
7854 do it that way, and avoid the explicit type-conversion. */
7855 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7856 && TREE_CODE (type) == INTEGER_TYPE
7857 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7858 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7859 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7860 && int_fits_type_p (TREE_OPERAND (exp, 1),
7861 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7862 /* Don't use a widening multiply if a shift will do. */
7863 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7864 > HOST_BITS_PER_WIDE_INT)
7865 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7867 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7868 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7870 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7871 /* If both operands are extended, they must either both
7872 be zero-extended or both be sign-extended. */
7873 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7875 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7877 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7878 enum machine_mode innermode = TYPE_MODE (op0type);
7879 bool zextend_p = TREE_UNSIGNED (op0type);
7880 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7881 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7883 if (mode == GET_MODE_WIDER_MODE (innermode))
7885 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7887 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7888 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7889 TREE_OPERAND (exp, 1),
7890 NULL_RTX, &op0, &op1, 0);
7891 else
7892 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7893 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7894 NULL_RTX, &op0, &op1, 0);
7895 goto binop2;
7897 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7898 && innermode == word_mode)
7900 rtx htem, hipart;
7901 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7902 NULL_RTX, VOIDmode, 0);
7903 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7904 op1 = convert_modes (innermode, mode,
7905 expand_expr (TREE_OPERAND (exp, 1),
7906 NULL_RTX, VOIDmode, 0),
7907 unsignedp);
7908 else
7909 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7910 NULL_RTX, VOIDmode, 0);
7911 temp = expand_binop (mode, other_optab, op0, op1, target,
7912 unsignedp, OPTAB_LIB_WIDEN);
7913 hipart = gen_highpart (innermode, temp);
7914 htem = expand_mult_highpart_adjust (innermode, hipart,
7915 op0, op1, hipart,
7916 zextend_p);
7917 if (htem != hipart)
7918 emit_move_insn (hipart, htem);
7919 return temp;
7923 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7924 subtarget, &op0, &op1, 0);
7925 return expand_mult (mode, op0, op1, target, unsignedp);
7927 case TRUNC_DIV_EXPR:
7928 case FLOOR_DIV_EXPR:
7929 case CEIL_DIV_EXPR:
7930 case ROUND_DIV_EXPR:
7931 case EXACT_DIV_EXPR:
7932 if (modifier == EXPAND_STACK_PARM)
7933 target = 0;
7934 /* Possible optimization: compute the dividend with EXPAND_SUM
7935 then if the divisor is constant can optimize the case
7936 where some terms of the dividend have coeffs divisible by it. */
7937 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7938 subtarget, &op0, &op1, 0);
7939 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7941 case RDIV_EXPR:
7942 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7943 expensive divide. If not, combine will rebuild the original
7944 computation. */
7945 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7946 && TREE_CODE (type) == REAL_TYPE
7947 && !real_onep (TREE_OPERAND (exp, 0)))
7948 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7949 build (RDIV_EXPR, type,
7950 build_real (type, dconst1),
7951 TREE_OPERAND (exp, 1))),
7952 target, tmode, modifier);
7953 this_optab = sdiv_optab;
7954 goto binop;
7956 case TRUNC_MOD_EXPR:
7957 case FLOOR_MOD_EXPR:
7958 case CEIL_MOD_EXPR:
7959 case ROUND_MOD_EXPR:
7960 if (modifier == EXPAND_STACK_PARM)
7961 target = 0;
7962 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7963 subtarget, &op0, &op1, 0);
7964 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7966 case FIX_ROUND_EXPR:
7967 case FIX_FLOOR_EXPR:
7968 case FIX_CEIL_EXPR:
7969 abort (); /* Not used for C. */
7971 case FIX_TRUNC_EXPR:
7972 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7973 if (target == 0 || modifier == EXPAND_STACK_PARM)
7974 target = gen_reg_rtx (mode);
7975 expand_fix (target, op0, unsignedp);
7976 return target;
7978 case FLOAT_EXPR:
7979 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7980 if (target == 0 || modifier == EXPAND_STACK_PARM)
7981 target = gen_reg_rtx (mode);
7982 /* expand_float can't figure out what to do if FROM has VOIDmode.
7983 So give it the correct mode. With -O, cse will optimize this. */
7984 if (GET_MODE (op0) == VOIDmode)
7985 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7986 op0);
7987 expand_float (target, op0,
7988 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7989 return target;
7991 case NEGATE_EXPR:
7992 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7993 if (modifier == EXPAND_STACK_PARM)
7994 target = 0;
7995 temp = expand_unop (mode,
7996 ! unsignedp && flag_trapv
7997 && (GET_MODE_CLASS(mode) == MODE_INT)
7998 ? negv_optab : neg_optab, op0, target, 0);
7999 if (temp == 0)
8000 abort ();
8001 return temp;
8003 case ABS_EXPR:
8004 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8005 if (modifier == EXPAND_STACK_PARM)
8006 target = 0;
8008 /* ABS_EXPR is not valid for complex arguments. */
8009 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8010 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8011 abort ();
8013 /* Unsigned abs is simply the operand. Testing here means we don't
8014 risk generating incorrect code below. */
8015 if (TREE_UNSIGNED (type))
8016 return op0;
8018 return expand_abs (mode, op0, target, unsignedp,
8019 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8021 case MAX_EXPR:
8022 case MIN_EXPR:
8023 target = original_target;
8024 if (target == 0
8025 || modifier == EXPAND_STACK_PARM
8026 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8027 || GET_MODE (target) != mode
8028 || (GET_CODE (target) == REG
8029 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8030 target = gen_reg_rtx (mode);
8031 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8032 target, &op0, &op1, 0);
8034 /* First try to do it with a special MIN or MAX instruction.
8035 If that does not win, use a conditional jump to select the proper
8036 value. */
8037 this_optab = (unsignedp
8038 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8039 : (code == MIN_EXPR ? smin_optab : smax_optab));
8041 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8042 OPTAB_WIDEN);
8043 if (temp != 0)
8044 return temp;
8046 /* At this point, a MEM target is no longer useful; we will get better
8047 code without it. */
8049 if (GET_CODE (target) == MEM)
8050 target = gen_reg_rtx (mode);
8052 /* If op1 was placed in target, swap op0 and op1. */
8053 if (target != op0 && target == op1)
8055 rtx tem = op0;
8056 op0 = op1;
8057 op1 = tem;
8060 if (target != op0)
8061 emit_move_insn (target, op0);
8063 op0 = gen_label_rtx ();
8065 /* If this mode is an integer too wide to compare properly,
8066 compare word by word. Rely on cse to optimize constant cases. */
8067 if (GET_MODE_CLASS (mode) == MODE_INT
8068 && ! can_compare_p (GE, mode, ccp_jump))
8070 if (code == MAX_EXPR)
8071 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8072 NULL_RTX, op0);
8073 else
8074 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8075 NULL_RTX, op0);
8077 else
8079 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8080 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8082 emit_move_insn (target, op1);
8083 emit_label (op0);
8084 return target;
8086 case BIT_NOT_EXPR:
8087 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8088 if (modifier == EXPAND_STACK_PARM)
8089 target = 0;
8090 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8091 if (temp == 0)
8092 abort ();
8093 return temp;
8095 /* ??? Can optimize bitwise operations with one arg constant.
8096 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8097 and (a bitwise1 b) bitwise2 b (etc)
8098 but that is probably not worth while. */
8100 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8101 boolean values when we want in all cases to compute both of them. In
8102 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8103 as actual zero-or-1 values and then bitwise anding. In cases where
8104 there cannot be any side effects, better code would be made by
8105 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8106 how to recognize those cases. */
8108 case TRUTH_AND_EXPR:
8109 case BIT_AND_EXPR:
8110 this_optab = and_optab;
8111 goto binop;
8113 case TRUTH_OR_EXPR:
8114 case BIT_IOR_EXPR:
8115 this_optab = ior_optab;
8116 goto binop;
8118 case TRUTH_XOR_EXPR:
8119 case BIT_XOR_EXPR:
8120 this_optab = xor_optab;
8121 goto binop;
8123 case LSHIFT_EXPR:
8124 case RSHIFT_EXPR:
8125 case LROTATE_EXPR:
8126 case RROTATE_EXPR:
8127 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8128 subtarget = 0;
8129 if (modifier == EXPAND_STACK_PARM)
8130 target = 0;
8131 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8132 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8133 unsignedp);
8135 /* Could determine the answer when only additive constants differ. Also,
8136 the addition of one can be handled by changing the condition. */
8137 case LT_EXPR:
8138 case LE_EXPR:
8139 case GT_EXPR:
8140 case GE_EXPR:
8141 case EQ_EXPR:
8142 case NE_EXPR:
8143 case UNORDERED_EXPR:
8144 case ORDERED_EXPR:
8145 case UNLT_EXPR:
8146 case UNLE_EXPR:
8147 case UNGT_EXPR:
8148 case UNGE_EXPR:
8149 case UNEQ_EXPR:
8150 temp = do_store_flag (exp,
8151 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8152 tmode != VOIDmode ? tmode : mode, 0);
8153 if (temp != 0)
8154 return temp;
8156 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8157 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8158 && original_target
8159 && GET_CODE (original_target) == REG
8160 && (GET_MODE (original_target)
8161 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8163 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8164 VOIDmode, 0);
8166 /* If temp is constant, we can just compute the result. */
8167 if (GET_CODE (temp) == CONST_INT)
8169 if (INTVAL (temp) != 0)
8170 emit_move_insn (target, const1_rtx);
8171 else
8172 emit_move_insn (target, const0_rtx);
8174 return target;
8177 if (temp != original_target)
8179 enum machine_mode mode1 = GET_MODE (temp);
8180 if (mode1 == VOIDmode)
8181 mode1 = tmode != VOIDmode ? tmode : mode;
8183 temp = copy_to_mode_reg (mode1, temp);
8186 op1 = gen_label_rtx ();
8187 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8188 GET_MODE (temp), unsignedp, op1);
8189 emit_move_insn (temp, const1_rtx);
8190 emit_label (op1);
8191 return temp;
8194 /* If no set-flag instruction, must generate a conditional
8195 store into a temporary variable. Drop through
8196 and handle this like && and ||. */
8198 case TRUTH_ANDIF_EXPR:
8199 case TRUTH_ORIF_EXPR:
8200 if (! ignore
8201 && (target == 0
8202 || modifier == EXPAND_STACK_PARM
8203 || ! safe_from_p (target, exp, 1)
8204 /* Make sure we don't have a hard reg (such as function's return
8205 value) live across basic blocks, if not optimizing. */
8206 || (!optimize && GET_CODE (target) == REG
8207 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8208 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8210 if (target)
8211 emit_clr_insn (target);
8213 op1 = gen_label_rtx ();
8214 jumpifnot (exp, op1);
8216 if (target)
8217 emit_0_to_1_insn (target);
8219 emit_label (op1);
8220 return ignore ? const0_rtx : target;
8222 case TRUTH_NOT_EXPR:
8223 if (modifier == EXPAND_STACK_PARM)
8224 target = 0;
8225 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8226 /* The parser is careful to generate TRUTH_NOT_EXPR
8227 only with operands that are always zero or one. */
8228 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8229 target, 1, OPTAB_LIB_WIDEN);
8230 if (temp == 0)
8231 abort ();
8232 return temp;
8234 case COMPOUND_EXPR:
8235 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8236 emit_queue ();
8237 return expand_expr_real (TREE_OPERAND (exp, 1),
8238 (ignore ? const0_rtx : target),
8239 VOIDmode, modifier, alt_rtl);
8241 case COND_EXPR:
8242 /* If we would have a "singleton" (see below) were it not for a
8243 conversion in each arm, bring that conversion back out. */
8244 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8245 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8246 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8247 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8249 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8250 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8252 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8253 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8254 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8255 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8256 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8257 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8258 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8259 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8260 return expand_expr (build1 (NOP_EXPR, type,
8261 build (COND_EXPR, TREE_TYPE (iftrue),
8262 TREE_OPERAND (exp, 0),
8263 iftrue, iffalse)),
8264 target, tmode, modifier);
8268 /* Note that COND_EXPRs whose type is a structure or union
8269 are required to be constructed to contain assignments of
8270 a temporary variable, so that we can evaluate them here
8271 for side effect only. If type is void, we must do likewise. */
8273 /* If an arm of the branch requires a cleanup,
8274 only that cleanup is performed. */
8276 tree singleton = 0;
8277 tree binary_op = 0, unary_op = 0;
8279 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8280 convert it to our mode, if necessary. */
8281 if (integer_onep (TREE_OPERAND (exp, 1))
8282 && integer_zerop (TREE_OPERAND (exp, 2))
8283 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8285 if (ignore)
8287 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8288 modifier);
8289 return const0_rtx;
8292 if (modifier == EXPAND_STACK_PARM)
8293 target = 0;
8294 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8295 if (GET_MODE (op0) == mode)
8296 return op0;
8298 if (target == 0)
8299 target = gen_reg_rtx (mode);
8300 convert_move (target, op0, unsignedp);
8301 return target;
8304 /* Check for X ? A + B : A. If we have this, we can copy A to the
8305 output and conditionally add B. Similarly for unary operations.
8306 Don't do this if X has side-effects because those side effects
8307 might affect A or B and the "?" operation is a sequence point in
8308 ANSI. (operand_equal_p tests for side effects.) */
8310 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8311 && operand_equal_p (TREE_OPERAND (exp, 2),
8312 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8313 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8314 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8315 && operand_equal_p (TREE_OPERAND (exp, 1),
8316 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8317 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8318 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8319 && operand_equal_p (TREE_OPERAND (exp, 2),
8320 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8321 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8322 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8323 && operand_equal_p (TREE_OPERAND (exp, 1),
8324 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8325 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8327 /* If we are not to produce a result, we have no target. Otherwise,
8328 if a target was specified use it; it will not be used as an
8329 intermediate target unless it is safe. If no target, use a
8330 temporary. */
8332 if (ignore)
8333 temp = 0;
8334 else if (modifier == EXPAND_STACK_PARM)
8335 temp = assign_temp (type, 0, 0, 1);
8336 else if (original_target
8337 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8338 || (singleton && GET_CODE (original_target) == REG
8339 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8340 && original_target == var_rtx (singleton)))
8341 && GET_MODE (original_target) == mode
8342 #ifdef HAVE_conditional_move
8343 && (! can_conditionally_move_p (mode)
8344 || GET_CODE (original_target) == REG
8345 || TREE_ADDRESSABLE (type))
8346 #endif
8347 && (GET_CODE (original_target) != MEM
8348 || TREE_ADDRESSABLE (type)))
8349 temp = original_target;
8350 else if (TREE_ADDRESSABLE (type))
8351 abort ();
8352 else
8353 temp = assign_temp (type, 0, 0, 1);
8355 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8356 do the test of X as a store-flag operation, do this as
8357 A + ((X != 0) << log C). Similarly for other simple binary
8358 operators. Only do for C == 1 if BRANCH_COST is low. */
8359 if (temp && singleton && binary_op
8360 && (TREE_CODE (binary_op) == PLUS_EXPR
8361 || TREE_CODE (binary_op) == MINUS_EXPR
8362 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8363 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8364 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8365 : integer_onep (TREE_OPERAND (binary_op, 1)))
8366 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8368 rtx result;
8369 tree cond;
8370 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8371 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8372 ? addv_optab : add_optab)
8373 : TREE_CODE (binary_op) == MINUS_EXPR
8374 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8375 ? subv_optab : sub_optab)
8376 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8377 : xor_optab);
8379 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8380 if (singleton == TREE_OPERAND (exp, 1))
8381 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8382 else
8383 cond = TREE_OPERAND (exp, 0);
8385 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8386 ? temp : NULL_RTX),
8387 mode, BRANCH_COST <= 1);
8389 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8390 result = expand_shift (LSHIFT_EXPR, mode, result,
8391 build_int_2 (tree_log2
8392 (TREE_OPERAND
8393 (binary_op, 1)),
8395 (safe_from_p (temp, singleton, 1)
8396 ? temp : NULL_RTX), 0);
8398 if (result)
8400 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8401 return expand_binop (mode, boptab, op1, result, temp,
8402 unsignedp, OPTAB_LIB_WIDEN);
8406 do_pending_stack_adjust ();
8407 NO_DEFER_POP;
8408 op0 = gen_label_rtx ();
8410 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8412 if (temp != 0)
8414 /* If the target conflicts with the other operand of the
8415 binary op, we can't use it. Also, we can't use the target
8416 if it is a hard register, because evaluating the condition
8417 might clobber it. */
8418 if ((binary_op
8419 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8420 || (GET_CODE (temp) == REG
8421 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8422 temp = gen_reg_rtx (mode);
8423 store_expr (singleton, temp,
8424 modifier == EXPAND_STACK_PARM ? 2 : 0);
8426 else
8427 expand_expr (singleton,
8428 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8429 if (singleton == TREE_OPERAND (exp, 1))
8430 jumpif (TREE_OPERAND (exp, 0), op0);
8431 else
8432 jumpifnot (TREE_OPERAND (exp, 0), op0);
8434 start_cleanup_deferral ();
8435 if (binary_op && temp == 0)
8436 /* Just touch the other operand. */
8437 expand_expr (TREE_OPERAND (binary_op, 1),
8438 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8439 else if (binary_op)
8440 store_expr (build (TREE_CODE (binary_op), type,
8441 make_tree (type, temp),
8442 TREE_OPERAND (binary_op, 1)),
8443 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8444 else
8445 store_expr (build1 (TREE_CODE (unary_op), type,
8446 make_tree (type, temp)),
8447 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8448 op1 = op0;
8450 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8451 comparison operator. If we have one of these cases, set the
8452 output to A, branch on A (cse will merge these two references),
8453 then set the output to FOO. */
8454 else if (temp
8455 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8456 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8457 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8458 TREE_OPERAND (exp, 1), 0)
8459 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8460 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8461 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8463 if (GET_CODE (temp) == REG
8464 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8465 temp = gen_reg_rtx (mode);
8466 store_expr (TREE_OPERAND (exp, 1), temp,
8467 modifier == EXPAND_STACK_PARM ? 2 : 0);
8468 jumpif (TREE_OPERAND (exp, 0), op0);
8470 start_cleanup_deferral ();
8471 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8472 store_expr (TREE_OPERAND (exp, 2), temp,
8473 modifier == EXPAND_STACK_PARM ? 2 : 0);
8474 else
8475 expand_expr (TREE_OPERAND (exp, 2),
8476 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8477 op1 = op0;
8479 else if (temp
8480 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8481 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8482 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8483 TREE_OPERAND (exp, 2), 0)
8484 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8485 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8486 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8488 if (GET_CODE (temp) == REG
8489 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8490 temp = gen_reg_rtx (mode);
8491 store_expr (TREE_OPERAND (exp, 2), temp,
8492 modifier == EXPAND_STACK_PARM ? 2 : 0);
8493 jumpifnot (TREE_OPERAND (exp, 0), op0);
8495 start_cleanup_deferral ();
8496 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8497 store_expr (TREE_OPERAND (exp, 1), temp,
8498 modifier == EXPAND_STACK_PARM ? 2 : 0);
8499 else
8500 expand_expr (TREE_OPERAND (exp, 1),
8501 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8502 op1 = op0;
8504 else
8506 op1 = gen_label_rtx ();
8507 jumpifnot (TREE_OPERAND (exp, 0), op0);
8509 start_cleanup_deferral ();
8511 /* One branch of the cond can be void, if it never returns. For
8512 example A ? throw : E */
8513 if (temp != 0
8514 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8515 store_expr (TREE_OPERAND (exp, 1), temp,
8516 modifier == EXPAND_STACK_PARM ? 2 : 0);
8517 else
8518 expand_expr (TREE_OPERAND (exp, 1),
8519 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8520 end_cleanup_deferral ();
8521 emit_queue ();
8522 emit_jump_insn (gen_jump (op1));
8523 emit_barrier ();
8524 emit_label (op0);
8525 start_cleanup_deferral ();
8526 if (temp != 0
8527 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8528 store_expr (TREE_OPERAND (exp, 2), temp,
8529 modifier == EXPAND_STACK_PARM ? 2 : 0);
8530 else
8531 expand_expr (TREE_OPERAND (exp, 2),
8532 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8535 end_cleanup_deferral ();
8537 emit_queue ();
8538 emit_label (op1);
8539 OK_DEFER_POP;
8541 return temp;
8544 case TARGET_EXPR:
8546 /* Something needs to be initialized, but we didn't know
8547 where that thing was when building the tree. For example,
8548 it could be the return value of a function, or a parameter
8549 to a function which lays down in the stack, or a temporary
8550 variable which must be passed by reference.
8552 We guarantee that the expression will either be constructed
8553 or copied into our original target. */
8555 tree slot = TREE_OPERAND (exp, 0);
8556 tree cleanups = NULL_TREE;
8557 tree exp1;
8559 if (TREE_CODE (slot) != VAR_DECL)
8560 abort ();
8562 if (! ignore)
8563 target = original_target;
8565 /* Set this here so that if we get a target that refers to a
8566 register variable that's already been used, put_reg_into_stack
8567 knows that it should fix up those uses. */
8568 TREE_USED (slot) = 1;
8570 if (target == 0)
8572 if (DECL_RTL_SET_P (slot))
8574 target = DECL_RTL (slot);
8575 /* If we have already expanded the slot, so don't do
8576 it again. (mrs) */
8577 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8578 return target;
8580 else
8582 target = assign_temp (type, 2, 0, 1);
8583 /* All temp slots at this level must not conflict. */
8584 preserve_temp_slots (target);
8585 SET_DECL_RTL (slot, target);
8586 if (TREE_ADDRESSABLE (slot))
8587 put_var_into_stack (slot, /*rescan=*/false);
8589 /* Since SLOT is not known to the called function
8590 to belong to its stack frame, we must build an explicit
8591 cleanup. This case occurs when we must build up a reference
8592 to pass the reference as an argument. In this case,
8593 it is very likely that such a reference need not be
8594 built here. */
8596 if (TREE_OPERAND (exp, 2) == 0)
8597 TREE_OPERAND (exp, 2)
8598 = (*lang_hooks.maybe_build_cleanup) (slot);
8599 cleanups = TREE_OPERAND (exp, 2);
8602 else
8604 /* This case does occur, when expanding a parameter which
8605 needs to be constructed on the stack. The target
8606 is the actual stack address that we want to initialize.
8607 The function we call will perform the cleanup in this case. */
8609 /* If we have already assigned it space, use that space,
8610 not target that we were passed in, as our target
8611 parameter is only a hint. */
8612 if (DECL_RTL_SET_P (slot))
8614 target = DECL_RTL (slot);
8615 /* If we have already expanded the slot, so don't do
8616 it again. (mrs) */
8617 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8618 return target;
8620 else
8622 SET_DECL_RTL (slot, target);
8623 /* If we must have an addressable slot, then make sure that
8624 the RTL that we just stored in slot is OK. */
8625 if (TREE_ADDRESSABLE (slot))
8626 put_var_into_stack (slot, /*rescan=*/true);
8630 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8631 /* Mark it as expanded. */
8632 TREE_OPERAND (exp, 1) = NULL_TREE;
8634 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8636 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8638 return target;
8641 case INIT_EXPR:
8643 tree lhs = TREE_OPERAND (exp, 0);
8644 tree rhs = TREE_OPERAND (exp, 1);
8646 temp = expand_assignment (lhs, rhs, ! ignore);
8647 return temp;
8650 case MODIFY_EXPR:
8652 /* If lhs is complex, expand calls in rhs before computing it.
8653 That's so we don't compute a pointer and save it over a
8654 call. If lhs is simple, compute it first so we can give it
8655 as a target if the rhs is just a call. This avoids an
8656 extra temp and copy and that prevents a partial-subsumption
8657 which makes bad code. Actually we could treat
8658 component_ref's of vars like vars. */
8660 tree lhs = TREE_OPERAND (exp, 0);
8661 tree rhs = TREE_OPERAND (exp, 1);
8663 temp = 0;
8665 /* Check for |= or &= of a bitfield of size one into another bitfield
8666 of size 1. In this case, (unless we need the result of the
8667 assignment) we can do this more efficiently with a
8668 test followed by an assignment, if necessary.
8670 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8671 things change so we do, this code should be enhanced to
8672 support it. */
8673 if (ignore
8674 && TREE_CODE (lhs) == COMPONENT_REF
8675 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8676 || TREE_CODE (rhs) == BIT_AND_EXPR)
8677 && TREE_OPERAND (rhs, 0) == lhs
8678 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8679 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8680 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8682 rtx label = gen_label_rtx ();
8684 do_jump (TREE_OPERAND (rhs, 1),
8685 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8686 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8687 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8688 (TREE_CODE (rhs) == BIT_IOR_EXPR
8689 ? integer_one_node
8690 : integer_zero_node)),
8692 do_pending_stack_adjust ();
8693 emit_label (label);
8694 return const0_rtx;
8697 temp = expand_assignment (lhs, rhs, ! ignore);
8699 return temp;
8702 case RETURN_EXPR:
8703 if (!TREE_OPERAND (exp, 0))
8704 expand_null_return ();
8705 else
8706 expand_return (TREE_OPERAND (exp, 0));
8707 return const0_rtx;
8709 case PREINCREMENT_EXPR:
8710 case PREDECREMENT_EXPR:
8711 return expand_increment (exp, 0, ignore);
8713 case POSTINCREMENT_EXPR:
8714 case POSTDECREMENT_EXPR:
8715 /* Faster to treat as pre-increment if result is not used. */
8716 return expand_increment (exp, ! ignore, ignore);
8718 case ADDR_EXPR:
8719 if (modifier == EXPAND_STACK_PARM)
8720 target = 0;
8721 /* Are we taking the address of a nested function? */
8722 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8723 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8724 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8725 && ! TREE_STATIC (exp))
8727 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8728 op0 = force_operand (op0, target);
8730 /* If we are taking the address of something erroneous, just
8731 return a zero. */
8732 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8733 return const0_rtx;
8734 /* If we are taking the address of a constant and are at the
8735 top level, we have to use output_constant_def since we can't
8736 call force_const_mem at top level. */
8737 else if (cfun == 0
8738 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8739 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8740 == 'c')))
8741 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8742 else
8744 /* We make sure to pass const0_rtx down if we came in with
8745 ignore set, to avoid doing the cleanups twice for something. */
8746 op0 = expand_expr (TREE_OPERAND (exp, 0),
8747 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8748 (modifier == EXPAND_INITIALIZER
8749 ? modifier : EXPAND_CONST_ADDRESS));
8751 /* If we are going to ignore the result, OP0 will have been set
8752 to const0_rtx, so just return it. Don't get confused and
8753 think we are taking the address of the constant. */
8754 if (ignore)
8755 return op0;
8757 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8758 clever and returns a REG when given a MEM. */
8759 op0 = protect_from_queue (op0, 1);
8761 /* We would like the object in memory. If it is a constant, we can
8762 have it be statically allocated into memory. For a non-constant,
8763 we need to allocate some memory and store the value into it. */
8765 if (CONSTANT_P (op0))
8766 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8767 op0);
8768 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8769 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8770 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8772 /* If the operand is a SAVE_EXPR, we can deal with this by
8773 forcing the SAVE_EXPR into memory. */
8774 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8776 put_var_into_stack (TREE_OPERAND (exp, 0),
8777 /*rescan=*/true);
8778 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8780 else
8782 /* If this object is in a register, it can't be BLKmode. */
8783 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8784 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8786 if (GET_CODE (op0) == PARALLEL)
8787 /* Handle calls that pass values in multiple
8788 non-contiguous locations. The Irix 6 ABI has examples
8789 of this. */
8790 emit_group_store (memloc, op0, inner_type,
8791 int_size_in_bytes (inner_type));
8792 else
8793 emit_move_insn (memloc, op0);
8795 op0 = memloc;
8799 if (GET_CODE (op0) != MEM)
8800 abort ();
8802 mark_temp_addr_taken (op0);
8803 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8805 op0 = XEXP (op0, 0);
8806 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8807 op0 = convert_memory_address (ptr_mode, op0);
8808 return op0;
8811 /* If OP0 is not aligned as least as much as the type requires, we
8812 need to make a temporary, copy OP0 to it, and take the address of
8813 the temporary. We want to use the alignment of the type, not of
8814 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8815 the test for BLKmode means that can't happen. The test for
8816 BLKmode is because we never make mis-aligned MEMs with
8817 non-BLKmode.
8819 We don't need to do this at all if the machine doesn't have
8820 strict alignment. */
8821 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8822 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8823 > MEM_ALIGN (op0))
8824 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8826 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8827 rtx new;
8829 if (TYPE_ALIGN_OK (inner_type))
8830 abort ();
8832 if (TREE_ADDRESSABLE (inner_type))
8834 /* We can't make a bitwise copy of this object, so fail. */
8835 error ("cannot take the address of an unaligned member");
8836 return const0_rtx;
8839 new = assign_stack_temp_for_type
8840 (TYPE_MODE (inner_type),
8841 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8842 : int_size_in_bytes (inner_type),
8843 1, build_qualified_type (inner_type,
8844 (TYPE_QUALS (inner_type)
8845 | TYPE_QUAL_CONST)));
8847 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8848 (modifier == EXPAND_STACK_PARM
8849 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8851 op0 = new;
8854 op0 = force_operand (XEXP (op0, 0), target);
8857 if (flag_force_addr
8858 && GET_CODE (op0) != REG
8859 && modifier != EXPAND_CONST_ADDRESS
8860 && modifier != EXPAND_INITIALIZER
8861 && modifier != EXPAND_SUM)
8862 op0 = force_reg (Pmode, op0);
8864 if (GET_CODE (op0) == REG
8865 && ! REG_USERVAR_P (op0))
8866 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8868 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8869 op0 = convert_memory_address (ptr_mode, op0);
8871 return op0;
8873 case ENTRY_VALUE_EXPR:
8874 abort ();
8876 /* COMPLEX type for Extended Pascal & Fortran */
8877 case COMPLEX_EXPR:
8879 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8880 rtx insns;
8882 /* Get the rtx code of the operands. */
8883 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8884 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8886 if (! target)
8887 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8889 start_sequence ();
8891 /* Move the real (op0) and imaginary (op1) parts to their location. */
8892 emit_move_insn (gen_realpart (mode, target), op0);
8893 emit_move_insn (gen_imagpart (mode, target), op1);
8895 insns = get_insns ();
8896 end_sequence ();
8898 /* Complex construction should appear as a single unit. */
8899 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8900 each with a separate pseudo as destination.
8901 It's not correct for flow to treat them as a unit. */
8902 if (GET_CODE (target) != CONCAT)
8903 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8904 else
8905 emit_insn (insns);
8907 return target;
8910 case REALPART_EXPR:
8911 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8912 return gen_realpart (mode, op0);
8914 case IMAGPART_EXPR:
8915 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8916 return gen_imagpart (mode, op0);
8918 case CONJ_EXPR:
8920 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8921 rtx imag_t;
8922 rtx insns;
8924 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8926 if (! target)
8927 target = gen_reg_rtx (mode);
8929 start_sequence ();
8931 /* Store the realpart and the negated imagpart to target. */
8932 emit_move_insn (gen_realpart (partmode, target),
8933 gen_realpart (partmode, op0));
8935 imag_t = gen_imagpart (partmode, target);
8936 temp = expand_unop (partmode,
8937 ! unsignedp && flag_trapv
8938 && (GET_MODE_CLASS(partmode) == MODE_INT)
8939 ? negv_optab : neg_optab,
8940 gen_imagpart (partmode, op0), imag_t, 0);
8941 if (temp != imag_t)
8942 emit_move_insn (imag_t, temp);
8944 insns = get_insns ();
8945 end_sequence ();
8947 /* Conjugate should appear as a single unit
8948 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8949 each with a separate pseudo as destination.
8950 It's not correct for flow to treat them as a unit. */
8951 if (GET_CODE (target) != CONCAT)
8952 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8953 else
8954 emit_insn (insns);
8956 return target;
8959 case TRY_CATCH_EXPR:
8961 tree handler = TREE_OPERAND (exp, 1);
8963 expand_eh_region_start ();
8965 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8967 expand_eh_region_end_cleanup (handler);
8969 return op0;
8972 case TRY_FINALLY_EXPR:
8974 tree try_block = TREE_OPERAND (exp, 0);
8975 tree finally_block = TREE_OPERAND (exp, 1);
8977 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8979 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8980 is not sufficient, so we cannot expand the block twice.
8981 So we play games with GOTO_SUBROUTINE_EXPR to let us
8982 expand the thing only once. */
8983 /* When not optimizing, we go ahead with this form since
8984 (1) user breakpoints operate more predictably without
8985 code duplication, and
8986 (2) we're not running any of the global optimizers
8987 that would explode in time/space with the highly
8988 connected CFG created by the indirect branching. */
8990 rtx finally_label = gen_label_rtx ();
8991 rtx done_label = gen_label_rtx ();
8992 rtx return_link = gen_reg_rtx (Pmode);
8993 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8994 (tree) finally_label, (tree) return_link);
8995 TREE_SIDE_EFFECTS (cleanup) = 1;
8997 /* Start a new binding layer that will keep track of all cleanup
8998 actions to be performed. */
8999 expand_start_bindings (2);
9000 target_temp_slot_level = temp_slot_level;
9002 expand_decl_cleanup (NULL_TREE, cleanup);
9003 op0 = expand_expr (try_block, target, tmode, modifier);
9005 preserve_temp_slots (op0);
9006 expand_end_bindings (NULL_TREE, 0, 0);
9007 emit_jump (done_label);
9008 emit_label (finally_label);
9009 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9010 emit_indirect_jump (return_link);
9011 emit_label (done_label);
9013 else
9015 expand_start_bindings (2);
9016 target_temp_slot_level = temp_slot_level;
9018 expand_decl_cleanup (NULL_TREE, finally_block);
9019 op0 = expand_expr (try_block, target, tmode, modifier);
9021 preserve_temp_slots (op0);
9022 expand_end_bindings (NULL_TREE, 0, 0);
9025 return op0;
9028 case GOTO_SUBROUTINE_EXPR:
9030 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9031 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9032 rtx return_address = gen_label_rtx ();
9033 emit_move_insn (return_link,
9034 gen_rtx_LABEL_REF (Pmode, return_address));
9035 emit_jump (subr);
9036 emit_label (return_address);
9037 return const0_rtx;
9040 case VA_ARG_EXPR:
9041 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9043 case EXC_PTR_EXPR:
9044 return get_exception_pointer (cfun);
9046 case FDESC_EXPR:
9047 /* Function descriptors are not valid except for as
9048 initialization constants, and should not be expanded. */
9049 abort ();
9051 default:
9052 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier,
9053 alt_rtl);
9056 /* Here to do an ordinary binary operator, generating an instruction
9057 from the optab already placed in `this_optab'. */
9058 binop:
9059 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9060 subtarget, &op0, &op1, 0);
9061 binop2:
9062 if (modifier == EXPAND_STACK_PARM)
9063 target = 0;
9064 temp = expand_binop (mode, this_optab, op0, op1, target,
9065 unsignedp, OPTAB_LIB_WIDEN);
9066 if (temp == 0)
9067 abort ();
9068 return temp;
9071 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9072 when applied to the address of EXP produces an address known to be
9073 aligned more than BIGGEST_ALIGNMENT. */
9075 static int
9076 is_aligning_offset (tree offset, tree exp)
9078 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9079 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9080 || TREE_CODE (offset) == NOP_EXPR
9081 || TREE_CODE (offset) == CONVERT_EXPR
9082 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9083 offset = TREE_OPERAND (offset, 0);
9085 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9086 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9087 if (TREE_CODE (offset) != BIT_AND_EXPR
9088 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9089 || compare_tree_int (TREE_OPERAND (offset, 1),
9090 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9091 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9092 return 0;
9094 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9095 It must be NEGATE_EXPR. Then strip any more conversions. */
9096 offset = TREE_OPERAND (offset, 0);
9097 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9098 || TREE_CODE (offset) == NOP_EXPR
9099 || TREE_CODE (offset) == CONVERT_EXPR)
9100 offset = TREE_OPERAND (offset, 0);
9102 if (TREE_CODE (offset) != NEGATE_EXPR)
9103 return 0;
9105 offset = TREE_OPERAND (offset, 0);
9106 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9107 || TREE_CODE (offset) == NOP_EXPR
9108 || TREE_CODE (offset) == CONVERT_EXPR)
9109 offset = TREE_OPERAND (offset, 0);
9111 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9112 whose type is the same as EXP. */
9113 return (TREE_CODE (offset) == ADDR_EXPR
9114 && (TREE_OPERAND (offset, 0) == exp
9115 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9116 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9117 == TREE_TYPE (exp)))));
9120 /* Return the tree node if an ARG corresponds to a string constant or zero
9121 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9122 in bytes within the string that ARG is accessing. The type of the
9123 offset will be `sizetype'. */
9125 tree
9126 string_constant (tree arg, tree *ptr_offset)
9128 STRIP_NOPS (arg);
9130 if (TREE_CODE (arg) == ADDR_EXPR
9131 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9133 *ptr_offset = size_zero_node;
9134 return TREE_OPERAND (arg, 0);
9136 else if (TREE_CODE (arg) == PLUS_EXPR)
9138 tree arg0 = TREE_OPERAND (arg, 0);
9139 tree arg1 = TREE_OPERAND (arg, 1);
9141 STRIP_NOPS (arg0);
9142 STRIP_NOPS (arg1);
9144 if (TREE_CODE (arg0) == ADDR_EXPR
9145 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9147 *ptr_offset = convert (sizetype, arg1);
9148 return TREE_OPERAND (arg0, 0);
9150 else if (TREE_CODE (arg1) == ADDR_EXPR
9151 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9153 *ptr_offset = convert (sizetype, arg0);
9154 return TREE_OPERAND (arg1, 0);
9158 return 0;
9161 /* Expand code for a post- or pre- increment or decrement
9162 and return the RTX for the result.
9163 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9165 static rtx
9166 expand_increment (tree exp, int post, int ignore)
9168 rtx op0, op1;
9169 rtx temp, value;
9170 tree incremented = TREE_OPERAND (exp, 0);
9171 optab this_optab = add_optab;
9172 int icode;
9173 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9174 int op0_is_copy = 0;
9175 int single_insn = 0;
9176 /* 1 means we can't store into OP0 directly,
9177 because it is a subreg narrower than a word,
9178 and we don't dare clobber the rest of the word. */
9179 int bad_subreg = 0;
9181 /* Stabilize any component ref that might need to be
9182 evaluated more than once below. */
9183 if (!post
9184 || TREE_CODE (incremented) == BIT_FIELD_REF
9185 || (TREE_CODE (incremented) == COMPONENT_REF
9186 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9187 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9188 incremented = stabilize_reference (incremented);
9189 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9190 ones into save exprs so that they don't accidentally get evaluated
9191 more than once by the code below. */
9192 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9193 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9194 incremented = save_expr (incremented);
9196 /* Compute the operands as RTX.
9197 Note whether OP0 is the actual lvalue or a copy of it:
9198 I believe it is a copy iff it is a register or subreg
9199 and insns were generated in computing it. */
9201 temp = get_last_insn ();
9202 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9204 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9205 in place but instead must do sign- or zero-extension during assignment,
9206 so we copy it into a new register and let the code below use it as
9207 a copy.
9209 Note that we can safely modify this SUBREG since it is know not to be
9210 shared (it was made by the expand_expr call above). */
9212 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9214 if (post)
9215 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9216 else
9217 bad_subreg = 1;
9219 else if (GET_CODE (op0) == SUBREG
9220 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9222 /* We cannot increment this SUBREG in place. If we are
9223 post-incrementing, get a copy of the old value. Otherwise,
9224 just mark that we cannot increment in place. */
9225 if (post)
9226 op0 = copy_to_reg (op0);
9227 else
9228 bad_subreg = 1;
9231 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9232 && temp != get_last_insn ());
9233 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9235 /* Decide whether incrementing or decrementing. */
9236 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9237 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9238 this_optab = sub_optab;
9240 /* Convert decrement by a constant into a negative increment. */
9241 if (this_optab == sub_optab
9242 && GET_CODE (op1) == CONST_INT)
9244 op1 = GEN_INT (-INTVAL (op1));
9245 this_optab = add_optab;
9248 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9249 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9251 /* For a preincrement, see if we can do this with a single instruction. */
9252 if (!post)
9254 icode = (int) this_optab->handlers[(int) mode].insn_code;
9255 if (icode != (int) CODE_FOR_nothing
9256 /* Make sure that OP0 is valid for operands 0 and 1
9257 of the insn we want to queue. */
9258 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9259 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9260 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9261 single_insn = 1;
9264 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9265 then we cannot just increment OP0. We must therefore contrive to
9266 increment the original value. Then, for postincrement, we can return
9267 OP0 since it is a copy of the old value. For preincrement, expand here
9268 unless we can do it with a single insn.
9270 Likewise if storing directly into OP0 would clobber high bits
9271 we need to preserve (bad_subreg). */
9272 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9274 /* This is the easiest way to increment the value wherever it is.
9275 Problems with multiple evaluation of INCREMENTED are prevented
9276 because either (1) it is a component_ref or preincrement,
9277 in which case it was stabilized above, or (2) it is an array_ref
9278 with constant index in an array in a register, which is
9279 safe to reevaluate. */
9280 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9281 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9282 ? MINUS_EXPR : PLUS_EXPR),
9283 TREE_TYPE (exp),
9284 incremented,
9285 TREE_OPERAND (exp, 1));
9287 while (TREE_CODE (incremented) == NOP_EXPR
9288 || TREE_CODE (incremented) == CONVERT_EXPR)
9290 newexp = convert (TREE_TYPE (incremented), newexp);
9291 incremented = TREE_OPERAND (incremented, 0);
9294 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9295 return post ? op0 : temp;
9298 if (post)
9300 /* We have a true reference to the value in OP0.
9301 If there is an insn to add or subtract in this mode, queue it.
9302 Queuing the increment insn avoids the register shuffling
9303 that often results if we must increment now and first save
9304 the old value for subsequent use. */
9306 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9307 op0 = stabilize (op0);
9308 #endif
9310 icode = (int) this_optab->handlers[(int) mode].insn_code;
9311 if (icode != (int) CODE_FOR_nothing
9312 /* Make sure that OP0 is valid for operands 0 and 1
9313 of the insn we want to queue. */
9314 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9315 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9317 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9318 op1 = force_reg (mode, op1);
9320 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9322 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9324 rtx addr = (general_operand (XEXP (op0, 0), mode)
9325 ? force_reg (Pmode, XEXP (op0, 0))
9326 : copy_to_reg (XEXP (op0, 0)));
9327 rtx temp, result;
9329 op0 = replace_equiv_address (op0, addr);
9330 temp = force_reg (GET_MODE (op0), op0);
9331 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9332 op1 = force_reg (mode, op1);
9334 /* The increment queue is LIFO, thus we have to `queue'
9335 the instructions in reverse order. */
9336 enqueue_insn (op0, gen_move_insn (op0, temp));
9337 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9338 return result;
9342 /* Preincrement, or we can't increment with one simple insn. */
9343 if (post)
9344 /* Save a copy of the value before inc or dec, to return it later. */
9345 temp = value = copy_to_reg (op0);
9346 else
9347 /* Arrange to return the incremented value. */
9348 /* Copy the rtx because expand_binop will protect from the queue,
9349 and the results of that would be invalid for us to return
9350 if our caller does emit_queue before using our result. */
9351 temp = copy_rtx (value = op0);
9353 /* Increment however we can. */
9354 op1 = expand_binop (mode, this_optab, value, op1, op0,
9355 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9357 /* Make sure the value is stored into OP0. */
9358 if (op1 != op0)
9359 emit_move_insn (op0, op1);
9361 return temp;
9364 /* Generate code to calculate EXP using a store-flag instruction
9365 and return an rtx for the result. EXP is either a comparison
9366 or a TRUTH_NOT_EXPR whose operand is a comparison.
9368 If TARGET is nonzero, store the result there if convenient.
9370 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9371 cheap.
9373 Return zero if there is no suitable set-flag instruction
9374 available on this machine.
9376 Once expand_expr has been called on the arguments of the comparison,
9377 we are committed to doing the store flag, since it is not safe to
9378 re-evaluate the expression. We emit the store-flag insn by calling
9379 emit_store_flag, but only expand the arguments if we have a reason
9380 to believe that emit_store_flag will be successful. If we think that
9381 it will, but it isn't, we have to simulate the store-flag with a
9382 set/jump/set sequence. */
9384 static rtx
9385 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9387 enum rtx_code code;
9388 tree arg0, arg1, type;
9389 tree tem;
9390 enum machine_mode operand_mode;
9391 int invert = 0;
9392 int unsignedp;
9393 rtx op0, op1;
9394 enum insn_code icode;
9395 rtx subtarget = target;
9396 rtx result, label;
9398 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9399 result at the end. We can't simply invert the test since it would
9400 have already been inverted if it were valid. This case occurs for
9401 some floating-point comparisons. */
9403 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9404 invert = 1, exp = TREE_OPERAND (exp, 0);
9406 arg0 = TREE_OPERAND (exp, 0);
9407 arg1 = TREE_OPERAND (exp, 1);
9409 /* Don't crash if the comparison was erroneous. */
9410 if (arg0 == error_mark_node || arg1 == error_mark_node)
9411 return const0_rtx;
9413 type = TREE_TYPE (arg0);
9414 operand_mode = TYPE_MODE (type);
9415 unsignedp = TREE_UNSIGNED (type);
9417 /* We won't bother with BLKmode store-flag operations because it would mean
9418 passing a lot of information to emit_store_flag. */
9419 if (operand_mode == BLKmode)
9420 return 0;
9422 /* We won't bother with store-flag operations involving function pointers
9423 when function pointers must be canonicalized before comparisons. */
9424 #ifdef HAVE_canonicalize_funcptr_for_compare
9425 if (HAVE_canonicalize_funcptr_for_compare
9426 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9427 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9428 == FUNCTION_TYPE))
9429 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9430 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9431 == FUNCTION_TYPE))))
9432 return 0;
9433 #endif
9435 STRIP_NOPS (arg0);
9436 STRIP_NOPS (arg1);
9438 /* Get the rtx comparison code to use. We know that EXP is a comparison
9439 operation of some type. Some comparisons against 1 and -1 can be
9440 converted to comparisons with zero. Do so here so that the tests
9441 below will be aware that we have a comparison with zero. These
9442 tests will not catch constants in the first operand, but constants
9443 are rarely passed as the first operand. */
9445 switch (TREE_CODE (exp))
9447 case EQ_EXPR:
9448 code = EQ;
9449 break;
9450 case NE_EXPR:
9451 code = NE;
9452 break;
9453 case LT_EXPR:
9454 if (integer_onep (arg1))
9455 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9456 else
9457 code = unsignedp ? LTU : LT;
9458 break;
9459 case LE_EXPR:
9460 if (! unsignedp && integer_all_onesp (arg1))
9461 arg1 = integer_zero_node, code = LT;
9462 else
9463 code = unsignedp ? LEU : LE;
9464 break;
9465 case GT_EXPR:
9466 if (! unsignedp && integer_all_onesp (arg1))
9467 arg1 = integer_zero_node, code = GE;
9468 else
9469 code = unsignedp ? GTU : GT;
9470 break;
9471 case GE_EXPR:
9472 if (integer_onep (arg1))
9473 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9474 else
9475 code = unsignedp ? GEU : GE;
9476 break;
9478 case UNORDERED_EXPR:
9479 code = UNORDERED;
9480 break;
9481 case ORDERED_EXPR:
9482 code = ORDERED;
9483 break;
9484 case UNLT_EXPR:
9485 code = UNLT;
9486 break;
9487 case UNLE_EXPR:
9488 code = UNLE;
9489 break;
9490 case UNGT_EXPR:
9491 code = UNGT;
9492 break;
9493 case UNGE_EXPR:
9494 code = UNGE;
9495 break;
9496 case UNEQ_EXPR:
9497 code = UNEQ;
9498 break;
9500 default:
9501 abort ();
9504 /* Put a constant second. */
9505 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9507 tem = arg0; arg0 = arg1; arg1 = tem;
9508 code = swap_condition (code);
9511 /* If this is an equality or inequality test of a single bit, we can
9512 do this by shifting the bit being tested to the low-order bit and
9513 masking the result with the constant 1. If the condition was EQ,
9514 we xor it with 1. This does not require an scc insn and is faster
9515 than an scc insn even if we have it.
9517 The code to make this transformation was moved into fold_single_bit_test,
9518 so we just call into the folder and expand its result. */
9520 if ((code == NE || code == EQ)
9521 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9522 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9524 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9525 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9526 arg0, arg1, type),
9527 target, VOIDmode, EXPAND_NORMAL);
9530 /* Now see if we are likely to be able to do this. Return if not. */
9531 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9532 return 0;
9534 icode = setcc_gen_code[(int) code];
9535 if (icode == CODE_FOR_nothing
9536 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9538 /* We can only do this if it is one of the special cases that
9539 can be handled without an scc insn. */
9540 if ((code == LT && integer_zerop (arg1))
9541 || (! only_cheap && code == GE && integer_zerop (arg1)))
9543 else if (BRANCH_COST >= 0
9544 && ! only_cheap && (code == NE || code == EQ)
9545 && TREE_CODE (type) != REAL_TYPE
9546 && ((abs_optab->handlers[(int) operand_mode].insn_code
9547 != CODE_FOR_nothing)
9548 || (ffs_optab->handlers[(int) operand_mode].insn_code
9549 != CODE_FOR_nothing)))
9551 else
9552 return 0;
9555 if (! get_subtarget (target)
9556 || GET_MODE (subtarget) != operand_mode)
9557 subtarget = 0;
9559 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9561 if (target == 0)
9562 target = gen_reg_rtx (mode);
9564 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9565 because, if the emit_store_flag does anything it will succeed and
9566 OP0 and OP1 will not be used subsequently. */
9568 result = emit_store_flag (target, code,
9569 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9570 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9571 operand_mode, unsignedp, 1);
9573 if (result)
9575 if (invert)
9576 result = expand_binop (mode, xor_optab, result, const1_rtx,
9577 result, 0, OPTAB_LIB_WIDEN);
9578 return result;
9581 /* If this failed, we have to do this with set/compare/jump/set code. */
9582 if (GET_CODE (target) != REG
9583 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9584 target = gen_reg_rtx (GET_MODE (target));
9586 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9587 result = compare_from_rtx (op0, op1, code, unsignedp,
9588 operand_mode, NULL_RTX);
9589 if (GET_CODE (result) == CONST_INT)
9590 return (((result == const0_rtx && ! invert)
9591 || (result != const0_rtx && invert))
9592 ? const0_rtx : const1_rtx);
9594 /* The code of RESULT may not match CODE if compare_from_rtx
9595 decided to swap its operands and reverse the original code.
9597 We know that compare_from_rtx returns either a CONST_INT or
9598 a new comparison code, so it is safe to just extract the
9599 code from RESULT. */
9600 code = GET_CODE (result);
9602 label = gen_label_rtx ();
9603 if (bcc_gen_fctn[(int) code] == 0)
9604 abort ();
9606 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9607 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9608 emit_label (label);
9610 return target;
9614 /* Stubs in case we haven't got a casesi insn. */
9615 #ifndef HAVE_casesi
9616 # define HAVE_casesi 0
9617 # define gen_casesi(a, b, c, d, e) (0)
9618 # define CODE_FOR_casesi CODE_FOR_nothing
9619 #endif
9621 /* If the machine does not have a case insn that compares the bounds,
9622 this means extra overhead for dispatch tables, which raises the
9623 threshold for using them. */
9624 #ifndef CASE_VALUES_THRESHOLD
9625 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9626 #endif /* CASE_VALUES_THRESHOLD */
9628 unsigned int
9629 case_values_threshold (void)
9631 return CASE_VALUES_THRESHOLD;
9634 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9635 0 otherwise (i.e. if there is no casesi instruction). */
9637 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9638 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9640 enum machine_mode index_mode = SImode;
9641 int index_bits = GET_MODE_BITSIZE (index_mode);
9642 rtx op1, op2, index;
9643 enum machine_mode op_mode;
9645 if (! HAVE_casesi)
9646 return 0;
9648 /* Convert the index to SImode. */
9649 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9651 enum machine_mode omode = TYPE_MODE (index_type);
9652 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9654 /* We must handle the endpoints in the original mode. */
9655 index_expr = build (MINUS_EXPR, index_type,
9656 index_expr, minval);
9657 minval = integer_zero_node;
9658 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9659 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9660 omode, 1, default_label);
9661 /* Now we can safely truncate. */
9662 index = convert_to_mode (index_mode, index, 0);
9664 else
9666 if (TYPE_MODE (index_type) != index_mode)
9668 index_expr = convert ((*lang_hooks.types.type_for_size)
9669 (index_bits, 0), index_expr);
9670 index_type = TREE_TYPE (index_expr);
9673 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9675 emit_queue ();
9676 index = protect_from_queue (index, 0);
9677 do_pending_stack_adjust ();
9679 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9680 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9681 (index, op_mode))
9682 index = copy_to_mode_reg (op_mode, index);
9684 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9686 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9687 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9688 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9689 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9690 (op1, op_mode))
9691 op1 = copy_to_mode_reg (op_mode, op1);
9693 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9695 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9696 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9697 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9698 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9699 (op2, op_mode))
9700 op2 = copy_to_mode_reg (op_mode, op2);
9702 emit_jump_insn (gen_casesi (index, op1, op2,
9703 table_label, default_label));
9704 return 1;
9707 /* Attempt to generate a tablejump instruction; same concept. */
9708 #ifndef HAVE_tablejump
9709 #define HAVE_tablejump 0
9710 #define gen_tablejump(x, y) (0)
9711 #endif
9713 /* Subroutine of the next function.
9715 INDEX is the value being switched on, with the lowest value
9716 in the table already subtracted.
9717 MODE is its expected mode (needed if INDEX is constant).
9718 RANGE is the length of the jump table.
9719 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9721 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9722 index value is out of range. */
9724 static void
9725 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9726 rtx default_label)
9728 rtx temp, vector;
9730 if (INTVAL (range) > cfun->max_jumptable_ents)
9731 cfun->max_jumptable_ents = INTVAL (range);
9733 /* Do an unsigned comparison (in the proper mode) between the index
9734 expression and the value which represents the length of the range.
9735 Since we just finished subtracting the lower bound of the range
9736 from the index expression, this comparison allows us to simultaneously
9737 check that the original index expression value is both greater than
9738 or equal to the minimum value of the range and less than or equal to
9739 the maximum value of the range. */
9741 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9742 default_label);
9744 /* If index is in range, it must fit in Pmode.
9745 Convert to Pmode so we can index with it. */
9746 if (mode != Pmode)
9747 index = convert_to_mode (Pmode, index, 1);
9749 /* Don't let a MEM slip through, because then INDEX that comes
9750 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9751 and break_out_memory_refs will go to work on it and mess it up. */
9752 #ifdef PIC_CASE_VECTOR_ADDRESS
9753 if (flag_pic && GET_CODE (index) != REG)
9754 index = copy_to_mode_reg (Pmode, index);
9755 #endif
9757 /* If flag_force_addr were to affect this address
9758 it could interfere with the tricky assumptions made
9759 about addresses that contain label-refs,
9760 which may be valid only very near the tablejump itself. */
9761 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9762 GET_MODE_SIZE, because this indicates how large insns are. The other
9763 uses should all be Pmode, because they are addresses. This code
9764 could fail if addresses and insns are not the same size. */
9765 index = gen_rtx_PLUS (Pmode,
9766 gen_rtx_MULT (Pmode, index,
9767 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9768 gen_rtx_LABEL_REF (Pmode, table_label));
9769 #ifdef PIC_CASE_VECTOR_ADDRESS
9770 if (flag_pic)
9771 index = PIC_CASE_VECTOR_ADDRESS (index);
9772 else
9773 #endif
9774 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9775 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9776 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9777 RTX_UNCHANGING_P (vector) = 1;
9778 MEM_NOTRAP_P (vector) = 1;
9779 convert_move (temp, vector, 0);
9781 emit_jump_insn (gen_tablejump (temp, table_label));
9783 /* If we are generating PIC code or if the table is PC-relative, the
9784 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9785 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9786 emit_barrier ();
9790 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9791 rtx table_label, rtx default_label)
9793 rtx index;
9795 if (! HAVE_tablejump)
9796 return 0;
9798 index_expr = fold (build (MINUS_EXPR, index_type,
9799 convert (index_type, index_expr),
9800 convert (index_type, minval)));
9801 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9802 emit_queue ();
9803 index = protect_from_queue (index, 0);
9804 do_pending_stack_adjust ();
9806 do_tablejump (index, TYPE_MODE (index_type),
9807 convert_modes (TYPE_MODE (index_type),
9808 TYPE_MODE (TREE_TYPE (range)),
9809 expand_expr (range, NULL_RTX,
9810 VOIDmode, 0),
9811 TREE_UNSIGNED (TREE_TYPE (range))),
9812 table_label, default_label);
9813 return 1;
9816 /* Nonzero if the mode is a valid vector mode for this architecture.
9817 This returns nonzero even if there is no hardware support for the
9818 vector mode, but we can emulate with narrower modes. */
9821 vector_mode_valid_p (enum machine_mode mode)
9823 enum mode_class class = GET_MODE_CLASS (mode);
9824 enum machine_mode innermode;
9826 /* Doh! What's going on? */
9827 if (class != MODE_VECTOR_INT
9828 && class != MODE_VECTOR_FLOAT)
9829 return 0;
9831 /* Hardware support. Woo hoo! */
9832 if (VECTOR_MODE_SUPPORTED_P (mode))
9833 return 1;
9835 innermode = GET_MODE_INNER (mode);
9837 /* We should probably return 1 if requesting V4DI and we have no DI,
9838 but we have V2DI, but this is probably very unlikely. */
9840 /* If we have support for the inner mode, we can safely emulate it.
9841 We may not have V2DI, but me can emulate with a pair of DIs. */
9842 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9845 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9846 static rtx
9847 const_vector_from_tree (tree exp)
9849 rtvec v;
9850 int units, i;
9851 tree link, elt;
9852 enum machine_mode inner, mode;
9854 mode = TYPE_MODE (TREE_TYPE (exp));
9856 if (is_zeros_p (exp))
9857 return CONST0_RTX (mode);
9859 units = GET_MODE_NUNITS (mode);
9860 inner = GET_MODE_INNER (mode);
9862 v = rtvec_alloc (units);
9864 link = TREE_VECTOR_CST_ELTS (exp);
9865 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9867 elt = TREE_VALUE (link);
9869 if (TREE_CODE (elt) == REAL_CST)
9870 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9871 inner);
9872 else
9873 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9874 TREE_INT_CST_HIGH (elt),
9875 inner);
9878 /* Initialize remaining elements to 0. */
9879 for (; i < units; ++i)
9880 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9882 return gen_rtx_raw_CONST_VECTOR (mode, v);
9885 #include "gt-expr.h"