Mark ChangeLog
[official-gcc.git] / gcc / expr.c
bloba359e067a9390c7807a834884a7df5e0e02cf112
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx, bool);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, int);
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 #ifdef PUSH_ROUNDING
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 #endif
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
162 static void write_complex_part (rtx, rtx, bool);
164 /* Record for each mode whether we can move a register directly to or
165 from an object of that mode in memory. If we can't, we won't try
166 to use that mode directly when accessing a field of that mode. */
168 static char direct_load[NUM_MACHINE_MODES];
169 static char direct_store[NUM_MACHINE_MODES];
171 /* Record for each mode whether we can float-extend from memory. */
173 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
175 /* This macro is used to determine whether move_by_pieces should be called
176 to perform a structure copy. */
177 #ifndef MOVE_BY_PIECES_P
178 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
180 < (unsigned int) MOVE_RATIO)
181 #endif
183 /* This macro is used to determine whether clear_by_pieces should be
184 called to clear storage. */
185 #ifndef CLEAR_BY_PIECES_P
186 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188 < (unsigned int) CLEAR_RATIO)
189 #endif
191 /* This macro is used to determine whether store_by_pieces should be
192 called to "memset" storage with byte values other than zero, or
193 to "memcpy" storage when the source is a constant string. */
194 #ifndef STORE_BY_PIECES_P
195 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
196 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
197 < (unsigned int) MOVE_RATIO)
198 #endif
200 /* This array records the insn_code of insns to perform block moves. */
201 enum insn_code movmem_optab[NUM_MACHINE_MODES];
203 /* This array records the insn_code of insns to perform block clears. */
204 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
206 /* These arrays record the insn_code of two different kinds of insns
207 to perform block compares. */
208 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
209 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
211 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
213 #ifndef SLOW_UNALIGNED_ACCESS
214 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
215 #endif
217 /* This is run once per compilation to set up which modes can be used
218 directly in memory and to initialize the block move optab. */
220 void
221 init_expr_once (void)
223 rtx insn, pat;
224 enum machine_mode mode;
225 int num_clobbers;
226 rtx mem, mem1;
227 rtx reg;
229 /* Try indexing by frame ptr and try by stack ptr.
230 It is known that on the Convex the stack ptr isn't a valid index.
231 With luck, one or the other is valid on any machine. */
232 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
233 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
235 /* A scratch register we can modify in-place below to avoid
236 useless RTL allocations. */
237 reg = gen_rtx_REG (VOIDmode, -1);
239 insn = rtx_alloc (INSN);
240 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
241 PATTERN (insn) = pat;
243 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
244 mode = (enum machine_mode) ((int) mode + 1))
246 int regno;
248 direct_load[(int) mode] = direct_store[(int) mode] = 0;
249 PUT_MODE (mem, mode);
250 PUT_MODE (mem1, mode);
251 PUT_MODE (reg, mode);
253 /* See if there is some register that can be used in this mode and
254 directly loaded or stored from memory. */
256 if (mode != VOIDmode && mode != BLKmode)
257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
258 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
259 regno++)
261 if (! HARD_REGNO_MODE_OK (regno, mode))
262 continue;
264 REGNO (reg) = regno;
266 SET_SRC (pat) = mem;
267 SET_DEST (pat) = reg;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_load[(int) mode] = 1;
271 SET_SRC (pat) = mem1;
272 SET_DEST (pat) = reg;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_load[(int) mode] = 1;
276 SET_SRC (pat) = reg;
277 SET_DEST (pat) = mem;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_store[(int) mode] = 1;
281 SET_SRC (pat) = reg;
282 SET_DEST (pat) = mem1;
283 if (recog (pat, insn, &num_clobbers) >= 0)
284 direct_store[(int) mode] = 1;
288 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
290 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
291 mode = GET_MODE_WIDER_MODE (mode))
293 enum machine_mode srcmode;
294 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
295 srcmode = GET_MODE_WIDER_MODE (srcmode))
297 enum insn_code ic;
299 ic = can_extend_p (mode, srcmode, 0);
300 if (ic == CODE_FOR_nothing)
301 continue;
303 PUT_MODE (mem, srcmode);
305 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
306 float_extend_from_mem[mode][srcmode] = true;
311 /* This is run at the start of compiling a function. */
313 void
314 init_expr (void)
316 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
319 /* Copy data from FROM to TO, where the machine modes are not the same.
320 Both modes may be integer, or both may be floating.
321 UNSIGNEDP should be nonzero if FROM is an unsigned type.
322 This causes zero-extension instead of sign-extension. */
324 void
325 convert_move (rtx to, rtx from, int unsignedp)
327 enum machine_mode to_mode = GET_MODE (to);
328 enum machine_mode from_mode = GET_MODE (from);
329 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
330 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
331 enum insn_code code;
332 rtx libcall;
334 /* rtx code for making an equivalent value. */
335 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
336 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
339 gcc_assert (to_real == from_real);
341 /* If the source and destination are already the same, then there's
342 nothing to do. */
343 if (to == from)
344 return;
346 /* If FROM is a SUBREG that indicates that we have already done at least
347 the required extension, strip it. We don't handle such SUBREGs as
348 TO here. */
350 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
351 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
352 >= GET_MODE_SIZE (to_mode))
353 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
354 from = gen_lowpart (to_mode, from), from_mode = to_mode;
356 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
358 if (to_mode == from_mode
359 || (from_mode == VOIDmode && CONSTANT_P (from)))
361 emit_move_insn (to, from);
362 return;
365 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
367 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
369 if (VECTOR_MODE_P (to_mode))
370 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
371 else
372 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
374 emit_move_insn (to, from);
375 return;
378 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
380 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
381 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
382 return;
385 if (to_real)
387 rtx value, insns;
388 convert_optab tab;
390 gcc_assert (GET_MODE_PRECISION (from_mode)
391 != GET_MODE_PRECISION (to_mode));
393 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
394 tab = sext_optab;
395 else
396 tab = trunc_optab;
398 /* Try converting directly if the insn is supported. */
400 code = tab->handlers[to_mode][from_mode].insn_code;
401 if (code != CODE_FOR_nothing)
403 emit_unop_insn (code, to, from,
404 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
405 return;
408 /* Otherwise use a libcall. */
409 libcall = tab->handlers[to_mode][from_mode].libfunc;
411 /* Is this conversion implemented yet? */
412 gcc_assert (libcall);
414 start_sequence ();
415 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
416 1, from, from_mode);
417 insns = get_insns ();
418 end_sequence ();
419 emit_libcall_block (insns, to, value,
420 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
421 from)
422 : gen_rtx_FLOAT_EXTEND (to_mode, from));
423 return;
426 /* Handle pointer conversion. */ /* SPEE 900220. */
427 /* Targets are expected to provide conversion insns between PxImode and
428 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
429 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
431 enum machine_mode full_mode
432 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
434 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
435 != CODE_FOR_nothing);
437 if (full_mode != from_mode)
438 from = convert_to_mode (full_mode, from, unsignedp);
439 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
440 to, from, UNKNOWN);
441 return;
443 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
445 enum machine_mode full_mode
446 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
448 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
449 != CODE_FOR_nothing);
451 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
452 to, from, UNKNOWN);
453 if (to_mode == full_mode)
454 return;
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
460 /* Now both modes are integers. */
462 /* Handle expanding beyond a word. */
463 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
464 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
466 rtx insns;
467 rtx lowpart;
468 rtx fill_value;
469 rtx lowfrom;
470 int i;
471 enum machine_mode lowpart_mode;
472 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
474 /* Try converting directly if the insn is supported. */
475 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
476 != CODE_FOR_nothing)
478 /* If FROM is a SUBREG, put it into a register. Do this
479 so that we always generate the same set of insns for
480 better cse'ing; if an intermediate assignment occurred,
481 we won't be doing the operation directly on the SUBREG. */
482 if (optimize > 0 && GET_CODE (from) == SUBREG)
483 from = force_reg (from_mode, from);
484 emit_unop_insn (code, to, from, equiv_code);
485 return;
487 /* Next, try converting via full word. */
488 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
489 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
490 != CODE_FOR_nothing))
492 if (REG_P (to))
494 if (reg_overlap_mentioned_p (to, from))
495 from = force_reg (from_mode, from);
496 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
498 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
499 emit_unop_insn (code, to,
500 gen_lowpart (word_mode, to), equiv_code);
501 return;
504 /* No special multiword conversion insn; do it by hand. */
505 start_sequence ();
507 /* Since we will turn this into a no conflict block, we must ensure
508 that the source does not overlap the target. */
510 if (reg_overlap_mentioned_p (to, from))
511 from = force_reg (from_mode, from);
513 /* Get a copy of FROM widened to a word, if necessary. */
514 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
515 lowpart_mode = word_mode;
516 else
517 lowpart_mode = from_mode;
519 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
521 lowpart = gen_lowpart (lowpart_mode, to);
522 emit_move_insn (lowpart, lowfrom);
524 /* Compute the value to put in each remaining word. */
525 if (unsignedp)
526 fill_value = const0_rtx;
527 else
529 #ifdef HAVE_slt
530 if (HAVE_slt
531 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
532 && STORE_FLAG_VALUE == -1)
534 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
535 lowpart_mode, 0);
536 fill_value = gen_reg_rtx (word_mode);
537 emit_insn (gen_slt (fill_value));
539 else
540 #endif
542 fill_value
543 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
544 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
545 NULL_RTX, 0);
546 fill_value = convert_to_mode (word_mode, fill_value, 1);
550 /* Fill the remaining words. */
551 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
553 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554 rtx subword = operand_subword (to, index, 1, to_mode);
556 gcc_assert (subword);
558 if (fill_value != subword)
559 emit_move_insn (subword, fill_value);
562 insns = get_insns ();
563 end_sequence ();
565 emit_no_conflict_block (insns, to, from, NULL_RTX,
566 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
567 return;
570 /* Truncating multi-word to a word or less. */
571 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
572 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 if (!((MEM_P (from)
575 && ! MEM_VOLATILE_P (from)
576 && direct_load[(int) to_mode]
577 && ! mode_dependent_address_p (XEXP (from, 0)))
578 || REG_P (from)
579 || GET_CODE (from) == SUBREG))
580 from = force_reg (from_mode, from);
581 convert_move (to, gen_lowpart (word_mode, from), 0);
582 return;
585 /* Now follow all the conversions between integers
586 no more than a word long. */
588 /* For truncation, usually we can just refer to FROM in a narrower mode. */
589 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
590 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
591 GET_MODE_BITSIZE (from_mode)))
593 if (!((MEM_P (from)
594 && ! MEM_VOLATILE_P (from)
595 && direct_load[(int) to_mode]
596 && ! mode_dependent_address_p (XEXP (from, 0)))
597 || REG_P (from)
598 || GET_CODE (from) == SUBREG))
599 from = force_reg (from_mode, from);
600 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
601 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
602 from = copy_to_reg (from);
603 emit_move_insn (to, gen_lowpart (to_mode, from));
604 return;
607 /* Handle extension. */
608 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
610 /* Convert directly if that works. */
611 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
612 != CODE_FOR_nothing)
614 if (flag_force_mem)
615 from = force_not_mem (from);
617 emit_unop_insn (code, to, from, equiv_code);
618 return;
620 else
622 enum machine_mode intermediate;
623 rtx tmp;
624 tree shift_amount;
626 /* Search for a mode to convert via. */
627 for (intermediate = from_mode; intermediate != VOIDmode;
628 intermediate = GET_MODE_WIDER_MODE (intermediate))
629 if (((can_extend_p (to_mode, intermediate, unsignedp)
630 != CODE_FOR_nothing)
631 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
632 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
633 GET_MODE_BITSIZE (intermediate))))
634 && (can_extend_p (intermediate, from_mode, unsignedp)
635 != CODE_FOR_nothing))
637 convert_move (to, convert_to_mode (intermediate, from,
638 unsignedp), unsignedp);
639 return;
642 /* No suitable intermediate mode.
643 Generate what we need with shifts. */
644 shift_amount = build_int_cst (NULL_TREE,
645 GET_MODE_BITSIZE (to_mode)
646 - GET_MODE_BITSIZE (from_mode));
647 from = gen_lowpart (to_mode, force_reg (from_mode, from));
648 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
649 to, unsignedp);
650 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
651 to, unsignedp);
652 if (tmp != to)
653 emit_move_insn (to, tmp);
654 return;
658 /* Support special truncate insns for certain modes. */
659 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
661 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
662 to, from, UNKNOWN);
663 return;
666 /* Handle truncation of volatile memrefs, and so on;
667 the things that couldn't be truncated directly,
668 and for which there was no special instruction.
670 ??? Code above formerly short-circuited this, for most integer
671 mode pairs, with a force_reg in from_mode followed by a recursive
672 call to this routine. Appears always to have been wrong. */
673 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
675 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
676 emit_move_insn (to, temp);
677 return;
680 /* Mode combination is not recognized. */
681 gcc_unreachable ();
684 /* Return an rtx for a value that would result
685 from converting X to mode MODE.
686 Both X and MODE may be floating, or both integer.
687 UNSIGNEDP is nonzero if X is an unsigned value.
688 This can be done by referring to a part of X in place
689 or by copying to a new temporary with conversion. */
692 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
694 return convert_modes (mode, VOIDmode, x, unsignedp);
697 /* Return an rtx for a value that would result
698 from converting X from mode OLDMODE to mode MODE.
699 Both modes may be floating, or both integer.
700 UNSIGNEDP is nonzero if X is an unsigned value.
702 This can be done by referring to a part of X in place
703 or by copying to a new temporary with conversion.
705 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
708 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
710 rtx temp;
712 /* If FROM is a SUBREG that indicates that we have already done at least
713 the required extension, strip it. */
715 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
716 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
717 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
718 x = gen_lowpart (mode, x);
720 if (GET_MODE (x) != VOIDmode)
721 oldmode = GET_MODE (x);
723 if (mode == oldmode)
724 return x;
726 /* There is one case that we must handle specially: If we are converting
727 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
728 we are to interpret the constant as unsigned, gen_lowpart will do
729 the wrong if the constant appears negative. What we want to do is
730 make the high-order word of the constant zero, not all ones. */
732 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
733 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
734 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
736 HOST_WIDE_INT val = INTVAL (x);
738 if (oldmode != VOIDmode
739 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
741 int width = GET_MODE_BITSIZE (oldmode);
743 /* We need to zero extend VAL. */
744 val &= ((HOST_WIDE_INT) 1 << width) - 1;
747 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
750 /* We can do this with a gen_lowpart if both desired and current modes
751 are integer, and this is either a constant integer, a register, or a
752 non-volatile MEM. Except for the constant case where MODE is no
753 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
755 if ((GET_CODE (x) == CONST_INT
756 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
757 || (GET_MODE_CLASS (mode) == MODE_INT
758 && GET_MODE_CLASS (oldmode) == MODE_INT
759 && (GET_CODE (x) == CONST_DOUBLE
760 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
761 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
762 && direct_load[(int) mode])
763 || (REG_P (x)
764 && (! HARD_REGISTER_P (x)
765 || HARD_REGNO_MODE_OK (REGNO (x), mode))
766 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
767 GET_MODE_BITSIZE (GET_MODE (x)))))))))
769 /* ?? If we don't know OLDMODE, we have to assume here that
770 X does not need sign- or zero-extension. This may not be
771 the case, but it's the best we can do. */
772 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
773 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
775 HOST_WIDE_INT val = INTVAL (x);
776 int width = GET_MODE_BITSIZE (oldmode);
778 /* We must sign or zero-extend in this case. Start by
779 zero-extending, then sign extend if we need to. */
780 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 if (! unsignedp
782 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
783 val |= (HOST_WIDE_INT) (-1) << width;
785 return gen_int_mode (val, mode);
788 return gen_lowpart (mode, x);
791 /* Converting from integer constant into mode is always equivalent to an
792 subreg operation. */
793 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
795 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
796 return simplify_gen_subreg (mode, x, oldmode, 0);
799 temp = gen_reg_rtx (mode);
800 convert_move (temp, x, unsignedp);
801 return temp;
804 /* STORE_MAX_PIECES is the number of bytes at a time that we can
805 store efficiently. Due to internal GCC limitations, this is
806 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
807 for an immediate constant. */
809 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
811 /* Determine whether the LEN bytes can be moved by using several move
812 instructions. Return nonzero if a call to move_by_pieces should
813 succeed. */
816 can_move_by_pieces (unsigned HOST_WIDE_INT len,
817 unsigned int align ATTRIBUTE_UNUSED)
819 return MOVE_BY_PIECES_P (len, align);
822 /* Generate several move instructions to copy LEN bytes from block FROM to
823 block TO. (These are MEM rtx's with BLKmode).
825 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
826 used to push FROM to the stack.
828 ALIGN is maximum stack alignment we can assume.
830 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
831 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
832 stpcpy. */
835 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
836 unsigned int align, int endp)
838 struct move_by_pieces data;
839 rtx to_addr, from_addr = XEXP (from, 0);
840 unsigned int max_size = MOVE_MAX_PIECES + 1;
841 enum machine_mode mode = VOIDmode, tmode;
842 enum insn_code icode;
844 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
846 data.offset = 0;
847 data.from_addr = from_addr;
848 if (to)
850 to_addr = XEXP (to, 0);
851 data.to = to;
852 data.autinc_to
853 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
854 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
855 data.reverse
856 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
858 else
860 to_addr = NULL_RTX;
861 data.to = NULL_RTX;
862 data.autinc_to = 1;
863 #ifdef STACK_GROWS_DOWNWARD
864 data.reverse = 1;
865 #else
866 data.reverse = 0;
867 #endif
869 data.to_addr = to_addr;
870 data.from = from;
871 data.autinc_from
872 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
873 || GET_CODE (from_addr) == POST_INC
874 || GET_CODE (from_addr) == POST_DEC);
876 data.explicit_inc_from = 0;
877 data.explicit_inc_to = 0;
878 if (data.reverse) data.offset = len;
879 data.len = len;
881 /* If copying requires more than two move insns,
882 copy addresses to registers (to make displacements shorter)
883 and use post-increment if available. */
884 if (!(data.autinc_from && data.autinc_to)
885 && move_by_pieces_ninsns (len, align, max_size) > 2)
887 /* Find the mode of the largest move... */
888 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
889 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
890 if (GET_MODE_SIZE (tmode) < max_size)
891 mode = tmode;
893 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
895 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
896 data.autinc_from = 1;
897 data.explicit_inc_from = -1;
899 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
901 data.from_addr = copy_addr_to_reg (from_addr);
902 data.autinc_from = 1;
903 data.explicit_inc_from = 1;
905 if (!data.autinc_from && CONSTANT_P (from_addr))
906 data.from_addr = copy_addr_to_reg (from_addr);
907 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
909 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
910 data.autinc_to = 1;
911 data.explicit_inc_to = -1;
913 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
915 data.to_addr = copy_addr_to_reg (to_addr);
916 data.autinc_to = 1;
917 data.explicit_inc_to = 1;
919 if (!data.autinc_to && CONSTANT_P (to_addr))
920 data.to_addr = copy_addr_to_reg (to_addr);
923 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
924 if (align >= GET_MODE_ALIGNMENT (tmode))
925 align = GET_MODE_ALIGNMENT (tmode);
926 else
928 enum machine_mode xmode;
930 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
931 tmode != VOIDmode;
932 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
933 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
934 || SLOW_UNALIGNED_ACCESS (tmode, align))
935 break;
937 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
940 /* First move what we can in the largest integer mode, then go to
941 successively smaller modes. */
943 while (max_size > 1)
945 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
946 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
947 if (GET_MODE_SIZE (tmode) < max_size)
948 mode = tmode;
950 if (mode == VOIDmode)
951 break;
953 icode = mov_optab->handlers[(int) mode].insn_code;
954 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
955 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
957 max_size = GET_MODE_SIZE (mode);
960 /* The code above should have handled everything. */
961 gcc_assert (!data.len);
963 if (endp)
965 rtx to1;
967 gcc_assert (!data.reverse);
968 if (data.autinc_to)
970 if (endp == 2)
972 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
973 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
974 else
975 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
976 -1));
978 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
979 data.offset);
981 else
983 if (endp == 2)
984 --data.offset;
985 to1 = adjust_address (data.to, QImode, data.offset);
987 return to1;
989 else
990 return data.to;
993 /* Return number of insns required to move L bytes by pieces.
994 ALIGN (in bits) is maximum alignment we can assume. */
996 static unsigned HOST_WIDE_INT
997 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
998 unsigned int max_size)
1000 unsigned HOST_WIDE_INT n_insns = 0;
1001 enum machine_mode tmode;
1003 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1004 if (align >= GET_MODE_ALIGNMENT (tmode))
1005 align = GET_MODE_ALIGNMENT (tmode);
1006 else
1008 enum machine_mode tmode, xmode;
1010 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1011 tmode != VOIDmode;
1012 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1013 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1014 || SLOW_UNALIGNED_ACCESS (tmode, align))
1015 break;
1017 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1020 while (max_size > 1)
1022 enum machine_mode mode = VOIDmode;
1023 enum insn_code icode;
1025 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1026 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1027 if (GET_MODE_SIZE (tmode) < max_size)
1028 mode = tmode;
1030 if (mode == VOIDmode)
1031 break;
1033 icode = mov_optab->handlers[(int) mode].insn_code;
1034 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1035 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1037 max_size = GET_MODE_SIZE (mode);
1040 gcc_assert (!l);
1041 return n_insns;
1044 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1045 with move instructions for mode MODE. GENFUN is the gen_... function
1046 to make a move insn for that mode. DATA has all the other info. */
1048 static void
1049 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1050 struct move_by_pieces *data)
1052 unsigned int size = GET_MODE_SIZE (mode);
1053 rtx to1 = NULL_RTX, from1;
1055 while (data->len >= size)
1057 if (data->reverse)
1058 data->offset -= size;
1060 if (data->to)
1062 if (data->autinc_to)
1063 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1064 data->offset);
1065 else
1066 to1 = adjust_address (data->to, mode, data->offset);
1069 if (data->autinc_from)
1070 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1071 data->offset);
1072 else
1073 from1 = adjust_address (data->from, mode, data->offset);
1075 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1076 emit_insn (gen_add2_insn (data->to_addr,
1077 GEN_INT (-(HOST_WIDE_INT)size)));
1078 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1079 emit_insn (gen_add2_insn (data->from_addr,
1080 GEN_INT (-(HOST_WIDE_INT)size)));
1082 if (data->to)
1083 emit_insn ((*genfun) (to1, from1));
1084 else
1086 #ifdef PUSH_ROUNDING
1087 emit_single_push_insn (mode, from1, NULL);
1088 #else
1089 gcc_unreachable ();
1090 #endif
1093 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1094 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1095 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1096 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1098 if (! data->reverse)
1099 data->offset += size;
1101 data->len -= size;
1105 /* Emit code to move a block Y to a block X. This may be done with
1106 string-move instructions, with multiple scalar move instructions,
1107 or with a library call.
1109 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1110 SIZE is an rtx that says how long they are.
1111 ALIGN is the maximum alignment we can assume they have.
1112 METHOD describes what kind of copy this is, and what mechanisms may be used.
1114 Return the address of the new block, if memcpy is called and returns it,
1115 0 otherwise. */
1118 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1120 bool may_use_call;
1121 rtx retval = 0;
1122 unsigned int align;
1124 switch (method)
1126 case BLOCK_OP_NORMAL:
1127 case BLOCK_OP_TAILCALL:
1128 may_use_call = true;
1129 break;
1131 case BLOCK_OP_CALL_PARM:
1132 may_use_call = block_move_libcall_safe_for_call_parm ();
1134 /* Make inhibit_defer_pop nonzero around the library call
1135 to force it to pop the arguments right away. */
1136 NO_DEFER_POP;
1137 break;
1139 case BLOCK_OP_NO_LIBCALL:
1140 may_use_call = false;
1141 break;
1143 default:
1144 gcc_unreachable ();
1147 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1149 gcc_assert (MEM_P (x));
1150 gcc_assert (MEM_P (y));
1151 gcc_assert (size);
1153 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1154 block copy is more efficient for other large modes, e.g. DCmode. */
1155 x = adjust_address (x, BLKmode, 0);
1156 y = adjust_address (y, BLKmode, 0);
1158 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1159 can be incorrect is coming from __builtin_memcpy. */
1160 if (GET_CODE (size) == CONST_INT)
1162 if (INTVAL (size) == 0)
1163 return 0;
1165 x = shallow_copy_rtx (x);
1166 y = shallow_copy_rtx (y);
1167 set_mem_size (x, size);
1168 set_mem_size (y, size);
1171 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1172 move_by_pieces (x, y, INTVAL (size), align, 0);
1173 else if (emit_block_move_via_movmem (x, y, size, align))
1175 else if (may_use_call)
1176 retval = emit_block_move_via_libcall (x, y, size,
1177 method == BLOCK_OP_TAILCALL);
1178 else
1179 emit_block_move_via_loop (x, y, size, align);
1181 if (method == BLOCK_OP_CALL_PARM)
1182 OK_DEFER_POP;
1184 return retval;
1187 /* A subroutine of emit_block_move. Returns true if calling the
1188 block move libcall will not clobber any parameters which may have
1189 already been placed on the stack. */
1191 static bool
1192 block_move_libcall_safe_for_call_parm (void)
1194 /* If arguments are pushed on the stack, then they're safe. */
1195 if (PUSH_ARGS)
1196 return true;
1198 /* If registers go on the stack anyway, any argument is sure to clobber
1199 an outgoing argument. */
1200 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1202 tree fn = emit_block_move_libcall_fn (false);
1203 (void) fn;
1204 if (REG_PARM_STACK_SPACE (fn) != 0)
1205 return false;
1207 #endif
1209 /* If any argument goes in memory, then it might clobber an outgoing
1210 argument. */
1212 CUMULATIVE_ARGS args_so_far;
1213 tree fn, arg;
1215 fn = emit_block_move_libcall_fn (false);
1216 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1218 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1219 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1221 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1222 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1223 if (!tmp || !REG_P (tmp))
1224 return false;
1225 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1226 return false;
1227 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1230 return true;
1233 /* A subroutine of emit_block_move. Expand a movmem pattern;
1234 return true if successful. */
1236 static bool
1237 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1239 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1240 int save_volatile_ok = volatile_ok;
1241 enum machine_mode mode;
1243 /* Since this is a move insn, we don't care about volatility. */
1244 volatile_ok = 1;
1246 /* Try the most limited insn first, because there's no point
1247 including more than one in the machine description unless
1248 the more limited one has some advantage. */
1250 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1251 mode = GET_MODE_WIDER_MODE (mode))
1253 enum insn_code code = movmem_optab[(int) mode];
1254 insn_operand_predicate_fn pred;
1256 if (code != CODE_FOR_nothing
1257 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1258 here because if SIZE is less than the mode mask, as it is
1259 returned by the macro, it will definitely be less than the
1260 actual mode mask. */
1261 && ((GET_CODE (size) == CONST_INT
1262 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1263 <= (GET_MODE_MASK (mode) >> 1)))
1264 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1265 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1266 || (*pred) (x, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1268 || (*pred) (y, BLKmode))
1269 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1270 || (*pred) (opalign, VOIDmode)))
1272 rtx op2;
1273 rtx last = get_last_insn ();
1274 rtx pat;
1276 op2 = convert_to_mode (mode, size, 1);
1277 pred = insn_data[(int) code].operand[2].predicate;
1278 if (pred != 0 && ! (*pred) (op2, mode))
1279 op2 = copy_to_mode_reg (mode, op2);
1281 /* ??? When called via emit_block_move_for_call, it'd be
1282 nice if there were some way to inform the backend, so
1283 that it doesn't fail the expansion because it thinks
1284 emitting the libcall would be more efficient. */
1286 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1287 if (pat)
1289 emit_insn (pat);
1290 volatile_ok = save_volatile_ok;
1291 return true;
1293 else
1294 delete_insns_since (last);
1298 volatile_ok = save_volatile_ok;
1299 return false;
1302 /* A subroutine of emit_block_move. Expand a call to memcpy.
1303 Return the return value from memcpy, 0 otherwise. */
1305 static rtx
1306 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1308 rtx dst_addr, src_addr;
1309 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1310 enum machine_mode size_mode;
1311 rtx retval;
1313 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1314 pseudos. We can then place those new pseudos into a VAR_DECL and
1315 use them later. */
1317 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1318 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1320 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1321 src_addr = convert_memory_address (ptr_mode, src_addr);
1323 dst_tree = make_tree (ptr_type_node, dst_addr);
1324 src_tree = make_tree (ptr_type_node, src_addr);
1326 size_mode = TYPE_MODE (sizetype);
1328 size = convert_to_mode (size_mode, size, 1);
1329 size = copy_to_mode_reg (size_mode, size);
1331 /* It is incorrect to use the libcall calling conventions to call
1332 memcpy in this context. This could be a user call to memcpy and
1333 the user may wish to examine the return value from memcpy. For
1334 targets where libcalls and normal calls have different conventions
1335 for returning pointers, we could end up generating incorrect code. */
1337 size_tree = make_tree (sizetype, size);
1339 fn = emit_block_move_libcall_fn (true);
1340 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1341 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1342 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1344 /* Now we have to build up the CALL_EXPR itself. */
1345 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1346 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1347 call_expr, arg_list, NULL_TREE);
1348 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1350 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1352 return retval;
1355 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1356 for the function we use for block copies. The first time FOR_CALL
1357 is true, we call assemble_external. */
1359 static GTY(()) tree block_move_fn;
1361 void
1362 init_block_move_fn (const char *asmspec)
1364 if (!block_move_fn)
1366 tree args, fn;
1368 fn = get_identifier ("memcpy");
1369 args = build_function_type_list (ptr_type_node, ptr_type_node,
1370 const_ptr_type_node, sizetype,
1371 NULL_TREE);
1373 fn = build_decl (FUNCTION_DECL, fn, args);
1374 DECL_EXTERNAL (fn) = 1;
1375 TREE_PUBLIC (fn) = 1;
1376 DECL_ARTIFICIAL (fn) = 1;
1377 TREE_NOTHROW (fn) = 1;
1378 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1379 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1381 block_move_fn = fn;
1384 if (asmspec)
1385 set_user_assembler_name (block_move_fn, asmspec);
1388 static tree
1389 emit_block_move_libcall_fn (int for_call)
1391 static bool emitted_extern;
1393 if (!block_move_fn)
1394 init_block_move_fn (NULL);
1396 if (for_call && !emitted_extern)
1398 emitted_extern = true;
1399 make_decl_rtl (block_move_fn);
1400 assemble_external (block_move_fn);
1403 return block_move_fn;
1406 /* A subroutine of emit_block_move. Copy the data via an explicit
1407 loop. This is used only when libcalls are forbidden. */
1408 /* ??? It'd be nice to copy in hunks larger than QImode. */
1410 static void
1411 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1412 unsigned int align ATTRIBUTE_UNUSED)
1414 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1415 enum machine_mode iter_mode;
1417 iter_mode = GET_MODE (size);
1418 if (iter_mode == VOIDmode)
1419 iter_mode = word_mode;
1421 top_label = gen_label_rtx ();
1422 cmp_label = gen_label_rtx ();
1423 iter = gen_reg_rtx (iter_mode);
1425 emit_move_insn (iter, const0_rtx);
1427 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1428 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1429 do_pending_stack_adjust ();
1431 emit_jump (cmp_label);
1432 emit_label (top_label);
1434 tmp = convert_modes (Pmode, iter_mode, iter, true);
1435 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1436 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1437 x = change_address (x, QImode, x_addr);
1438 y = change_address (y, QImode, y_addr);
1440 emit_move_insn (x, y);
1442 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1443 true, OPTAB_LIB_WIDEN);
1444 if (tmp != iter)
1445 emit_move_insn (iter, tmp);
1447 emit_label (cmp_label);
1449 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1450 true, top_label);
1453 /* Copy all or part of a value X into registers starting at REGNO.
1454 The number of registers to be filled is NREGS. */
1456 void
1457 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1459 int i;
1460 #ifdef HAVE_load_multiple
1461 rtx pat;
1462 rtx last;
1463 #endif
1465 if (nregs == 0)
1466 return;
1468 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1469 x = validize_mem (force_const_mem (mode, x));
1471 /* See if the machine can do this with a load multiple insn. */
1472 #ifdef HAVE_load_multiple
1473 if (HAVE_load_multiple)
1475 last = get_last_insn ();
1476 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1477 GEN_INT (nregs));
1478 if (pat)
1480 emit_insn (pat);
1481 return;
1483 else
1484 delete_insns_since (last);
1486 #endif
1488 for (i = 0; i < nregs; i++)
1489 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1490 operand_subword_force (x, i, mode));
1493 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1494 The number of registers to be filled is NREGS. */
1496 void
1497 move_block_from_reg (int regno, rtx x, int nregs)
1499 int i;
1501 if (nregs == 0)
1502 return;
1504 /* See if the machine can do this with a store multiple insn. */
1505 #ifdef HAVE_store_multiple
1506 if (HAVE_store_multiple)
1508 rtx last = get_last_insn ();
1509 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1510 GEN_INT (nregs));
1511 if (pat)
1513 emit_insn (pat);
1514 return;
1516 else
1517 delete_insns_since (last);
1519 #endif
1521 for (i = 0; i < nregs; i++)
1523 rtx tem = operand_subword (x, i, 1, BLKmode);
1525 gcc_assert (tem);
1527 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1531 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1532 ORIG, where ORIG is a non-consecutive group of registers represented by
1533 a PARALLEL. The clone is identical to the original except in that the
1534 original set of registers is replaced by a new set of pseudo registers.
1535 The new set has the same modes as the original set. */
1538 gen_group_rtx (rtx orig)
1540 int i, length;
1541 rtx *tmps;
1543 gcc_assert (GET_CODE (orig) == PARALLEL);
1545 length = XVECLEN (orig, 0);
1546 tmps = alloca (sizeof (rtx) * length);
1548 /* Skip a NULL entry in first slot. */
1549 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1551 if (i)
1552 tmps[0] = 0;
1554 for (; i < length; i++)
1556 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1557 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1559 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1562 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1565 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1566 except that values are placed in TMPS[i], and must later be moved
1567 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1569 static void
1570 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1572 rtx src;
1573 int start, i;
1574 enum machine_mode m = GET_MODE (orig_src);
1576 gcc_assert (GET_CODE (dst) == PARALLEL);
1578 if (m != VOIDmode
1579 && !SCALAR_INT_MODE_P (m)
1580 && !MEM_P (orig_src)
1581 && GET_CODE (orig_src) != CONCAT)
1583 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1584 if (imode == BLKmode)
1585 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1586 else
1587 src = gen_reg_rtx (imode);
1588 if (imode != BLKmode)
1589 src = gen_lowpart (GET_MODE (orig_src), src);
1590 emit_move_insn (src, orig_src);
1591 /* ...and back again. */
1592 if (imode != BLKmode)
1593 src = gen_lowpart (imode, src);
1594 emit_group_load_1 (tmps, dst, src, type, ssize);
1595 return;
1598 /* Check for a NULL entry, used to indicate that the parameter goes
1599 both on the stack and in registers. */
1600 if (XEXP (XVECEXP (dst, 0, 0), 0))
1601 start = 0;
1602 else
1603 start = 1;
1605 /* Process the pieces. */
1606 for (i = start; i < XVECLEN (dst, 0); i++)
1608 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1609 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1610 unsigned int bytelen = GET_MODE_SIZE (mode);
1611 int shift = 0;
1613 /* Handle trailing fragments that run over the size of the struct. */
1614 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1616 /* Arrange to shift the fragment to where it belongs.
1617 extract_bit_field loads to the lsb of the reg. */
1618 if (
1619 #ifdef BLOCK_REG_PADDING
1620 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1621 == (BYTES_BIG_ENDIAN ? upward : downward)
1622 #else
1623 BYTES_BIG_ENDIAN
1624 #endif
1626 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1627 bytelen = ssize - bytepos;
1628 gcc_assert (bytelen > 0);
1631 /* If we won't be loading directly from memory, protect the real source
1632 from strange tricks we might play; but make sure that the source can
1633 be loaded directly into the destination. */
1634 src = orig_src;
1635 if (!MEM_P (orig_src)
1636 && (!CONSTANT_P (orig_src)
1637 || (GET_MODE (orig_src) != mode
1638 && GET_MODE (orig_src) != VOIDmode)))
1640 if (GET_MODE (orig_src) == VOIDmode)
1641 src = gen_reg_rtx (mode);
1642 else
1643 src = gen_reg_rtx (GET_MODE (orig_src));
1645 emit_move_insn (src, orig_src);
1648 /* Optimize the access just a bit. */
1649 if (MEM_P (src)
1650 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1651 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1652 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1653 && bytelen == GET_MODE_SIZE (mode))
1655 tmps[i] = gen_reg_rtx (mode);
1656 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1658 else if (COMPLEX_MODE_P (mode)
1659 && GET_MODE (src) == mode
1660 && bytelen == GET_MODE_SIZE (mode))
1661 /* Let emit_move_complex do the bulk of the work. */
1662 tmps[i] = src;
1663 else if (GET_CODE (src) == CONCAT)
1665 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1666 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1668 if ((bytepos == 0 && bytelen == slen0)
1669 || (bytepos != 0 && bytepos + bytelen <= slen))
1671 /* The following assumes that the concatenated objects all
1672 have the same size. In this case, a simple calculation
1673 can be used to determine the object and the bit field
1674 to be extracted. */
1675 tmps[i] = XEXP (src, bytepos / slen0);
1676 if (! CONSTANT_P (tmps[i])
1677 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1678 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1679 (bytepos % slen0) * BITS_PER_UNIT,
1680 1, NULL_RTX, mode, mode);
1682 else
1684 rtx mem;
1686 gcc_assert (!bytepos);
1687 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1688 emit_move_insn (mem, src);
1689 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1690 0, 1, NULL_RTX, mode, mode);
1693 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1694 SIMD register, which is currently broken. While we get GCC
1695 to emit proper RTL for these cases, let's dump to memory. */
1696 else if (VECTOR_MODE_P (GET_MODE (dst))
1697 && REG_P (src))
1699 int slen = GET_MODE_SIZE (GET_MODE (src));
1700 rtx mem;
1702 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1703 emit_move_insn (mem, src);
1704 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1706 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1707 && XVECLEN (dst, 0) > 1)
1708 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1709 else if (CONSTANT_P (src)
1710 || (REG_P (src) && GET_MODE (src) == mode))
1711 tmps[i] = src;
1712 else
1713 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1714 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1715 mode, mode);
1717 if (shift)
1718 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1719 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1723 /* Emit code to move a block SRC of type TYPE to a block DST,
1724 where DST is non-consecutive registers represented by a PARALLEL.
1725 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1726 if not known. */
1728 void
1729 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1731 rtx *tmps;
1732 int i;
1734 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1735 emit_group_load_1 (tmps, dst, src, type, ssize);
1737 /* Copy the extracted pieces into the proper (probable) hard regs. */
1738 for (i = 0; i < XVECLEN (dst, 0); i++)
1740 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1741 if (d == NULL)
1742 continue;
1743 emit_move_insn (d, tmps[i]);
1747 /* Similar, but load SRC into new pseudos in a format that looks like
1748 PARALLEL. This can later be fed to emit_group_move to get things
1749 in the right place. */
1752 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1754 rtvec vec;
1755 int i;
1757 vec = rtvec_alloc (XVECLEN (parallel, 0));
1758 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1760 /* Convert the vector to look just like the original PARALLEL, except
1761 with the computed values. */
1762 for (i = 0; i < XVECLEN (parallel, 0); i++)
1764 rtx e = XVECEXP (parallel, 0, i);
1765 rtx d = XEXP (e, 0);
1767 if (d)
1769 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1770 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1772 RTVEC_ELT (vec, i) = e;
1775 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1778 /* Emit code to move a block SRC to block DST, where SRC and DST are
1779 non-consecutive groups of registers, each represented by a PARALLEL. */
1781 void
1782 emit_group_move (rtx dst, rtx src)
1784 int i;
1786 gcc_assert (GET_CODE (src) == PARALLEL
1787 && GET_CODE (dst) == PARALLEL
1788 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1790 /* Skip first entry if NULL. */
1791 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1792 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1793 XEXP (XVECEXP (src, 0, i), 0));
1796 /* Move a group of registers represented by a PARALLEL into pseudos. */
1799 emit_group_move_into_temps (rtx src)
1801 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1802 int i;
1804 for (i = 0; i < XVECLEN (src, 0); i++)
1806 rtx e = XVECEXP (src, 0, i);
1807 rtx d = XEXP (e, 0);
1809 if (d)
1810 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1811 RTVEC_ELT (vec, i) = e;
1814 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1817 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1818 where SRC is non-consecutive registers represented by a PARALLEL.
1819 SSIZE represents the total size of block ORIG_DST, or -1 if not
1820 known. */
1822 void
1823 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1825 rtx *tmps, dst;
1826 int start, i;
1827 enum machine_mode m = GET_MODE (orig_dst);
1829 gcc_assert (GET_CODE (src) == PARALLEL);
1831 if (!SCALAR_INT_MODE_P (m)
1832 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1834 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1835 if (imode == BLKmode)
1836 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1837 else
1838 dst = gen_reg_rtx (imode);
1839 emit_group_store (dst, src, type, ssize);
1840 if (imode != BLKmode)
1841 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1842 emit_move_insn (orig_dst, dst);
1843 return;
1846 /* Check for a NULL entry, used to indicate that the parameter goes
1847 both on the stack and in registers. */
1848 if (XEXP (XVECEXP (src, 0, 0), 0))
1849 start = 0;
1850 else
1851 start = 1;
1853 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1855 /* Copy the (probable) hard regs into pseudos. */
1856 for (i = start; i < XVECLEN (src, 0); i++)
1858 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1859 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1860 emit_move_insn (tmps[i], reg);
1863 /* If we won't be storing directly into memory, protect the real destination
1864 from strange tricks we might play. */
1865 dst = orig_dst;
1866 if (GET_CODE (dst) == PARALLEL)
1868 rtx temp;
1870 /* We can get a PARALLEL dst if there is a conditional expression in
1871 a return statement. In that case, the dst and src are the same,
1872 so no action is necessary. */
1873 if (rtx_equal_p (dst, src))
1874 return;
1876 /* It is unclear if we can ever reach here, but we may as well handle
1877 it. Allocate a temporary, and split this into a store/load to/from
1878 the temporary. */
1880 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1881 emit_group_store (temp, src, type, ssize);
1882 emit_group_load (dst, temp, type, ssize);
1883 return;
1885 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1887 dst = gen_reg_rtx (GET_MODE (orig_dst));
1888 /* Make life a bit easier for combine. */
1889 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1892 /* Process the pieces. */
1893 for (i = start; i < XVECLEN (src, 0); i++)
1895 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1896 enum machine_mode mode = GET_MODE (tmps[i]);
1897 unsigned int bytelen = GET_MODE_SIZE (mode);
1898 rtx dest = dst;
1900 /* Handle trailing fragments that run over the size of the struct. */
1901 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1903 /* store_bit_field always takes its value from the lsb.
1904 Move the fragment to the lsb if it's not already there. */
1905 if (
1906 #ifdef BLOCK_REG_PADDING
1907 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1908 == (BYTES_BIG_ENDIAN ? upward : downward)
1909 #else
1910 BYTES_BIG_ENDIAN
1911 #endif
1914 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1915 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1916 build_int_cst (NULL_TREE, shift),
1917 tmps[i], 0);
1919 bytelen = ssize - bytepos;
1922 if (GET_CODE (dst) == CONCAT)
1924 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1925 dest = XEXP (dst, 0);
1926 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1928 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1929 dest = XEXP (dst, 1);
1931 else
1933 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1934 dest = assign_stack_temp (GET_MODE (dest),
1935 GET_MODE_SIZE (GET_MODE (dest)), 0);
1936 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1937 tmps[i]);
1938 dst = dest;
1939 break;
1943 /* Optimize the access just a bit. */
1944 if (MEM_P (dest)
1945 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1946 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1947 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1948 && bytelen == GET_MODE_SIZE (mode))
1949 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1950 else
1951 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1952 mode, tmps[i]);
1955 /* Copy from the pseudo into the (probable) hard reg. */
1956 if (orig_dst != dst)
1957 emit_move_insn (orig_dst, dst);
1960 /* Generate code to copy a BLKmode object of TYPE out of a
1961 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1962 is null, a stack temporary is created. TGTBLK is returned.
1964 The purpose of this routine is to handle functions that return
1965 BLKmode structures in registers. Some machines (the PA for example)
1966 want to return all small structures in registers regardless of the
1967 structure's alignment. */
1970 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1972 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1973 rtx src = NULL, dst = NULL;
1974 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1975 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1977 if (tgtblk == 0)
1979 tgtblk = assign_temp (build_qualified_type (type,
1980 (TYPE_QUALS (type)
1981 | TYPE_QUAL_CONST)),
1982 0, 1, 1);
1983 preserve_temp_slots (tgtblk);
1986 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1987 into a new pseudo which is a full word. */
1989 if (GET_MODE (srcreg) != BLKmode
1990 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1991 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1993 /* If the structure doesn't take up a whole number of words, see whether
1994 SRCREG is padded on the left or on the right. If it's on the left,
1995 set PADDING_CORRECTION to the number of bits to skip.
1997 In most ABIs, the structure will be returned at the least end of
1998 the register, which translates to right padding on little-endian
1999 targets and left padding on big-endian targets. The opposite
2000 holds if the structure is returned at the most significant
2001 end of the register. */
2002 if (bytes % UNITS_PER_WORD != 0
2003 && (targetm.calls.return_in_msb (type)
2004 ? !BYTES_BIG_ENDIAN
2005 : BYTES_BIG_ENDIAN))
2006 padding_correction
2007 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2009 /* Copy the structure BITSIZE bites at a time.
2011 We could probably emit more efficient code for machines which do not use
2012 strict alignment, but it doesn't seem worth the effort at the current
2013 time. */
2014 for (bitpos = 0, xbitpos = padding_correction;
2015 bitpos < bytes * BITS_PER_UNIT;
2016 bitpos += bitsize, xbitpos += bitsize)
2018 /* We need a new source operand each time xbitpos is on a
2019 word boundary and when xbitpos == padding_correction
2020 (the first time through). */
2021 if (xbitpos % BITS_PER_WORD == 0
2022 || xbitpos == padding_correction)
2023 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2024 GET_MODE (srcreg));
2026 /* We need a new destination operand each time bitpos is on
2027 a word boundary. */
2028 if (bitpos % BITS_PER_WORD == 0)
2029 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2031 /* Use xbitpos for the source extraction (right justified) and
2032 xbitpos for the destination store (left justified). */
2033 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2034 extract_bit_field (src, bitsize,
2035 xbitpos % BITS_PER_WORD, 1,
2036 NULL_RTX, word_mode, word_mode));
2039 return tgtblk;
2042 /* Add a USE expression for REG to the (possibly empty) list pointed
2043 to by CALL_FUSAGE. REG must denote a hard register. */
2045 void
2046 use_reg (rtx *call_fusage, rtx reg)
2048 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2050 *call_fusage
2051 = gen_rtx_EXPR_LIST (VOIDmode,
2052 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2055 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2056 starting at REGNO. All of these registers must be hard registers. */
2058 void
2059 use_regs (rtx *call_fusage, int regno, int nregs)
2061 int i;
2063 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2065 for (i = 0; i < nregs; i++)
2066 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2069 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2070 PARALLEL REGS. This is for calls that pass values in multiple
2071 non-contiguous locations. The Irix 6 ABI has examples of this. */
2073 void
2074 use_group_regs (rtx *call_fusage, rtx regs)
2076 int i;
2078 for (i = 0; i < XVECLEN (regs, 0); i++)
2080 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2082 /* A NULL entry means the parameter goes both on the stack and in
2083 registers. This can also be a MEM for targets that pass values
2084 partially on the stack and partially in registers. */
2085 if (reg != 0 && REG_P (reg))
2086 use_reg (call_fusage, reg);
2091 /* Determine whether the LEN bytes generated by CONSTFUN can be
2092 stored to memory using several move instructions. CONSTFUNDATA is
2093 a pointer which will be passed as argument in every CONSTFUN call.
2094 ALIGN is maximum alignment we can assume. Return nonzero if a
2095 call to store_by_pieces should succeed. */
2098 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2099 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2100 void *constfundata, unsigned int align)
2102 unsigned HOST_WIDE_INT l;
2103 unsigned int max_size;
2104 HOST_WIDE_INT offset = 0;
2105 enum machine_mode mode, tmode;
2106 enum insn_code icode;
2107 int reverse;
2108 rtx cst;
2110 if (len == 0)
2111 return 1;
2113 if (! STORE_BY_PIECES_P (len, align))
2114 return 0;
2116 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2117 if (align >= GET_MODE_ALIGNMENT (tmode))
2118 align = GET_MODE_ALIGNMENT (tmode);
2119 else
2121 enum machine_mode xmode;
2123 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2124 tmode != VOIDmode;
2125 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2126 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2127 || SLOW_UNALIGNED_ACCESS (tmode, align))
2128 break;
2130 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2133 /* We would first store what we can in the largest integer mode, then go to
2134 successively smaller modes. */
2136 for (reverse = 0;
2137 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2138 reverse++)
2140 l = len;
2141 mode = VOIDmode;
2142 max_size = STORE_MAX_PIECES + 1;
2143 while (max_size > 1)
2145 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2146 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2147 if (GET_MODE_SIZE (tmode) < max_size)
2148 mode = tmode;
2150 if (mode == VOIDmode)
2151 break;
2153 icode = mov_optab->handlers[(int) mode].insn_code;
2154 if (icode != CODE_FOR_nothing
2155 && align >= GET_MODE_ALIGNMENT (mode))
2157 unsigned int size = GET_MODE_SIZE (mode);
2159 while (l >= size)
2161 if (reverse)
2162 offset -= size;
2164 cst = (*constfun) (constfundata, offset, mode);
2165 if (!LEGITIMATE_CONSTANT_P (cst))
2166 return 0;
2168 if (!reverse)
2169 offset += size;
2171 l -= size;
2175 max_size = GET_MODE_SIZE (mode);
2178 /* The code above should have handled everything. */
2179 gcc_assert (!l);
2182 return 1;
2185 /* Generate several move instructions to store LEN bytes generated by
2186 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2187 pointer which will be passed as argument in every CONSTFUN call.
2188 ALIGN is maximum alignment we can assume.
2189 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2190 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2191 stpcpy. */
2194 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2195 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2196 void *constfundata, unsigned int align, int endp)
2198 struct store_by_pieces data;
2200 if (len == 0)
2202 gcc_assert (endp != 2);
2203 return to;
2206 gcc_assert (STORE_BY_PIECES_P (len, align));
2207 data.constfun = constfun;
2208 data.constfundata = constfundata;
2209 data.len = len;
2210 data.to = to;
2211 store_by_pieces_1 (&data, align);
2212 if (endp)
2214 rtx to1;
2216 gcc_assert (!data.reverse);
2217 if (data.autinc_to)
2219 if (endp == 2)
2221 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2222 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2223 else
2224 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2225 -1));
2227 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2228 data.offset);
2230 else
2232 if (endp == 2)
2233 --data.offset;
2234 to1 = adjust_address (data.to, QImode, data.offset);
2236 return to1;
2238 else
2239 return data.to;
2242 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2243 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2245 static void
2246 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2248 struct store_by_pieces data;
2250 if (len == 0)
2251 return;
2253 data.constfun = clear_by_pieces_1;
2254 data.constfundata = NULL;
2255 data.len = len;
2256 data.to = to;
2257 store_by_pieces_1 (&data, align);
2260 /* Callback routine for clear_by_pieces.
2261 Return const0_rtx unconditionally. */
2263 static rtx
2264 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2265 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2266 enum machine_mode mode ATTRIBUTE_UNUSED)
2268 return const0_rtx;
2271 /* Subroutine of clear_by_pieces and store_by_pieces.
2272 Generate several move instructions to store LEN bytes of block TO. (A MEM
2273 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2275 static void
2276 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2277 unsigned int align ATTRIBUTE_UNUSED)
2279 rtx to_addr = XEXP (data->to, 0);
2280 unsigned int max_size = STORE_MAX_PIECES + 1;
2281 enum machine_mode mode = VOIDmode, tmode;
2282 enum insn_code icode;
2284 data->offset = 0;
2285 data->to_addr = to_addr;
2286 data->autinc_to
2287 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2288 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2290 data->explicit_inc_to = 0;
2291 data->reverse
2292 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2293 if (data->reverse)
2294 data->offset = data->len;
2296 /* If storing requires more than two move insns,
2297 copy addresses to registers (to make displacements shorter)
2298 and use post-increment if available. */
2299 if (!data->autinc_to
2300 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2302 /* Determine the main mode we'll be using. */
2303 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2304 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2305 if (GET_MODE_SIZE (tmode) < max_size)
2306 mode = tmode;
2308 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2310 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2311 data->autinc_to = 1;
2312 data->explicit_inc_to = -1;
2315 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2316 && ! data->autinc_to)
2318 data->to_addr = copy_addr_to_reg (to_addr);
2319 data->autinc_to = 1;
2320 data->explicit_inc_to = 1;
2323 if ( !data->autinc_to && CONSTANT_P (to_addr))
2324 data->to_addr = copy_addr_to_reg (to_addr);
2327 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2328 if (align >= GET_MODE_ALIGNMENT (tmode))
2329 align = GET_MODE_ALIGNMENT (tmode);
2330 else
2332 enum machine_mode xmode;
2334 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2335 tmode != VOIDmode;
2336 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2337 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2338 || SLOW_UNALIGNED_ACCESS (tmode, align))
2339 break;
2341 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2344 /* First store what we can in the largest integer mode, then go to
2345 successively smaller modes. */
2347 while (max_size > 1)
2349 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2350 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2351 if (GET_MODE_SIZE (tmode) < max_size)
2352 mode = tmode;
2354 if (mode == VOIDmode)
2355 break;
2357 icode = mov_optab->handlers[(int) mode].insn_code;
2358 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2359 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2361 max_size = GET_MODE_SIZE (mode);
2364 /* The code above should have handled everything. */
2365 gcc_assert (!data->len);
2368 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2369 with move instructions for mode MODE. GENFUN is the gen_... function
2370 to make a move insn for that mode. DATA has all the other info. */
2372 static void
2373 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2374 struct store_by_pieces *data)
2376 unsigned int size = GET_MODE_SIZE (mode);
2377 rtx to1, cst;
2379 while (data->len >= size)
2381 if (data->reverse)
2382 data->offset -= size;
2384 if (data->autinc_to)
2385 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2386 data->offset);
2387 else
2388 to1 = adjust_address (data->to, mode, data->offset);
2390 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2391 emit_insn (gen_add2_insn (data->to_addr,
2392 GEN_INT (-(HOST_WIDE_INT) size)));
2394 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2395 emit_insn ((*genfun) (to1, cst));
2397 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2398 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2400 if (! data->reverse)
2401 data->offset += size;
2403 data->len -= size;
2407 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2408 its length in bytes. */
2411 clear_storage (rtx object, rtx size, enum block_op_methods method)
2413 enum machine_mode mode = GET_MODE (object);
2414 unsigned int align;
2416 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2418 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2419 just move a zero. Otherwise, do this a piece at a time. */
2420 if (mode != BLKmode
2421 && GET_CODE (size) == CONST_INT
2422 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2424 rtx zero = CONST0_RTX (mode);
2425 if (zero != NULL)
2427 emit_move_insn (object, zero);
2428 return NULL;
2431 if (COMPLEX_MODE_P (mode))
2433 zero = CONST0_RTX (GET_MODE_INNER (mode));
2434 if (zero != NULL)
2436 write_complex_part (object, zero, 0);
2437 write_complex_part (object, zero, 1);
2438 return NULL;
2443 if (size == const0_rtx)
2444 return NULL;
2446 align = MEM_ALIGN (object);
2448 if (GET_CODE (size) == CONST_INT
2449 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2450 clear_by_pieces (object, INTVAL (size), align);
2451 else if (clear_storage_via_clrmem (object, size, align))
2453 else
2454 return clear_storage_via_libcall (object, size,
2455 method == BLOCK_OP_TAILCALL);
2457 return NULL;
2460 /* A subroutine of clear_storage. Expand a clrmem pattern;
2461 return true if successful. */
2463 static bool
2464 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2466 /* Try the most limited insn first, because there's no point
2467 including more than one in the machine description unless
2468 the more limited one has some advantage. */
2470 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2471 enum machine_mode mode;
2473 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2474 mode = GET_MODE_WIDER_MODE (mode))
2476 enum insn_code code = clrmem_optab[(int) mode];
2477 insn_operand_predicate_fn pred;
2479 if (code != CODE_FOR_nothing
2480 /* We don't need MODE to be narrower than
2481 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2482 the mode mask, as it is returned by the macro, it will
2483 definitely be less than the actual mode mask. */
2484 && ((GET_CODE (size) == CONST_INT
2485 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2486 <= (GET_MODE_MASK (mode) >> 1)))
2487 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2488 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2489 || (*pred) (object, BLKmode))
2490 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2491 || (*pred) (opalign, VOIDmode)))
2493 rtx op1;
2494 rtx last = get_last_insn ();
2495 rtx pat;
2497 op1 = convert_to_mode (mode, size, 1);
2498 pred = insn_data[(int) code].operand[1].predicate;
2499 if (pred != 0 && ! (*pred) (op1, mode))
2500 op1 = copy_to_mode_reg (mode, op1);
2502 pat = GEN_FCN ((int) code) (object, op1, opalign);
2503 if (pat)
2505 emit_insn (pat);
2506 return true;
2508 else
2509 delete_insns_since (last);
2513 return false;
2516 /* A subroutine of clear_storage. Expand a call to memset.
2517 Return the return value of memset, 0 otherwise. */
2519 static rtx
2520 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2522 tree call_expr, arg_list, fn, object_tree, size_tree;
2523 enum machine_mode size_mode;
2524 rtx retval;
2526 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2527 place those into new pseudos into a VAR_DECL and use them later. */
2529 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2531 size_mode = TYPE_MODE (sizetype);
2532 size = convert_to_mode (size_mode, size, 1);
2533 size = copy_to_mode_reg (size_mode, size);
2535 /* It is incorrect to use the libcall calling conventions to call
2536 memset in this context. This could be a user call to memset and
2537 the user may wish to examine the return value from memset. For
2538 targets where libcalls and normal calls have different conventions
2539 for returning pointers, we could end up generating incorrect code. */
2541 object_tree = make_tree (ptr_type_node, object);
2542 size_tree = make_tree (sizetype, size);
2544 fn = clear_storage_libcall_fn (true);
2545 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2546 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2547 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2549 /* Now we have to build up the CALL_EXPR itself. */
2550 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2551 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2552 call_expr, arg_list, NULL_TREE);
2553 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2555 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2557 return retval;
2560 /* A subroutine of clear_storage_via_libcall. Create the tree node
2561 for the function we use for block clears. The first time FOR_CALL
2562 is true, we call assemble_external. */
2564 static GTY(()) tree block_clear_fn;
2566 void
2567 init_block_clear_fn (const char *asmspec)
2569 if (!block_clear_fn)
2571 tree fn, args;
2573 fn = get_identifier ("memset");
2574 args = build_function_type_list (ptr_type_node, ptr_type_node,
2575 integer_type_node, sizetype,
2576 NULL_TREE);
2578 fn = build_decl (FUNCTION_DECL, fn, args);
2579 DECL_EXTERNAL (fn) = 1;
2580 TREE_PUBLIC (fn) = 1;
2581 DECL_ARTIFICIAL (fn) = 1;
2582 TREE_NOTHROW (fn) = 1;
2583 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2584 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2586 block_clear_fn = fn;
2589 if (asmspec)
2590 set_user_assembler_name (block_clear_fn, asmspec);
2593 static tree
2594 clear_storage_libcall_fn (int for_call)
2596 static bool emitted_extern;
2598 if (!block_clear_fn)
2599 init_block_clear_fn (NULL);
2601 if (for_call && !emitted_extern)
2603 emitted_extern = true;
2604 make_decl_rtl (block_clear_fn);
2605 assemble_external (block_clear_fn);
2608 return block_clear_fn;
2611 /* Write to one of the components of the complex value CPLX. Write VAL to
2612 the real part if IMAG_P is false, and the imaginary part if its true. */
2614 static void
2615 write_complex_part (rtx cplx, rtx val, bool imag_p)
2617 enum machine_mode cmode;
2618 enum machine_mode imode;
2619 unsigned ibitsize;
2621 if (GET_CODE (cplx) == CONCAT)
2623 emit_move_insn (XEXP (cplx, imag_p), val);
2624 return;
2627 cmode = GET_MODE (cplx);
2628 imode = GET_MODE_INNER (cmode);
2629 ibitsize = GET_MODE_BITSIZE (imode);
2631 /* If the sub-object is at least word sized, then we know that subregging
2632 will work. This special case is important, since store_bit_field
2633 wants to operate on integer modes, and there's rarely an OImode to
2634 correspond to TCmode. */
2635 if (ibitsize >= BITS_PER_WORD
2636 /* For hard regs we have exact predicates. Assume we can split
2637 the original object if it spans an even number of hard regs.
2638 This special case is important for SCmode on 64-bit platforms
2639 where the natural size of floating-point regs is 32-bit. */
2640 || (GET_CODE (cplx) == REG
2641 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2642 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
2643 /* For MEMs we always try to make a "subreg", that is to adjust
2644 the MEM, because store_bit_field may generate overly
2645 convoluted RTL for sub-word fields. */
2646 || MEM_P (cplx))
2648 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2649 imag_p ? GET_MODE_SIZE (imode) : 0);
2650 if (part)
2652 emit_move_insn (part, val);
2653 return;
2655 else
2656 /* simplify_gen_subreg may fail for sub-word MEMs. */
2657 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2660 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2663 /* Extract one of the components of the complex value CPLX. Extract the
2664 real part if IMAG_P is false, and the imaginary part if it's true. */
2666 static rtx
2667 read_complex_part (rtx cplx, bool imag_p)
2669 enum machine_mode cmode, imode;
2670 unsigned ibitsize;
2672 if (GET_CODE (cplx) == CONCAT)
2673 return XEXP (cplx, imag_p);
2675 cmode = GET_MODE (cplx);
2676 imode = GET_MODE_INNER (cmode);
2677 ibitsize = GET_MODE_BITSIZE (imode);
2679 /* Special case reads from complex constants that got spilled to memory. */
2680 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2682 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2683 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2685 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2686 if (CONSTANT_CLASS_P (part))
2687 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2691 /* If the sub-object is at least word sized, then we know that subregging
2692 will work. This special case is important, since extract_bit_field
2693 wants to operate on integer modes, and there's rarely an OImode to
2694 correspond to TCmode. */
2695 if (ibitsize >= BITS_PER_WORD
2696 /* For hard regs we have exact predicates. Assume we can split
2697 the original object if it spans an even number of hard regs.
2698 This special case is important for SCmode on 64-bit platforms
2699 where the natural size of floating-point regs is 32-bit. */
2700 || (GET_CODE (cplx) == REG
2701 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2702 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
2703 /* For MEMs we always try to make a "subreg", that is to adjust
2704 the MEM, because extract_bit_field may generate overly
2705 convoluted RTL for sub-word fields. */
2706 || MEM_P (cplx))
2708 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2709 imag_p ? GET_MODE_SIZE (imode) : 0);
2710 if (ret)
2711 return ret;
2712 else
2713 /* simplify_gen_subreg may fail for sub-word MEMs. */
2714 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2717 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2718 true, NULL_RTX, imode, imode);
2721 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2722 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2723 represented in NEW_MODE. If FORCE is true, this will never happen, as
2724 we'll force-create a SUBREG if needed. */
2726 static rtx
2727 emit_move_change_mode (enum machine_mode new_mode,
2728 enum machine_mode old_mode, rtx x, bool force)
2730 rtx ret;
2732 if (reload_in_progress && MEM_P (x))
2734 /* We can't use gen_lowpart here because it may call change_address
2735 which is not appropriate if we were called when a reload was in
2736 progress. We don't have to worry about changing the address since
2737 the size in bytes is supposed to be the same. Copy the MEM to
2738 change the mode and move any substitutions from the old MEM to
2739 the new one. */
2741 ret = adjust_address_nv (x, new_mode, 0);
2742 copy_replacements (x, ret);
2744 else
2746 /* Note that we do want simplify_subreg's behavior of validating
2747 that the new mode is ok for a hard register. If we were to use
2748 simplify_gen_subreg, we would create the subreg, but would
2749 probably run into the target not being able to implement it. */
2750 /* Except, of course, when FORCE is true, when this is exactly what
2751 we want. Which is needed for CCmodes on some targets. */
2752 if (force)
2753 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2754 else
2755 ret = simplify_subreg (new_mode, x, old_mode, 0);
2758 return ret;
2761 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2762 an integer mode of the same size as MODE. Returns the instruction
2763 emitted, or NULL if such a move could not be generated. */
2765 static rtx
2766 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2768 enum machine_mode imode;
2769 enum insn_code code;
2771 /* There must exist a mode of the exact size we require. */
2772 imode = int_mode_for_mode (mode);
2773 if (imode == BLKmode)
2774 return NULL_RTX;
2776 /* The target must support moves in this mode. */
2777 code = mov_optab->handlers[imode].insn_code;
2778 if (code == CODE_FOR_nothing)
2779 return NULL_RTX;
2781 x = emit_move_change_mode (imode, mode, x, false);
2782 if (x == NULL_RTX)
2783 return NULL_RTX;
2784 y = emit_move_change_mode (imode, mode, y, false);
2785 if (y == NULL_RTX)
2786 return NULL_RTX;
2787 return emit_insn (GEN_FCN (code) (x, y));
2790 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2791 Return an equivalent MEM that does not use an auto-increment. */
2793 static rtx
2794 emit_move_resolve_push (enum machine_mode mode, rtx x)
2796 enum rtx_code code = GET_CODE (XEXP (x, 0));
2797 HOST_WIDE_INT adjust;
2798 rtx temp;
2800 adjust = GET_MODE_SIZE (mode);
2801 #ifdef PUSH_ROUNDING
2802 adjust = PUSH_ROUNDING (adjust);
2803 #endif
2804 if (code == PRE_DEC || code == POST_DEC)
2805 adjust = -adjust;
2807 /* Do not use anti_adjust_stack, since we don't want to update
2808 stack_pointer_delta. */
2809 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2810 GEN_INT (adjust), stack_pointer_rtx,
2811 0, OPTAB_LIB_WIDEN);
2812 if (temp != stack_pointer_rtx)
2813 emit_move_insn (stack_pointer_rtx, temp);
2815 switch (code)
2817 case PRE_INC:
2818 case PRE_DEC:
2819 temp = stack_pointer_rtx;
2820 break;
2821 case POST_INC:
2822 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2823 break;
2824 case POST_DEC:
2825 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2826 break;
2827 default:
2828 gcc_unreachable ();
2831 return replace_equiv_address (x, temp);
2834 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2835 X is known to satisfy push_operand, and MODE is known to be complex.
2836 Returns the last instruction emitted. */
2838 static rtx
2839 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2841 enum machine_mode submode = GET_MODE_INNER (mode);
2842 bool imag_first;
2844 #ifdef PUSH_ROUNDING
2845 unsigned int submodesize = GET_MODE_SIZE (submode);
2847 /* In case we output to the stack, but the size is smaller than the
2848 machine can push exactly, we need to use move instructions. */
2849 if (PUSH_ROUNDING (submodesize) != submodesize)
2851 x = emit_move_resolve_push (mode, x);
2852 return emit_move_insn (x, y);
2854 #endif
2856 /* Note that the real part always precedes the imag part in memory
2857 regardless of machine's endianness. */
2858 switch (GET_CODE (XEXP (x, 0)))
2860 case PRE_DEC:
2861 case POST_DEC:
2862 imag_first = true;
2863 break;
2864 case PRE_INC:
2865 case POST_INC:
2866 imag_first = false;
2867 break;
2868 default:
2869 gcc_unreachable ();
2872 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 read_complex_part (y, imag_first));
2874 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2875 read_complex_part (y, !imag_first));
2878 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2879 MODE is known to be complex. Returns the last instruction emitted. */
2881 static rtx
2882 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2884 bool try_int;
2886 /* Need to take special care for pushes, to maintain proper ordering
2887 of the data, and possibly extra padding. */
2888 if (push_operand (x, mode))
2889 return emit_move_complex_push (mode, x, y);
2891 /* See if we can coerce the target into moving both values at once. */
2893 /* Move floating point as parts. */
2894 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2895 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2896 try_int = false;
2897 /* Not possible if the values are inherently not adjacent. */
2898 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2899 try_int = false;
2900 /* Is possible if both are registers (or subregs of registers). */
2901 else if (register_operand (x, mode) && register_operand (y, mode))
2902 try_int = true;
2903 /* If one of the operands is a memory, and alignment constraints
2904 are friendly enough, we may be able to do combined memory operations.
2905 We do not attempt this if Y is a constant because that combination is
2906 usually better with the by-parts thing below. */
2907 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2908 && (!STRICT_ALIGNMENT
2909 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2910 try_int = true;
2911 else
2912 try_int = false;
2914 if (try_int)
2916 rtx ret;
2918 /* For memory to memory moves, optimal behavior can be had with the
2919 existing block move logic. */
2920 if (MEM_P (x) && MEM_P (y))
2922 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2923 BLOCK_OP_NO_LIBCALL);
2924 return get_last_insn ();
2927 ret = emit_move_via_integer (mode, x, y);
2928 if (ret)
2929 return ret;
2932 /* Show the output dies here. This is necessary for SUBREGs
2933 of pseudos since we cannot track their lifetimes correctly;
2934 hard regs shouldn't appear here except as return values. */
2935 if (!reload_completed && !reload_in_progress
2936 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2937 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2939 write_complex_part (x, read_complex_part (y, false), false);
2940 write_complex_part (x, read_complex_part (y, true), true);
2941 return get_last_insn ();
2944 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2945 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2947 static rtx
2948 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2950 rtx ret;
2952 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
2953 if (mode != CCmode)
2955 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
2956 if (code != CODE_FOR_nothing)
2958 x = emit_move_change_mode (CCmode, mode, x, true);
2959 y = emit_move_change_mode (CCmode, mode, y, true);
2960 return emit_insn (GEN_FCN (code) (x, y));
2964 /* Otherwise, find the MODE_INT mode of the same width. */
2965 ret = emit_move_via_integer (mode, x, y);
2966 gcc_assert (ret != NULL);
2967 return ret;
2970 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2971 MODE is any multi-word or full-word mode that lacks a move_insn
2972 pattern. Note that you will get better code if you define such
2973 patterns, even if they must turn into multiple assembler instructions. */
2975 static rtx
2976 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
2978 rtx last_insn = 0;
2979 rtx seq, inner;
2980 bool need_clobber;
2981 int i;
2983 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2985 /* If X is a push on the stack, do the push now and replace
2986 X with a reference to the stack pointer. */
2987 if (push_operand (x, mode))
2988 x = emit_move_resolve_push (mode, x);
2990 /* If we are in reload, see if either operand is a MEM whose address
2991 is scheduled for replacement. */
2992 if (reload_in_progress && MEM_P (x)
2993 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2994 x = replace_equiv_address_nv (x, inner);
2995 if (reload_in_progress && MEM_P (y)
2996 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2997 y = replace_equiv_address_nv (y, inner);
2999 start_sequence ();
3001 need_clobber = false;
3002 for (i = 0;
3003 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3004 i++)
3006 rtx xpart = operand_subword (x, i, 1, mode);
3007 rtx ypart = operand_subword (y, i, 1, mode);
3009 /* If we can't get a part of Y, put Y into memory if it is a
3010 constant. Otherwise, force it into a register. If we still
3011 can't get a part of Y, abort. */
3012 if (ypart == 0 && CONSTANT_P (y))
3014 y = force_const_mem (mode, y);
3015 ypart = operand_subword (y, i, 1, mode);
3017 else if (ypart == 0)
3018 ypart = operand_subword_force (y, i, mode);
3020 gcc_assert (xpart && ypart);
3022 need_clobber |= (GET_CODE (xpart) == SUBREG);
3024 last_insn = emit_move_insn (xpart, ypart);
3027 seq = get_insns ();
3028 end_sequence ();
3030 /* Show the output dies here. This is necessary for SUBREGs
3031 of pseudos since we cannot track their lifetimes correctly;
3032 hard regs shouldn't appear here except as return values.
3033 We never want to emit such a clobber after reload. */
3034 if (x != y
3035 && ! (reload_in_progress || reload_completed)
3036 && need_clobber != 0)
3037 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3039 emit_insn (seq);
3041 return last_insn;
3044 /* Low level part of emit_move_insn.
3045 Called just like emit_move_insn, but assumes X and Y
3046 are basically valid. */
3049 emit_move_insn_1 (rtx x, rtx y)
3051 enum machine_mode mode = GET_MODE (x);
3052 enum insn_code code;
3054 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3056 code = mov_optab->handlers[mode].insn_code;
3057 if (code != CODE_FOR_nothing)
3058 return emit_insn (GEN_FCN (code) (x, y));
3060 /* Expand complex moves by moving real part and imag part. */
3061 if (COMPLEX_MODE_P (mode))
3062 return emit_move_complex (mode, x, y);
3064 if (GET_MODE_CLASS (mode) == MODE_CC)
3065 return emit_move_ccmode (mode, x, y);
3067 /* Try using a move pattern for the corresponding integer mode. This is
3068 only safe when simplify_subreg can convert MODE constants into integer
3069 constants. At present, it can only do this reliably if the value
3070 fits within a HOST_WIDE_INT. */
3071 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3073 rtx ret = emit_move_via_integer (mode, x, y);
3074 if (ret)
3075 return ret;
3078 return emit_move_multi_word (mode, x, y);
3081 /* Generate code to copy Y into X.
3082 Both Y and X must have the same mode, except that
3083 Y can be a constant with VOIDmode.
3084 This mode cannot be BLKmode; use emit_block_move for that.
3086 Return the last instruction emitted. */
3089 emit_move_insn (rtx x, rtx y)
3091 enum machine_mode mode = GET_MODE (x);
3092 rtx y_cst = NULL_RTX;
3093 rtx last_insn, set;
3095 gcc_assert (mode != BLKmode
3096 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3098 if (CONSTANT_P (y))
3100 if (optimize
3101 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3102 && (last_insn = compress_float_constant (x, y)))
3103 return last_insn;
3105 y_cst = y;
3107 if (!LEGITIMATE_CONSTANT_P (y))
3109 y = force_const_mem (mode, y);
3111 /* If the target's cannot_force_const_mem prevented the spill,
3112 assume that the target's move expanders will also take care
3113 of the non-legitimate constant. */
3114 if (!y)
3115 y = y_cst;
3119 /* If X or Y are memory references, verify that their addresses are valid
3120 for the machine. */
3121 if (MEM_P (x)
3122 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3123 && ! push_operand (x, GET_MODE (x)))
3124 || (flag_force_addr
3125 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3126 x = validize_mem (x);
3128 if (MEM_P (y)
3129 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3130 || (flag_force_addr
3131 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3132 y = validize_mem (y);
3134 gcc_assert (mode != BLKmode);
3136 last_insn = emit_move_insn_1 (x, y);
3138 if (y_cst && REG_P (x)
3139 && (set = single_set (last_insn)) != NULL_RTX
3140 && SET_DEST (set) == x
3141 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3142 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3144 return last_insn;
3147 /* If Y is representable exactly in a narrower mode, and the target can
3148 perform the extension directly from constant or memory, then emit the
3149 move as an extension. */
3151 static rtx
3152 compress_float_constant (rtx x, rtx y)
3154 enum machine_mode dstmode = GET_MODE (x);
3155 enum machine_mode orig_srcmode = GET_MODE (y);
3156 enum machine_mode srcmode;
3157 REAL_VALUE_TYPE r;
3159 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3161 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3162 srcmode != orig_srcmode;
3163 srcmode = GET_MODE_WIDER_MODE (srcmode))
3165 enum insn_code ic;
3166 rtx trunc_y, last_insn;
3168 /* Skip if the target can't extend this way. */
3169 ic = can_extend_p (dstmode, srcmode, 0);
3170 if (ic == CODE_FOR_nothing)
3171 continue;
3173 /* Skip if the narrowed value isn't exact. */
3174 if (! exact_real_truncate (srcmode, &r))
3175 continue;
3177 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3179 if (LEGITIMATE_CONSTANT_P (trunc_y))
3181 /* Skip if the target needs extra instructions to perform
3182 the extension. */
3183 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3184 continue;
3186 else if (float_extend_from_mem[dstmode][srcmode])
3187 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3188 else
3189 continue;
3191 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3192 last_insn = get_last_insn ();
3194 if (REG_P (x))
3195 set_unique_reg_note (last_insn, REG_EQUAL, y);
3197 return last_insn;
3200 return NULL_RTX;
3203 /* Pushing data onto the stack. */
3205 /* Push a block of length SIZE (perhaps variable)
3206 and return an rtx to address the beginning of the block.
3207 The value may be virtual_outgoing_args_rtx.
3209 EXTRA is the number of bytes of padding to push in addition to SIZE.
3210 BELOW nonzero means this padding comes at low addresses;
3211 otherwise, the padding comes at high addresses. */
3214 push_block (rtx size, int extra, int below)
3216 rtx temp;
3218 size = convert_modes (Pmode, ptr_mode, size, 1);
3219 if (CONSTANT_P (size))
3220 anti_adjust_stack (plus_constant (size, extra));
3221 else if (REG_P (size) && extra == 0)
3222 anti_adjust_stack (size);
3223 else
3225 temp = copy_to_mode_reg (Pmode, size);
3226 if (extra != 0)
3227 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3228 temp, 0, OPTAB_LIB_WIDEN);
3229 anti_adjust_stack (temp);
3232 #ifndef STACK_GROWS_DOWNWARD
3233 if (0)
3234 #else
3235 if (1)
3236 #endif
3238 temp = virtual_outgoing_args_rtx;
3239 if (extra != 0 && below)
3240 temp = plus_constant (temp, extra);
3242 else
3244 if (GET_CODE (size) == CONST_INT)
3245 temp = plus_constant (virtual_outgoing_args_rtx,
3246 -INTVAL (size) - (below ? 0 : extra));
3247 else if (extra != 0 && !below)
3248 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3249 negate_rtx (Pmode, plus_constant (size, extra)));
3250 else
3251 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3252 negate_rtx (Pmode, size));
3255 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3258 #ifdef PUSH_ROUNDING
3260 /* Emit single push insn. */
3262 static void
3263 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3265 rtx dest_addr;
3266 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3267 rtx dest;
3268 enum insn_code icode;
3269 insn_operand_predicate_fn pred;
3271 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3272 /* If there is push pattern, use it. Otherwise try old way of throwing
3273 MEM representing push operation to move expander. */
3274 icode = push_optab->handlers[(int) mode].insn_code;
3275 if (icode != CODE_FOR_nothing)
3277 if (((pred = insn_data[(int) icode].operand[0].predicate)
3278 && !((*pred) (x, mode))))
3279 x = force_reg (mode, x);
3280 emit_insn (GEN_FCN (icode) (x));
3281 return;
3283 if (GET_MODE_SIZE (mode) == rounded_size)
3284 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3285 /* If we are to pad downward, adjust the stack pointer first and
3286 then store X into the stack location using an offset. This is
3287 because emit_move_insn does not know how to pad; it does not have
3288 access to type. */
3289 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3291 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3292 HOST_WIDE_INT offset;
3294 emit_move_insn (stack_pointer_rtx,
3295 expand_binop (Pmode,
3296 #ifdef STACK_GROWS_DOWNWARD
3297 sub_optab,
3298 #else
3299 add_optab,
3300 #endif
3301 stack_pointer_rtx,
3302 GEN_INT (rounded_size),
3303 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3305 offset = (HOST_WIDE_INT) padding_size;
3306 #ifdef STACK_GROWS_DOWNWARD
3307 if (STACK_PUSH_CODE == POST_DEC)
3308 /* We have already decremented the stack pointer, so get the
3309 previous value. */
3310 offset += (HOST_WIDE_INT) rounded_size;
3311 #else
3312 if (STACK_PUSH_CODE == POST_INC)
3313 /* We have already incremented the stack pointer, so get the
3314 previous value. */
3315 offset -= (HOST_WIDE_INT) rounded_size;
3316 #endif
3317 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3319 else
3321 #ifdef STACK_GROWS_DOWNWARD
3322 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3323 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3324 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3325 #else
3326 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3327 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3328 GEN_INT (rounded_size));
3329 #endif
3330 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3333 dest = gen_rtx_MEM (mode, dest_addr);
3335 if (type != 0)
3337 set_mem_attributes (dest, type, 1);
3339 if (flag_optimize_sibling_calls)
3340 /* Function incoming arguments may overlap with sibling call
3341 outgoing arguments and we cannot allow reordering of reads
3342 from function arguments with stores to outgoing arguments
3343 of sibling calls. */
3344 set_mem_alias_set (dest, 0);
3346 emit_move_insn (dest, x);
3348 #endif
3350 /* Generate code to push X onto the stack, assuming it has mode MODE and
3351 type TYPE.
3352 MODE is redundant except when X is a CONST_INT (since they don't
3353 carry mode info).
3354 SIZE is an rtx for the size of data to be copied (in bytes),
3355 needed only if X is BLKmode.
3357 ALIGN (in bits) is maximum alignment we can assume.
3359 If PARTIAL and REG are both nonzero, then copy that many of the first
3360 bytes of X into registers starting with REG, and push the rest of X.
3361 The amount of space pushed is decreased by PARTIAL bytes.
3362 REG must be a hard register in this case.
3363 If REG is zero but PARTIAL is not, take any all others actions for an
3364 argument partially in registers, but do not actually load any
3365 registers.
3367 EXTRA is the amount in bytes of extra space to leave next to this arg.
3368 This is ignored if an argument block has already been allocated.
3370 On a machine that lacks real push insns, ARGS_ADDR is the address of
3371 the bottom of the argument block for this call. We use indexing off there
3372 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3373 argument block has not been preallocated.
3375 ARGS_SO_FAR is the size of args previously pushed for this call.
3377 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3378 for arguments passed in registers. If nonzero, it will be the number
3379 of bytes required. */
3381 void
3382 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3383 unsigned int align, int partial, rtx reg, int extra,
3384 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3385 rtx alignment_pad)
3387 rtx xinner;
3388 enum direction stack_direction
3389 #ifdef STACK_GROWS_DOWNWARD
3390 = downward;
3391 #else
3392 = upward;
3393 #endif
3395 /* Decide where to pad the argument: `downward' for below,
3396 `upward' for above, or `none' for don't pad it.
3397 Default is below for small data on big-endian machines; else above. */
3398 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3400 /* Invert direction if stack is post-decrement.
3401 FIXME: why? */
3402 if (STACK_PUSH_CODE == POST_DEC)
3403 if (where_pad != none)
3404 where_pad = (where_pad == downward ? upward : downward);
3406 xinner = x;
3408 if (mode == BLKmode)
3410 /* Copy a block into the stack, entirely or partially. */
3412 rtx temp;
3413 int used;
3414 int offset;
3415 int skip;
3417 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3418 used = partial - offset;
3420 gcc_assert (size);
3422 /* USED is now the # of bytes we need not copy to the stack
3423 because registers will take care of them. */
3425 if (partial != 0)
3426 xinner = adjust_address (xinner, BLKmode, used);
3428 /* If the partial register-part of the arg counts in its stack size,
3429 skip the part of stack space corresponding to the registers.
3430 Otherwise, start copying to the beginning of the stack space,
3431 by setting SKIP to 0. */
3432 skip = (reg_parm_stack_space == 0) ? 0 : used;
3434 #ifdef PUSH_ROUNDING
3435 /* Do it with several push insns if that doesn't take lots of insns
3436 and if there is no difficulty with push insns that skip bytes
3437 on the stack for alignment purposes. */
3438 if (args_addr == 0
3439 && PUSH_ARGS
3440 && GET_CODE (size) == CONST_INT
3441 && skip == 0
3442 && MEM_ALIGN (xinner) >= align
3443 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3444 /* Here we avoid the case of a structure whose weak alignment
3445 forces many pushes of a small amount of data,
3446 and such small pushes do rounding that causes trouble. */
3447 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3448 || align >= BIGGEST_ALIGNMENT
3449 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3450 == (align / BITS_PER_UNIT)))
3451 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3453 /* Push padding now if padding above and stack grows down,
3454 or if padding below and stack grows up.
3455 But if space already allocated, this has already been done. */
3456 if (extra && args_addr == 0
3457 && where_pad != none && where_pad != stack_direction)
3458 anti_adjust_stack (GEN_INT (extra));
3460 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3462 else
3463 #endif /* PUSH_ROUNDING */
3465 rtx target;
3467 /* Otherwise make space on the stack and copy the data
3468 to the address of that space. */
3470 /* Deduct words put into registers from the size we must copy. */
3471 if (partial != 0)
3473 if (GET_CODE (size) == CONST_INT)
3474 size = GEN_INT (INTVAL (size) - used);
3475 else
3476 size = expand_binop (GET_MODE (size), sub_optab, size,
3477 GEN_INT (used), NULL_RTX, 0,
3478 OPTAB_LIB_WIDEN);
3481 /* Get the address of the stack space.
3482 In this case, we do not deal with EXTRA separately.
3483 A single stack adjust will do. */
3484 if (! args_addr)
3486 temp = push_block (size, extra, where_pad == downward);
3487 extra = 0;
3489 else if (GET_CODE (args_so_far) == CONST_INT)
3490 temp = memory_address (BLKmode,
3491 plus_constant (args_addr,
3492 skip + INTVAL (args_so_far)));
3493 else
3494 temp = memory_address (BLKmode,
3495 plus_constant (gen_rtx_PLUS (Pmode,
3496 args_addr,
3497 args_so_far),
3498 skip));
3500 if (!ACCUMULATE_OUTGOING_ARGS)
3502 /* If the source is referenced relative to the stack pointer,
3503 copy it to another register to stabilize it. We do not need
3504 to do this if we know that we won't be changing sp. */
3506 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3507 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3508 temp = copy_to_reg (temp);
3511 target = gen_rtx_MEM (BLKmode, temp);
3513 /* We do *not* set_mem_attributes here, because incoming arguments
3514 may overlap with sibling call outgoing arguments and we cannot
3515 allow reordering of reads from function arguments with stores
3516 to outgoing arguments of sibling calls. We do, however, want
3517 to record the alignment of the stack slot. */
3518 /* ALIGN may well be better aligned than TYPE, e.g. due to
3519 PARM_BOUNDARY. Assume the caller isn't lying. */
3520 set_mem_align (target, align);
3522 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3525 else if (partial > 0)
3527 /* Scalar partly in registers. */
3529 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3530 int i;
3531 int not_stack;
3532 /* # bytes of start of argument
3533 that we must make space for but need not store. */
3534 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3535 int args_offset = INTVAL (args_so_far);
3536 int skip;
3538 /* Push padding now if padding above and stack grows down,
3539 or if padding below and stack grows up.
3540 But if space already allocated, this has already been done. */
3541 if (extra && args_addr == 0
3542 && where_pad != none && where_pad != stack_direction)
3543 anti_adjust_stack (GEN_INT (extra));
3545 /* If we make space by pushing it, we might as well push
3546 the real data. Otherwise, we can leave OFFSET nonzero
3547 and leave the space uninitialized. */
3548 if (args_addr == 0)
3549 offset = 0;
3551 /* Now NOT_STACK gets the number of words that we don't need to
3552 allocate on the stack. */
3553 not_stack = (partial - offset) / UNITS_PER_WORD;
3555 /* If the partial register-part of the arg counts in its stack size,
3556 skip the part of stack space corresponding to the registers.
3557 Otherwise, start copying to the beginning of the stack space,
3558 by setting SKIP to 0. */
3559 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3561 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3562 x = validize_mem (force_const_mem (mode, x));
3564 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3565 SUBREGs of such registers are not allowed. */
3566 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3567 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3568 x = copy_to_reg (x);
3570 /* Loop over all the words allocated on the stack for this arg. */
3571 /* We can do it by words, because any scalar bigger than a word
3572 has a size a multiple of a word. */
3573 #ifndef PUSH_ARGS_REVERSED
3574 for (i = not_stack; i < size; i++)
3575 #else
3576 for (i = size - 1; i >= not_stack; i--)
3577 #endif
3578 if (i >= not_stack + offset)
3579 emit_push_insn (operand_subword_force (x, i, mode),
3580 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3581 0, args_addr,
3582 GEN_INT (args_offset + ((i - not_stack + skip)
3583 * UNITS_PER_WORD)),
3584 reg_parm_stack_space, alignment_pad);
3586 else
3588 rtx addr;
3589 rtx dest;
3591 /* Push padding now if padding above and stack grows down,
3592 or if padding below and stack grows up.
3593 But if space already allocated, this has already been done. */
3594 if (extra && args_addr == 0
3595 && where_pad != none && where_pad != stack_direction)
3596 anti_adjust_stack (GEN_INT (extra));
3598 #ifdef PUSH_ROUNDING
3599 if (args_addr == 0 && PUSH_ARGS)
3600 emit_single_push_insn (mode, x, type);
3601 else
3602 #endif
3604 if (GET_CODE (args_so_far) == CONST_INT)
3605 addr
3606 = memory_address (mode,
3607 plus_constant (args_addr,
3608 INTVAL (args_so_far)));
3609 else
3610 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3611 args_so_far));
3612 dest = gen_rtx_MEM (mode, addr);
3614 /* We do *not* set_mem_attributes here, because incoming arguments
3615 may overlap with sibling call outgoing arguments and we cannot
3616 allow reordering of reads from function arguments with stores
3617 to outgoing arguments of sibling calls. We do, however, want
3618 to record the alignment of the stack slot. */
3619 /* ALIGN may well be better aligned than TYPE, e.g. due to
3620 PARM_BOUNDARY. Assume the caller isn't lying. */
3621 set_mem_align (dest, align);
3623 emit_move_insn (dest, x);
3627 /* If part should go in registers, copy that part
3628 into the appropriate registers. Do this now, at the end,
3629 since mem-to-mem copies above may do function calls. */
3630 if (partial > 0 && reg != 0)
3632 /* Handle calls that pass values in multiple non-contiguous locations.
3633 The Irix 6 ABI has examples of this. */
3634 if (GET_CODE (reg) == PARALLEL)
3635 emit_group_load (reg, x, type, -1);
3636 else
3638 gcc_assert (partial % UNITS_PER_WORD == 0);
3639 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3643 if (extra && args_addr == 0 && where_pad == stack_direction)
3644 anti_adjust_stack (GEN_INT (extra));
3646 if (alignment_pad && args_addr == 0)
3647 anti_adjust_stack (alignment_pad);
3650 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3651 operations. */
3653 static rtx
3654 get_subtarget (rtx x)
3656 return (optimize
3657 || x == 0
3658 /* Only registers can be subtargets. */
3659 || !REG_P (x)
3660 /* Don't use hard regs to avoid extending their life. */
3661 || REGNO (x) < FIRST_PSEUDO_REGISTER
3662 ? 0 : x);
3665 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3666 FIELD is a bitfield. Returns true if the optimization was successful,
3667 and there's nothing else to do. */
3669 static bool
3670 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3671 unsigned HOST_WIDE_INT bitpos,
3672 enum machine_mode mode1, rtx str_rtx,
3673 tree to, tree src)
3675 enum machine_mode str_mode = GET_MODE (str_rtx);
3676 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3677 tree op0, op1;
3678 rtx value, result;
3679 optab binop;
3681 if (mode1 != VOIDmode
3682 || bitsize >= BITS_PER_WORD
3683 || str_bitsize > BITS_PER_WORD
3684 || TREE_SIDE_EFFECTS (to)
3685 || TREE_THIS_VOLATILE (to))
3686 return false;
3688 STRIP_NOPS (src);
3689 if (!BINARY_CLASS_P (src)
3690 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3691 return false;
3693 op0 = TREE_OPERAND (src, 0);
3694 op1 = TREE_OPERAND (src, 1);
3695 STRIP_NOPS (op0);
3697 if (!operand_equal_p (to, op0, 0))
3698 return false;
3700 if (MEM_P (str_rtx))
3702 unsigned HOST_WIDE_INT offset1;
3704 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3705 str_mode = word_mode;
3706 str_mode = get_best_mode (bitsize, bitpos,
3707 MEM_ALIGN (str_rtx), str_mode, 0);
3708 if (str_mode == VOIDmode)
3709 return false;
3710 str_bitsize = GET_MODE_BITSIZE (str_mode);
3712 offset1 = bitpos;
3713 bitpos %= str_bitsize;
3714 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3715 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3717 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3718 return false;
3720 /* If the bit field covers the whole REG/MEM, store_field
3721 will likely generate better code. */
3722 if (bitsize >= str_bitsize)
3723 return false;
3725 /* We can't handle fields split across multiple entities. */
3726 if (bitpos + bitsize > str_bitsize)
3727 return false;
3729 if (BYTES_BIG_ENDIAN)
3730 bitpos = str_bitsize - bitpos - bitsize;
3732 switch (TREE_CODE (src))
3734 case PLUS_EXPR:
3735 case MINUS_EXPR:
3736 /* For now, just optimize the case of the topmost bitfield
3737 where we don't need to do any masking and also
3738 1 bit bitfields where xor can be used.
3739 We might win by one instruction for the other bitfields
3740 too if insv/extv instructions aren't used, so that
3741 can be added later. */
3742 if (bitpos + bitsize != str_bitsize
3743 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3744 break;
3746 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3747 value = convert_modes (str_mode,
3748 TYPE_MODE (TREE_TYPE (op1)), value,
3749 TYPE_UNSIGNED (TREE_TYPE (op1)));
3751 /* We may be accessing data outside the field, which means
3752 we can alias adjacent data. */
3753 if (MEM_P (str_rtx))
3755 str_rtx = shallow_copy_rtx (str_rtx);
3756 set_mem_alias_set (str_rtx, 0);
3757 set_mem_expr (str_rtx, 0);
3760 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3761 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3763 value = expand_and (str_mode, value, const1_rtx, NULL);
3764 binop = xor_optab;
3766 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3767 build_int_cst (NULL_TREE, bitpos),
3768 NULL_RTX, 1);
3769 result = expand_binop (str_mode, binop, str_rtx,
3770 value, str_rtx, 1, OPTAB_WIDEN);
3771 if (result != str_rtx)
3772 emit_move_insn (str_rtx, result);
3773 return true;
3775 default:
3776 break;
3779 return false;
3783 /* Expand an assignment that stores the value of FROM into TO. */
3785 void
3786 expand_assignment (tree to, tree from)
3788 rtx to_rtx = 0;
3789 rtx result;
3791 /* Don't crash if the lhs of the assignment was erroneous. */
3793 if (TREE_CODE (to) == ERROR_MARK)
3795 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3796 return;
3799 /* Assignment of a structure component needs special treatment
3800 if the structure component's rtx is not simply a MEM.
3801 Assignment of an array element at a constant index, and assignment of
3802 an array element in an unaligned packed structure field, has the same
3803 problem. */
3804 if (handled_component_p (to)
3805 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3807 enum machine_mode mode1;
3808 HOST_WIDE_INT bitsize, bitpos;
3809 rtx orig_to_rtx;
3810 tree offset;
3811 int unsignedp;
3812 int volatilep = 0;
3813 tree tem;
3815 push_temp_slots ();
3816 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3817 &unsignedp, &volatilep, true);
3819 /* If we are going to use store_bit_field and extract_bit_field,
3820 make sure to_rtx will be safe for multiple use. */
3822 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3824 if (offset != 0)
3826 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3828 gcc_assert (MEM_P (to_rtx));
3830 #ifdef POINTERS_EXTEND_UNSIGNED
3831 if (GET_MODE (offset_rtx) != Pmode)
3832 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3833 #else
3834 if (GET_MODE (offset_rtx) != ptr_mode)
3835 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3836 #endif
3838 /* A constant address in TO_RTX can have VOIDmode, we must not try
3839 to call force_reg for that case. Avoid that case. */
3840 if (MEM_P (to_rtx)
3841 && GET_MODE (to_rtx) == BLKmode
3842 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3843 && bitsize > 0
3844 && (bitpos % bitsize) == 0
3845 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3846 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3848 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3849 bitpos = 0;
3852 to_rtx = offset_address (to_rtx, offset_rtx,
3853 highest_pow2_factor_for_target (to,
3854 offset));
3857 /* Handle expand_expr of a complex value returning a CONCAT. */
3858 if (GET_CODE (to_rtx) == CONCAT)
3860 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3862 gcc_assert (bitpos == 0);
3863 result = store_expr (from, to_rtx, false);
3865 else
3867 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3868 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3871 else
3873 if (MEM_P (to_rtx))
3875 /* If the field is at offset zero, we could have been given the
3876 DECL_RTX of the parent struct. Don't munge it. */
3877 to_rtx = shallow_copy_rtx (to_rtx);
3879 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3881 /* Deal with volatile and readonly fields. The former is only
3882 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3883 if (volatilep)
3884 MEM_VOLATILE_P (to_rtx) = 1;
3885 if (component_uses_parent_alias_set (to))
3886 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3889 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3890 to_rtx, to, from))
3891 result = NULL;
3892 else
3893 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3894 TREE_TYPE (tem), get_alias_set (to));
3897 if (result)
3898 preserve_temp_slots (result);
3899 free_temp_slots ();
3900 pop_temp_slots ();
3901 return;
3904 /* If the rhs is a function call and its value is not an aggregate,
3905 call the function before we start to compute the lhs.
3906 This is needed for correct code for cases such as
3907 val = setjmp (buf) on machines where reference to val
3908 requires loading up part of an address in a separate insn.
3910 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3911 since it might be a promoted variable where the zero- or sign- extension
3912 needs to be done. Handling this in the normal way is safe because no
3913 computation is done before the call. */
3914 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3915 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3916 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3917 && REG_P (DECL_RTL (to))))
3919 rtx value;
3921 push_temp_slots ();
3922 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3923 if (to_rtx == 0)
3924 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3926 /* Handle calls that return values in multiple non-contiguous locations.
3927 The Irix 6 ABI has examples of this. */
3928 if (GET_CODE (to_rtx) == PARALLEL)
3929 emit_group_load (to_rtx, value, TREE_TYPE (from),
3930 int_size_in_bytes (TREE_TYPE (from)));
3931 else if (GET_MODE (to_rtx) == BLKmode)
3932 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3933 else
3935 if (POINTER_TYPE_P (TREE_TYPE (to)))
3936 value = convert_memory_address (GET_MODE (to_rtx), value);
3937 emit_move_insn (to_rtx, value);
3939 preserve_temp_slots (to_rtx);
3940 free_temp_slots ();
3941 pop_temp_slots ();
3942 return;
3945 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3946 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3948 if (to_rtx == 0)
3949 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3951 /* Don't move directly into a return register. */
3952 if (TREE_CODE (to) == RESULT_DECL
3953 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3955 rtx temp;
3957 push_temp_slots ();
3958 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3960 if (GET_CODE (to_rtx) == PARALLEL)
3961 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3962 int_size_in_bytes (TREE_TYPE (from)));
3963 else
3964 emit_move_insn (to_rtx, temp);
3966 preserve_temp_slots (to_rtx);
3967 free_temp_slots ();
3968 pop_temp_slots ();
3969 return;
3972 /* In case we are returning the contents of an object which overlaps
3973 the place the value is being stored, use a safe function when copying
3974 a value through a pointer into a structure value return block. */
3975 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3976 && current_function_returns_struct
3977 && !current_function_returns_pcc_struct)
3979 rtx from_rtx, size;
3981 push_temp_slots ();
3982 size = expr_size (from);
3983 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3985 emit_library_call (memmove_libfunc, LCT_NORMAL,
3986 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3987 XEXP (from_rtx, 0), Pmode,
3988 convert_to_mode (TYPE_MODE (sizetype),
3989 size, TYPE_UNSIGNED (sizetype)),
3990 TYPE_MODE (sizetype));
3992 preserve_temp_slots (to_rtx);
3993 free_temp_slots ();
3994 pop_temp_slots ();
3995 return;
3998 /* Compute FROM and store the value in the rtx we got. */
4000 push_temp_slots ();
4001 result = store_expr (from, to_rtx, 0);
4002 preserve_temp_slots (result);
4003 free_temp_slots ();
4004 pop_temp_slots ();
4005 return;
4008 /* Generate code for computing expression EXP,
4009 and storing the value into TARGET.
4011 If the mode is BLKmode then we may return TARGET itself.
4012 It turns out that in BLKmode it doesn't cause a problem.
4013 because C has no operators that could combine two different
4014 assignments into the same BLKmode object with different values
4015 with no sequence point. Will other languages need this to
4016 be more thorough?
4018 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4019 stack, and block moves may need to be treated specially. */
4022 store_expr (tree exp, rtx target, int call_param_p)
4024 rtx temp;
4025 rtx alt_rtl = NULL_RTX;
4026 int dont_return_target = 0;
4028 if (VOID_TYPE_P (TREE_TYPE (exp)))
4030 /* C++ can generate ?: expressions with a throw expression in one
4031 branch and an rvalue in the other. Here, we resolve attempts to
4032 store the throw expression's nonexistent result. */
4033 gcc_assert (!call_param_p);
4034 expand_expr (exp, const0_rtx, VOIDmode, 0);
4035 return NULL_RTX;
4037 if (TREE_CODE (exp) == COMPOUND_EXPR)
4039 /* Perform first part of compound expression, then assign from second
4040 part. */
4041 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4042 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4043 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4045 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4047 /* For conditional expression, get safe form of the target. Then
4048 test the condition, doing the appropriate assignment on either
4049 side. This avoids the creation of unnecessary temporaries.
4050 For non-BLKmode, it is more efficient not to do this. */
4052 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4054 do_pending_stack_adjust ();
4055 NO_DEFER_POP;
4056 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4057 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4058 emit_jump_insn (gen_jump (lab2));
4059 emit_barrier ();
4060 emit_label (lab1);
4061 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4062 emit_label (lab2);
4063 OK_DEFER_POP;
4065 return NULL_RTX;
4067 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4068 /* If this is a scalar in a register that is stored in a wider mode
4069 than the declared mode, compute the result into its declared mode
4070 and then convert to the wider mode. Our value is the computed
4071 expression. */
4073 rtx inner_target = 0;
4075 /* We can do the conversion inside EXP, which will often result
4076 in some optimizations. Do the conversion in two steps: first
4077 change the signedness, if needed, then the extend. But don't
4078 do this if the type of EXP is a subtype of something else
4079 since then the conversion might involve more than just
4080 converting modes. */
4081 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4082 && TREE_TYPE (TREE_TYPE (exp)) == 0
4083 && (!lang_hooks.reduce_bit_field_operations
4084 || (GET_MODE_PRECISION (GET_MODE (target))
4085 == TYPE_PRECISION (TREE_TYPE (exp)))))
4087 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4088 != SUBREG_PROMOTED_UNSIGNED_P (target))
4089 exp = convert
4090 (lang_hooks.types.signed_or_unsigned_type
4091 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4093 exp = convert (lang_hooks.types.type_for_mode
4094 (GET_MODE (SUBREG_REG (target)),
4095 SUBREG_PROMOTED_UNSIGNED_P (target)),
4096 exp);
4098 inner_target = SUBREG_REG (target);
4101 temp = expand_expr (exp, inner_target, VOIDmode,
4102 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4104 /* If TEMP is a VOIDmode constant, use convert_modes to make
4105 sure that we properly convert it. */
4106 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4108 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4109 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4110 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4111 GET_MODE (target), temp,
4112 SUBREG_PROMOTED_UNSIGNED_P (target));
4115 convert_move (SUBREG_REG (target), temp,
4116 SUBREG_PROMOTED_UNSIGNED_P (target));
4118 return NULL_RTX;
4120 else
4122 temp = expand_expr_real (exp, target, GET_MODE (target),
4123 (call_param_p
4124 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4125 &alt_rtl);
4126 /* Return TARGET if it's a specified hardware register.
4127 If TARGET is a volatile mem ref, either return TARGET
4128 or return a reg copied *from* TARGET; ANSI requires this.
4130 Otherwise, if TEMP is not TARGET, return TEMP
4131 if it is constant (for efficiency),
4132 or if we really want the correct value. */
4133 if (!(target && REG_P (target)
4134 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4135 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4136 && ! rtx_equal_p (temp, target)
4137 && CONSTANT_P (temp))
4138 dont_return_target = 1;
4141 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4142 the same as that of TARGET, adjust the constant. This is needed, for
4143 example, in case it is a CONST_DOUBLE and we want only a word-sized
4144 value. */
4145 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4146 && TREE_CODE (exp) != ERROR_MARK
4147 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4148 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4149 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4151 /* If value was not generated in the target, store it there.
4152 Convert the value to TARGET's type first if necessary and emit the
4153 pending incrementations that have been queued when expanding EXP.
4154 Note that we cannot emit the whole queue blindly because this will
4155 effectively disable the POST_INC optimization later.
4157 If TEMP and TARGET compare equal according to rtx_equal_p, but
4158 one or both of them are volatile memory refs, we have to distinguish
4159 two cases:
4160 - expand_expr has used TARGET. In this case, we must not generate
4161 another copy. This can be detected by TARGET being equal according
4162 to == .
4163 - expand_expr has not used TARGET - that means that the source just
4164 happens to have the same RTX form. Since temp will have been created
4165 by expand_expr, it will compare unequal according to == .
4166 We must generate a copy in this case, to reach the correct number
4167 of volatile memory references. */
4169 if ((! rtx_equal_p (temp, target)
4170 || (temp != target && (side_effects_p (temp)
4171 || side_effects_p (target))))
4172 && TREE_CODE (exp) != ERROR_MARK
4173 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4174 but TARGET is not valid memory reference, TEMP will differ
4175 from TARGET although it is really the same location. */
4176 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4177 /* If there's nothing to copy, don't bother. Don't call expr_size
4178 unless necessary, because some front-ends (C++) expr_size-hook
4179 aborts on objects that are not supposed to be bit-copied or
4180 bit-initialized. */
4181 && expr_size (exp) != const0_rtx)
4183 if (GET_MODE (temp) != GET_MODE (target)
4184 && GET_MODE (temp) != VOIDmode)
4186 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4187 if (dont_return_target)
4189 /* In this case, we will return TEMP,
4190 so make sure it has the proper mode.
4191 But don't forget to store the value into TARGET. */
4192 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4193 emit_move_insn (target, temp);
4195 else
4196 convert_move (target, temp, unsignedp);
4199 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4201 /* Handle copying a string constant into an array. The string
4202 constant may be shorter than the array. So copy just the string's
4203 actual length, and clear the rest. First get the size of the data
4204 type of the string, which is actually the size of the target. */
4205 rtx size = expr_size (exp);
4207 if (GET_CODE (size) == CONST_INT
4208 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4209 emit_block_move (target, temp, size,
4210 (call_param_p
4211 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4212 else
4214 /* Compute the size of the data to copy from the string. */
4215 tree copy_size
4216 = size_binop (MIN_EXPR,
4217 make_tree (sizetype, size),
4218 size_int (TREE_STRING_LENGTH (exp)));
4219 rtx copy_size_rtx
4220 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4221 (call_param_p
4222 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4223 rtx label = 0;
4225 /* Copy that much. */
4226 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4227 TYPE_UNSIGNED (sizetype));
4228 emit_block_move (target, temp, copy_size_rtx,
4229 (call_param_p
4230 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4232 /* Figure out how much is left in TARGET that we have to clear.
4233 Do all calculations in ptr_mode. */
4234 if (GET_CODE (copy_size_rtx) == CONST_INT)
4236 size = plus_constant (size, -INTVAL (copy_size_rtx));
4237 target = adjust_address (target, BLKmode,
4238 INTVAL (copy_size_rtx));
4240 else
4242 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4243 copy_size_rtx, NULL_RTX, 0,
4244 OPTAB_LIB_WIDEN);
4246 #ifdef POINTERS_EXTEND_UNSIGNED
4247 if (GET_MODE (copy_size_rtx) != Pmode)
4248 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4249 TYPE_UNSIGNED (sizetype));
4250 #endif
4252 target = offset_address (target, copy_size_rtx,
4253 highest_pow2_factor (copy_size));
4254 label = gen_label_rtx ();
4255 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4256 GET_MODE (size), 0, label);
4259 if (size != const0_rtx)
4260 clear_storage (target, size, BLOCK_OP_NORMAL);
4262 if (label)
4263 emit_label (label);
4266 /* Handle calls that return values in multiple non-contiguous locations.
4267 The Irix 6 ABI has examples of this. */
4268 else if (GET_CODE (target) == PARALLEL)
4269 emit_group_load (target, temp, TREE_TYPE (exp),
4270 int_size_in_bytes (TREE_TYPE (exp)));
4271 else if (GET_MODE (temp) == BLKmode)
4272 emit_block_move (target, temp, expr_size (exp),
4273 (call_param_p
4274 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4275 else
4277 temp = force_operand (temp, target);
4278 if (temp != target)
4279 emit_move_insn (target, temp);
4283 return NULL_RTX;
4286 /* Examine CTOR to discover:
4287 * how many scalar fields are set to nonzero values,
4288 and place it in *P_NZ_ELTS;
4289 * how many scalar fields are set to non-constant values,
4290 and place it in *P_NC_ELTS; and
4291 * how many scalar fields in total are in CTOR,
4292 and place it in *P_ELT_COUNT.
4293 * if a type is a union, and the initializer from the constructor
4294 is not the largest element in the union, then set *p_must_clear. */
4296 static void
4297 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4298 HOST_WIDE_INT *p_nc_elts,
4299 HOST_WIDE_INT *p_elt_count,
4300 bool *p_must_clear)
4302 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4303 tree list;
4305 nz_elts = 0;
4306 nc_elts = 0;
4307 elt_count = 0;
4309 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4311 tree value = TREE_VALUE (list);
4312 tree purpose = TREE_PURPOSE (list);
4313 HOST_WIDE_INT mult;
4315 mult = 1;
4316 if (TREE_CODE (purpose) == RANGE_EXPR)
4318 tree lo_index = TREE_OPERAND (purpose, 0);
4319 tree hi_index = TREE_OPERAND (purpose, 1);
4321 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4322 mult = (tree_low_cst (hi_index, 1)
4323 - tree_low_cst (lo_index, 1) + 1);
4326 switch (TREE_CODE (value))
4328 case CONSTRUCTOR:
4330 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4331 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4332 nz_elts += mult * nz;
4333 nc_elts += mult * nc;
4334 elt_count += mult * ic;
4336 break;
4338 case INTEGER_CST:
4339 case REAL_CST:
4340 if (!initializer_zerop (value))
4341 nz_elts += mult;
4342 elt_count += mult;
4343 break;
4345 case STRING_CST:
4346 nz_elts += mult * TREE_STRING_LENGTH (value);
4347 elt_count += mult * TREE_STRING_LENGTH (value);
4348 break;
4350 case COMPLEX_CST:
4351 if (!initializer_zerop (TREE_REALPART (value)))
4352 nz_elts += mult;
4353 if (!initializer_zerop (TREE_IMAGPART (value)))
4354 nz_elts += mult;
4355 elt_count += mult;
4356 break;
4358 case VECTOR_CST:
4360 tree v;
4361 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4363 if (!initializer_zerop (TREE_VALUE (v)))
4364 nz_elts += mult;
4365 elt_count += mult;
4368 break;
4370 default:
4371 nz_elts += mult;
4372 elt_count += mult;
4373 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4374 nc_elts += mult;
4375 break;
4379 if (!*p_must_clear
4380 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4381 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4383 tree init_sub_type;
4384 bool clear_this = true;
4386 list = CONSTRUCTOR_ELTS (ctor);
4387 if (list)
4389 /* We don't expect more than one element of the union to be
4390 initialized. Not sure what we should do otherwise... */
4391 gcc_assert (TREE_CHAIN (list) == NULL);
4393 init_sub_type = TREE_TYPE (TREE_VALUE (list));
4395 /* ??? We could look at each element of the union, and find the
4396 largest element. Which would avoid comparing the size of the
4397 initialized element against any tail padding in the union.
4398 Doesn't seem worth the effort... */
4399 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4400 TYPE_SIZE (init_sub_type)) == 1)
4402 /* And now we have to find out if the element itself is fully
4403 constructed. E.g. for union { struct { int a, b; } s; } u
4404 = { .s = { .a = 1 } }. */
4405 if (elt_count == count_type_elements (init_sub_type))
4406 clear_this = false;
4410 *p_must_clear = clear_this;
4413 *p_nz_elts += nz_elts;
4414 *p_nc_elts += nc_elts;
4415 *p_elt_count += elt_count;
4418 void
4419 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4420 HOST_WIDE_INT *p_nc_elts,
4421 HOST_WIDE_INT *p_elt_count,
4422 bool *p_must_clear)
4424 *p_nz_elts = 0;
4425 *p_nc_elts = 0;
4426 *p_elt_count = 0;
4427 *p_must_clear = false;
4428 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4429 p_must_clear);
4432 /* Count the number of scalars in TYPE. Return -1 on overflow or
4433 variable-sized. */
4435 HOST_WIDE_INT
4436 count_type_elements (tree type)
4438 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4439 switch (TREE_CODE (type))
4441 case ARRAY_TYPE:
4443 tree telts = array_type_nelts (type);
4444 if (telts && host_integerp (telts, 1))
4446 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4447 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4448 if (n == 0)
4449 return 0;
4450 else if (max / n > m)
4451 return n * m;
4453 return -1;
4456 case RECORD_TYPE:
4458 HOST_WIDE_INT n = 0, t;
4459 tree f;
4461 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4462 if (TREE_CODE (f) == FIELD_DECL)
4464 t = count_type_elements (TREE_TYPE (f));
4465 if (t < 0)
4466 return -1;
4467 n += t;
4470 return n;
4473 case UNION_TYPE:
4474 case QUAL_UNION_TYPE:
4476 /* Ho hum. How in the world do we guess here? Clearly it isn't
4477 right to count the fields. Guess based on the number of words. */
4478 HOST_WIDE_INT n = int_size_in_bytes (type);
4479 if (n < 0)
4480 return -1;
4481 return n / UNITS_PER_WORD;
4484 case COMPLEX_TYPE:
4485 return 2;
4487 case VECTOR_TYPE:
4488 return TYPE_VECTOR_SUBPARTS (type);
4490 case INTEGER_TYPE:
4491 case REAL_TYPE:
4492 case ENUMERAL_TYPE:
4493 case BOOLEAN_TYPE:
4494 case CHAR_TYPE:
4495 case POINTER_TYPE:
4496 case OFFSET_TYPE:
4497 case REFERENCE_TYPE:
4498 return 1;
4500 case VOID_TYPE:
4501 case METHOD_TYPE:
4502 case FILE_TYPE:
4503 case FUNCTION_TYPE:
4504 case LANG_TYPE:
4505 default:
4506 gcc_unreachable ();
4510 /* Return 1 if EXP contains mostly (3/4) zeros. */
4512 static int
4513 mostly_zeros_p (tree exp)
4515 if (TREE_CODE (exp) == CONSTRUCTOR)
4518 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4519 bool must_clear;
4521 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4522 if (must_clear)
4523 return 1;
4525 elts = count_type_elements (TREE_TYPE (exp));
4527 return nz_elts < elts / 4;
4530 return initializer_zerop (exp);
4533 /* Helper function for store_constructor.
4534 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4535 TYPE is the type of the CONSTRUCTOR, not the element type.
4536 CLEARED is as for store_constructor.
4537 ALIAS_SET is the alias set to use for any stores.
4539 This provides a recursive shortcut back to store_constructor when it isn't
4540 necessary to go through store_field. This is so that we can pass through
4541 the cleared field to let store_constructor know that we may not have to
4542 clear a substructure if the outer structure has already been cleared. */
4544 static void
4545 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4546 HOST_WIDE_INT bitpos, enum machine_mode mode,
4547 tree exp, tree type, int cleared, int alias_set)
4549 if (TREE_CODE (exp) == CONSTRUCTOR
4550 /* We can only call store_constructor recursively if the size and
4551 bit position are on a byte boundary. */
4552 && bitpos % BITS_PER_UNIT == 0
4553 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4554 /* If we have a nonzero bitpos for a register target, then we just
4555 let store_field do the bitfield handling. This is unlikely to
4556 generate unnecessary clear instructions anyways. */
4557 && (bitpos == 0 || MEM_P (target)))
4559 if (MEM_P (target))
4560 target
4561 = adjust_address (target,
4562 GET_MODE (target) == BLKmode
4563 || 0 != (bitpos
4564 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4565 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4568 /* Update the alias set, if required. */
4569 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4570 && MEM_ALIAS_SET (target) != 0)
4572 target = copy_rtx (target);
4573 set_mem_alias_set (target, alias_set);
4576 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4578 else
4579 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4582 /* Store the value of constructor EXP into the rtx TARGET.
4583 TARGET is either a REG or a MEM; we know it cannot conflict, since
4584 safe_from_p has been called.
4585 CLEARED is true if TARGET is known to have been zero'd.
4586 SIZE is the number of bytes of TARGET we are allowed to modify: this
4587 may not be the same as the size of EXP if we are assigning to a field
4588 which has been packed to exclude padding bits. */
4590 static void
4591 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4593 tree type = TREE_TYPE (exp);
4594 #ifdef WORD_REGISTER_OPERATIONS
4595 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4596 #endif
4598 switch (TREE_CODE (type))
4600 case RECORD_TYPE:
4601 case UNION_TYPE:
4602 case QUAL_UNION_TYPE:
4604 tree elt;
4606 /* If size is zero or the target is already cleared, do nothing. */
4607 if (size == 0 || cleared)
4608 cleared = 1;
4609 /* We either clear the aggregate or indicate the value is dead. */
4610 else if ((TREE_CODE (type) == UNION_TYPE
4611 || TREE_CODE (type) == QUAL_UNION_TYPE)
4612 && ! CONSTRUCTOR_ELTS (exp))
4613 /* If the constructor is empty, clear the union. */
4615 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4616 cleared = 1;
4619 /* If we are building a static constructor into a register,
4620 set the initial value as zero so we can fold the value into
4621 a constant. But if more than one register is involved,
4622 this probably loses. */
4623 else if (REG_P (target) && TREE_STATIC (exp)
4624 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4626 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4627 cleared = 1;
4630 /* If the constructor has fewer fields than the structure or
4631 if we are initializing the structure to mostly zeros, clear
4632 the whole structure first. Don't do this if TARGET is a
4633 register whose mode size isn't equal to SIZE since
4634 clear_storage can't handle this case. */
4635 else if (size > 0
4636 && ((list_length (CONSTRUCTOR_ELTS (exp))
4637 != fields_length (type))
4638 || mostly_zeros_p (exp))
4639 && (!REG_P (target)
4640 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4641 == size)))
4643 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4644 cleared = 1;
4647 if (! cleared)
4648 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4650 /* Store each element of the constructor into the
4651 corresponding field of TARGET. */
4653 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4655 tree field = TREE_PURPOSE (elt);
4656 tree value = TREE_VALUE (elt);
4657 enum machine_mode mode;
4658 HOST_WIDE_INT bitsize;
4659 HOST_WIDE_INT bitpos = 0;
4660 tree offset;
4661 rtx to_rtx = target;
4663 /* Just ignore missing fields. We cleared the whole
4664 structure, above, if any fields are missing. */
4665 if (field == 0)
4666 continue;
4668 if (cleared && initializer_zerop (value))
4669 continue;
4671 if (host_integerp (DECL_SIZE (field), 1))
4672 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4673 else
4674 bitsize = -1;
4676 mode = DECL_MODE (field);
4677 if (DECL_BIT_FIELD (field))
4678 mode = VOIDmode;
4680 offset = DECL_FIELD_OFFSET (field);
4681 if (host_integerp (offset, 0)
4682 && host_integerp (bit_position (field), 0))
4684 bitpos = int_bit_position (field);
4685 offset = 0;
4687 else
4688 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4690 if (offset)
4692 rtx offset_rtx;
4694 offset
4695 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4696 make_tree (TREE_TYPE (exp),
4697 target));
4699 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4700 gcc_assert (MEM_P (to_rtx));
4702 #ifdef POINTERS_EXTEND_UNSIGNED
4703 if (GET_MODE (offset_rtx) != Pmode)
4704 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4705 #else
4706 if (GET_MODE (offset_rtx) != ptr_mode)
4707 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4708 #endif
4710 to_rtx = offset_address (to_rtx, offset_rtx,
4711 highest_pow2_factor (offset));
4714 #ifdef WORD_REGISTER_OPERATIONS
4715 /* If this initializes a field that is smaller than a
4716 word, at the start of a word, try to widen it to a full
4717 word. This special case allows us to output C++ member
4718 function initializations in a form that the optimizers
4719 can understand. */
4720 if (REG_P (target)
4721 && bitsize < BITS_PER_WORD
4722 && bitpos % BITS_PER_WORD == 0
4723 && GET_MODE_CLASS (mode) == MODE_INT
4724 && TREE_CODE (value) == INTEGER_CST
4725 && exp_size >= 0
4726 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4728 tree type = TREE_TYPE (value);
4730 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4732 type = lang_hooks.types.type_for_size
4733 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4734 value = convert (type, value);
4737 if (BYTES_BIG_ENDIAN)
4738 value
4739 = fold (build2 (LSHIFT_EXPR, type, value,
4740 build_int_cst (NULL_TREE,
4741 BITS_PER_WORD - bitsize)));
4742 bitsize = BITS_PER_WORD;
4743 mode = word_mode;
4745 #endif
4747 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4748 && DECL_NONADDRESSABLE_P (field))
4750 to_rtx = copy_rtx (to_rtx);
4751 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4754 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4755 value, type, cleared,
4756 get_alias_set (TREE_TYPE (field)));
4758 break;
4760 case ARRAY_TYPE:
4762 tree elt;
4763 int i;
4764 int need_to_clear;
4765 tree domain;
4766 tree elttype = TREE_TYPE (type);
4767 int const_bounds_p;
4768 HOST_WIDE_INT minelt = 0;
4769 HOST_WIDE_INT maxelt = 0;
4771 domain = TYPE_DOMAIN (type);
4772 const_bounds_p = (TYPE_MIN_VALUE (domain)
4773 && TYPE_MAX_VALUE (domain)
4774 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4775 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4777 /* If we have constant bounds for the range of the type, get them. */
4778 if (const_bounds_p)
4780 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4781 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4784 /* If the constructor has fewer elements than the array, clear
4785 the whole array first. Similarly if this is static
4786 constructor of a non-BLKmode object. */
4787 if (cleared)
4788 need_to_clear = 0;
4789 else if (REG_P (target) && TREE_STATIC (exp))
4790 need_to_clear = 1;
4791 else
4793 HOST_WIDE_INT count = 0, zero_count = 0;
4794 need_to_clear = ! const_bounds_p;
4796 /* This loop is a more accurate version of the loop in
4797 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4798 is also needed to check for missing elements. */
4799 for (elt = CONSTRUCTOR_ELTS (exp);
4800 elt != NULL_TREE && ! need_to_clear;
4801 elt = TREE_CHAIN (elt))
4803 tree index = TREE_PURPOSE (elt);
4804 HOST_WIDE_INT this_node_count;
4806 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4808 tree lo_index = TREE_OPERAND (index, 0);
4809 tree hi_index = TREE_OPERAND (index, 1);
4811 if (! host_integerp (lo_index, 1)
4812 || ! host_integerp (hi_index, 1))
4814 need_to_clear = 1;
4815 break;
4818 this_node_count = (tree_low_cst (hi_index, 1)
4819 - tree_low_cst (lo_index, 1) + 1);
4821 else
4822 this_node_count = 1;
4824 count += this_node_count;
4825 if (mostly_zeros_p (TREE_VALUE (elt)))
4826 zero_count += this_node_count;
4829 /* Clear the entire array first if there are any missing
4830 elements, or if the incidence of zero elements is >=
4831 75%. */
4832 if (! need_to_clear
4833 && (count < maxelt - minelt + 1
4834 || 4 * zero_count >= 3 * count))
4835 need_to_clear = 1;
4838 if (need_to_clear && size > 0)
4840 if (REG_P (target))
4841 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4842 else
4843 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4844 cleared = 1;
4847 if (!cleared && REG_P (target))
4848 /* Inform later passes that the old value is dead. */
4849 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4851 /* Store each element of the constructor into the
4852 corresponding element of TARGET, determined by counting the
4853 elements. */
4854 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4855 elt;
4856 elt = TREE_CHAIN (elt), i++)
4858 enum machine_mode mode;
4859 HOST_WIDE_INT bitsize;
4860 HOST_WIDE_INT bitpos;
4861 int unsignedp;
4862 tree value = TREE_VALUE (elt);
4863 tree index = TREE_PURPOSE (elt);
4864 rtx xtarget = target;
4866 if (cleared && initializer_zerop (value))
4867 continue;
4869 unsignedp = TYPE_UNSIGNED (elttype);
4870 mode = TYPE_MODE (elttype);
4871 if (mode == BLKmode)
4872 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4873 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4874 : -1);
4875 else
4876 bitsize = GET_MODE_BITSIZE (mode);
4878 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4880 tree lo_index = TREE_OPERAND (index, 0);
4881 tree hi_index = TREE_OPERAND (index, 1);
4882 rtx index_r, pos_rtx;
4883 HOST_WIDE_INT lo, hi, count;
4884 tree position;
4886 /* If the range is constant and "small", unroll the loop. */
4887 if (const_bounds_p
4888 && host_integerp (lo_index, 0)
4889 && host_integerp (hi_index, 0)
4890 && (lo = tree_low_cst (lo_index, 0),
4891 hi = tree_low_cst (hi_index, 0),
4892 count = hi - lo + 1,
4893 (!MEM_P (target)
4894 || count <= 2
4895 || (host_integerp (TYPE_SIZE (elttype), 1)
4896 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4897 <= 40 * 8)))))
4899 lo -= minelt; hi -= minelt;
4900 for (; lo <= hi; lo++)
4902 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4904 if (MEM_P (target)
4905 && !MEM_KEEP_ALIAS_SET_P (target)
4906 && TREE_CODE (type) == ARRAY_TYPE
4907 && TYPE_NONALIASED_COMPONENT (type))
4909 target = copy_rtx (target);
4910 MEM_KEEP_ALIAS_SET_P (target) = 1;
4913 store_constructor_field
4914 (target, bitsize, bitpos, mode, value, type, cleared,
4915 get_alias_set (elttype));
4918 else
4920 rtx loop_start = gen_label_rtx ();
4921 rtx loop_end = gen_label_rtx ();
4922 tree exit_cond;
4924 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4925 unsignedp = TYPE_UNSIGNED (domain);
4927 index = build_decl (VAR_DECL, NULL_TREE, domain);
4929 index_r
4930 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4931 &unsignedp, 0));
4932 SET_DECL_RTL (index, index_r);
4933 store_expr (lo_index, index_r, 0);
4935 /* Build the head of the loop. */
4936 do_pending_stack_adjust ();
4937 emit_label (loop_start);
4939 /* Assign value to element index. */
4940 position
4941 = convert (ssizetype,
4942 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4943 index, TYPE_MIN_VALUE (domain))));
4944 position = size_binop (MULT_EXPR, position,
4945 convert (ssizetype,
4946 TYPE_SIZE_UNIT (elttype)));
4948 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4949 xtarget = offset_address (target, pos_rtx,
4950 highest_pow2_factor (position));
4951 xtarget = adjust_address (xtarget, mode, 0);
4952 if (TREE_CODE (value) == CONSTRUCTOR)
4953 store_constructor (value, xtarget, cleared,
4954 bitsize / BITS_PER_UNIT);
4955 else
4956 store_expr (value, xtarget, 0);
4958 /* Generate a conditional jump to exit the loop. */
4959 exit_cond = build2 (LT_EXPR, integer_type_node,
4960 index, hi_index);
4961 jumpif (exit_cond, loop_end);
4963 /* Update the loop counter, and jump to the head of
4964 the loop. */
4965 expand_assignment (index,
4966 build2 (PLUS_EXPR, TREE_TYPE (index),
4967 index, integer_one_node));
4969 emit_jump (loop_start);
4971 /* Build the end of the loop. */
4972 emit_label (loop_end);
4975 else if ((index != 0 && ! host_integerp (index, 0))
4976 || ! host_integerp (TYPE_SIZE (elttype), 1))
4978 tree position;
4980 if (index == 0)
4981 index = ssize_int (1);
4983 if (minelt)
4984 index = fold_convert (ssizetype,
4985 fold (build2 (MINUS_EXPR,
4986 TREE_TYPE (index),
4987 index,
4988 TYPE_MIN_VALUE (domain))));
4990 position = size_binop (MULT_EXPR, index,
4991 convert (ssizetype,
4992 TYPE_SIZE_UNIT (elttype)));
4993 xtarget = offset_address (target,
4994 expand_expr (position, 0, VOIDmode, 0),
4995 highest_pow2_factor (position));
4996 xtarget = adjust_address (xtarget, mode, 0);
4997 store_expr (value, xtarget, 0);
4999 else
5001 if (index != 0)
5002 bitpos = ((tree_low_cst (index, 0) - minelt)
5003 * tree_low_cst (TYPE_SIZE (elttype), 1));
5004 else
5005 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5007 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5008 && TREE_CODE (type) == ARRAY_TYPE
5009 && TYPE_NONALIASED_COMPONENT (type))
5011 target = copy_rtx (target);
5012 MEM_KEEP_ALIAS_SET_P (target) = 1;
5014 store_constructor_field (target, bitsize, bitpos, mode, value,
5015 type, cleared, get_alias_set (elttype));
5018 break;
5021 case VECTOR_TYPE:
5023 tree elt;
5024 int i;
5025 int need_to_clear;
5026 int icode = 0;
5027 tree elttype = TREE_TYPE (type);
5028 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5029 enum machine_mode eltmode = TYPE_MODE (elttype);
5030 HOST_WIDE_INT bitsize;
5031 HOST_WIDE_INT bitpos;
5032 rtvec vector = NULL;
5033 unsigned n_elts;
5035 gcc_assert (eltmode != BLKmode);
5037 n_elts = TYPE_VECTOR_SUBPARTS (type);
5038 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5040 enum machine_mode mode = GET_MODE (target);
5042 icode = (int) vec_init_optab->handlers[mode].insn_code;
5043 if (icode != CODE_FOR_nothing)
5045 unsigned int i;
5047 vector = rtvec_alloc (n_elts);
5048 for (i = 0; i < n_elts; i++)
5049 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5053 /* If the constructor has fewer elements than the vector,
5054 clear the whole array first. Similarly if this is static
5055 constructor of a non-BLKmode object. */
5056 if (cleared)
5057 need_to_clear = 0;
5058 else if (REG_P (target) && TREE_STATIC (exp))
5059 need_to_clear = 1;
5060 else
5062 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5064 for (elt = CONSTRUCTOR_ELTS (exp);
5065 elt != NULL_TREE;
5066 elt = TREE_CHAIN (elt))
5068 int n_elts_here = tree_low_cst
5069 (int_const_binop (TRUNC_DIV_EXPR,
5070 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
5071 TYPE_SIZE (elttype), 0), 1);
5073 count += n_elts_here;
5074 if (mostly_zeros_p (TREE_VALUE (elt)))
5075 zero_count += n_elts_here;
5078 /* Clear the entire vector first if there are any missing elements,
5079 or if the incidence of zero elements is >= 75%. */
5080 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5083 if (need_to_clear && size > 0 && !vector)
5085 if (REG_P (target))
5086 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5087 else
5088 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5089 cleared = 1;
5092 if (!cleared && REG_P (target))
5093 /* Inform later passes that the old value is dead. */
5094 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5096 /* Store each element of the constructor into the corresponding
5097 element of TARGET, determined by counting the elements. */
5098 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5099 elt;
5100 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
5102 tree value = TREE_VALUE (elt);
5103 tree index = TREE_PURPOSE (elt);
5104 HOST_WIDE_INT eltpos;
5106 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5107 if (cleared && initializer_zerop (value))
5108 continue;
5110 if (index != 0)
5111 eltpos = tree_low_cst (index, 1);
5112 else
5113 eltpos = i;
5115 if (vector)
5117 /* Vector CONSTRUCTORs should only be built from smaller
5118 vectors in the case of BLKmode vectors. */
5119 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5120 RTVEC_ELT (vector, eltpos)
5121 = expand_expr (value, NULL_RTX, VOIDmode, 0);
5123 else
5125 enum machine_mode value_mode =
5126 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5127 ? TYPE_MODE (TREE_TYPE (value))
5128 : eltmode;
5129 bitpos = eltpos * elt_size;
5130 store_constructor_field (target, bitsize, bitpos,
5131 value_mode, value, type,
5132 cleared, get_alias_set (elttype));
5136 if (vector)
5137 emit_insn (GEN_FCN (icode)
5138 (target,
5139 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5140 break;
5143 default:
5144 gcc_unreachable ();
5148 /* Store the value of EXP (an expression tree)
5149 into a subfield of TARGET which has mode MODE and occupies
5150 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5151 If MODE is VOIDmode, it means that we are storing into a bit-field.
5153 Always return const0_rtx unless we have something particular to
5154 return.
5156 TYPE is the type of the underlying object,
5158 ALIAS_SET is the alias set for the destination. This value will
5159 (in general) be different from that for TARGET, since TARGET is a
5160 reference to the containing structure. */
5162 static rtx
5163 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5164 enum machine_mode mode, tree exp, tree type, int alias_set)
5166 HOST_WIDE_INT width_mask = 0;
5168 if (TREE_CODE (exp) == ERROR_MARK)
5169 return const0_rtx;
5171 /* If we have nothing to store, do nothing unless the expression has
5172 side-effects. */
5173 if (bitsize == 0)
5174 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5175 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5176 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5178 /* If we are storing into an unaligned field of an aligned union that is
5179 in a register, we may have the mode of TARGET being an integer mode but
5180 MODE == BLKmode. In that case, get an aligned object whose size and
5181 alignment are the same as TARGET and store TARGET into it (we can avoid
5182 the store if the field being stored is the entire width of TARGET). Then
5183 call ourselves recursively to store the field into a BLKmode version of
5184 that object. Finally, load from the object into TARGET. This is not
5185 very efficient in general, but should only be slightly more expensive
5186 than the otherwise-required unaligned accesses. Perhaps this can be
5187 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5188 twice, once with emit_move_insn and once via store_field. */
5190 if (mode == BLKmode
5191 && (REG_P (target) || GET_CODE (target) == SUBREG))
5193 rtx object = assign_temp (type, 0, 1, 1);
5194 rtx blk_object = adjust_address (object, BLKmode, 0);
5196 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5197 emit_move_insn (object, target);
5199 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5201 emit_move_insn (target, object);
5203 /* We want to return the BLKmode version of the data. */
5204 return blk_object;
5207 if (GET_CODE (target) == CONCAT)
5209 /* We're storing into a struct containing a single __complex. */
5211 gcc_assert (!bitpos);
5212 return store_expr (exp, target, 0);
5215 /* If the structure is in a register or if the component
5216 is a bit field, we cannot use addressing to access it.
5217 Use bit-field techniques or SUBREG to store in it. */
5219 if (mode == VOIDmode
5220 || (mode != BLKmode && ! direct_store[(int) mode]
5221 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5222 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5223 || REG_P (target)
5224 || GET_CODE (target) == SUBREG
5225 /* If the field isn't aligned enough to store as an ordinary memref,
5226 store it as a bit field. */
5227 || (mode != BLKmode
5228 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5229 || bitpos % GET_MODE_ALIGNMENT (mode))
5230 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5231 || (bitpos % BITS_PER_UNIT != 0)))
5232 /* If the RHS and field are a constant size and the size of the
5233 RHS isn't the same size as the bitfield, we must use bitfield
5234 operations. */
5235 || (bitsize >= 0
5236 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5237 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5239 rtx temp;
5241 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5242 implies a mask operation. If the precision is the same size as
5243 the field we're storing into, that mask is redundant. This is
5244 particularly common with bit field assignments generated by the
5245 C front end. */
5246 if (TREE_CODE (exp) == NOP_EXPR)
5248 tree type = TREE_TYPE (exp);
5249 if (INTEGRAL_TYPE_P (type)
5250 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5251 && bitsize == TYPE_PRECISION (type))
5253 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5254 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5255 exp = TREE_OPERAND (exp, 0);
5259 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5261 /* If BITSIZE is narrower than the size of the type of EXP
5262 we will be narrowing TEMP. Normally, what's wanted are the
5263 low-order bits. However, if EXP's type is a record and this is
5264 big-endian machine, we want the upper BITSIZE bits. */
5265 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5266 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5267 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5268 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5269 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5270 - bitsize),
5271 NULL_RTX, 1);
5273 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5274 MODE. */
5275 if (mode != VOIDmode && mode != BLKmode
5276 && mode != TYPE_MODE (TREE_TYPE (exp)))
5277 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5279 /* If the modes of TARGET and TEMP are both BLKmode, both
5280 must be in memory and BITPOS must be aligned on a byte
5281 boundary. If so, we simply do a block copy. */
5282 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5284 gcc_assert (MEM_P (target) && MEM_P (temp)
5285 && !(bitpos % BITS_PER_UNIT));
5287 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5288 emit_block_move (target, temp,
5289 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5290 / BITS_PER_UNIT),
5291 BLOCK_OP_NORMAL);
5293 return const0_rtx;
5296 /* Store the value in the bitfield. */
5297 store_bit_field (target, bitsize, bitpos, mode, temp);
5299 return const0_rtx;
5301 else
5303 /* Now build a reference to just the desired component. */
5304 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5306 if (to_rtx == target)
5307 to_rtx = copy_rtx (to_rtx);
5309 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5310 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5311 set_mem_alias_set (to_rtx, alias_set);
5313 return store_expr (exp, to_rtx, 0);
5317 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5318 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5319 codes and find the ultimate containing object, which we return.
5321 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5322 bit position, and *PUNSIGNEDP to the signedness of the field.
5323 If the position of the field is variable, we store a tree
5324 giving the variable offset (in units) in *POFFSET.
5325 This offset is in addition to the bit position.
5326 If the position is not variable, we store 0 in *POFFSET.
5328 If any of the extraction expressions is volatile,
5329 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5331 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5332 is a mode that can be used to access the field. In that case, *PBITSIZE
5333 is redundant.
5335 If the field describes a variable-sized object, *PMODE is set to
5336 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5337 this case, but the address of the object can be found.
5339 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5340 look through nodes that serve as markers of a greater alignment than
5341 the one that can be deduced from the expression. These nodes make it
5342 possible for front-ends to prevent temporaries from being created by
5343 the middle-end on alignment considerations. For that purpose, the
5344 normal operating mode at high-level is to always pass FALSE so that
5345 the ultimate containing object is really returned; moreover, the
5346 associated predicate handled_component_p will always return TRUE
5347 on these nodes, thus indicating that they are essentially handled
5348 by get_inner_reference. TRUE should only be passed when the caller
5349 is scanning the expression in order to build another representation
5350 and specifically knows how to handle these nodes; as such, this is
5351 the normal operating mode in the RTL expanders. */
5353 tree
5354 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5355 HOST_WIDE_INT *pbitpos, tree *poffset,
5356 enum machine_mode *pmode, int *punsignedp,
5357 int *pvolatilep, bool keep_aligning)
5359 tree size_tree = 0;
5360 enum machine_mode mode = VOIDmode;
5361 tree offset = size_zero_node;
5362 tree bit_offset = bitsize_zero_node;
5363 tree tem;
5365 /* First get the mode, signedness, and size. We do this from just the
5366 outermost expression. */
5367 if (TREE_CODE (exp) == COMPONENT_REF)
5369 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5370 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5371 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5373 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5375 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5377 size_tree = TREE_OPERAND (exp, 1);
5378 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5380 else
5382 mode = TYPE_MODE (TREE_TYPE (exp));
5383 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5385 if (mode == BLKmode)
5386 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5387 else
5388 *pbitsize = GET_MODE_BITSIZE (mode);
5391 if (size_tree != 0)
5393 if (! host_integerp (size_tree, 1))
5394 mode = BLKmode, *pbitsize = -1;
5395 else
5396 *pbitsize = tree_low_cst (size_tree, 1);
5399 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5400 and find the ultimate containing object. */
5401 while (1)
5403 switch (TREE_CODE (exp))
5405 case BIT_FIELD_REF:
5406 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5407 TREE_OPERAND (exp, 2));
5408 break;
5410 case COMPONENT_REF:
5412 tree field = TREE_OPERAND (exp, 1);
5413 tree this_offset = component_ref_field_offset (exp);
5415 /* If this field hasn't been filled in yet, don't go past it.
5416 This should only happen when folding expressions made during
5417 type construction. */
5418 if (this_offset == 0)
5419 break;
5421 offset = size_binop (PLUS_EXPR, offset, this_offset);
5422 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5423 DECL_FIELD_BIT_OFFSET (field));
5425 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5427 break;
5429 case ARRAY_REF:
5430 case ARRAY_RANGE_REF:
5432 tree index = TREE_OPERAND (exp, 1);
5433 tree low_bound = array_ref_low_bound (exp);
5434 tree unit_size = array_ref_element_size (exp);
5436 /* We assume all arrays have sizes that are a multiple of a byte.
5437 First subtract the lower bound, if any, in the type of the
5438 index, then convert to sizetype and multiply by the size of
5439 the array element. */
5440 if (! integer_zerop (low_bound))
5441 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5442 index, low_bound));
5444 offset = size_binop (PLUS_EXPR, offset,
5445 size_binop (MULT_EXPR,
5446 convert (sizetype, index),
5447 unit_size));
5449 break;
5451 case REALPART_EXPR:
5452 break;
5454 case IMAGPART_EXPR:
5455 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5456 bitsize_int (*pbitsize));
5457 break;
5459 case VIEW_CONVERT_EXPR:
5460 if (keep_aligning && STRICT_ALIGNMENT
5461 && (TYPE_ALIGN (TREE_TYPE (exp))
5462 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5463 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5464 < BIGGEST_ALIGNMENT)
5465 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5466 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5467 goto done;
5468 break;
5470 default:
5471 goto done;
5474 /* If any reference in the chain is volatile, the effect is volatile. */
5475 if (TREE_THIS_VOLATILE (exp))
5476 *pvolatilep = 1;
5478 exp = TREE_OPERAND (exp, 0);
5480 done:
5482 /* If OFFSET is constant, see if we can return the whole thing as a
5483 constant bit position. Otherwise, split it up. */
5484 if (host_integerp (offset, 0)
5485 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5486 bitsize_unit_node))
5487 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5488 && host_integerp (tem, 0))
5489 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5490 else
5491 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5493 *pmode = mode;
5494 return exp;
5497 /* Return a tree of sizetype representing the size, in bytes, of the element
5498 of EXP, an ARRAY_REF. */
5500 tree
5501 array_ref_element_size (tree exp)
5503 tree aligned_size = TREE_OPERAND (exp, 3);
5504 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5506 /* If a size was specified in the ARRAY_REF, it's the size measured
5507 in alignment units of the element type. So multiply by that value. */
5508 if (aligned_size)
5510 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5511 sizetype from another type of the same width and signedness. */
5512 if (TREE_TYPE (aligned_size) != sizetype)
5513 aligned_size = fold_convert (sizetype, aligned_size);
5514 return size_binop (MULT_EXPR, aligned_size,
5515 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5518 /* Otherwise, take the size from that of the element type. Substitute
5519 any PLACEHOLDER_EXPR that we have. */
5520 else
5521 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5524 /* Return a tree representing the lower bound of the array mentioned in
5525 EXP, an ARRAY_REF. */
5527 tree
5528 array_ref_low_bound (tree exp)
5530 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5532 /* If a lower bound is specified in EXP, use it. */
5533 if (TREE_OPERAND (exp, 2))
5534 return TREE_OPERAND (exp, 2);
5536 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5537 substituting for a PLACEHOLDER_EXPR as needed. */
5538 if (domain_type && TYPE_MIN_VALUE (domain_type))
5539 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5541 /* Otherwise, return a zero of the appropriate type. */
5542 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5545 /* Return a tree representing the upper bound of the array mentioned in
5546 EXP, an ARRAY_REF. */
5548 tree
5549 array_ref_up_bound (tree exp)
5551 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5553 /* If there is a domain type and it has an upper bound, use it, substituting
5554 for a PLACEHOLDER_EXPR as needed. */
5555 if (domain_type && TYPE_MAX_VALUE (domain_type))
5556 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5558 /* Otherwise fail. */
5559 return NULL_TREE;
5562 /* Return a tree representing the offset, in bytes, of the field referenced
5563 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5565 tree
5566 component_ref_field_offset (tree exp)
5568 tree aligned_offset = TREE_OPERAND (exp, 2);
5569 tree field = TREE_OPERAND (exp, 1);
5571 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5572 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5573 value. */
5574 if (aligned_offset)
5576 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5577 sizetype from another type of the same width and signedness. */
5578 if (TREE_TYPE (aligned_offset) != sizetype)
5579 aligned_offset = fold_convert (sizetype, aligned_offset);
5580 return size_binop (MULT_EXPR, aligned_offset,
5581 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5584 /* Otherwise, take the offset from that of the field. Substitute
5585 any PLACEHOLDER_EXPR that we have. */
5586 else
5587 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5590 /* Return 1 if T is an expression that get_inner_reference handles. */
5593 handled_component_p (tree t)
5595 switch (TREE_CODE (t))
5597 case BIT_FIELD_REF:
5598 case COMPONENT_REF:
5599 case ARRAY_REF:
5600 case ARRAY_RANGE_REF:
5601 case VIEW_CONVERT_EXPR:
5602 case REALPART_EXPR:
5603 case IMAGPART_EXPR:
5604 return 1;
5606 default:
5607 return 0;
5611 /* Given an rtx VALUE that may contain additions and multiplications, return
5612 an equivalent value that just refers to a register, memory, or constant.
5613 This is done by generating instructions to perform the arithmetic and
5614 returning a pseudo-register containing the value.
5616 The returned value may be a REG, SUBREG, MEM or constant. */
5619 force_operand (rtx value, rtx target)
5621 rtx op1, op2;
5622 /* Use subtarget as the target for operand 0 of a binary operation. */
5623 rtx subtarget = get_subtarget (target);
5624 enum rtx_code code = GET_CODE (value);
5626 /* Check for subreg applied to an expression produced by loop optimizer. */
5627 if (code == SUBREG
5628 && !REG_P (SUBREG_REG (value))
5629 && !MEM_P (SUBREG_REG (value)))
5631 value = simplify_gen_subreg (GET_MODE (value),
5632 force_reg (GET_MODE (SUBREG_REG (value)),
5633 force_operand (SUBREG_REG (value),
5634 NULL_RTX)),
5635 GET_MODE (SUBREG_REG (value)),
5636 SUBREG_BYTE (value));
5637 code = GET_CODE (value);
5640 /* Check for a PIC address load. */
5641 if ((code == PLUS || code == MINUS)
5642 && XEXP (value, 0) == pic_offset_table_rtx
5643 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5644 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5645 || GET_CODE (XEXP (value, 1)) == CONST))
5647 if (!subtarget)
5648 subtarget = gen_reg_rtx (GET_MODE (value));
5649 emit_move_insn (subtarget, value);
5650 return subtarget;
5653 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5655 if (!target)
5656 target = gen_reg_rtx (GET_MODE (value));
5657 convert_move (target, force_operand (XEXP (value, 0), NULL),
5658 code == ZERO_EXTEND);
5659 return target;
5662 if (ARITHMETIC_P (value))
5664 op2 = XEXP (value, 1);
5665 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5666 subtarget = 0;
5667 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5669 code = PLUS;
5670 op2 = negate_rtx (GET_MODE (value), op2);
5673 /* Check for an addition with OP2 a constant integer and our first
5674 operand a PLUS of a virtual register and something else. In that
5675 case, we want to emit the sum of the virtual register and the
5676 constant first and then add the other value. This allows virtual
5677 register instantiation to simply modify the constant rather than
5678 creating another one around this addition. */
5679 if (code == PLUS && GET_CODE (op2) == CONST_INT
5680 && GET_CODE (XEXP (value, 0)) == PLUS
5681 && REG_P (XEXP (XEXP (value, 0), 0))
5682 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5683 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5685 rtx temp = expand_simple_binop (GET_MODE (value), code,
5686 XEXP (XEXP (value, 0), 0), op2,
5687 subtarget, 0, OPTAB_LIB_WIDEN);
5688 return expand_simple_binop (GET_MODE (value), code, temp,
5689 force_operand (XEXP (XEXP (value,
5690 0), 1), 0),
5691 target, 0, OPTAB_LIB_WIDEN);
5694 op1 = force_operand (XEXP (value, 0), subtarget);
5695 op2 = force_operand (op2, NULL_RTX);
5696 switch (code)
5698 case MULT:
5699 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5700 case DIV:
5701 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5702 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5703 target, 1, OPTAB_LIB_WIDEN);
5704 else
5705 return expand_divmod (0,
5706 FLOAT_MODE_P (GET_MODE (value))
5707 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5708 GET_MODE (value), op1, op2, target, 0);
5709 break;
5710 case MOD:
5711 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5712 target, 0);
5713 break;
5714 case UDIV:
5715 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5716 target, 1);
5717 break;
5718 case UMOD:
5719 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5720 target, 1);
5721 break;
5722 case ASHIFTRT:
5723 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5724 target, 0, OPTAB_LIB_WIDEN);
5725 break;
5726 default:
5727 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5728 target, 1, OPTAB_LIB_WIDEN);
5731 if (UNARY_P (value))
5733 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5734 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5737 #ifdef INSN_SCHEDULING
5738 /* On machines that have insn scheduling, we want all memory reference to be
5739 explicit, so we need to deal with such paradoxical SUBREGs. */
5740 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5741 && (GET_MODE_SIZE (GET_MODE (value))
5742 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5743 value
5744 = simplify_gen_subreg (GET_MODE (value),
5745 force_reg (GET_MODE (SUBREG_REG (value)),
5746 force_operand (SUBREG_REG (value),
5747 NULL_RTX)),
5748 GET_MODE (SUBREG_REG (value)),
5749 SUBREG_BYTE (value));
5750 #endif
5752 return value;
5755 /* Subroutine of expand_expr: return nonzero iff there is no way that
5756 EXP can reference X, which is being modified. TOP_P is nonzero if this
5757 call is going to be used to determine whether we need a temporary
5758 for EXP, as opposed to a recursive call to this function.
5760 It is always safe for this routine to return zero since it merely
5761 searches for optimization opportunities. */
5764 safe_from_p (rtx x, tree exp, int top_p)
5766 rtx exp_rtl = 0;
5767 int i, nops;
5769 if (x == 0
5770 /* If EXP has varying size, we MUST use a target since we currently
5771 have no way of allocating temporaries of variable size
5772 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5773 So we assume here that something at a higher level has prevented a
5774 clash. This is somewhat bogus, but the best we can do. Only
5775 do this when X is BLKmode and when we are at the top level. */
5776 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5777 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5778 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5779 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5780 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5781 != INTEGER_CST)
5782 && GET_MODE (x) == BLKmode)
5783 /* If X is in the outgoing argument area, it is always safe. */
5784 || (MEM_P (x)
5785 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5786 || (GET_CODE (XEXP (x, 0)) == PLUS
5787 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5788 return 1;
5790 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5791 find the underlying pseudo. */
5792 if (GET_CODE (x) == SUBREG)
5794 x = SUBREG_REG (x);
5795 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5796 return 0;
5799 /* Now look at our tree code and possibly recurse. */
5800 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5802 case tcc_declaration:
5803 exp_rtl = DECL_RTL_IF_SET (exp);
5804 break;
5806 case tcc_constant:
5807 return 1;
5809 case tcc_exceptional:
5810 if (TREE_CODE (exp) == TREE_LIST)
5812 while (1)
5814 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5815 return 0;
5816 exp = TREE_CHAIN (exp);
5817 if (!exp)
5818 return 1;
5819 if (TREE_CODE (exp) != TREE_LIST)
5820 return safe_from_p (x, exp, 0);
5823 else if (TREE_CODE (exp) == ERROR_MARK)
5824 return 1; /* An already-visited SAVE_EXPR? */
5825 else
5826 return 0;
5828 case tcc_statement:
5829 /* The only case we look at here is the DECL_INITIAL inside a
5830 DECL_EXPR. */
5831 return (TREE_CODE (exp) != DECL_EXPR
5832 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5833 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5834 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5836 case tcc_binary:
5837 case tcc_comparison:
5838 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5839 return 0;
5840 /* Fall through. */
5842 case tcc_unary:
5843 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5845 case tcc_expression:
5846 case tcc_reference:
5847 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5848 the expression. If it is set, we conflict iff we are that rtx or
5849 both are in memory. Otherwise, we check all operands of the
5850 expression recursively. */
5852 switch (TREE_CODE (exp))
5854 case ADDR_EXPR:
5855 /* If the operand is static or we are static, we can't conflict.
5856 Likewise if we don't conflict with the operand at all. */
5857 if (staticp (TREE_OPERAND (exp, 0))
5858 || TREE_STATIC (exp)
5859 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5860 return 1;
5862 /* Otherwise, the only way this can conflict is if we are taking
5863 the address of a DECL a that address if part of X, which is
5864 very rare. */
5865 exp = TREE_OPERAND (exp, 0);
5866 if (DECL_P (exp))
5868 if (!DECL_RTL_SET_P (exp)
5869 || !MEM_P (DECL_RTL (exp)))
5870 return 0;
5871 else
5872 exp_rtl = XEXP (DECL_RTL (exp), 0);
5874 break;
5876 case MISALIGNED_INDIRECT_REF:
5877 case ALIGN_INDIRECT_REF:
5878 case INDIRECT_REF:
5879 if (MEM_P (x)
5880 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5881 get_alias_set (exp)))
5882 return 0;
5883 break;
5885 case CALL_EXPR:
5886 /* Assume that the call will clobber all hard registers and
5887 all of memory. */
5888 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5889 || MEM_P (x))
5890 return 0;
5891 break;
5893 case WITH_CLEANUP_EXPR:
5894 case CLEANUP_POINT_EXPR:
5895 /* Lowered by gimplify.c. */
5896 gcc_unreachable ();
5898 case SAVE_EXPR:
5899 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5901 default:
5902 break;
5905 /* If we have an rtx, we do not need to scan our operands. */
5906 if (exp_rtl)
5907 break;
5909 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5910 for (i = 0; i < nops; i++)
5911 if (TREE_OPERAND (exp, i) != 0
5912 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5913 return 0;
5915 /* If this is a language-specific tree code, it may require
5916 special handling. */
5917 if ((unsigned int) TREE_CODE (exp)
5918 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5919 && !lang_hooks.safe_from_p (x, exp))
5920 return 0;
5921 break;
5923 case tcc_type:
5924 /* Should never get a type here. */
5925 gcc_unreachable ();
5928 /* If we have an rtl, find any enclosed object. Then see if we conflict
5929 with it. */
5930 if (exp_rtl)
5932 if (GET_CODE (exp_rtl) == SUBREG)
5934 exp_rtl = SUBREG_REG (exp_rtl);
5935 if (REG_P (exp_rtl)
5936 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5937 return 0;
5940 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5941 are memory and they conflict. */
5942 return ! (rtx_equal_p (x, exp_rtl)
5943 || (MEM_P (x) && MEM_P (exp_rtl)
5944 && true_dependence (exp_rtl, VOIDmode, x,
5945 rtx_addr_varies_p)));
5948 /* If we reach here, it is safe. */
5949 return 1;
5953 /* Return the highest power of two that EXP is known to be a multiple of.
5954 This is used in updating alignment of MEMs in array references. */
5956 static unsigned HOST_WIDE_INT
5957 highest_pow2_factor (tree exp)
5959 unsigned HOST_WIDE_INT c0, c1;
5961 switch (TREE_CODE (exp))
5963 case INTEGER_CST:
5964 /* We can find the lowest bit that's a one. If the low
5965 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5966 We need to handle this case since we can find it in a COND_EXPR,
5967 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5968 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5969 later ICE. */
5970 if (TREE_CONSTANT_OVERFLOW (exp))
5971 return BIGGEST_ALIGNMENT;
5972 else
5974 /* Note: tree_low_cst is intentionally not used here,
5975 we don't care about the upper bits. */
5976 c0 = TREE_INT_CST_LOW (exp);
5977 c0 &= -c0;
5978 return c0 ? c0 : BIGGEST_ALIGNMENT;
5980 break;
5982 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5983 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5984 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5985 return MIN (c0, c1);
5987 case MULT_EXPR:
5988 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5989 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5990 return c0 * c1;
5992 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5993 case CEIL_DIV_EXPR:
5994 if (integer_pow2p (TREE_OPERAND (exp, 1))
5995 && host_integerp (TREE_OPERAND (exp, 1), 1))
5997 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5998 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5999 return MAX (1, c0 / c1);
6001 break;
6003 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6004 case SAVE_EXPR:
6005 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6007 case COMPOUND_EXPR:
6008 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6010 case COND_EXPR:
6011 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6012 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6013 return MIN (c0, c1);
6015 default:
6016 break;
6019 return 1;
6022 /* Similar, except that the alignment requirements of TARGET are
6023 taken into account. Assume it is at least as aligned as its
6024 type, unless it is a COMPONENT_REF in which case the layout of
6025 the structure gives the alignment. */
6027 static unsigned HOST_WIDE_INT
6028 highest_pow2_factor_for_target (tree target, tree exp)
6030 unsigned HOST_WIDE_INT target_align, factor;
6032 factor = highest_pow2_factor (exp);
6033 if (TREE_CODE (target) == COMPONENT_REF)
6034 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6035 else
6036 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6037 return MAX (factor, target_align);
6040 /* Expands variable VAR. */
6042 void
6043 expand_var (tree var)
6045 if (DECL_EXTERNAL (var))
6046 return;
6048 if (TREE_STATIC (var))
6049 /* If this is an inlined copy of a static local variable,
6050 look up the original decl. */
6051 var = DECL_ORIGIN (var);
6053 if (TREE_STATIC (var)
6054 ? !TREE_ASM_WRITTEN (var)
6055 : !DECL_RTL_SET_P (var))
6057 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
6058 /* Should be ignored. */;
6059 else if (lang_hooks.expand_decl (var))
6060 /* OK. */;
6061 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6062 expand_decl (var);
6063 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6064 rest_of_decl_compilation (var, 0, 0);
6065 else
6066 /* No expansion needed. */
6067 gcc_assert (TREE_CODE (var) == TYPE_DECL
6068 || TREE_CODE (var) == CONST_DECL
6069 || TREE_CODE (var) == FUNCTION_DECL
6070 || TREE_CODE (var) == LABEL_DECL);
6074 /* Subroutine of expand_expr. Expand the two operands of a binary
6075 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6076 The value may be stored in TARGET if TARGET is nonzero. The
6077 MODIFIER argument is as documented by expand_expr. */
6079 static void
6080 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6081 enum expand_modifier modifier)
6083 if (! safe_from_p (target, exp1, 1))
6084 target = 0;
6085 if (operand_equal_p (exp0, exp1, 0))
6087 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6088 *op1 = copy_rtx (*op0);
6090 else
6092 /* If we need to preserve evaluation order, copy exp0 into its own
6093 temporary variable so that it can't be clobbered by exp1. */
6094 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6095 exp0 = save_expr (exp0);
6096 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6097 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6102 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6103 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6105 static rtx
6106 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6107 enum expand_modifier modifier)
6109 rtx result, subtarget;
6110 tree inner, offset;
6111 HOST_WIDE_INT bitsize, bitpos;
6112 int volatilep, unsignedp;
6113 enum machine_mode mode1;
6115 /* If we are taking the address of a constant and are at the top level,
6116 we have to use output_constant_def since we can't call force_const_mem
6117 at top level. */
6118 /* ??? This should be considered a front-end bug. We should not be
6119 generating ADDR_EXPR of something that isn't an LVALUE. The only
6120 exception here is STRING_CST. */
6121 if (TREE_CODE (exp) == CONSTRUCTOR
6122 || CONSTANT_CLASS_P (exp))
6123 return XEXP (output_constant_def (exp, 0), 0);
6125 /* Everything must be something allowed by is_gimple_addressable. */
6126 switch (TREE_CODE (exp))
6128 case INDIRECT_REF:
6129 /* This case will happen via recursion for &a->b. */
6130 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6132 case CONST_DECL:
6133 /* Recurse and make the output_constant_def clause above handle this. */
6134 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6135 tmode, modifier);
6137 case REALPART_EXPR:
6138 /* The real part of the complex number is always first, therefore
6139 the address is the same as the address of the parent object. */
6140 offset = 0;
6141 bitpos = 0;
6142 inner = TREE_OPERAND (exp, 0);
6143 break;
6145 case IMAGPART_EXPR:
6146 /* The imaginary part of the complex number is always second.
6147 The expression is therefore always offset by the size of the
6148 scalar type. */
6149 offset = 0;
6150 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6151 inner = TREE_OPERAND (exp, 0);
6152 break;
6154 default:
6155 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6156 expand_expr, as that can have various side effects; LABEL_DECLs for
6157 example, may not have their DECL_RTL set yet. Assume language
6158 specific tree nodes can be expanded in some interesting way. */
6159 if (DECL_P (exp)
6160 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6162 result = expand_expr (exp, target, tmode,
6163 modifier == EXPAND_INITIALIZER
6164 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6166 /* If the DECL isn't in memory, then the DECL wasn't properly
6167 marked TREE_ADDRESSABLE, which will be either a front-end
6168 or a tree optimizer bug. */
6169 gcc_assert (GET_CODE (result) == MEM);
6170 result = XEXP (result, 0);
6172 /* ??? Is this needed anymore? */
6173 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6175 assemble_external (exp);
6176 TREE_USED (exp) = 1;
6179 if (modifier != EXPAND_INITIALIZER
6180 && modifier != EXPAND_CONST_ADDRESS)
6181 result = force_operand (result, target);
6182 return result;
6185 /* Pass FALSE as the last argument to get_inner_reference although
6186 we are expanding to RTL. The rationale is that we know how to
6187 handle "aligning nodes" here: we can just bypass them because
6188 they won't change the final object whose address will be returned
6189 (they actually exist only for that purpose). */
6190 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6191 &mode1, &unsignedp, &volatilep, false);
6192 break;
6195 /* We must have made progress. */
6196 gcc_assert (inner != exp);
6198 subtarget = offset || bitpos ? NULL_RTX : target;
6199 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6201 if (offset)
6203 rtx tmp;
6205 if (modifier != EXPAND_NORMAL)
6206 result = force_operand (result, NULL);
6207 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6209 result = convert_memory_address (tmode, result);
6210 tmp = convert_memory_address (tmode, tmp);
6212 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6213 result = gen_rtx_PLUS (tmode, result, tmp);
6214 else
6216 subtarget = bitpos ? NULL_RTX : target;
6217 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6218 1, OPTAB_LIB_WIDEN);
6222 if (bitpos)
6224 /* Someone beforehand should have rejected taking the address
6225 of such an object. */
6226 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6228 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6229 if (modifier < EXPAND_SUM)
6230 result = force_operand (result, target);
6233 return result;
6236 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6237 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6239 static rtx
6240 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6241 enum expand_modifier modifier)
6243 enum machine_mode rmode;
6244 rtx result;
6246 /* Target mode of VOIDmode says "whatever's natural". */
6247 if (tmode == VOIDmode)
6248 tmode = TYPE_MODE (TREE_TYPE (exp));
6250 /* We can get called with some Weird Things if the user does silliness
6251 like "(short) &a". In that case, convert_memory_address won't do
6252 the right thing, so ignore the given target mode. */
6253 if (tmode != Pmode && tmode != ptr_mode)
6254 tmode = Pmode;
6256 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6257 tmode, modifier);
6259 /* Despite expand_expr claims concerning ignoring TMODE when not
6260 strictly convenient, stuff breaks if we don't honor it. Note
6261 that combined with the above, we only do this for pointer modes. */
6262 rmode = GET_MODE (result);
6263 if (rmode == VOIDmode)
6264 rmode = tmode;
6265 if (rmode != tmode)
6266 result = convert_memory_address (tmode, result);
6268 return result;
6272 /* expand_expr: generate code for computing expression EXP.
6273 An rtx for the computed value is returned. The value is never null.
6274 In the case of a void EXP, const0_rtx is returned.
6276 The value may be stored in TARGET if TARGET is nonzero.
6277 TARGET is just a suggestion; callers must assume that
6278 the rtx returned may not be the same as TARGET.
6280 If TARGET is CONST0_RTX, it means that the value will be ignored.
6282 If TMODE is not VOIDmode, it suggests generating the
6283 result in mode TMODE. But this is done only when convenient.
6284 Otherwise, TMODE is ignored and the value generated in its natural mode.
6285 TMODE is just a suggestion; callers must assume that
6286 the rtx returned may not have mode TMODE.
6288 Note that TARGET may have neither TMODE nor MODE. In that case, it
6289 probably will not be used.
6291 If MODIFIER is EXPAND_SUM then when EXP is an addition
6292 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6293 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6294 products as above, or REG or MEM, or constant.
6295 Ordinarily in such cases we would output mul or add instructions
6296 and then return a pseudo reg containing the sum.
6298 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6299 it also marks a label as absolutely required (it can't be dead).
6300 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6301 This is used for outputting expressions used in initializers.
6303 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6304 with a constant address even if that address is not normally legitimate.
6305 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6307 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6308 a call parameter. Such targets require special care as we haven't yet
6309 marked TARGET so that it's safe from being trashed by libcalls. We
6310 don't want to use TARGET for anything but the final result;
6311 Intermediate values must go elsewhere. Additionally, calls to
6312 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6314 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6315 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6316 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6317 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6318 recursively. */
6320 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6321 enum expand_modifier, rtx *);
6324 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6325 enum expand_modifier modifier, rtx *alt_rtl)
6327 int rn = -1;
6328 rtx ret, last = NULL;
6330 /* Handle ERROR_MARK before anybody tries to access its type. */
6331 if (TREE_CODE (exp) == ERROR_MARK
6332 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6334 ret = CONST0_RTX (tmode);
6335 return ret ? ret : const0_rtx;
6338 if (flag_non_call_exceptions)
6340 rn = lookup_stmt_eh_region (exp);
6341 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6342 if (rn >= 0)
6343 last = get_last_insn ();
6346 /* If this is an expression of some kind and it has an associated line
6347 number, then emit the line number before expanding the expression.
6349 We need to save and restore the file and line information so that
6350 errors discovered during expansion are emitted with the right
6351 information. It would be better of the diagnostic routines
6352 used the file/line information embedded in the tree nodes rather
6353 than globals. */
6354 if (cfun && EXPR_HAS_LOCATION (exp))
6356 location_t saved_location = input_location;
6357 input_location = EXPR_LOCATION (exp);
6358 emit_line_note (input_location);
6360 /* Record where the insns produced belong. */
6361 record_block_change (TREE_BLOCK (exp));
6363 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6365 input_location = saved_location;
6367 else
6369 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6372 /* If using non-call exceptions, mark all insns that may trap.
6373 expand_call() will mark CALL_INSNs before we get to this code,
6374 but it doesn't handle libcalls, and these may trap. */
6375 if (rn >= 0)
6377 rtx insn;
6378 for (insn = next_real_insn (last); insn;
6379 insn = next_real_insn (insn))
6381 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6382 /* If we want exceptions for non-call insns, any
6383 may_trap_p instruction may throw. */
6384 && GET_CODE (PATTERN (insn)) != CLOBBER
6385 && GET_CODE (PATTERN (insn)) != USE
6386 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6388 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6389 REG_NOTES (insn));
6394 return ret;
6397 static rtx
6398 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6399 enum expand_modifier modifier, rtx *alt_rtl)
6401 rtx op0, op1, temp;
6402 tree type = TREE_TYPE (exp);
6403 int unsignedp;
6404 enum machine_mode mode;
6405 enum tree_code code = TREE_CODE (exp);
6406 optab this_optab;
6407 rtx subtarget, original_target;
6408 int ignore;
6409 tree context;
6410 bool reduce_bit_field = false;
6411 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6412 ? reduce_to_bit_field_precision ((expr), \
6413 target, \
6414 type) \
6415 : (expr))
6417 mode = TYPE_MODE (type);
6418 unsignedp = TYPE_UNSIGNED (type);
6419 if (lang_hooks.reduce_bit_field_operations
6420 && TREE_CODE (type) == INTEGER_TYPE
6421 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6423 /* An operation in what may be a bit-field type needs the
6424 result to be reduced to the precision of the bit-field type,
6425 which is narrower than that of the type's mode. */
6426 reduce_bit_field = true;
6427 if (modifier == EXPAND_STACK_PARM)
6428 target = 0;
6431 /* Use subtarget as the target for operand 0 of a binary operation. */
6432 subtarget = get_subtarget (target);
6433 original_target = target;
6434 ignore = (target == const0_rtx
6435 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6436 || code == CONVERT_EXPR || code == COND_EXPR
6437 || code == VIEW_CONVERT_EXPR)
6438 && TREE_CODE (type) == VOID_TYPE));
6440 /* If we are going to ignore this result, we need only do something
6441 if there is a side-effect somewhere in the expression. If there
6442 is, short-circuit the most common cases here. Note that we must
6443 not call expand_expr with anything but const0_rtx in case this
6444 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6446 if (ignore)
6448 if (! TREE_SIDE_EFFECTS (exp))
6449 return const0_rtx;
6451 /* Ensure we reference a volatile object even if value is ignored, but
6452 don't do this if all we are doing is taking its address. */
6453 if (TREE_THIS_VOLATILE (exp)
6454 && TREE_CODE (exp) != FUNCTION_DECL
6455 && mode != VOIDmode && mode != BLKmode
6456 && modifier != EXPAND_CONST_ADDRESS)
6458 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6459 if (MEM_P (temp))
6460 temp = copy_to_reg (temp);
6461 return const0_rtx;
6464 if (TREE_CODE_CLASS (code) == tcc_unary
6465 || code == COMPONENT_REF || code == INDIRECT_REF)
6466 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6467 modifier);
6469 else if (TREE_CODE_CLASS (code) == tcc_binary
6470 || TREE_CODE_CLASS (code) == tcc_comparison
6471 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6473 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6474 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6475 return const0_rtx;
6477 else if (code == BIT_FIELD_REF)
6479 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6480 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6481 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6482 return const0_rtx;
6485 target = 0;
6488 /* If will do cse, generate all results into pseudo registers
6489 since 1) that allows cse to find more things
6490 and 2) otherwise cse could produce an insn the machine
6491 cannot support. An exception is a CONSTRUCTOR into a multi-word
6492 MEM: that's much more likely to be most efficient into the MEM.
6493 Another is a CALL_EXPR which must return in memory. */
6495 if (! cse_not_expected && mode != BLKmode && target
6496 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6497 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6498 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6499 target = 0;
6501 switch (code)
6503 case LABEL_DECL:
6505 tree function = decl_function_context (exp);
6507 temp = label_rtx (exp);
6508 temp = gen_rtx_LABEL_REF (Pmode, temp);
6510 if (function != current_function_decl
6511 && function != 0)
6512 LABEL_REF_NONLOCAL_P (temp) = 1;
6514 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6515 return temp;
6518 case SSA_NAME:
6519 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6520 NULL);
6522 case PARM_DECL:
6523 case VAR_DECL:
6524 /* If a static var's type was incomplete when the decl was written,
6525 but the type is complete now, lay out the decl now. */
6526 if (DECL_SIZE (exp) == 0
6527 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6528 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6529 layout_decl (exp, 0);
6531 /* ... fall through ... */
6533 case FUNCTION_DECL:
6534 case RESULT_DECL:
6535 gcc_assert (DECL_RTL (exp));
6537 /* Ensure variable marked as used even if it doesn't go through
6538 a parser. If it hasn't be used yet, write out an external
6539 definition. */
6540 if (! TREE_USED (exp))
6542 assemble_external (exp);
6543 TREE_USED (exp) = 1;
6546 /* Show we haven't gotten RTL for this yet. */
6547 temp = 0;
6549 /* Variables inherited from containing functions should have
6550 been lowered by this point. */
6551 context = decl_function_context (exp);
6552 gcc_assert (!context
6553 || context == current_function_decl
6554 || TREE_STATIC (exp)
6555 /* ??? C++ creates functions that are not TREE_STATIC. */
6556 || TREE_CODE (exp) == FUNCTION_DECL);
6558 /* This is the case of an array whose size is to be determined
6559 from its initializer, while the initializer is still being parsed.
6560 See expand_decl. */
6562 if (MEM_P (DECL_RTL (exp))
6563 && REG_P (XEXP (DECL_RTL (exp), 0)))
6564 temp = validize_mem (DECL_RTL (exp));
6566 /* If DECL_RTL is memory, we are in the normal case and either
6567 the address is not valid or it is not a register and -fforce-addr
6568 is specified, get the address into a register. */
6570 else if (MEM_P (DECL_RTL (exp))
6571 && modifier != EXPAND_CONST_ADDRESS
6572 && modifier != EXPAND_SUM
6573 && modifier != EXPAND_INITIALIZER
6574 && (! memory_address_p (DECL_MODE (exp),
6575 XEXP (DECL_RTL (exp), 0))
6576 || (flag_force_addr
6577 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6579 if (alt_rtl)
6580 *alt_rtl = DECL_RTL (exp);
6581 temp = replace_equiv_address (DECL_RTL (exp),
6582 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6585 /* If we got something, return it. But first, set the alignment
6586 if the address is a register. */
6587 if (temp != 0)
6589 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6590 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6592 return temp;
6595 /* If the mode of DECL_RTL does not match that of the decl, it
6596 must be a promoted value. We return a SUBREG of the wanted mode,
6597 but mark it so that we know that it was already extended. */
6599 if (REG_P (DECL_RTL (exp))
6600 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6602 enum machine_mode pmode;
6604 /* Get the signedness used for this variable. Ensure we get the
6605 same mode we got when the variable was declared. */
6606 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6607 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6608 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6610 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6611 SUBREG_PROMOTED_VAR_P (temp) = 1;
6612 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6613 return temp;
6616 return DECL_RTL (exp);
6618 case INTEGER_CST:
6619 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6620 TREE_INT_CST_HIGH (exp), mode);
6622 /* ??? If overflow is set, fold will have done an incomplete job,
6623 which can result in (plus xx (const_int 0)), which can get
6624 simplified by validate_replace_rtx during virtual register
6625 instantiation, which can result in unrecognizable insns.
6626 Avoid this by forcing all overflows into registers. */
6627 if (TREE_CONSTANT_OVERFLOW (exp)
6628 && modifier != EXPAND_INITIALIZER)
6629 temp = force_reg (mode, temp);
6631 return temp;
6633 case VECTOR_CST:
6634 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6635 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6636 return const_vector_from_tree (exp);
6637 else
6638 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6639 TREE_VECTOR_CST_ELTS (exp)),
6640 ignore ? const0_rtx : target, tmode, modifier);
6642 case CONST_DECL:
6643 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6645 case REAL_CST:
6646 /* If optimized, generate immediate CONST_DOUBLE
6647 which will be turned into memory by reload if necessary.
6649 We used to force a register so that loop.c could see it. But
6650 this does not allow gen_* patterns to perform optimizations with
6651 the constants. It also produces two insns in cases like "x = 1.0;".
6652 On most machines, floating-point constants are not permitted in
6653 many insns, so we'd end up copying it to a register in any case.
6655 Now, we do the copying in expand_binop, if appropriate. */
6656 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6657 TYPE_MODE (TREE_TYPE (exp)));
6659 case COMPLEX_CST:
6660 /* Handle evaluating a complex constant in a CONCAT target. */
6661 if (original_target && GET_CODE (original_target) == CONCAT)
6663 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6664 rtx rtarg, itarg;
6666 rtarg = XEXP (original_target, 0);
6667 itarg = XEXP (original_target, 1);
6669 /* Move the real and imaginary parts separately. */
6670 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6671 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6673 if (op0 != rtarg)
6674 emit_move_insn (rtarg, op0);
6675 if (op1 != itarg)
6676 emit_move_insn (itarg, op1);
6678 return original_target;
6681 /* ... fall through ... */
6683 case STRING_CST:
6684 temp = output_constant_def (exp, 1);
6686 /* temp contains a constant address.
6687 On RISC machines where a constant address isn't valid,
6688 make some insns to get that address into a register. */
6689 if (modifier != EXPAND_CONST_ADDRESS
6690 && modifier != EXPAND_INITIALIZER
6691 && modifier != EXPAND_SUM
6692 && (! memory_address_p (mode, XEXP (temp, 0))
6693 || flag_force_addr))
6694 return replace_equiv_address (temp,
6695 copy_rtx (XEXP (temp, 0)));
6696 return temp;
6698 case SAVE_EXPR:
6700 tree val = TREE_OPERAND (exp, 0);
6701 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6703 if (!SAVE_EXPR_RESOLVED_P (exp))
6705 /* We can indeed still hit this case, typically via builtin
6706 expanders calling save_expr immediately before expanding
6707 something. Assume this means that we only have to deal
6708 with non-BLKmode values. */
6709 gcc_assert (GET_MODE (ret) != BLKmode);
6711 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6712 DECL_ARTIFICIAL (val) = 1;
6713 DECL_IGNORED_P (val) = 1;
6714 TREE_OPERAND (exp, 0) = val;
6715 SAVE_EXPR_RESOLVED_P (exp) = 1;
6717 if (!CONSTANT_P (ret))
6718 ret = copy_to_reg (ret);
6719 SET_DECL_RTL (val, ret);
6722 return ret;
6725 case GOTO_EXPR:
6726 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6727 expand_goto (TREE_OPERAND (exp, 0));
6728 else
6729 expand_computed_goto (TREE_OPERAND (exp, 0));
6730 return const0_rtx;
6732 case CONSTRUCTOR:
6733 /* If we don't need the result, just ensure we evaluate any
6734 subexpressions. */
6735 if (ignore)
6737 tree elt;
6739 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6740 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6742 return const0_rtx;
6745 /* All elts simple constants => refer to a constant in memory. But
6746 if this is a non-BLKmode mode, let it store a field at a time
6747 since that should make a CONST_INT or CONST_DOUBLE when we
6748 fold. Likewise, if we have a target we can use, it is best to
6749 store directly into the target unless the type is large enough
6750 that memcpy will be used. If we are making an initializer and
6751 all operands are constant, put it in memory as well.
6753 FIXME: Avoid trying to fill vector constructors piece-meal.
6754 Output them with output_constant_def below unless we're sure
6755 they're zeros. This should go away when vector initializers
6756 are treated like VECTOR_CST instead of arrays.
6758 else if ((TREE_STATIC (exp)
6759 && ((mode == BLKmode
6760 && ! (target != 0 && safe_from_p (target, exp, 1)))
6761 || TREE_ADDRESSABLE (exp)
6762 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6763 && (! MOVE_BY_PIECES_P
6764 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6765 TYPE_ALIGN (type)))
6766 && ! mostly_zeros_p (exp))))
6767 || ((modifier == EXPAND_INITIALIZER
6768 || modifier == EXPAND_CONST_ADDRESS)
6769 && TREE_CONSTANT (exp)))
6771 rtx constructor = output_constant_def (exp, 1);
6773 if (modifier != EXPAND_CONST_ADDRESS
6774 && modifier != EXPAND_INITIALIZER
6775 && modifier != EXPAND_SUM)
6776 constructor = validize_mem (constructor);
6778 return constructor;
6780 else
6782 /* Handle calls that pass values in multiple non-contiguous
6783 locations. The Irix 6 ABI has examples of this. */
6784 if (target == 0 || ! safe_from_p (target, exp, 1)
6785 || GET_CODE (target) == PARALLEL
6786 || modifier == EXPAND_STACK_PARM)
6787 target
6788 = assign_temp (build_qualified_type (type,
6789 (TYPE_QUALS (type)
6790 | (TREE_READONLY (exp)
6791 * TYPE_QUAL_CONST))),
6792 0, TREE_ADDRESSABLE (exp), 1);
6794 store_constructor (exp, target, 0, int_expr_size (exp));
6795 return target;
6798 case MISALIGNED_INDIRECT_REF:
6799 case ALIGN_INDIRECT_REF:
6800 case INDIRECT_REF:
6802 tree exp1 = TREE_OPERAND (exp, 0);
6803 tree orig;
6805 if (modifier != EXPAND_WRITE)
6807 tree t;
6809 t = fold_read_from_constant_string (exp);
6810 if (t)
6811 return expand_expr (t, target, tmode, modifier);
6814 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6815 op0 = memory_address (mode, op0);
6817 if (code == ALIGN_INDIRECT_REF)
6819 int align = TYPE_ALIGN_UNIT (type);
6820 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6821 op0 = memory_address (mode, op0);
6824 temp = gen_rtx_MEM (mode, op0);
6826 orig = REF_ORIGINAL (exp);
6827 if (!orig)
6828 orig = exp;
6829 set_mem_attributes (temp, orig, 0);
6831 /* Resolve the misalignment now, so that we don't have to remember
6832 to resolve it later. Of course, this only works for reads. */
6833 /* ??? When we get around to supporting writes, we'll have to handle
6834 this in store_expr directly. The vectorizer isn't generating
6835 those yet, however. */
6836 if (code == MISALIGNED_INDIRECT_REF)
6838 int icode;
6839 rtx reg, insn;
6841 gcc_assert (modifier == EXPAND_NORMAL);
6843 /* The vectorizer should have already checked the mode. */
6844 icode = movmisalign_optab->handlers[mode].insn_code;
6845 gcc_assert (icode != CODE_FOR_nothing);
6847 /* We've already validated the memory, and we're creating a
6848 new pseudo destination. The predicates really can't fail. */
6849 reg = gen_reg_rtx (mode);
6851 /* Nor can the insn generator. */
6852 insn = GEN_FCN (icode) (reg, temp);
6853 emit_insn (insn);
6855 return reg;
6858 return temp;
6861 case ARRAY_REF:
6864 tree array = TREE_OPERAND (exp, 0);
6865 tree index = TREE_OPERAND (exp, 1);
6867 /* Fold an expression like: "foo"[2].
6868 This is not done in fold so it won't happen inside &.
6869 Don't fold if this is for wide characters since it's too
6870 difficult to do correctly and this is a very rare case. */
6872 if (modifier != EXPAND_CONST_ADDRESS
6873 && modifier != EXPAND_INITIALIZER
6874 && modifier != EXPAND_MEMORY)
6876 tree t = fold_read_from_constant_string (exp);
6878 if (t)
6879 return expand_expr (t, target, tmode, modifier);
6882 /* If this is a constant index into a constant array,
6883 just get the value from the array. Handle both the cases when
6884 we have an explicit constructor and when our operand is a variable
6885 that was declared const. */
6887 if (modifier != EXPAND_CONST_ADDRESS
6888 && modifier != EXPAND_INITIALIZER
6889 && modifier != EXPAND_MEMORY
6890 && TREE_CODE (array) == CONSTRUCTOR
6891 && ! TREE_SIDE_EFFECTS (array)
6892 && TREE_CODE (index) == INTEGER_CST)
6894 tree elem;
6896 for (elem = CONSTRUCTOR_ELTS (array);
6897 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6898 elem = TREE_CHAIN (elem))
6901 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6902 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6903 modifier);
6906 else if (optimize >= 1
6907 && modifier != EXPAND_CONST_ADDRESS
6908 && modifier != EXPAND_INITIALIZER
6909 && modifier != EXPAND_MEMORY
6910 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6911 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6912 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6913 && targetm.binds_local_p (array))
6915 if (TREE_CODE (index) == INTEGER_CST)
6917 tree init = DECL_INITIAL (array);
6919 if (TREE_CODE (init) == CONSTRUCTOR)
6921 tree elem;
6923 for (elem = CONSTRUCTOR_ELTS (init);
6924 (elem
6925 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6926 elem = TREE_CHAIN (elem))
6929 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6930 return expand_expr (fold (TREE_VALUE (elem)), target,
6931 tmode, modifier);
6933 else if (TREE_CODE (init) == STRING_CST
6934 && 0 > compare_tree_int (index,
6935 TREE_STRING_LENGTH (init)))
6937 tree type = TREE_TYPE (TREE_TYPE (init));
6938 enum machine_mode mode = TYPE_MODE (type);
6940 if (GET_MODE_CLASS (mode) == MODE_INT
6941 && GET_MODE_SIZE (mode) == 1)
6942 return gen_int_mode (TREE_STRING_POINTER (init)
6943 [TREE_INT_CST_LOW (index)], mode);
6948 goto normal_inner_ref;
6950 case COMPONENT_REF:
6951 /* If the operand is a CONSTRUCTOR, we can just extract the
6952 appropriate field if it is present. */
6953 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6955 tree elt;
6957 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6958 elt = TREE_CHAIN (elt))
6959 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6960 /* We can normally use the value of the field in the
6961 CONSTRUCTOR. However, if this is a bitfield in
6962 an integral mode that we can fit in a HOST_WIDE_INT,
6963 we must mask only the number of bits in the bitfield,
6964 since this is done implicitly by the constructor. If
6965 the bitfield does not meet either of those conditions,
6966 we can't do this optimization. */
6967 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6968 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6969 == MODE_INT)
6970 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6971 <= HOST_BITS_PER_WIDE_INT))))
6973 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6974 && modifier == EXPAND_STACK_PARM)
6975 target = 0;
6976 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6977 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6979 HOST_WIDE_INT bitsize
6980 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6981 enum machine_mode imode
6982 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6984 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6986 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6987 op0 = expand_and (imode, op0, op1, target);
6989 else
6991 tree count
6992 = build_int_cst (NULL_TREE,
6993 GET_MODE_BITSIZE (imode) - bitsize);
6995 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6996 target, 0);
6997 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6998 target, 0);
7002 return op0;
7005 goto normal_inner_ref;
7007 case BIT_FIELD_REF:
7008 case ARRAY_RANGE_REF:
7009 normal_inner_ref:
7011 enum machine_mode mode1;
7012 HOST_WIDE_INT bitsize, bitpos;
7013 tree offset;
7014 int volatilep = 0;
7015 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7016 &mode1, &unsignedp, &volatilep, true);
7017 rtx orig_op0;
7019 /* If we got back the original object, something is wrong. Perhaps
7020 we are evaluating an expression too early. In any event, don't
7021 infinitely recurse. */
7022 gcc_assert (tem != exp);
7024 /* If TEM's type is a union of variable size, pass TARGET to the inner
7025 computation, since it will need a temporary and TARGET is known
7026 to have to do. This occurs in unchecked conversion in Ada. */
7028 orig_op0 = op0
7029 = expand_expr (tem,
7030 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7031 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7032 != INTEGER_CST)
7033 && modifier != EXPAND_STACK_PARM
7034 ? target : NULL_RTX),
7035 VOIDmode,
7036 (modifier == EXPAND_INITIALIZER
7037 || modifier == EXPAND_CONST_ADDRESS
7038 || modifier == EXPAND_STACK_PARM)
7039 ? modifier : EXPAND_NORMAL);
7041 /* If this is a constant, put it into a register if it is a
7042 legitimate constant and OFFSET is 0 and memory if it isn't. */
7043 if (CONSTANT_P (op0))
7045 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7046 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7047 && offset == 0)
7048 op0 = force_reg (mode, op0);
7049 else
7050 op0 = validize_mem (force_const_mem (mode, op0));
7053 /* Otherwise, if this object not in memory and we either have an
7054 offset or a BLKmode result, put it there. This case can't occur in
7055 C, but can in Ada if we have unchecked conversion of an expression
7056 from a scalar type to an array or record type or for an
7057 ARRAY_RANGE_REF whose type is BLKmode. */
7058 else if (!MEM_P (op0)
7059 && (offset != 0
7060 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7062 tree nt = build_qualified_type (TREE_TYPE (tem),
7063 (TYPE_QUALS (TREE_TYPE (tem))
7064 | TYPE_QUAL_CONST));
7065 rtx memloc = assign_temp (nt, 1, 1, 1);
7067 emit_move_insn (memloc, op0);
7068 op0 = memloc;
7071 if (offset != 0)
7073 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7074 EXPAND_SUM);
7076 gcc_assert (MEM_P (op0));
7078 #ifdef POINTERS_EXTEND_UNSIGNED
7079 if (GET_MODE (offset_rtx) != Pmode)
7080 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7081 #else
7082 if (GET_MODE (offset_rtx) != ptr_mode)
7083 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7084 #endif
7086 if (GET_MODE (op0) == BLKmode
7087 /* A constant address in OP0 can have VOIDmode, we must
7088 not try to call force_reg in that case. */
7089 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7090 && bitsize != 0
7091 && (bitpos % bitsize) == 0
7092 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7093 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7095 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7096 bitpos = 0;
7099 op0 = offset_address (op0, offset_rtx,
7100 highest_pow2_factor (offset));
7103 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7104 record its alignment as BIGGEST_ALIGNMENT. */
7105 if (MEM_P (op0) && bitpos == 0 && offset != 0
7106 && is_aligning_offset (offset, tem))
7107 set_mem_align (op0, BIGGEST_ALIGNMENT);
7109 /* Don't forget about volatility even if this is a bitfield. */
7110 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7112 if (op0 == orig_op0)
7113 op0 = copy_rtx (op0);
7115 MEM_VOLATILE_P (op0) = 1;
7118 /* The following code doesn't handle CONCAT.
7119 Assume only bitpos == 0 can be used for CONCAT, due to
7120 one element arrays having the same mode as its element. */
7121 if (GET_CODE (op0) == CONCAT)
7123 gcc_assert (bitpos == 0
7124 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7125 return op0;
7128 /* In cases where an aligned union has an unaligned object
7129 as a field, we might be extracting a BLKmode value from
7130 an integer-mode (e.g., SImode) object. Handle this case
7131 by doing the extract into an object as wide as the field
7132 (which we know to be the width of a basic mode), then
7133 storing into memory, and changing the mode to BLKmode. */
7134 if (mode1 == VOIDmode
7135 || REG_P (op0) || GET_CODE (op0) == SUBREG
7136 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7137 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7138 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7139 && modifier != EXPAND_CONST_ADDRESS
7140 && modifier != EXPAND_INITIALIZER)
7141 /* If the field isn't aligned enough to fetch as a memref,
7142 fetch it as a bit field. */
7143 || (mode1 != BLKmode
7144 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7145 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7146 || (MEM_P (op0)
7147 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7148 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7149 && ((modifier == EXPAND_CONST_ADDRESS
7150 || modifier == EXPAND_INITIALIZER)
7151 ? STRICT_ALIGNMENT
7152 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7153 || (bitpos % BITS_PER_UNIT != 0)))
7154 /* If the type and the field are a constant size and the
7155 size of the type isn't the same size as the bitfield,
7156 we must use bitfield operations. */
7157 || (bitsize >= 0
7158 && TYPE_SIZE (TREE_TYPE (exp))
7159 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7160 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7161 bitsize)))
7163 enum machine_mode ext_mode = mode;
7165 if (ext_mode == BLKmode
7166 && ! (target != 0 && MEM_P (op0)
7167 && MEM_P (target)
7168 && bitpos % BITS_PER_UNIT == 0))
7169 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7171 if (ext_mode == BLKmode)
7173 if (target == 0)
7174 target = assign_temp (type, 0, 1, 1);
7176 if (bitsize == 0)
7177 return target;
7179 /* In this case, BITPOS must start at a byte boundary and
7180 TARGET, if specified, must be a MEM. */
7181 gcc_assert (MEM_P (op0)
7182 && (!target || MEM_P (target))
7183 && !(bitpos % BITS_PER_UNIT));
7185 emit_block_move (target,
7186 adjust_address (op0, VOIDmode,
7187 bitpos / BITS_PER_UNIT),
7188 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7189 / BITS_PER_UNIT),
7190 (modifier == EXPAND_STACK_PARM
7191 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7193 return target;
7196 op0 = validize_mem (op0);
7198 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7199 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7201 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7202 (modifier == EXPAND_STACK_PARM
7203 ? NULL_RTX : target),
7204 ext_mode, ext_mode);
7206 /* If the result is a record type and BITSIZE is narrower than
7207 the mode of OP0, an integral mode, and this is a big endian
7208 machine, we must put the field into the high-order bits. */
7209 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7210 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7211 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7212 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7213 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7214 - bitsize),
7215 op0, 1);
7217 /* If the result type is BLKmode, store the data into a temporary
7218 of the appropriate type, but with the mode corresponding to the
7219 mode for the data we have (op0's mode). It's tempting to make
7220 this a constant type, since we know it's only being stored once,
7221 but that can cause problems if we are taking the address of this
7222 COMPONENT_REF because the MEM of any reference via that address
7223 will have flags corresponding to the type, which will not
7224 necessarily be constant. */
7225 if (mode == BLKmode)
7227 rtx new
7228 = assign_stack_temp_for_type
7229 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7231 emit_move_insn (new, op0);
7232 op0 = copy_rtx (new);
7233 PUT_MODE (op0, BLKmode);
7234 set_mem_attributes (op0, exp, 1);
7237 return op0;
7240 /* If the result is BLKmode, use that to access the object
7241 now as well. */
7242 if (mode == BLKmode)
7243 mode1 = BLKmode;
7245 /* Get a reference to just this component. */
7246 if (modifier == EXPAND_CONST_ADDRESS
7247 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7248 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7249 else
7250 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7252 if (op0 == orig_op0)
7253 op0 = copy_rtx (op0);
7255 set_mem_attributes (op0, exp, 0);
7256 if (REG_P (XEXP (op0, 0)))
7257 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7259 MEM_VOLATILE_P (op0) |= volatilep;
7260 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7261 || modifier == EXPAND_CONST_ADDRESS
7262 || modifier == EXPAND_INITIALIZER)
7263 return op0;
7264 else if (target == 0)
7265 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7267 convert_move (target, op0, unsignedp);
7268 return target;
7271 case OBJ_TYPE_REF:
7272 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7274 case CALL_EXPR:
7275 /* Check for a built-in function. */
7276 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7277 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7278 == FUNCTION_DECL)
7279 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7281 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7282 == BUILT_IN_FRONTEND)
7283 return lang_hooks.expand_expr (exp, original_target,
7284 tmode, modifier,
7285 alt_rtl);
7286 else
7287 return expand_builtin (exp, target, subtarget, tmode, ignore);
7290 return expand_call (exp, target, ignore);
7292 case NON_LVALUE_EXPR:
7293 case NOP_EXPR:
7294 case CONVERT_EXPR:
7295 if (TREE_OPERAND (exp, 0) == error_mark_node)
7296 return const0_rtx;
7298 if (TREE_CODE (type) == UNION_TYPE)
7300 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7302 /* If both input and output are BLKmode, this conversion isn't doing
7303 anything except possibly changing memory attribute. */
7304 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7306 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7307 modifier);
7309 result = copy_rtx (result);
7310 set_mem_attributes (result, exp, 0);
7311 return result;
7314 if (target == 0)
7316 if (TYPE_MODE (type) != BLKmode)
7317 target = gen_reg_rtx (TYPE_MODE (type));
7318 else
7319 target = assign_temp (type, 0, 1, 1);
7322 if (MEM_P (target))
7323 /* Store data into beginning of memory target. */
7324 store_expr (TREE_OPERAND (exp, 0),
7325 adjust_address (target, TYPE_MODE (valtype), 0),
7326 modifier == EXPAND_STACK_PARM);
7328 else
7330 gcc_assert (REG_P (target));
7332 /* Store this field into a union of the proper type. */
7333 store_field (target,
7334 MIN ((int_size_in_bytes (TREE_TYPE
7335 (TREE_OPERAND (exp, 0)))
7336 * BITS_PER_UNIT),
7337 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7338 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7339 type, 0);
7342 /* Return the entire union. */
7343 return target;
7346 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7348 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7349 modifier);
7351 /* If the signedness of the conversion differs and OP0 is
7352 a promoted SUBREG, clear that indication since we now
7353 have to do the proper extension. */
7354 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7355 && GET_CODE (op0) == SUBREG)
7356 SUBREG_PROMOTED_VAR_P (op0) = 0;
7358 return REDUCE_BIT_FIELD (op0);
7361 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7362 if (GET_MODE (op0) == mode)
7365 /* If OP0 is a constant, just convert it into the proper mode. */
7366 else if (CONSTANT_P (op0))
7368 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7369 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7371 if (modifier == EXPAND_INITIALIZER)
7372 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7373 subreg_lowpart_offset (mode,
7374 inner_mode));
7375 else
7376 op0= convert_modes (mode, inner_mode, op0,
7377 TYPE_UNSIGNED (inner_type));
7380 else if (modifier == EXPAND_INITIALIZER)
7381 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7383 else if (target == 0)
7384 op0 = convert_to_mode (mode, op0,
7385 TYPE_UNSIGNED (TREE_TYPE
7386 (TREE_OPERAND (exp, 0))));
7387 else
7389 convert_move (target, op0,
7390 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7391 op0 = target;
7394 return REDUCE_BIT_FIELD (op0);
7396 case VIEW_CONVERT_EXPR:
7397 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7399 /* If the input and output modes are both the same, we are done. */
7400 if (TYPE_MODE (type) == GET_MODE (op0))
7402 /* If neither mode is BLKmode, and both modes are the same size
7403 then we can use gen_lowpart. */
7404 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7405 && GET_MODE_SIZE (TYPE_MODE (type))
7406 == GET_MODE_SIZE (GET_MODE (op0)))
7408 if (GET_CODE (op0) == SUBREG)
7409 op0 = force_reg (GET_MODE (op0), op0);
7410 op0 = gen_lowpart (TYPE_MODE (type), op0);
7412 /* If both modes are integral, then we can convert from one to the
7413 other. */
7414 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7415 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7416 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7417 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7418 /* As a last resort, spill op0 to memory, and reload it in a
7419 different mode. */
7420 else if (!MEM_P (op0))
7422 /* If the operand is not a MEM, force it into memory. Since we
7423 are going to be be changing the mode of the MEM, don't call
7424 force_const_mem for constants because we don't allow pool
7425 constants to change mode. */
7426 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7428 gcc_assert (!TREE_ADDRESSABLE (exp));
7430 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7431 target
7432 = assign_stack_temp_for_type
7433 (TYPE_MODE (inner_type),
7434 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7436 emit_move_insn (target, op0);
7437 op0 = target;
7440 /* At this point, OP0 is in the correct mode. If the output type is such
7441 that the operand is known to be aligned, indicate that it is.
7442 Otherwise, we need only be concerned about alignment for non-BLKmode
7443 results. */
7444 if (MEM_P (op0))
7446 op0 = copy_rtx (op0);
7448 if (TYPE_ALIGN_OK (type))
7449 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7450 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7451 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7453 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7454 HOST_WIDE_INT temp_size
7455 = MAX (int_size_in_bytes (inner_type),
7456 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7457 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7458 temp_size, 0, type);
7459 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7461 gcc_assert (!TREE_ADDRESSABLE (exp));
7463 if (GET_MODE (op0) == BLKmode)
7464 emit_block_move (new_with_op0_mode, op0,
7465 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7466 (modifier == EXPAND_STACK_PARM
7467 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7468 else
7469 emit_move_insn (new_with_op0_mode, op0);
7471 op0 = new;
7474 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7477 return op0;
7479 case PLUS_EXPR:
7480 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7481 something else, make sure we add the register to the constant and
7482 then to the other thing. This case can occur during strength
7483 reduction and doing it this way will produce better code if the
7484 frame pointer or argument pointer is eliminated.
7486 fold-const.c will ensure that the constant is always in the inner
7487 PLUS_EXPR, so the only case we need to do anything about is if
7488 sp, ap, or fp is our second argument, in which case we must swap
7489 the innermost first argument and our second argument. */
7491 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7492 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7493 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7494 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7495 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7496 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7498 tree t = TREE_OPERAND (exp, 1);
7500 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7501 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7504 /* If the result is to be ptr_mode and we are adding an integer to
7505 something, we might be forming a constant. So try to use
7506 plus_constant. If it produces a sum and we can't accept it,
7507 use force_operand. This allows P = &ARR[const] to generate
7508 efficient code on machines where a SYMBOL_REF is not a valid
7509 address.
7511 If this is an EXPAND_SUM call, always return the sum. */
7512 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7513 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7515 if (modifier == EXPAND_STACK_PARM)
7516 target = 0;
7517 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7518 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7519 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7521 rtx constant_part;
7523 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7524 EXPAND_SUM);
7525 /* Use immed_double_const to ensure that the constant is
7526 truncated according to the mode of OP1, then sign extended
7527 to a HOST_WIDE_INT. Using the constant directly can result
7528 in non-canonical RTL in a 64x32 cross compile. */
7529 constant_part
7530 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7531 (HOST_WIDE_INT) 0,
7532 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7533 op1 = plus_constant (op1, INTVAL (constant_part));
7534 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7535 op1 = force_operand (op1, target);
7536 return REDUCE_BIT_FIELD (op1);
7539 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7540 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7541 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7543 rtx constant_part;
7545 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7546 (modifier == EXPAND_INITIALIZER
7547 ? EXPAND_INITIALIZER : EXPAND_SUM));
7548 if (! CONSTANT_P (op0))
7550 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7551 VOIDmode, modifier);
7552 /* Return a PLUS if modifier says it's OK. */
7553 if (modifier == EXPAND_SUM
7554 || modifier == EXPAND_INITIALIZER)
7555 return simplify_gen_binary (PLUS, mode, op0, op1);
7556 goto binop2;
7558 /* Use immed_double_const to ensure that the constant is
7559 truncated according to the mode of OP1, then sign extended
7560 to a HOST_WIDE_INT. Using the constant directly can result
7561 in non-canonical RTL in a 64x32 cross compile. */
7562 constant_part
7563 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7564 (HOST_WIDE_INT) 0,
7565 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7566 op0 = plus_constant (op0, INTVAL (constant_part));
7567 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7568 op0 = force_operand (op0, target);
7569 return REDUCE_BIT_FIELD (op0);
7573 /* No sense saving up arithmetic to be done
7574 if it's all in the wrong mode to form part of an address.
7575 And force_operand won't know whether to sign-extend or
7576 zero-extend. */
7577 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7578 || mode != ptr_mode)
7580 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7581 subtarget, &op0, &op1, 0);
7582 if (op0 == const0_rtx)
7583 return op1;
7584 if (op1 == const0_rtx)
7585 return op0;
7586 goto binop2;
7589 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7590 subtarget, &op0, &op1, modifier);
7591 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7593 case MINUS_EXPR:
7594 /* For initializers, we are allowed to return a MINUS of two
7595 symbolic constants. Here we handle all cases when both operands
7596 are constant. */
7597 /* Handle difference of two symbolic constants,
7598 for the sake of an initializer. */
7599 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7600 && really_constant_p (TREE_OPERAND (exp, 0))
7601 && really_constant_p (TREE_OPERAND (exp, 1)))
7603 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7604 NULL_RTX, &op0, &op1, modifier);
7606 /* If the last operand is a CONST_INT, use plus_constant of
7607 the negated constant. Else make the MINUS. */
7608 if (GET_CODE (op1) == CONST_INT)
7609 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7610 else
7611 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7614 /* No sense saving up arithmetic to be done
7615 if it's all in the wrong mode to form part of an address.
7616 And force_operand won't know whether to sign-extend or
7617 zero-extend. */
7618 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7619 || mode != ptr_mode)
7620 goto binop;
7622 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7623 subtarget, &op0, &op1, modifier);
7625 /* Convert A - const to A + (-const). */
7626 if (GET_CODE (op1) == CONST_INT)
7628 op1 = negate_rtx (mode, op1);
7629 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7632 goto binop2;
7634 case MULT_EXPR:
7635 /* If first operand is constant, swap them.
7636 Thus the following special case checks need only
7637 check the second operand. */
7638 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7640 tree t1 = TREE_OPERAND (exp, 0);
7641 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7642 TREE_OPERAND (exp, 1) = t1;
7645 /* Attempt to return something suitable for generating an
7646 indexed address, for machines that support that. */
7648 if (modifier == EXPAND_SUM && mode == ptr_mode
7649 && host_integerp (TREE_OPERAND (exp, 1), 0))
7651 tree exp1 = TREE_OPERAND (exp, 1);
7653 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7654 EXPAND_SUM);
7656 if (!REG_P (op0))
7657 op0 = force_operand (op0, NULL_RTX);
7658 if (!REG_P (op0))
7659 op0 = copy_to_mode_reg (mode, op0);
7661 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7662 gen_int_mode (tree_low_cst (exp1, 0),
7663 TYPE_MODE (TREE_TYPE (exp1)))));
7666 if (modifier == EXPAND_STACK_PARM)
7667 target = 0;
7669 /* Check for multiplying things that have been extended
7670 from a narrower type. If this machine supports multiplying
7671 in that narrower type with a result in the desired type,
7672 do it that way, and avoid the explicit type-conversion. */
7673 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7674 && TREE_CODE (type) == INTEGER_TYPE
7675 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7676 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7677 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7678 && int_fits_type_p (TREE_OPERAND (exp, 1),
7679 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7680 /* Don't use a widening multiply if a shift will do. */
7681 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7682 > HOST_BITS_PER_WIDE_INT)
7683 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7685 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7686 && (TYPE_PRECISION (TREE_TYPE
7687 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7688 == TYPE_PRECISION (TREE_TYPE
7689 (TREE_OPERAND
7690 (TREE_OPERAND (exp, 0), 0))))
7691 /* If both operands are extended, they must either both
7692 be zero-extended or both be sign-extended. */
7693 && (TYPE_UNSIGNED (TREE_TYPE
7694 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7695 == TYPE_UNSIGNED (TREE_TYPE
7696 (TREE_OPERAND
7697 (TREE_OPERAND (exp, 0), 0)))))))
7699 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7700 enum machine_mode innermode = TYPE_MODE (op0type);
7701 bool zextend_p = TYPE_UNSIGNED (op0type);
7702 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7703 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7705 if (mode == GET_MODE_WIDER_MODE (innermode))
7707 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7709 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7710 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7711 TREE_OPERAND (exp, 1),
7712 NULL_RTX, &op0, &op1, 0);
7713 else
7714 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7715 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7716 NULL_RTX, &op0, &op1, 0);
7717 goto binop3;
7719 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7720 && innermode == word_mode)
7722 rtx htem, hipart;
7723 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7724 NULL_RTX, VOIDmode, 0);
7725 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7726 op1 = convert_modes (innermode, mode,
7727 expand_expr (TREE_OPERAND (exp, 1),
7728 NULL_RTX, VOIDmode, 0),
7729 unsignedp);
7730 else
7731 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7732 NULL_RTX, VOIDmode, 0);
7733 temp = expand_binop (mode, other_optab, op0, op1, target,
7734 unsignedp, OPTAB_LIB_WIDEN);
7735 hipart = gen_highpart (innermode, temp);
7736 htem = expand_mult_highpart_adjust (innermode, hipart,
7737 op0, op1, hipart,
7738 zextend_p);
7739 if (htem != hipart)
7740 emit_move_insn (hipart, htem);
7741 return REDUCE_BIT_FIELD (temp);
7745 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7746 subtarget, &op0, &op1, 0);
7747 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7749 case TRUNC_DIV_EXPR:
7750 case FLOOR_DIV_EXPR:
7751 case CEIL_DIV_EXPR:
7752 case ROUND_DIV_EXPR:
7753 case EXACT_DIV_EXPR:
7754 if (modifier == EXPAND_STACK_PARM)
7755 target = 0;
7756 /* Possible optimization: compute the dividend with EXPAND_SUM
7757 then if the divisor is constant can optimize the case
7758 where some terms of the dividend have coeffs divisible by it. */
7759 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7760 subtarget, &op0, &op1, 0);
7761 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7763 case RDIV_EXPR:
7764 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7765 expensive divide. If not, combine will rebuild the original
7766 computation. */
7767 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7768 && TREE_CODE (type) == REAL_TYPE
7769 && !real_onep (TREE_OPERAND (exp, 0)))
7770 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7771 build2 (RDIV_EXPR, type,
7772 build_real (type, dconst1),
7773 TREE_OPERAND (exp, 1))),
7774 target, tmode, modifier);
7776 goto binop;
7778 case TRUNC_MOD_EXPR:
7779 case FLOOR_MOD_EXPR:
7780 case CEIL_MOD_EXPR:
7781 case ROUND_MOD_EXPR:
7782 if (modifier == EXPAND_STACK_PARM)
7783 target = 0;
7784 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7785 subtarget, &op0, &op1, 0);
7786 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7788 case FIX_ROUND_EXPR:
7789 case FIX_FLOOR_EXPR:
7790 case FIX_CEIL_EXPR:
7791 gcc_unreachable (); /* Not used for C. */
7793 case FIX_TRUNC_EXPR:
7794 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7795 if (target == 0 || modifier == EXPAND_STACK_PARM)
7796 target = gen_reg_rtx (mode);
7797 expand_fix (target, op0, unsignedp);
7798 return target;
7800 case FLOAT_EXPR:
7801 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7802 if (target == 0 || modifier == EXPAND_STACK_PARM)
7803 target = gen_reg_rtx (mode);
7804 /* expand_float can't figure out what to do if FROM has VOIDmode.
7805 So give it the correct mode. With -O, cse will optimize this. */
7806 if (GET_MODE (op0) == VOIDmode)
7807 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7808 op0);
7809 expand_float (target, op0,
7810 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7811 return target;
7813 case NEGATE_EXPR:
7814 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7815 if (modifier == EXPAND_STACK_PARM)
7816 target = 0;
7817 temp = expand_unop (mode,
7818 optab_for_tree_code (NEGATE_EXPR, type),
7819 op0, target, 0);
7820 gcc_assert (temp);
7821 return REDUCE_BIT_FIELD (temp);
7823 case ABS_EXPR:
7824 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7825 if (modifier == EXPAND_STACK_PARM)
7826 target = 0;
7828 /* ABS_EXPR is not valid for complex arguments. */
7829 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7830 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7832 /* Unsigned abs is simply the operand. Testing here means we don't
7833 risk generating incorrect code below. */
7834 if (TYPE_UNSIGNED (type))
7835 return op0;
7837 return expand_abs (mode, op0, target, unsignedp,
7838 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7840 case MAX_EXPR:
7841 case MIN_EXPR:
7842 target = original_target;
7843 if (target == 0
7844 || modifier == EXPAND_STACK_PARM
7845 || (MEM_P (target) && MEM_VOLATILE_P (target))
7846 || GET_MODE (target) != mode
7847 || (REG_P (target)
7848 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7849 target = gen_reg_rtx (mode);
7850 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7851 target, &op0, &op1, 0);
7853 /* First try to do it with a special MIN or MAX instruction.
7854 If that does not win, use a conditional jump to select the proper
7855 value. */
7856 this_optab = optab_for_tree_code (code, type);
7857 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7858 OPTAB_WIDEN);
7859 if (temp != 0)
7860 return temp;
7862 /* At this point, a MEM target is no longer useful; we will get better
7863 code without it. */
7865 if (! REG_P (target))
7866 target = gen_reg_rtx (mode);
7868 /* If op1 was placed in target, swap op0 and op1. */
7869 if (target != op0 && target == op1)
7871 rtx tem = op0;
7872 op0 = op1;
7873 op1 = tem;
7876 /* We generate better code and avoid problems with op1 mentioning
7877 target by forcing op1 into a pseudo if it isn't a constant. */
7878 if (! CONSTANT_P (op1))
7879 op1 = force_reg (mode, op1);
7881 if (target != op0)
7882 emit_move_insn (target, op0);
7884 op0 = gen_label_rtx ();
7886 /* If this mode is an integer too wide to compare properly,
7887 compare word by word. Rely on cse to optimize constant cases. */
7888 if (GET_MODE_CLASS (mode) == MODE_INT
7889 && ! can_compare_p (GE, mode, ccp_jump))
7891 if (code == MAX_EXPR)
7892 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7893 NULL_RTX, op0);
7894 else
7895 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7896 NULL_RTX, op0);
7898 else
7900 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7901 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7903 emit_move_insn (target, op1);
7904 emit_label (op0);
7905 return target;
7907 case BIT_NOT_EXPR:
7908 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7909 if (modifier == EXPAND_STACK_PARM)
7910 target = 0;
7911 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7912 gcc_assert (temp);
7913 return temp;
7915 /* ??? Can optimize bitwise operations with one arg constant.
7916 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7917 and (a bitwise1 b) bitwise2 b (etc)
7918 but that is probably not worth while. */
7920 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7921 boolean values when we want in all cases to compute both of them. In
7922 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7923 as actual zero-or-1 values and then bitwise anding. In cases where
7924 there cannot be any side effects, better code would be made by
7925 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7926 how to recognize those cases. */
7928 case TRUTH_AND_EXPR:
7929 code = BIT_AND_EXPR;
7930 case BIT_AND_EXPR:
7931 goto binop;
7933 case TRUTH_OR_EXPR:
7934 code = BIT_IOR_EXPR;
7935 case BIT_IOR_EXPR:
7936 goto binop;
7938 case TRUTH_XOR_EXPR:
7939 code = BIT_XOR_EXPR;
7940 case BIT_XOR_EXPR:
7941 goto binop;
7943 case LSHIFT_EXPR:
7944 case RSHIFT_EXPR:
7945 case LROTATE_EXPR:
7946 case RROTATE_EXPR:
7947 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7948 subtarget = 0;
7949 if (modifier == EXPAND_STACK_PARM)
7950 target = 0;
7951 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7952 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7953 unsignedp);
7955 /* Could determine the answer when only additive constants differ. Also,
7956 the addition of one can be handled by changing the condition. */
7957 case LT_EXPR:
7958 case LE_EXPR:
7959 case GT_EXPR:
7960 case GE_EXPR:
7961 case EQ_EXPR:
7962 case NE_EXPR:
7963 case UNORDERED_EXPR:
7964 case ORDERED_EXPR:
7965 case UNLT_EXPR:
7966 case UNLE_EXPR:
7967 case UNGT_EXPR:
7968 case UNGE_EXPR:
7969 case UNEQ_EXPR:
7970 case LTGT_EXPR:
7971 temp = do_store_flag (exp,
7972 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7973 tmode != VOIDmode ? tmode : mode, 0);
7974 if (temp != 0)
7975 return temp;
7977 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7978 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7979 && original_target
7980 && REG_P (original_target)
7981 && (GET_MODE (original_target)
7982 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7984 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7985 VOIDmode, 0);
7987 /* If temp is constant, we can just compute the result. */
7988 if (GET_CODE (temp) == CONST_INT)
7990 if (INTVAL (temp) != 0)
7991 emit_move_insn (target, const1_rtx);
7992 else
7993 emit_move_insn (target, const0_rtx);
7995 return target;
7998 if (temp != original_target)
8000 enum machine_mode mode1 = GET_MODE (temp);
8001 if (mode1 == VOIDmode)
8002 mode1 = tmode != VOIDmode ? tmode : mode;
8004 temp = copy_to_mode_reg (mode1, temp);
8007 op1 = gen_label_rtx ();
8008 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8009 GET_MODE (temp), unsignedp, op1);
8010 emit_move_insn (temp, const1_rtx);
8011 emit_label (op1);
8012 return temp;
8015 /* If no set-flag instruction, must generate a conditional store
8016 into a temporary variable. Drop through and handle this
8017 like && and ||. */
8019 if (! ignore
8020 && (target == 0
8021 || modifier == EXPAND_STACK_PARM
8022 || ! safe_from_p (target, exp, 1)
8023 /* Make sure we don't have a hard reg (such as function's return
8024 value) live across basic blocks, if not optimizing. */
8025 || (!optimize && REG_P (target)
8026 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8027 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8029 if (target)
8030 emit_move_insn (target, const0_rtx);
8032 op1 = gen_label_rtx ();
8033 jumpifnot (exp, op1);
8035 if (target)
8036 emit_move_insn (target, const1_rtx);
8038 emit_label (op1);
8039 return ignore ? const0_rtx : target;
8041 case TRUTH_NOT_EXPR:
8042 if (modifier == EXPAND_STACK_PARM)
8043 target = 0;
8044 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8045 /* The parser is careful to generate TRUTH_NOT_EXPR
8046 only with operands that are always zero or one. */
8047 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8048 target, 1, OPTAB_LIB_WIDEN);
8049 gcc_assert (temp);
8050 return temp;
8052 case STATEMENT_LIST:
8054 tree_stmt_iterator iter;
8056 gcc_assert (ignore);
8058 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8059 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8061 return const0_rtx;
8063 case COND_EXPR:
8064 /* A COND_EXPR with its type being VOID_TYPE represents a
8065 conditional jump and is handled in
8066 expand_gimple_cond_expr. */
8067 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8069 /* Note that COND_EXPRs whose type is a structure or union
8070 are required to be constructed to contain assignments of
8071 a temporary variable, so that we can evaluate them here
8072 for side effect only. If type is void, we must do likewise. */
8074 gcc_assert (!TREE_ADDRESSABLE (type)
8075 && !ignore
8076 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8077 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8079 /* If we are not to produce a result, we have no target. Otherwise,
8080 if a target was specified use it; it will not be used as an
8081 intermediate target unless it is safe. If no target, use a
8082 temporary. */
8084 if (modifier != EXPAND_STACK_PARM
8085 && original_target
8086 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8087 && GET_MODE (original_target) == mode
8088 #ifdef HAVE_conditional_move
8089 && (! can_conditionally_move_p (mode)
8090 || REG_P (original_target))
8091 #endif
8092 && !MEM_P (original_target))
8093 temp = original_target;
8094 else
8095 temp = assign_temp (type, 0, 0, 1);
8097 do_pending_stack_adjust ();
8098 NO_DEFER_POP;
8099 op0 = gen_label_rtx ();
8100 op1 = gen_label_rtx ();
8101 jumpifnot (TREE_OPERAND (exp, 0), op0);
8102 store_expr (TREE_OPERAND (exp, 1), temp,
8103 modifier == EXPAND_STACK_PARM);
8105 emit_jump_insn (gen_jump (op1));
8106 emit_barrier ();
8107 emit_label (op0);
8108 store_expr (TREE_OPERAND (exp, 2), temp,
8109 modifier == EXPAND_STACK_PARM);
8111 emit_label (op1);
8112 OK_DEFER_POP;
8113 return temp;
8115 case VEC_COND_EXPR:
8116 target = expand_vec_cond_expr (exp, target);
8117 return target;
8119 case MODIFY_EXPR:
8121 tree lhs = TREE_OPERAND (exp, 0);
8122 tree rhs = TREE_OPERAND (exp, 1);
8124 gcc_assert (ignore);
8126 /* Check for |= or &= of a bitfield of size one into another bitfield
8127 of size 1. In this case, (unless we need the result of the
8128 assignment) we can do this more efficiently with a
8129 test followed by an assignment, if necessary.
8131 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8132 things change so we do, this code should be enhanced to
8133 support it. */
8134 if (TREE_CODE (lhs) == COMPONENT_REF
8135 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8136 || TREE_CODE (rhs) == BIT_AND_EXPR)
8137 && TREE_OPERAND (rhs, 0) == lhs
8138 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8139 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8140 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8142 rtx label = gen_label_rtx ();
8144 do_jump (TREE_OPERAND (rhs, 1),
8145 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8146 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8147 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8148 (TREE_CODE (rhs) == BIT_IOR_EXPR
8149 ? integer_one_node
8150 : integer_zero_node)));
8151 do_pending_stack_adjust ();
8152 emit_label (label);
8153 return const0_rtx;
8156 expand_assignment (lhs, rhs);
8158 return const0_rtx;
8161 case RETURN_EXPR:
8162 if (!TREE_OPERAND (exp, 0))
8163 expand_null_return ();
8164 else
8165 expand_return (TREE_OPERAND (exp, 0));
8166 return const0_rtx;
8168 case ADDR_EXPR:
8169 return expand_expr_addr_expr (exp, target, tmode, modifier);
8171 case COMPLEX_EXPR:
8172 /* Get the rtx code of the operands. */
8173 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8174 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8176 if (!target)
8177 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8179 /* Move the real (op0) and imaginary (op1) parts to their location. */
8180 write_complex_part (target, op0, false);
8181 write_complex_part (target, op1, true);
8183 return target;
8185 case REALPART_EXPR:
8186 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8187 return read_complex_part (op0, false);
8189 case IMAGPART_EXPR:
8190 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8191 return read_complex_part (op0, true);
8193 case RESX_EXPR:
8194 expand_resx_expr (exp);
8195 return const0_rtx;
8197 case TRY_CATCH_EXPR:
8198 case CATCH_EXPR:
8199 case EH_FILTER_EXPR:
8200 case TRY_FINALLY_EXPR:
8201 /* Lowered by tree-eh.c. */
8202 gcc_unreachable ();
8204 case WITH_CLEANUP_EXPR:
8205 case CLEANUP_POINT_EXPR:
8206 case TARGET_EXPR:
8207 case CASE_LABEL_EXPR:
8208 case VA_ARG_EXPR:
8209 case BIND_EXPR:
8210 case INIT_EXPR:
8211 case CONJ_EXPR:
8212 case COMPOUND_EXPR:
8213 case PREINCREMENT_EXPR:
8214 case PREDECREMENT_EXPR:
8215 case POSTINCREMENT_EXPR:
8216 case POSTDECREMENT_EXPR:
8217 case LOOP_EXPR:
8218 case EXIT_EXPR:
8219 case TRUTH_ANDIF_EXPR:
8220 case TRUTH_ORIF_EXPR:
8221 /* Lowered by gimplify.c. */
8222 gcc_unreachable ();
8224 case EXC_PTR_EXPR:
8225 return get_exception_pointer (cfun);
8227 case FILTER_EXPR:
8228 return get_exception_filter (cfun);
8230 case FDESC_EXPR:
8231 /* Function descriptors are not valid except for as
8232 initialization constants, and should not be expanded. */
8233 gcc_unreachable ();
8235 case SWITCH_EXPR:
8236 expand_case (exp);
8237 return const0_rtx;
8239 case LABEL_EXPR:
8240 expand_label (TREE_OPERAND (exp, 0));
8241 return const0_rtx;
8243 case ASM_EXPR:
8244 expand_asm_expr (exp);
8245 return const0_rtx;
8247 case WITH_SIZE_EXPR:
8248 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8249 have pulled out the size to use in whatever context it needed. */
8250 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8251 modifier, alt_rtl);
8253 case REALIGN_LOAD_EXPR:
8255 tree oprnd0 = TREE_OPERAND (exp, 0);
8256 tree oprnd1 = TREE_OPERAND (exp, 1);
8257 tree oprnd2 = TREE_OPERAND (exp, 2);
8258 rtx op2;
8260 this_optab = optab_for_tree_code (code, type);
8261 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8262 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8263 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8264 target, unsignedp);
8265 if (temp == 0)
8266 abort ();
8267 return temp;
8271 default:
8272 return lang_hooks.expand_expr (exp, original_target, tmode,
8273 modifier, alt_rtl);
8276 /* Here to do an ordinary binary operator. */
8277 binop:
8278 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8279 subtarget, &op0, &op1, 0);
8280 binop2:
8281 this_optab = optab_for_tree_code (code, type);
8282 binop3:
8283 if (modifier == EXPAND_STACK_PARM)
8284 target = 0;
8285 temp = expand_binop (mode, this_optab, op0, op1, target,
8286 unsignedp, OPTAB_LIB_WIDEN);
8287 gcc_assert (temp);
8288 return REDUCE_BIT_FIELD (temp);
8290 #undef REDUCE_BIT_FIELD
8292 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8293 signedness of TYPE), possibly returning the result in TARGET. */
8294 static rtx
8295 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8297 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8298 if (target && GET_MODE (target) != GET_MODE (exp))
8299 target = 0;
8300 if (TYPE_UNSIGNED (type))
8302 rtx mask;
8303 if (prec < HOST_BITS_PER_WIDE_INT)
8304 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8305 GET_MODE (exp));
8306 else
8307 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8308 ((unsigned HOST_WIDE_INT) 1
8309 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8310 GET_MODE (exp));
8311 return expand_and (GET_MODE (exp), exp, mask, target);
8313 else
8315 tree count = build_int_cst (NULL_TREE,
8316 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8317 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8318 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8322 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8323 when applied to the address of EXP produces an address known to be
8324 aligned more than BIGGEST_ALIGNMENT. */
8326 static int
8327 is_aligning_offset (tree offset, tree exp)
8329 /* Strip off any conversions. */
8330 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8331 || TREE_CODE (offset) == NOP_EXPR
8332 || TREE_CODE (offset) == CONVERT_EXPR)
8333 offset = TREE_OPERAND (offset, 0);
8335 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8336 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8337 if (TREE_CODE (offset) != BIT_AND_EXPR
8338 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8339 || compare_tree_int (TREE_OPERAND (offset, 1),
8340 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8341 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8342 return 0;
8344 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8345 It must be NEGATE_EXPR. Then strip any more conversions. */
8346 offset = TREE_OPERAND (offset, 0);
8347 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8348 || TREE_CODE (offset) == NOP_EXPR
8349 || TREE_CODE (offset) == CONVERT_EXPR)
8350 offset = TREE_OPERAND (offset, 0);
8352 if (TREE_CODE (offset) != NEGATE_EXPR)
8353 return 0;
8355 offset = TREE_OPERAND (offset, 0);
8356 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8357 || TREE_CODE (offset) == NOP_EXPR
8358 || TREE_CODE (offset) == CONVERT_EXPR)
8359 offset = TREE_OPERAND (offset, 0);
8361 /* This must now be the address of EXP. */
8362 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8365 /* Return the tree node if an ARG corresponds to a string constant or zero
8366 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8367 in bytes within the string that ARG is accessing. The type of the
8368 offset will be `sizetype'. */
8370 tree
8371 string_constant (tree arg, tree *ptr_offset)
8373 tree array, offset;
8374 STRIP_NOPS (arg);
8376 if (TREE_CODE (arg) == ADDR_EXPR)
8378 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8380 *ptr_offset = size_zero_node;
8381 return TREE_OPERAND (arg, 0);
8383 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8385 array = TREE_OPERAND (arg, 0);
8386 offset = size_zero_node;
8388 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8390 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8391 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8392 if (TREE_CODE (array) != STRING_CST
8393 && TREE_CODE (array) != VAR_DECL)
8394 return 0;
8396 else
8397 return 0;
8399 else if (TREE_CODE (arg) == PLUS_EXPR)
8401 tree arg0 = TREE_OPERAND (arg, 0);
8402 tree arg1 = TREE_OPERAND (arg, 1);
8404 STRIP_NOPS (arg0);
8405 STRIP_NOPS (arg1);
8407 if (TREE_CODE (arg0) == ADDR_EXPR
8408 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8409 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8411 array = TREE_OPERAND (arg0, 0);
8412 offset = arg1;
8414 else if (TREE_CODE (arg1) == ADDR_EXPR
8415 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8416 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8418 array = TREE_OPERAND (arg1, 0);
8419 offset = arg0;
8421 else
8422 return 0;
8424 else
8425 return 0;
8427 if (TREE_CODE (array) == STRING_CST)
8429 *ptr_offset = convert (sizetype, offset);
8430 return array;
8432 else if (TREE_CODE (array) == VAR_DECL)
8434 int length;
8436 /* Variables initialized to string literals can be handled too. */
8437 if (DECL_INITIAL (array) == NULL_TREE
8438 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8439 return 0;
8441 /* If they are read-only, non-volatile and bind locally. */
8442 if (! TREE_READONLY (array)
8443 || TREE_SIDE_EFFECTS (array)
8444 || ! targetm.binds_local_p (array))
8445 return 0;
8447 /* Avoid const char foo[4] = "abcde"; */
8448 if (DECL_SIZE_UNIT (array) == NULL_TREE
8449 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8450 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8451 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8452 return 0;
8454 /* If variable is bigger than the string literal, OFFSET must be constant
8455 and inside of the bounds of the string literal. */
8456 offset = convert (sizetype, offset);
8457 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8458 && (! host_integerp (offset, 1)
8459 || compare_tree_int (offset, length) >= 0))
8460 return 0;
8462 *ptr_offset = offset;
8463 return DECL_INITIAL (array);
8466 return 0;
8469 /* Generate code to calculate EXP using a store-flag instruction
8470 and return an rtx for the result. EXP is either a comparison
8471 or a TRUTH_NOT_EXPR whose operand is a comparison.
8473 If TARGET is nonzero, store the result there if convenient.
8475 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8476 cheap.
8478 Return zero if there is no suitable set-flag instruction
8479 available on this machine.
8481 Once expand_expr has been called on the arguments of the comparison,
8482 we are committed to doing the store flag, since it is not safe to
8483 re-evaluate the expression. We emit the store-flag insn by calling
8484 emit_store_flag, but only expand the arguments if we have a reason
8485 to believe that emit_store_flag will be successful. If we think that
8486 it will, but it isn't, we have to simulate the store-flag with a
8487 set/jump/set sequence. */
8489 static rtx
8490 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8492 enum rtx_code code;
8493 tree arg0, arg1, type;
8494 tree tem;
8495 enum machine_mode operand_mode;
8496 int invert = 0;
8497 int unsignedp;
8498 rtx op0, op1;
8499 enum insn_code icode;
8500 rtx subtarget = target;
8501 rtx result, label;
8503 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8504 result at the end. We can't simply invert the test since it would
8505 have already been inverted if it were valid. This case occurs for
8506 some floating-point comparisons. */
8508 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8509 invert = 1, exp = TREE_OPERAND (exp, 0);
8511 arg0 = TREE_OPERAND (exp, 0);
8512 arg1 = TREE_OPERAND (exp, 1);
8514 /* Don't crash if the comparison was erroneous. */
8515 if (arg0 == error_mark_node || arg1 == error_mark_node)
8516 return const0_rtx;
8518 type = TREE_TYPE (arg0);
8519 operand_mode = TYPE_MODE (type);
8520 unsignedp = TYPE_UNSIGNED (type);
8522 /* We won't bother with BLKmode store-flag operations because it would mean
8523 passing a lot of information to emit_store_flag. */
8524 if (operand_mode == BLKmode)
8525 return 0;
8527 /* We won't bother with store-flag operations involving function pointers
8528 when function pointers must be canonicalized before comparisons. */
8529 #ifdef HAVE_canonicalize_funcptr_for_compare
8530 if (HAVE_canonicalize_funcptr_for_compare
8531 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8532 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8533 == FUNCTION_TYPE))
8534 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8535 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8536 == FUNCTION_TYPE))))
8537 return 0;
8538 #endif
8540 STRIP_NOPS (arg0);
8541 STRIP_NOPS (arg1);
8543 /* Get the rtx comparison code to use. We know that EXP is a comparison
8544 operation of some type. Some comparisons against 1 and -1 can be
8545 converted to comparisons with zero. Do so here so that the tests
8546 below will be aware that we have a comparison with zero. These
8547 tests will not catch constants in the first operand, but constants
8548 are rarely passed as the first operand. */
8550 switch (TREE_CODE (exp))
8552 case EQ_EXPR:
8553 code = EQ;
8554 break;
8555 case NE_EXPR:
8556 code = NE;
8557 break;
8558 case LT_EXPR:
8559 if (integer_onep (arg1))
8560 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8561 else
8562 code = unsignedp ? LTU : LT;
8563 break;
8564 case LE_EXPR:
8565 if (! unsignedp && integer_all_onesp (arg1))
8566 arg1 = integer_zero_node, code = LT;
8567 else
8568 code = unsignedp ? LEU : LE;
8569 break;
8570 case GT_EXPR:
8571 if (! unsignedp && integer_all_onesp (arg1))
8572 arg1 = integer_zero_node, code = GE;
8573 else
8574 code = unsignedp ? GTU : GT;
8575 break;
8576 case GE_EXPR:
8577 if (integer_onep (arg1))
8578 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8579 else
8580 code = unsignedp ? GEU : GE;
8581 break;
8583 case UNORDERED_EXPR:
8584 code = UNORDERED;
8585 break;
8586 case ORDERED_EXPR:
8587 code = ORDERED;
8588 break;
8589 case UNLT_EXPR:
8590 code = UNLT;
8591 break;
8592 case UNLE_EXPR:
8593 code = UNLE;
8594 break;
8595 case UNGT_EXPR:
8596 code = UNGT;
8597 break;
8598 case UNGE_EXPR:
8599 code = UNGE;
8600 break;
8601 case UNEQ_EXPR:
8602 code = UNEQ;
8603 break;
8604 case LTGT_EXPR:
8605 code = LTGT;
8606 break;
8608 default:
8609 gcc_unreachable ();
8612 /* Put a constant second. */
8613 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8615 tem = arg0; arg0 = arg1; arg1 = tem;
8616 code = swap_condition (code);
8619 /* If this is an equality or inequality test of a single bit, we can
8620 do this by shifting the bit being tested to the low-order bit and
8621 masking the result with the constant 1. If the condition was EQ,
8622 we xor it with 1. This does not require an scc insn and is faster
8623 than an scc insn even if we have it.
8625 The code to make this transformation was moved into fold_single_bit_test,
8626 so we just call into the folder and expand its result. */
8628 if ((code == NE || code == EQ)
8629 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8630 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8632 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8633 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8634 arg0, arg1, type),
8635 target, VOIDmode, EXPAND_NORMAL);
8638 /* Now see if we are likely to be able to do this. Return if not. */
8639 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8640 return 0;
8642 icode = setcc_gen_code[(int) code];
8643 if (icode == CODE_FOR_nothing
8644 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8646 /* We can only do this if it is one of the special cases that
8647 can be handled without an scc insn. */
8648 if ((code == LT && integer_zerop (arg1))
8649 || (! only_cheap && code == GE && integer_zerop (arg1)))
8651 else if (BRANCH_COST >= 0
8652 && ! only_cheap && (code == NE || code == EQ)
8653 && TREE_CODE (type) != REAL_TYPE
8654 && ((abs_optab->handlers[(int) operand_mode].insn_code
8655 != CODE_FOR_nothing)
8656 || (ffs_optab->handlers[(int) operand_mode].insn_code
8657 != CODE_FOR_nothing)))
8659 else
8660 return 0;
8663 if (! get_subtarget (target)
8664 || GET_MODE (subtarget) != operand_mode)
8665 subtarget = 0;
8667 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8669 if (target == 0)
8670 target = gen_reg_rtx (mode);
8672 result = emit_store_flag (target, code, op0, op1,
8673 operand_mode, unsignedp, 1);
8675 if (result)
8677 if (invert)
8678 result = expand_binop (mode, xor_optab, result, const1_rtx,
8679 result, 0, OPTAB_LIB_WIDEN);
8680 return result;
8683 /* If this failed, we have to do this with set/compare/jump/set code. */
8684 if (!REG_P (target)
8685 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8686 target = gen_reg_rtx (GET_MODE (target));
8688 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8689 result = compare_from_rtx (op0, op1, code, unsignedp,
8690 operand_mode, NULL_RTX);
8691 if (GET_CODE (result) == CONST_INT)
8692 return (((result == const0_rtx && ! invert)
8693 || (result != const0_rtx && invert))
8694 ? const0_rtx : const1_rtx);
8696 /* The code of RESULT may not match CODE if compare_from_rtx
8697 decided to swap its operands and reverse the original code.
8699 We know that compare_from_rtx returns either a CONST_INT or
8700 a new comparison code, so it is safe to just extract the
8701 code from RESULT. */
8702 code = GET_CODE (result);
8704 label = gen_label_rtx ();
8705 gcc_assert (bcc_gen_fctn[(int) code]);
8707 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8708 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8709 emit_label (label);
8711 return target;
8715 /* Stubs in case we haven't got a casesi insn. */
8716 #ifndef HAVE_casesi
8717 # define HAVE_casesi 0
8718 # define gen_casesi(a, b, c, d, e) (0)
8719 # define CODE_FOR_casesi CODE_FOR_nothing
8720 #endif
8722 /* If the machine does not have a case insn that compares the bounds,
8723 this means extra overhead for dispatch tables, which raises the
8724 threshold for using them. */
8725 #ifndef CASE_VALUES_THRESHOLD
8726 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8727 #endif /* CASE_VALUES_THRESHOLD */
8729 unsigned int
8730 case_values_threshold (void)
8732 return CASE_VALUES_THRESHOLD;
8735 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8736 0 otherwise (i.e. if there is no casesi instruction). */
8738 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8739 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8741 enum machine_mode index_mode = SImode;
8742 int index_bits = GET_MODE_BITSIZE (index_mode);
8743 rtx op1, op2, index;
8744 enum machine_mode op_mode;
8746 if (! HAVE_casesi)
8747 return 0;
8749 /* Convert the index to SImode. */
8750 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8752 enum machine_mode omode = TYPE_MODE (index_type);
8753 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8755 /* We must handle the endpoints in the original mode. */
8756 index_expr = build2 (MINUS_EXPR, index_type,
8757 index_expr, minval);
8758 minval = integer_zero_node;
8759 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8760 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8761 omode, 1, default_label);
8762 /* Now we can safely truncate. */
8763 index = convert_to_mode (index_mode, index, 0);
8765 else
8767 if (TYPE_MODE (index_type) != index_mode)
8769 index_expr = convert (lang_hooks.types.type_for_size
8770 (index_bits, 0), index_expr);
8771 index_type = TREE_TYPE (index_expr);
8774 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8777 do_pending_stack_adjust ();
8779 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8780 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8781 (index, op_mode))
8782 index = copy_to_mode_reg (op_mode, index);
8784 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8786 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8787 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8788 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8789 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8790 (op1, op_mode))
8791 op1 = copy_to_mode_reg (op_mode, op1);
8793 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8795 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8796 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8797 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8798 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8799 (op2, op_mode))
8800 op2 = copy_to_mode_reg (op_mode, op2);
8802 emit_jump_insn (gen_casesi (index, op1, op2,
8803 table_label, default_label));
8804 return 1;
8807 /* Attempt to generate a tablejump instruction; same concept. */
8808 #ifndef HAVE_tablejump
8809 #define HAVE_tablejump 0
8810 #define gen_tablejump(x, y) (0)
8811 #endif
8813 /* Subroutine of the next function.
8815 INDEX is the value being switched on, with the lowest value
8816 in the table already subtracted.
8817 MODE is its expected mode (needed if INDEX is constant).
8818 RANGE is the length of the jump table.
8819 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8821 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8822 index value is out of range. */
8824 static void
8825 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8826 rtx default_label)
8828 rtx temp, vector;
8830 if (INTVAL (range) > cfun->max_jumptable_ents)
8831 cfun->max_jumptable_ents = INTVAL (range);
8833 /* Do an unsigned comparison (in the proper mode) between the index
8834 expression and the value which represents the length of the range.
8835 Since we just finished subtracting the lower bound of the range
8836 from the index expression, this comparison allows us to simultaneously
8837 check that the original index expression value is both greater than
8838 or equal to the minimum value of the range and less than or equal to
8839 the maximum value of the range. */
8841 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8842 default_label);
8844 /* If index is in range, it must fit in Pmode.
8845 Convert to Pmode so we can index with it. */
8846 if (mode != Pmode)
8847 index = convert_to_mode (Pmode, index, 1);
8849 /* Don't let a MEM slip through, because then INDEX that comes
8850 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8851 and break_out_memory_refs will go to work on it and mess it up. */
8852 #ifdef PIC_CASE_VECTOR_ADDRESS
8853 if (flag_pic && !REG_P (index))
8854 index = copy_to_mode_reg (Pmode, index);
8855 #endif
8857 /* If flag_force_addr were to affect this address
8858 it could interfere with the tricky assumptions made
8859 about addresses that contain label-refs,
8860 which may be valid only very near the tablejump itself. */
8861 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8862 GET_MODE_SIZE, because this indicates how large insns are. The other
8863 uses should all be Pmode, because they are addresses. This code
8864 could fail if addresses and insns are not the same size. */
8865 index = gen_rtx_PLUS (Pmode,
8866 gen_rtx_MULT (Pmode, index,
8867 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8868 gen_rtx_LABEL_REF (Pmode, table_label));
8869 #ifdef PIC_CASE_VECTOR_ADDRESS
8870 if (flag_pic)
8871 index = PIC_CASE_VECTOR_ADDRESS (index);
8872 else
8873 #endif
8874 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8875 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8876 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8877 convert_move (temp, vector, 0);
8879 emit_jump_insn (gen_tablejump (temp, table_label));
8881 /* If we are generating PIC code or if the table is PC-relative, the
8882 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8883 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8884 emit_barrier ();
8888 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8889 rtx table_label, rtx default_label)
8891 rtx index;
8893 if (! HAVE_tablejump)
8894 return 0;
8896 index_expr = fold (build2 (MINUS_EXPR, index_type,
8897 convert (index_type, index_expr),
8898 convert (index_type, minval)));
8899 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8900 do_pending_stack_adjust ();
8902 do_tablejump (index, TYPE_MODE (index_type),
8903 convert_modes (TYPE_MODE (index_type),
8904 TYPE_MODE (TREE_TYPE (range)),
8905 expand_expr (range, NULL_RTX,
8906 VOIDmode, 0),
8907 TYPE_UNSIGNED (TREE_TYPE (range))),
8908 table_label, default_label);
8909 return 1;
8912 /* Nonzero if the mode is a valid vector mode for this architecture.
8913 This returns nonzero even if there is no hardware support for the
8914 vector mode, but we can emulate with narrower modes. */
8917 vector_mode_valid_p (enum machine_mode mode)
8919 enum mode_class class = GET_MODE_CLASS (mode);
8920 enum machine_mode innermode;
8922 /* Doh! What's going on? */
8923 if (class != MODE_VECTOR_INT
8924 && class != MODE_VECTOR_FLOAT)
8925 return 0;
8927 /* Hardware support. Woo hoo! */
8928 if (targetm.vector_mode_supported_p (mode))
8929 return 1;
8931 innermode = GET_MODE_INNER (mode);
8933 /* We should probably return 1 if requesting V4DI and we have no DI,
8934 but we have V2DI, but this is probably very unlikely. */
8936 /* If we have support for the inner mode, we can safely emulate it.
8937 We may not have V2DI, but me can emulate with a pair of DIs. */
8938 return targetm.scalar_mode_supported_p (innermode);
8941 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8942 static rtx
8943 const_vector_from_tree (tree exp)
8945 rtvec v;
8946 int units, i;
8947 tree link, elt;
8948 enum machine_mode inner, mode;
8950 mode = TYPE_MODE (TREE_TYPE (exp));
8952 if (initializer_zerop (exp))
8953 return CONST0_RTX (mode);
8955 units = GET_MODE_NUNITS (mode);
8956 inner = GET_MODE_INNER (mode);
8958 v = rtvec_alloc (units);
8960 link = TREE_VECTOR_CST_ELTS (exp);
8961 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8963 elt = TREE_VALUE (link);
8965 if (TREE_CODE (elt) == REAL_CST)
8966 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8967 inner);
8968 else
8969 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8970 TREE_INT_CST_HIGH (elt),
8971 inner);
8974 /* Initialize remaining elements to 0. */
8975 for (; i < units; ++i)
8976 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8978 return gen_rtx_CONST_VECTOR (mode, v);
8980 #include "gt-expr.h"