Fix a problem with setting CR when splitting into rotlsi3.
[official-gcc.git] / gcc / expr.c
blobe71c22f8df305bea1232dafeb9aae6b30e37b680
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, int);
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 #ifdef PUSH_ROUNDING
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 #endif
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
180 #endif
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
188 #endif
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
197 #endif
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
214 #endif
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
219 void
220 init_expr_once (void)
222 rtx insn, pat;
223 enum machine_mode mode;
224 int num_clobbers;
225 rtx mem, mem1;
226 rtx reg;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
245 int regno;
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 regno++)
260 if (! HARD_REGNO_MODE_OK (regno, mode))
261 continue;
263 REGNO (reg) = regno;
265 SET_SRC (pat) = mem;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
296 enum insn_code ic;
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
300 continue;
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
312 void
313 init_expr (void)
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
323 void
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
330 enum insn_code code;
331 rtx libcall;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 gcc_assert (to_real == from_real);
340 /* If the source and destination are already the same, then there's
341 nothing to do. */
342 if (to == from)
343 return;
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
347 TO here. */
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
360 emit_move_insn (to, from);
361 return;
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
370 else
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
373 emit_move_insn (to, from);
374 return;
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
381 return;
384 if (to_real)
386 rtx value, insns;
387 convert_optab tab;
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
393 tab = sext_optab;
394 else
395 tab = trunc_optab;
397 /* Try converting directly if the insn is supported. */
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
404 return;
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
413 start_sequence ();
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
415 1, from, from_mode);
416 insns = get_insns ();
417 end_sequence ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
420 from)
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
422 return;
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
439 to, from, UNKNOWN);
440 return;
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
451 to, from, UNKNOWN);
452 if (to_mode == full_mode)
453 return;
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
465 rtx insns;
466 rtx lowpart;
467 rtx fill_value;
468 rtx lowfrom;
469 int i;
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
475 != CODE_FOR_nothing)
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
484 return;
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
491 if (REG_P (to))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
500 return;
503 /* No special multiword conversion insn; do it by hand. */
504 start_sequence ();
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
515 else
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
524 if (unsignedp)
525 fill_value = const0_rtx;
526 else
528 #ifdef HAVE_slt
529 if (HAVE_slt
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
534 lowpart_mode, 0);
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
538 else
539 #endif
541 fill_value
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
544 NULL_RTX, 0);
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
562 end_sequence ();
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
566 return;
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
573 if (!((MEM_P (from)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
577 || REG_P (from)
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
581 return;
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
592 if (!((MEM_P (from)
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
596 || REG_P (from)
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
603 return;
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
611 != CODE_FOR_nothing)
613 if (flag_force_mem)
614 from = force_not_mem (from);
616 emit_unop_insn (code, to, from, equiv_code);
617 return;
619 else
621 enum machine_mode intermediate;
622 rtx tmp;
623 tree shift_amount;
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
629 != CODE_FOR_nothing)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
638 return;
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
648 to, unsignedp);
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
650 to, unsignedp);
651 if (tmp != to)
652 emit_move_insn (to, tmp);
653 return;
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
661 to, from, UNKNOWN);
662 return;
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
676 return;
679 /* Mode combination is not recognized. */
680 gcc_unreachable ();
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
709 rtx temp;
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
722 if (mode == oldmode)
723 return x;
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
762 || (REG_P (x)
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
780 if (! unsignedp
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
791 subreg operation. */
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
800 return temp;
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
812 succeed. */
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
818 return MOVE_BY_PIECES_P (len, align);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
831 stpcpy. */
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
845 data.offset = 0;
846 data.from_addr = from_addr;
847 if (to)
849 to_addr = XEXP (to, 0);
850 data.to = to;
851 data.autinc_to
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
854 data.reverse
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
857 else
859 to_addr = NULL_RTX;
860 data.to = NULL_RTX;
861 data.autinc_to = 1;
862 #ifdef STACK_GROWS_DOWNWARD
863 data.reverse = 1;
864 #else
865 data.reverse = 0;
866 #endif
868 data.to_addr = to_addr;
869 data.from = from;
870 data.autinc_from
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
878 data.len = len;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
890 mode = tmode;
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
909 data.autinc_to = 1;
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_addr_to_reg (to_addr);
915 data.autinc_to = 1;
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
925 else
927 enum machine_mode xmode;
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
930 tmode != VOIDmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
934 break;
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
942 while (max_size > 1)
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
947 mode = tmode;
949 if (mode == VOIDmode)
950 break;
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
956 max_size = GET_MODE_SIZE (mode);
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
962 if (endp)
964 rtx to1;
966 gcc_assert (!data.reverse);
967 if (data.autinc_to)
969 if (endp == 2)
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
973 else
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
975 -1));
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
978 data.offset);
980 else
982 if (endp == 2)
983 --data.offset;
984 to1 = adjust_address (data.to, QImode, data.offset);
986 return to1;
988 else
989 return data.to;
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1005 else
1007 enum machine_mode tmode, xmode;
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1010 tmode != VOIDmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1014 break;
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1019 while (max_size > 1)
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1027 mode = tmode;
1029 if (mode == VOIDmode)
1030 break;
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1039 gcc_assert (!l);
1040 return n_insns;
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1047 static void
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1056 if (data->reverse)
1057 data->offset -= size;
1059 if (data->to)
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1063 data->offset);
1064 else
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1070 data->offset);
1071 else
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1081 if (data->to)
1082 emit_insn ((*genfun) (to1, from1));
1083 else
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1087 #else
1088 gcc_unreachable ();
1089 #endif
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1100 data->len -= size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1114 0 otherwise. */
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1119 bool may_use_call;
1120 rtx retval = 0;
1121 unsigned int align;
1123 switch (method)
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1127 break;
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1134 NO_DEFER_POP;
1135 break;
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1139 break;
1141 default:
1142 gcc_unreachable ();
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1149 gcc_assert (size);
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1160 if (INTVAL (size) == 0)
1161 return 0;
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1175 else
1176 emit_block_move_via_loop (x, y, size, align);
1178 if (method == BLOCK_OP_CALL_PARM)
1179 OK_DEFER_POP;
1181 return retval;
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1188 static bool
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1192 if (PUSH_ARGS)
1193 return true;
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn = emit_block_move_libcall_fn (false);
1200 (void) fn;
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1202 return false;
1204 #endif
1206 /* If any argument goes in memory, then it might clobber an outgoing
1207 argument. */
1209 CUMULATIVE_ARGS args_so_far;
1210 tree fn, arg;
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1221 return false;
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1223 NULL_TREE, 1))
1224 return false;
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1228 return true;
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1234 static bool
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1241 /* Since this is a move insn, we don't care about volatility. */
1242 volatile_ok = 1;
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1270 rtx op2;
1271 rtx last = get_last_insn ();
1272 rtx pat;
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1285 if (pat)
1287 emit_insn (pat);
1288 volatile_ok = save_volatile_ok;
1289 return true;
1291 else
1292 delete_insns_since (last);
1296 volatile_ok = save_volatile_ok;
1297 return false;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1303 static rtx
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1309 rtx retval;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1313 use them later. */
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1324 size_mode = TYPE_MODE (sizetype);
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree = make_tree (sizetype, size);
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1349 return retval;
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn;
1358 void
1359 init_block_move_fn (const char *asmspec)
1361 if (!block_move_fn)
1363 tree args, fn;
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1368 NULL_TREE);
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1376 block_move_fn = fn;
1379 if (asmspec)
1380 set_user_assembler_name (block_move_fn, asmspec);
1383 static tree
1384 emit_block_move_libcall_fn (int for_call)
1386 static bool emitted_extern;
1388 if (!block_move_fn)
1389 init_block_move_fn (NULL);
1391 if (for_call && !emitted_extern)
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1398 return block_move_fn;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1405 static void
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1420 emit_move_insn (iter, const0_rtx);
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1435 emit_move_insn (x, y);
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1439 if (tmp != iter)
1440 emit_move_insn (iter, tmp);
1442 emit_label (cmp_label);
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1445 true, top_label);
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1451 void
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1454 int i;
1455 #ifdef HAVE_load_multiple
1456 rtx pat;
1457 rtx last;
1458 #endif
1460 if (nregs == 0)
1461 return;
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1472 GEN_INT (nregs));
1473 if (pat)
1475 emit_insn (pat);
1476 return;
1478 else
1479 delete_insns_since (last);
1481 #endif
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1491 void
1492 move_block_from_reg (int regno, rtx x, int nregs)
1494 int i;
1496 if (nregs == 0)
1497 return;
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1505 GEN_INT (nregs));
1506 if (pat)
1508 emit_insn (pat);
1509 return;
1511 else
1512 delete_insns_since (last);
1514 #endif
1516 for (i = 0; i < nregs; i++)
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1520 gcc_assert (tem);
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig)
1535 int i, length;
1536 rtx *tmps;
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1546 if (i)
1547 tmps[0] = 0;
1549 for (; i < length; i++)
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1560 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1561 where DST is non-consecutive registers represented by a PARALLEL.
1562 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1563 if not known. */
1565 void
1566 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1568 rtx *tmps, src;
1569 int start, i;
1571 gcc_assert (GET_CODE (dst) == PARALLEL);
1573 if (!SCALAR_INT_MODE_P (GET_MODE (orig_src)))
1575 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1576 if (imode == BLKmode)
1577 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1578 else
1579 src = gen_reg_rtx (imode);
1580 if (imode != BLKmode)
1581 src = gen_lowpart (GET_MODE (orig_src), src);
1582 emit_move_insn (src, orig_src);
1583 /* ...and back again. */
1584 if (imode != BLKmode)
1585 src = gen_lowpart (imode, src);
1586 emit_group_load (dst, src, type, ssize);
1587 return;
1590 /* Check for a NULL entry, used to indicate that the parameter goes
1591 both on the stack and in registers. */
1592 if (XEXP (XVECEXP (dst, 0, 0), 0))
1593 start = 0;
1594 else
1595 start = 1;
1597 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1599 /* Process the pieces. */
1600 for (i = start; i < XVECLEN (dst, 0); i++)
1602 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1603 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1604 unsigned int bytelen = GET_MODE_SIZE (mode);
1605 int shift = 0;
1607 /* Handle trailing fragments that run over the size of the struct. */
1608 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1610 /* Arrange to shift the fragment to where it belongs.
1611 extract_bit_field loads to the lsb of the reg. */
1612 if (
1613 #ifdef BLOCK_REG_PADDING
1614 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1615 == (BYTES_BIG_ENDIAN ? upward : downward)
1616 #else
1617 BYTES_BIG_ENDIAN
1618 #endif
1620 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1621 bytelen = ssize - bytepos;
1622 gcc_assert (bytelen > 0);
1625 /* If we won't be loading directly from memory, protect the real source
1626 from strange tricks we might play; but make sure that the source can
1627 be loaded directly into the destination. */
1628 src = orig_src;
1629 if (!MEM_P (orig_src)
1630 && (!CONSTANT_P (orig_src)
1631 || (GET_MODE (orig_src) != mode
1632 && GET_MODE (orig_src) != VOIDmode)))
1634 if (GET_MODE (orig_src) == VOIDmode)
1635 src = gen_reg_rtx (mode);
1636 else
1637 src = gen_reg_rtx (GET_MODE (orig_src));
1639 emit_move_insn (src, orig_src);
1642 /* Optimize the access just a bit. */
1643 if (MEM_P (src)
1644 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1645 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1646 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1647 && bytelen == GET_MODE_SIZE (mode))
1649 tmps[i] = gen_reg_rtx (mode);
1650 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1652 else if (GET_CODE (src) == CONCAT)
1654 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1655 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1657 if ((bytepos == 0 && bytelen == slen0)
1658 || (bytepos != 0 && bytepos + bytelen <= slen))
1660 /* The following assumes that the concatenated objects all
1661 have the same size. In this case, a simple calculation
1662 can be used to determine the object and the bit field
1663 to be extracted. */
1664 tmps[i] = XEXP (src, bytepos / slen0);
1665 if (! CONSTANT_P (tmps[i])
1666 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1667 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1668 (bytepos % slen0) * BITS_PER_UNIT,
1669 1, NULL_RTX, mode, mode);
1671 else
1673 rtx mem;
1675 gcc_assert (!bytepos);
1676 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1677 emit_move_insn (mem, src);
1678 tmps[i] = adjust_address (mem, mode, 0);
1681 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1682 SIMD register, which is currently broken. While we get GCC
1683 to emit proper RTL for these cases, let's dump to memory. */
1684 else if (VECTOR_MODE_P (GET_MODE (dst))
1685 && REG_P (src))
1687 int slen = GET_MODE_SIZE (GET_MODE (src));
1688 rtx mem;
1690 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1691 emit_move_insn (mem, src);
1692 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1694 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1695 && XVECLEN (dst, 0) > 1)
1696 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1697 else if (CONSTANT_P (src)
1698 || (REG_P (src) && GET_MODE (src) == mode))
1699 tmps[i] = src;
1700 else
1701 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1702 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1703 mode, mode);
1705 if (shift)
1706 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1707 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1710 /* Copy the extracted pieces into the proper (probable) hard regs. */
1711 for (i = start; i < XVECLEN (dst, 0); i++)
1712 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1715 /* Emit code to move a block SRC to block DST, where SRC and DST are
1716 non-consecutive groups of registers, each represented by a PARALLEL. */
1718 void
1719 emit_group_move (rtx dst, rtx src)
1721 int i;
1723 gcc_assert (GET_CODE (src) == PARALLEL
1724 && GET_CODE (dst) == PARALLEL
1725 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1727 /* Skip first entry if NULL. */
1728 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1729 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1730 XEXP (XVECEXP (src, 0, i), 0));
1733 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1734 where SRC is non-consecutive registers represented by a PARALLEL.
1735 SSIZE represents the total size of block ORIG_DST, or -1 if not
1736 known. */
1738 void
1739 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1741 rtx *tmps, dst;
1742 int start, i;
1744 gcc_assert (GET_CODE (src) == PARALLEL);
1746 if (!SCALAR_INT_MODE_P (GET_MODE (orig_dst)))
1748 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1749 if (imode == BLKmode)
1750 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1751 else
1752 dst = gen_reg_rtx (imode);
1753 emit_group_store (dst, src, type, ssize);
1754 if (imode != BLKmode)
1755 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1756 emit_move_insn (orig_dst, dst);
1757 return;
1760 /* Check for a NULL entry, used to indicate that the parameter goes
1761 both on the stack and in registers. */
1762 if (XEXP (XVECEXP (src, 0, 0), 0))
1763 start = 0;
1764 else
1765 start = 1;
1767 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1769 /* Copy the (probable) hard regs into pseudos. */
1770 for (i = start; i < XVECLEN (src, 0); i++)
1772 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1773 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1774 emit_move_insn (tmps[i], reg);
1777 /* If we won't be storing directly into memory, protect the real destination
1778 from strange tricks we might play. */
1779 dst = orig_dst;
1780 if (GET_CODE (dst) == PARALLEL)
1782 rtx temp;
1784 /* We can get a PARALLEL dst if there is a conditional expression in
1785 a return statement. In that case, the dst and src are the same,
1786 so no action is necessary. */
1787 if (rtx_equal_p (dst, src))
1788 return;
1790 /* It is unclear if we can ever reach here, but we may as well handle
1791 it. Allocate a temporary, and split this into a store/load to/from
1792 the temporary. */
1794 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1795 emit_group_store (temp, src, type, ssize);
1796 emit_group_load (dst, temp, type, ssize);
1797 return;
1799 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1801 dst = gen_reg_rtx (GET_MODE (orig_dst));
1802 /* Make life a bit easier for combine. */
1803 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1806 /* Process the pieces. */
1807 for (i = start; i < XVECLEN (src, 0); i++)
1809 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1810 enum machine_mode mode = GET_MODE (tmps[i]);
1811 unsigned int bytelen = GET_MODE_SIZE (mode);
1812 rtx dest = dst;
1814 /* Handle trailing fragments that run over the size of the struct. */
1815 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1817 /* store_bit_field always takes its value from the lsb.
1818 Move the fragment to the lsb if it's not already there. */
1819 if (
1820 #ifdef BLOCK_REG_PADDING
1821 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1822 == (BYTES_BIG_ENDIAN ? upward : downward)
1823 #else
1824 BYTES_BIG_ENDIAN
1825 #endif
1828 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1829 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1830 build_int_cst (NULL_TREE, shift),
1831 tmps[i], 0);
1833 bytelen = ssize - bytepos;
1836 if (GET_CODE (dst) == CONCAT)
1838 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1839 dest = XEXP (dst, 0);
1840 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1842 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1843 dest = XEXP (dst, 1);
1845 else
1847 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1848 dest = assign_stack_temp (GET_MODE (dest),
1849 GET_MODE_SIZE (GET_MODE (dest)), 0);
1850 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1851 tmps[i]);
1852 dst = dest;
1853 break;
1857 /* Optimize the access just a bit. */
1858 if (MEM_P (dest)
1859 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1860 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1861 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1862 && bytelen == GET_MODE_SIZE (mode))
1863 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1864 else
1865 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1866 mode, tmps[i]);
1869 /* Copy from the pseudo into the (probable) hard reg. */
1870 if (orig_dst != dst)
1871 emit_move_insn (orig_dst, dst);
1874 /* Generate code to copy a BLKmode object of TYPE out of a
1875 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1876 is null, a stack temporary is created. TGTBLK is returned.
1878 The purpose of this routine is to handle functions that return
1879 BLKmode structures in registers. Some machines (the PA for example)
1880 want to return all small structures in registers regardless of the
1881 structure's alignment. */
1884 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1886 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1887 rtx src = NULL, dst = NULL;
1888 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1889 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1891 if (tgtblk == 0)
1893 tgtblk = assign_temp (build_qualified_type (type,
1894 (TYPE_QUALS (type)
1895 | TYPE_QUAL_CONST)),
1896 0, 1, 1);
1897 preserve_temp_slots (tgtblk);
1900 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1901 into a new pseudo which is a full word. */
1903 if (GET_MODE (srcreg) != BLKmode
1904 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1905 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1907 /* If the structure doesn't take up a whole number of words, see whether
1908 SRCREG is padded on the left or on the right. If it's on the left,
1909 set PADDING_CORRECTION to the number of bits to skip.
1911 In most ABIs, the structure will be returned at the least end of
1912 the register, which translates to right padding on little-endian
1913 targets and left padding on big-endian targets. The opposite
1914 holds if the structure is returned at the most significant
1915 end of the register. */
1916 if (bytes % UNITS_PER_WORD != 0
1917 && (targetm.calls.return_in_msb (type)
1918 ? !BYTES_BIG_ENDIAN
1919 : BYTES_BIG_ENDIAN))
1920 padding_correction
1921 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1923 /* Copy the structure BITSIZE bites at a time.
1925 We could probably emit more efficient code for machines which do not use
1926 strict alignment, but it doesn't seem worth the effort at the current
1927 time. */
1928 for (bitpos = 0, xbitpos = padding_correction;
1929 bitpos < bytes * BITS_PER_UNIT;
1930 bitpos += bitsize, xbitpos += bitsize)
1932 /* We need a new source operand each time xbitpos is on a
1933 word boundary and when xbitpos == padding_correction
1934 (the first time through). */
1935 if (xbitpos % BITS_PER_WORD == 0
1936 || xbitpos == padding_correction)
1937 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1938 GET_MODE (srcreg));
1940 /* We need a new destination operand each time bitpos is on
1941 a word boundary. */
1942 if (bitpos % BITS_PER_WORD == 0)
1943 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1945 /* Use xbitpos for the source extraction (right justified) and
1946 xbitpos for the destination store (left justified). */
1947 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1948 extract_bit_field (src, bitsize,
1949 xbitpos % BITS_PER_WORD, 1,
1950 NULL_RTX, word_mode, word_mode));
1953 return tgtblk;
1956 /* Add a USE expression for REG to the (possibly empty) list pointed
1957 to by CALL_FUSAGE. REG must denote a hard register. */
1959 void
1960 use_reg (rtx *call_fusage, rtx reg)
1962 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
1964 *call_fusage
1965 = gen_rtx_EXPR_LIST (VOIDmode,
1966 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1969 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1970 starting at REGNO. All of these registers must be hard registers. */
1972 void
1973 use_regs (rtx *call_fusage, int regno, int nregs)
1975 int i;
1977 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
1979 for (i = 0; i < nregs; i++)
1980 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1983 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1984 PARALLEL REGS. This is for calls that pass values in multiple
1985 non-contiguous locations. The Irix 6 ABI has examples of this. */
1987 void
1988 use_group_regs (rtx *call_fusage, rtx regs)
1990 int i;
1992 for (i = 0; i < XVECLEN (regs, 0); i++)
1994 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1996 /* A NULL entry means the parameter goes both on the stack and in
1997 registers. This can also be a MEM for targets that pass values
1998 partially on the stack and partially in registers. */
1999 if (reg != 0 && REG_P (reg))
2000 use_reg (call_fusage, reg);
2005 /* Determine whether the LEN bytes generated by CONSTFUN can be
2006 stored to memory using several move instructions. CONSTFUNDATA is
2007 a pointer which will be passed as argument in every CONSTFUN call.
2008 ALIGN is maximum alignment we can assume. Return nonzero if a
2009 call to store_by_pieces should succeed. */
2012 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2013 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2014 void *constfundata, unsigned int align)
2016 unsigned HOST_WIDE_INT l;
2017 unsigned int max_size;
2018 HOST_WIDE_INT offset = 0;
2019 enum machine_mode mode, tmode;
2020 enum insn_code icode;
2021 int reverse;
2022 rtx cst;
2024 if (len == 0)
2025 return 1;
2027 if (! STORE_BY_PIECES_P (len, align))
2028 return 0;
2030 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2031 if (align >= GET_MODE_ALIGNMENT (tmode))
2032 align = GET_MODE_ALIGNMENT (tmode);
2033 else
2035 enum machine_mode xmode;
2037 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2038 tmode != VOIDmode;
2039 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2040 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2041 || SLOW_UNALIGNED_ACCESS (tmode, align))
2042 break;
2044 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2047 /* We would first store what we can in the largest integer mode, then go to
2048 successively smaller modes. */
2050 for (reverse = 0;
2051 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2052 reverse++)
2054 l = len;
2055 mode = VOIDmode;
2056 max_size = STORE_MAX_PIECES + 1;
2057 while (max_size > 1)
2059 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2060 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2061 if (GET_MODE_SIZE (tmode) < max_size)
2062 mode = tmode;
2064 if (mode == VOIDmode)
2065 break;
2067 icode = mov_optab->handlers[(int) mode].insn_code;
2068 if (icode != CODE_FOR_nothing
2069 && align >= GET_MODE_ALIGNMENT (mode))
2071 unsigned int size = GET_MODE_SIZE (mode);
2073 while (l >= size)
2075 if (reverse)
2076 offset -= size;
2078 cst = (*constfun) (constfundata, offset, mode);
2079 if (!LEGITIMATE_CONSTANT_P (cst))
2080 return 0;
2082 if (!reverse)
2083 offset += size;
2085 l -= size;
2089 max_size = GET_MODE_SIZE (mode);
2092 /* The code above should have handled everything. */
2093 gcc_assert (!l);
2096 return 1;
2099 /* Generate several move instructions to store LEN bytes generated by
2100 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2101 pointer which will be passed as argument in every CONSTFUN call.
2102 ALIGN is maximum alignment we can assume.
2103 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2104 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2105 stpcpy. */
2108 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2109 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2110 void *constfundata, unsigned int align, int endp)
2112 struct store_by_pieces data;
2114 if (len == 0)
2116 gcc_assert (endp != 2);
2117 return to;
2120 gcc_assert (STORE_BY_PIECES_P (len, align));
2121 data.constfun = constfun;
2122 data.constfundata = constfundata;
2123 data.len = len;
2124 data.to = to;
2125 store_by_pieces_1 (&data, align);
2126 if (endp)
2128 rtx to1;
2130 gcc_assert (!data.reverse);
2131 if (data.autinc_to)
2133 if (endp == 2)
2135 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2136 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2137 else
2138 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2139 -1));
2141 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2142 data.offset);
2144 else
2146 if (endp == 2)
2147 --data.offset;
2148 to1 = adjust_address (data.to, QImode, data.offset);
2150 return to1;
2152 else
2153 return data.to;
2156 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2157 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2159 static void
2160 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2162 struct store_by_pieces data;
2164 if (len == 0)
2165 return;
2167 data.constfun = clear_by_pieces_1;
2168 data.constfundata = NULL;
2169 data.len = len;
2170 data.to = to;
2171 store_by_pieces_1 (&data, align);
2174 /* Callback routine for clear_by_pieces.
2175 Return const0_rtx unconditionally. */
2177 static rtx
2178 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2179 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2180 enum machine_mode mode ATTRIBUTE_UNUSED)
2182 return const0_rtx;
2185 /* Subroutine of clear_by_pieces and store_by_pieces.
2186 Generate several move instructions to store LEN bytes of block TO. (A MEM
2187 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2189 static void
2190 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2191 unsigned int align ATTRIBUTE_UNUSED)
2193 rtx to_addr = XEXP (data->to, 0);
2194 unsigned int max_size = STORE_MAX_PIECES + 1;
2195 enum machine_mode mode = VOIDmode, tmode;
2196 enum insn_code icode;
2198 data->offset = 0;
2199 data->to_addr = to_addr;
2200 data->autinc_to
2201 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2202 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2204 data->explicit_inc_to = 0;
2205 data->reverse
2206 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2207 if (data->reverse)
2208 data->offset = data->len;
2210 /* If storing requires more than two move insns,
2211 copy addresses to registers (to make displacements shorter)
2212 and use post-increment if available. */
2213 if (!data->autinc_to
2214 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2216 /* Determine the main mode we'll be using. */
2217 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2218 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2219 if (GET_MODE_SIZE (tmode) < max_size)
2220 mode = tmode;
2222 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2224 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2225 data->autinc_to = 1;
2226 data->explicit_inc_to = -1;
2229 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2230 && ! data->autinc_to)
2232 data->to_addr = copy_addr_to_reg (to_addr);
2233 data->autinc_to = 1;
2234 data->explicit_inc_to = 1;
2237 if ( !data->autinc_to && CONSTANT_P (to_addr))
2238 data->to_addr = copy_addr_to_reg (to_addr);
2241 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2242 if (align >= GET_MODE_ALIGNMENT (tmode))
2243 align = GET_MODE_ALIGNMENT (tmode);
2244 else
2246 enum machine_mode xmode;
2248 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2249 tmode != VOIDmode;
2250 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2251 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2252 || SLOW_UNALIGNED_ACCESS (tmode, align))
2253 break;
2255 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2258 /* First store what we can in the largest integer mode, then go to
2259 successively smaller modes. */
2261 while (max_size > 1)
2263 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2264 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2265 if (GET_MODE_SIZE (tmode) < max_size)
2266 mode = tmode;
2268 if (mode == VOIDmode)
2269 break;
2271 icode = mov_optab->handlers[(int) mode].insn_code;
2272 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2273 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2275 max_size = GET_MODE_SIZE (mode);
2278 /* The code above should have handled everything. */
2279 gcc_assert (!data->len);
2282 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2283 with move instructions for mode MODE. GENFUN is the gen_... function
2284 to make a move insn for that mode. DATA has all the other info. */
2286 static void
2287 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2288 struct store_by_pieces *data)
2290 unsigned int size = GET_MODE_SIZE (mode);
2291 rtx to1, cst;
2293 while (data->len >= size)
2295 if (data->reverse)
2296 data->offset -= size;
2298 if (data->autinc_to)
2299 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2300 data->offset);
2301 else
2302 to1 = adjust_address (data->to, mode, data->offset);
2304 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2305 emit_insn (gen_add2_insn (data->to_addr,
2306 GEN_INT (-(HOST_WIDE_INT) size)));
2308 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2309 emit_insn ((*genfun) (to1, cst));
2311 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2312 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2314 if (! data->reverse)
2315 data->offset += size;
2317 data->len -= size;
2321 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2322 its length in bytes. */
2325 clear_storage (rtx object, rtx size)
2327 rtx retval = 0;
2328 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2329 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2331 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2332 just move a zero. Otherwise, do this a piece at a time. */
2333 if (GET_MODE (object) != BLKmode
2334 && GET_CODE (size) == CONST_INT
2335 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2336 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2337 else
2339 if (size == const0_rtx)
2341 else if (GET_CODE (size) == CONST_INT
2342 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2343 clear_by_pieces (object, INTVAL (size), align);
2344 else if (clear_storage_via_clrmem (object, size, align))
2346 else
2347 retval = clear_storage_via_libcall (object, size);
2350 return retval;
2353 /* A subroutine of clear_storage. Expand a clrmem pattern;
2354 return true if successful. */
2356 static bool
2357 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2359 /* Try the most limited insn first, because there's no point
2360 including more than one in the machine description unless
2361 the more limited one has some advantage. */
2363 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2364 enum machine_mode mode;
2366 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2367 mode = GET_MODE_WIDER_MODE (mode))
2369 enum insn_code code = clrmem_optab[(int) mode];
2370 insn_operand_predicate_fn pred;
2372 if (code != CODE_FOR_nothing
2373 /* We don't need MODE to be narrower than
2374 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2375 the mode mask, as it is returned by the macro, it will
2376 definitely be less than the actual mode mask. */
2377 && ((GET_CODE (size) == CONST_INT
2378 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2379 <= (GET_MODE_MASK (mode) >> 1)))
2380 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2381 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2382 || (*pred) (object, BLKmode))
2383 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2384 || (*pred) (opalign, VOIDmode)))
2386 rtx op1;
2387 rtx last = get_last_insn ();
2388 rtx pat;
2390 op1 = convert_to_mode (mode, size, 1);
2391 pred = insn_data[(int) code].operand[1].predicate;
2392 if (pred != 0 && ! (*pred) (op1, mode))
2393 op1 = copy_to_mode_reg (mode, op1);
2395 pat = GEN_FCN ((int) code) (object, op1, opalign);
2396 if (pat)
2398 emit_insn (pat);
2399 return true;
2401 else
2402 delete_insns_since (last);
2406 return false;
2409 /* A subroutine of clear_storage. Expand a call to memset.
2410 Return the return value of memset, 0 otherwise. */
2412 static rtx
2413 clear_storage_via_libcall (rtx object, rtx size)
2415 tree call_expr, arg_list, fn, object_tree, size_tree;
2416 enum machine_mode size_mode;
2417 rtx retval;
2419 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2420 place those into new pseudos into a VAR_DECL and use them later. */
2422 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2424 size_mode = TYPE_MODE (sizetype);
2425 size = convert_to_mode (size_mode, size, 1);
2426 size = copy_to_mode_reg (size_mode, size);
2428 /* It is incorrect to use the libcall calling conventions to call
2429 memset in this context. This could be a user call to memset and
2430 the user may wish to examine the return value from memset. For
2431 targets where libcalls and normal calls have different conventions
2432 for returning pointers, we could end up generating incorrect code. */
2434 object_tree = make_tree (ptr_type_node, object);
2435 size_tree = make_tree (sizetype, size);
2437 fn = clear_storage_libcall_fn (true);
2438 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2439 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2440 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2442 /* Now we have to build up the CALL_EXPR itself. */
2443 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2444 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2445 call_expr, arg_list, NULL_TREE);
2447 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2449 return retval;
2452 /* A subroutine of clear_storage_via_libcall. Create the tree node
2453 for the function we use for block clears. The first time FOR_CALL
2454 is true, we call assemble_external. */
2456 static GTY(()) tree block_clear_fn;
2458 void
2459 init_block_clear_fn (const char *asmspec)
2461 if (!block_clear_fn)
2463 tree fn, args;
2465 fn = get_identifier ("memset");
2466 args = build_function_type_list (ptr_type_node, ptr_type_node,
2467 integer_type_node, sizetype,
2468 NULL_TREE);
2470 fn = build_decl (FUNCTION_DECL, fn, args);
2471 DECL_EXTERNAL (fn) = 1;
2472 TREE_PUBLIC (fn) = 1;
2473 DECL_ARTIFICIAL (fn) = 1;
2474 TREE_NOTHROW (fn) = 1;
2476 block_clear_fn = fn;
2479 if (asmspec)
2480 set_user_assembler_name (block_clear_fn, asmspec);
2483 static tree
2484 clear_storage_libcall_fn (int for_call)
2486 static bool emitted_extern;
2488 if (!block_clear_fn)
2489 init_block_clear_fn (NULL);
2491 if (for_call && !emitted_extern)
2493 emitted_extern = true;
2494 make_decl_rtl (block_clear_fn);
2495 assemble_external (block_clear_fn);
2498 return block_clear_fn;
2501 /* Generate code to copy Y into X.
2502 Both Y and X must have the same mode, except that
2503 Y can be a constant with VOIDmode.
2504 This mode cannot be BLKmode; use emit_block_move for that.
2506 Return the last instruction emitted. */
2509 emit_move_insn (rtx x, rtx y)
2511 enum machine_mode mode = GET_MODE (x);
2512 rtx y_cst = NULL_RTX;
2513 rtx last_insn, set;
2515 gcc_assert (mode != BLKmode
2516 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
2518 if (CONSTANT_P (y))
2520 if (optimize
2521 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2522 && (last_insn = compress_float_constant (x, y)))
2523 return last_insn;
2525 y_cst = y;
2527 if (!LEGITIMATE_CONSTANT_P (y))
2529 y = force_const_mem (mode, y);
2531 /* If the target's cannot_force_const_mem prevented the spill,
2532 assume that the target's move expanders will also take care
2533 of the non-legitimate constant. */
2534 if (!y)
2535 y = y_cst;
2539 /* If X or Y are memory references, verify that their addresses are valid
2540 for the machine. */
2541 if (MEM_P (x)
2542 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2543 && ! push_operand (x, GET_MODE (x)))
2544 || (flag_force_addr
2545 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2546 x = validize_mem (x);
2548 if (MEM_P (y)
2549 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2550 || (flag_force_addr
2551 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2552 y = validize_mem (y);
2554 gcc_assert (mode != BLKmode);
2556 last_insn = emit_move_insn_1 (x, y);
2558 if (y_cst && REG_P (x)
2559 && (set = single_set (last_insn)) != NULL_RTX
2560 && SET_DEST (set) == x
2561 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2562 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2564 return last_insn;
2567 /* Low level part of emit_move_insn.
2568 Called just like emit_move_insn, but assumes X and Y
2569 are basically valid. */
2572 emit_move_insn_1 (rtx x, rtx y)
2574 enum machine_mode mode = GET_MODE (x);
2575 enum machine_mode submode;
2576 enum mode_class class = GET_MODE_CLASS (mode);
2578 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
2580 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2581 return
2582 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2584 /* Expand complex moves by moving real part and imag part, if possible. */
2585 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2586 && BLKmode != (submode = GET_MODE_INNER (mode))
2587 && (mov_optab->handlers[(int) submode].insn_code
2588 != CODE_FOR_nothing))
2590 /* Don't split destination if it is a stack push. */
2591 int stack = push_operand (x, GET_MODE (x));
2593 #ifdef PUSH_ROUNDING
2594 /* In case we output to the stack, but the size is smaller than the
2595 machine can push exactly, we need to use move instructions. */
2596 if (stack
2597 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2598 != GET_MODE_SIZE (submode)))
2600 rtx temp;
2601 HOST_WIDE_INT offset1, offset2;
2603 /* Do not use anti_adjust_stack, since we don't want to update
2604 stack_pointer_delta. */
2605 temp = expand_binop (Pmode,
2606 #ifdef STACK_GROWS_DOWNWARD
2607 sub_optab,
2608 #else
2609 add_optab,
2610 #endif
2611 stack_pointer_rtx,
2612 GEN_INT
2613 (PUSH_ROUNDING
2614 (GET_MODE_SIZE (GET_MODE (x)))),
2615 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2617 if (temp != stack_pointer_rtx)
2618 emit_move_insn (stack_pointer_rtx, temp);
2620 #ifdef STACK_GROWS_DOWNWARD
2621 offset1 = 0;
2622 offset2 = GET_MODE_SIZE (submode);
2623 #else
2624 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2625 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2626 + GET_MODE_SIZE (submode));
2627 #endif
2629 emit_move_insn (change_address (x, submode,
2630 gen_rtx_PLUS (Pmode,
2631 stack_pointer_rtx,
2632 GEN_INT (offset1))),
2633 gen_realpart (submode, y));
2634 emit_move_insn (change_address (x, submode,
2635 gen_rtx_PLUS (Pmode,
2636 stack_pointer_rtx,
2637 GEN_INT (offset2))),
2638 gen_imagpart (submode, y));
2640 else
2641 #endif
2642 /* If this is a stack, push the highpart first, so it
2643 will be in the argument order.
2645 In that case, change_address is used only to convert
2646 the mode, not to change the address. */
2647 if (stack)
2649 /* Note that the real part always precedes the imag part in memory
2650 regardless of machine's endianness. */
2651 #ifdef STACK_GROWS_DOWNWARD
2652 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2653 gen_imagpart (submode, y));
2654 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2655 gen_realpart (submode, y));
2656 #else
2657 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2658 gen_realpart (submode, y));
2659 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2660 gen_imagpart (submode, y));
2661 #endif
2663 else
2665 rtx realpart_x, realpart_y;
2666 rtx imagpart_x, imagpart_y;
2668 /* If this is a complex value with each part being smaller than a
2669 word, the usual calling sequence will likely pack the pieces into
2670 a single register. Unfortunately, SUBREG of hard registers only
2671 deals in terms of words, so we have a problem converting input
2672 arguments to the CONCAT of two registers that is used elsewhere
2673 for complex values. If this is before reload, we can copy it into
2674 memory and reload. FIXME, we should see about using extract and
2675 insert on integer registers, but complex short and complex char
2676 variables should be rarely used. */
2677 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2678 && (reload_in_progress | reload_completed) == 0)
2680 int packed_dest_p
2681 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2682 int packed_src_p
2683 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2685 if (packed_dest_p || packed_src_p)
2687 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2688 ? MODE_FLOAT : MODE_INT);
2690 enum machine_mode reg_mode
2691 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2693 if (reg_mode != BLKmode)
2695 rtx mem = assign_stack_temp (reg_mode,
2696 GET_MODE_SIZE (mode), 0);
2697 rtx cmem = adjust_address (mem, mode, 0);
2699 if (packed_dest_p)
2701 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2703 emit_move_insn_1 (cmem, y);
2704 return emit_move_insn_1 (sreg, mem);
2706 else
2708 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2710 emit_move_insn_1 (mem, sreg);
2711 return emit_move_insn_1 (x, cmem);
2717 realpart_x = gen_realpart (submode, x);
2718 realpart_y = gen_realpart (submode, y);
2719 imagpart_x = gen_imagpart (submode, x);
2720 imagpart_y = gen_imagpart (submode, y);
2722 /* Show the output dies here. This is necessary for SUBREGs
2723 of pseudos since we cannot track their lifetimes correctly;
2724 hard regs shouldn't appear here except as return values.
2725 We never want to emit such a clobber after reload. */
2726 if (x != y
2727 && ! (reload_in_progress || reload_completed)
2728 && (GET_CODE (realpart_x) == SUBREG
2729 || GET_CODE (imagpart_x) == SUBREG))
2730 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2732 emit_move_insn (realpart_x, realpart_y);
2733 emit_move_insn (imagpart_x, imagpart_y);
2736 return get_last_insn ();
2739 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2740 find a mode to do it in. If we have a movcc, use it. Otherwise,
2741 find the MODE_INT mode of the same width. */
2742 else if (GET_MODE_CLASS (mode) == MODE_CC
2743 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2745 enum insn_code insn_code;
2746 enum machine_mode tmode = VOIDmode;
2747 rtx x1 = x, y1 = y;
2749 if (mode != CCmode
2750 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2751 tmode = CCmode;
2752 else
2753 for (tmode = QImode; tmode != VOIDmode;
2754 tmode = GET_MODE_WIDER_MODE (tmode))
2755 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2756 break;
2758 gcc_assert (tmode != VOIDmode);
2760 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2761 may call change_address which is not appropriate if we were
2762 called when a reload was in progress. We don't have to worry
2763 about changing the address since the size in bytes is supposed to
2764 be the same. Copy the MEM to change the mode and move any
2765 substitutions from the old MEM to the new one. */
2767 if (reload_in_progress)
2769 x = gen_lowpart_common (tmode, x1);
2770 if (x == 0 && MEM_P (x1))
2772 x = adjust_address_nv (x1, tmode, 0);
2773 copy_replacements (x1, x);
2776 y = gen_lowpart_common (tmode, y1);
2777 if (y == 0 && MEM_P (y1))
2779 y = adjust_address_nv (y1, tmode, 0);
2780 copy_replacements (y1, y);
2783 else
2785 x = gen_lowpart (tmode, x);
2786 y = gen_lowpart (tmode, y);
2789 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2790 return emit_insn (GEN_FCN (insn_code) (x, y));
2793 /* Try using a move pattern for the corresponding integer mode. This is
2794 only safe when simplify_subreg can convert MODE constants into integer
2795 constants. At present, it can only do this reliably if the value
2796 fits within a HOST_WIDE_INT. */
2797 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2798 && (submode = int_mode_for_mode (mode)) != BLKmode
2799 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2800 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2801 (simplify_gen_subreg (submode, x, mode, 0),
2802 simplify_gen_subreg (submode, y, mode, 0)));
2804 /* This will handle any multi-word or full-word mode that lacks a move_insn
2805 pattern. However, you will get better code if you define such patterns,
2806 even if they must turn into multiple assembler instructions. */
2807 else
2809 rtx last_insn = 0;
2810 rtx seq, inner;
2811 int need_clobber;
2812 int i;
2814 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2816 #ifdef PUSH_ROUNDING
2818 /* If X is a push on the stack, do the push now and replace
2819 X with a reference to the stack pointer. */
2820 if (push_operand (x, GET_MODE (x)))
2822 rtx temp;
2823 enum rtx_code code;
2825 /* Do not use anti_adjust_stack, since we don't want to update
2826 stack_pointer_delta. */
2827 temp = expand_binop (Pmode,
2828 #ifdef STACK_GROWS_DOWNWARD
2829 sub_optab,
2830 #else
2831 add_optab,
2832 #endif
2833 stack_pointer_rtx,
2834 GEN_INT
2835 (PUSH_ROUNDING
2836 (GET_MODE_SIZE (GET_MODE (x)))),
2837 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2839 if (temp != stack_pointer_rtx)
2840 emit_move_insn (stack_pointer_rtx, temp);
2842 code = GET_CODE (XEXP (x, 0));
2844 /* Just hope that small offsets off SP are OK. */
2845 if (code == POST_INC)
2846 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2847 GEN_INT (-((HOST_WIDE_INT)
2848 GET_MODE_SIZE (GET_MODE (x)))));
2849 else if (code == POST_DEC)
2850 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2851 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2852 else
2853 temp = stack_pointer_rtx;
2855 x = change_address (x, VOIDmode, temp);
2857 #endif
2859 /* If we are in reload, see if either operand is a MEM whose address
2860 is scheduled for replacement. */
2861 if (reload_in_progress && MEM_P (x)
2862 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2863 x = replace_equiv_address_nv (x, inner);
2864 if (reload_in_progress && MEM_P (y)
2865 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2866 y = replace_equiv_address_nv (y, inner);
2868 start_sequence ();
2870 need_clobber = 0;
2871 for (i = 0;
2872 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2873 i++)
2875 rtx xpart = operand_subword (x, i, 1, mode);
2876 rtx ypart = operand_subword (y, i, 1, mode);
2878 /* If we can't get a part of Y, put Y into memory if it is a
2879 constant. Otherwise, force it into a register. If we still
2880 can't get a part of Y, abort. */
2881 if (ypart == 0 && CONSTANT_P (y))
2883 y = force_const_mem (mode, y);
2884 ypart = operand_subword (y, i, 1, mode);
2886 else if (ypart == 0)
2887 ypart = operand_subword_force (y, i, mode);
2889 gcc_assert (xpart && ypart);
2891 need_clobber |= (GET_CODE (xpart) == SUBREG);
2893 last_insn = emit_move_insn (xpart, ypart);
2896 seq = get_insns ();
2897 end_sequence ();
2899 /* Show the output dies here. This is necessary for SUBREGs
2900 of pseudos since we cannot track their lifetimes correctly;
2901 hard regs shouldn't appear here except as return values.
2902 We never want to emit such a clobber after reload. */
2903 if (x != y
2904 && ! (reload_in_progress || reload_completed)
2905 && need_clobber != 0)
2906 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2908 emit_insn (seq);
2910 return last_insn;
2914 /* If Y is representable exactly in a narrower mode, and the target can
2915 perform the extension directly from constant or memory, then emit the
2916 move as an extension. */
2918 static rtx
2919 compress_float_constant (rtx x, rtx y)
2921 enum machine_mode dstmode = GET_MODE (x);
2922 enum machine_mode orig_srcmode = GET_MODE (y);
2923 enum machine_mode srcmode;
2924 REAL_VALUE_TYPE r;
2926 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2928 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2929 srcmode != orig_srcmode;
2930 srcmode = GET_MODE_WIDER_MODE (srcmode))
2932 enum insn_code ic;
2933 rtx trunc_y, last_insn;
2935 /* Skip if the target can't extend this way. */
2936 ic = can_extend_p (dstmode, srcmode, 0);
2937 if (ic == CODE_FOR_nothing)
2938 continue;
2940 /* Skip if the narrowed value isn't exact. */
2941 if (! exact_real_truncate (srcmode, &r))
2942 continue;
2944 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2946 if (LEGITIMATE_CONSTANT_P (trunc_y))
2948 /* Skip if the target needs extra instructions to perform
2949 the extension. */
2950 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2951 continue;
2953 else if (float_extend_from_mem[dstmode][srcmode])
2954 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2955 else
2956 continue;
2958 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2959 last_insn = get_last_insn ();
2961 if (REG_P (x))
2962 set_unique_reg_note (last_insn, REG_EQUAL, y);
2964 return last_insn;
2967 return NULL_RTX;
2970 /* Pushing data onto the stack. */
2972 /* Push a block of length SIZE (perhaps variable)
2973 and return an rtx to address the beginning of the block.
2974 The value may be virtual_outgoing_args_rtx.
2976 EXTRA is the number of bytes of padding to push in addition to SIZE.
2977 BELOW nonzero means this padding comes at low addresses;
2978 otherwise, the padding comes at high addresses. */
2981 push_block (rtx size, int extra, int below)
2983 rtx temp;
2985 size = convert_modes (Pmode, ptr_mode, size, 1);
2986 if (CONSTANT_P (size))
2987 anti_adjust_stack (plus_constant (size, extra));
2988 else if (REG_P (size) && extra == 0)
2989 anti_adjust_stack (size);
2990 else
2992 temp = copy_to_mode_reg (Pmode, size);
2993 if (extra != 0)
2994 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2995 temp, 0, OPTAB_LIB_WIDEN);
2996 anti_adjust_stack (temp);
2999 #ifndef STACK_GROWS_DOWNWARD
3000 if (0)
3001 #else
3002 if (1)
3003 #endif
3005 temp = virtual_outgoing_args_rtx;
3006 if (extra != 0 && below)
3007 temp = plus_constant (temp, extra);
3009 else
3011 if (GET_CODE (size) == CONST_INT)
3012 temp = plus_constant (virtual_outgoing_args_rtx,
3013 -INTVAL (size) - (below ? 0 : extra));
3014 else if (extra != 0 && !below)
3015 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3016 negate_rtx (Pmode, plus_constant (size, extra)));
3017 else
3018 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3019 negate_rtx (Pmode, size));
3022 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3025 #ifdef PUSH_ROUNDING
3027 /* Emit single push insn. */
3029 static void
3030 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3032 rtx dest_addr;
3033 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3034 rtx dest;
3035 enum insn_code icode;
3036 insn_operand_predicate_fn pred;
3038 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3039 /* If there is push pattern, use it. Otherwise try old way of throwing
3040 MEM representing push operation to move expander. */
3041 icode = push_optab->handlers[(int) mode].insn_code;
3042 if (icode != CODE_FOR_nothing)
3044 if (((pred = insn_data[(int) icode].operand[0].predicate)
3045 && !((*pred) (x, mode))))
3046 x = force_reg (mode, x);
3047 emit_insn (GEN_FCN (icode) (x));
3048 return;
3050 if (GET_MODE_SIZE (mode) == rounded_size)
3051 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3052 /* If we are to pad downward, adjust the stack pointer first and
3053 then store X into the stack location using an offset. This is
3054 because emit_move_insn does not know how to pad; it does not have
3055 access to type. */
3056 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3058 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3059 HOST_WIDE_INT offset;
3061 emit_move_insn (stack_pointer_rtx,
3062 expand_binop (Pmode,
3063 #ifdef STACK_GROWS_DOWNWARD
3064 sub_optab,
3065 #else
3066 add_optab,
3067 #endif
3068 stack_pointer_rtx,
3069 GEN_INT (rounded_size),
3070 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3072 offset = (HOST_WIDE_INT) padding_size;
3073 #ifdef STACK_GROWS_DOWNWARD
3074 if (STACK_PUSH_CODE == POST_DEC)
3075 /* We have already decremented the stack pointer, so get the
3076 previous value. */
3077 offset += (HOST_WIDE_INT) rounded_size;
3078 #else
3079 if (STACK_PUSH_CODE == POST_INC)
3080 /* We have already incremented the stack pointer, so get the
3081 previous value. */
3082 offset -= (HOST_WIDE_INT) rounded_size;
3083 #endif
3084 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3086 else
3088 #ifdef STACK_GROWS_DOWNWARD
3089 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3090 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3091 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3092 #else
3093 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3094 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3095 GEN_INT (rounded_size));
3096 #endif
3097 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3100 dest = gen_rtx_MEM (mode, dest_addr);
3102 if (type != 0)
3104 set_mem_attributes (dest, type, 1);
3106 if (flag_optimize_sibling_calls)
3107 /* Function incoming arguments may overlap with sibling call
3108 outgoing arguments and we cannot allow reordering of reads
3109 from function arguments with stores to outgoing arguments
3110 of sibling calls. */
3111 set_mem_alias_set (dest, 0);
3113 emit_move_insn (dest, x);
3115 #endif
3117 /* Generate code to push X onto the stack, assuming it has mode MODE and
3118 type TYPE.
3119 MODE is redundant except when X is a CONST_INT (since they don't
3120 carry mode info).
3121 SIZE is an rtx for the size of data to be copied (in bytes),
3122 needed only if X is BLKmode.
3124 ALIGN (in bits) is maximum alignment we can assume.
3126 If PARTIAL and REG are both nonzero, then copy that many of the first
3127 words of X into registers starting with REG, and push the rest of X.
3128 The amount of space pushed is decreased by PARTIAL words,
3129 rounded *down* to a multiple of PARM_BOUNDARY.
3130 REG must be a hard register in this case.
3131 If REG is zero but PARTIAL is not, take any all others actions for an
3132 argument partially in registers, but do not actually load any
3133 registers.
3135 EXTRA is the amount in bytes of extra space to leave next to this arg.
3136 This is ignored if an argument block has already been allocated.
3138 On a machine that lacks real push insns, ARGS_ADDR is the address of
3139 the bottom of the argument block for this call. We use indexing off there
3140 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3141 argument block has not been preallocated.
3143 ARGS_SO_FAR is the size of args previously pushed for this call.
3145 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3146 for arguments passed in registers. If nonzero, it will be the number
3147 of bytes required. */
3149 void
3150 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3151 unsigned int align, int partial, rtx reg, int extra,
3152 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3153 rtx alignment_pad)
3155 rtx xinner;
3156 enum direction stack_direction
3157 #ifdef STACK_GROWS_DOWNWARD
3158 = downward;
3159 #else
3160 = upward;
3161 #endif
3163 /* Decide where to pad the argument: `downward' for below,
3164 `upward' for above, or `none' for don't pad it.
3165 Default is below for small data on big-endian machines; else above. */
3166 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3168 /* Invert direction if stack is post-decrement.
3169 FIXME: why? */
3170 if (STACK_PUSH_CODE == POST_DEC)
3171 if (where_pad != none)
3172 where_pad = (where_pad == downward ? upward : downward);
3174 xinner = x;
3176 if (mode == BLKmode)
3178 /* Copy a block into the stack, entirely or partially. */
3180 rtx temp;
3181 int used = partial * UNITS_PER_WORD;
3182 int offset;
3183 int skip;
3185 if (reg && GET_CODE (reg) == PARALLEL)
3187 /* Use the size of the elt to compute offset. */
3188 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3189 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3190 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3192 else
3193 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3195 gcc_assert (size);
3197 used -= offset;
3199 /* USED is now the # of bytes we need not copy to the stack
3200 because registers will take care of them. */
3202 if (partial != 0)
3203 xinner = adjust_address (xinner, BLKmode, used);
3205 /* If the partial register-part of the arg counts in its stack size,
3206 skip the part of stack space corresponding to the registers.
3207 Otherwise, start copying to the beginning of the stack space,
3208 by setting SKIP to 0. */
3209 skip = (reg_parm_stack_space == 0) ? 0 : used;
3211 #ifdef PUSH_ROUNDING
3212 /* Do it with several push insns if that doesn't take lots of insns
3213 and if there is no difficulty with push insns that skip bytes
3214 on the stack for alignment purposes. */
3215 if (args_addr == 0
3216 && PUSH_ARGS
3217 && GET_CODE (size) == CONST_INT
3218 && skip == 0
3219 && MEM_ALIGN (xinner) >= align
3220 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3221 /* Here we avoid the case of a structure whose weak alignment
3222 forces many pushes of a small amount of data,
3223 and such small pushes do rounding that causes trouble. */
3224 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3225 || align >= BIGGEST_ALIGNMENT
3226 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3227 == (align / BITS_PER_UNIT)))
3228 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3230 /* Push padding now if padding above and stack grows down,
3231 or if padding below and stack grows up.
3232 But if space already allocated, this has already been done. */
3233 if (extra && args_addr == 0
3234 && where_pad != none && where_pad != stack_direction)
3235 anti_adjust_stack (GEN_INT (extra));
3237 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3239 else
3240 #endif /* PUSH_ROUNDING */
3242 rtx target;
3244 /* Otherwise make space on the stack and copy the data
3245 to the address of that space. */
3247 /* Deduct words put into registers from the size we must copy. */
3248 if (partial != 0)
3250 if (GET_CODE (size) == CONST_INT)
3251 size = GEN_INT (INTVAL (size) - used);
3252 else
3253 size = expand_binop (GET_MODE (size), sub_optab, size,
3254 GEN_INT (used), NULL_RTX, 0,
3255 OPTAB_LIB_WIDEN);
3258 /* Get the address of the stack space.
3259 In this case, we do not deal with EXTRA separately.
3260 A single stack adjust will do. */
3261 if (! args_addr)
3263 temp = push_block (size, extra, where_pad == downward);
3264 extra = 0;
3266 else if (GET_CODE (args_so_far) == CONST_INT)
3267 temp = memory_address (BLKmode,
3268 plus_constant (args_addr,
3269 skip + INTVAL (args_so_far)));
3270 else
3271 temp = memory_address (BLKmode,
3272 plus_constant (gen_rtx_PLUS (Pmode,
3273 args_addr,
3274 args_so_far),
3275 skip));
3277 if (!ACCUMULATE_OUTGOING_ARGS)
3279 /* If the source is referenced relative to the stack pointer,
3280 copy it to another register to stabilize it. We do not need
3281 to do this if we know that we won't be changing sp. */
3283 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3284 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3285 temp = copy_to_reg (temp);
3288 target = gen_rtx_MEM (BLKmode, temp);
3290 /* We do *not* set_mem_attributes here, because incoming arguments
3291 may overlap with sibling call outgoing arguments and we cannot
3292 allow reordering of reads from function arguments with stores
3293 to outgoing arguments of sibling calls. We do, however, want
3294 to record the alignment of the stack slot. */
3295 /* ALIGN may well be better aligned than TYPE, e.g. due to
3296 PARM_BOUNDARY. Assume the caller isn't lying. */
3297 set_mem_align (target, align);
3299 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3302 else if (partial > 0)
3304 /* Scalar partly in registers. */
3306 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3307 int i;
3308 int not_stack;
3309 /* # words of start of argument
3310 that we must make space for but need not store. */
3311 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3312 int args_offset = INTVAL (args_so_far);
3313 int skip;
3315 /* Push padding now if padding above and stack grows down,
3316 or if padding below and stack grows up.
3317 But if space already allocated, this has already been done. */
3318 if (extra && args_addr == 0
3319 && where_pad != none && where_pad != stack_direction)
3320 anti_adjust_stack (GEN_INT (extra));
3322 /* If we make space by pushing it, we might as well push
3323 the real data. Otherwise, we can leave OFFSET nonzero
3324 and leave the space uninitialized. */
3325 if (args_addr == 0)
3326 offset = 0;
3328 /* Now NOT_STACK gets the number of words that we don't need to
3329 allocate on the stack. */
3330 not_stack = partial - offset;
3332 /* If the partial register-part of the arg counts in its stack size,
3333 skip the part of stack space corresponding to the registers.
3334 Otherwise, start copying to the beginning of the stack space,
3335 by setting SKIP to 0. */
3336 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3338 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3339 x = validize_mem (force_const_mem (mode, x));
3341 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3342 SUBREGs of such registers are not allowed. */
3343 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3344 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3345 x = copy_to_reg (x);
3347 /* Loop over all the words allocated on the stack for this arg. */
3348 /* We can do it by words, because any scalar bigger than a word
3349 has a size a multiple of a word. */
3350 #ifndef PUSH_ARGS_REVERSED
3351 for (i = not_stack; i < size; i++)
3352 #else
3353 for (i = size - 1; i >= not_stack; i--)
3354 #endif
3355 if (i >= not_stack + offset)
3356 emit_push_insn (operand_subword_force (x, i, mode),
3357 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3358 0, args_addr,
3359 GEN_INT (args_offset + ((i - not_stack + skip)
3360 * UNITS_PER_WORD)),
3361 reg_parm_stack_space, alignment_pad);
3363 else
3365 rtx addr;
3366 rtx dest;
3368 /* Push padding now if padding above and stack grows down,
3369 or if padding below and stack grows up.
3370 But if space already allocated, this has already been done. */
3371 if (extra && args_addr == 0
3372 && where_pad != none && where_pad != stack_direction)
3373 anti_adjust_stack (GEN_INT (extra));
3375 #ifdef PUSH_ROUNDING
3376 if (args_addr == 0 && PUSH_ARGS)
3377 emit_single_push_insn (mode, x, type);
3378 else
3379 #endif
3381 if (GET_CODE (args_so_far) == CONST_INT)
3382 addr
3383 = memory_address (mode,
3384 plus_constant (args_addr,
3385 INTVAL (args_so_far)));
3386 else
3387 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3388 args_so_far));
3389 dest = gen_rtx_MEM (mode, addr);
3391 /* We do *not* set_mem_attributes here, because incoming arguments
3392 may overlap with sibling call outgoing arguments and we cannot
3393 allow reordering of reads from function arguments with stores
3394 to outgoing arguments of sibling calls. We do, however, want
3395 to record the alignment of the stack slot. */
3396 /* ALIGN may well be better aligned than TYPE, e.g. due to
3397 PARM_BOUNDARY. Assume the caller isn't lying. */
3398 set_mem_align (dest, align);
3400 emit_move_insn (dest, x);
3404 /* If part should go in registers, copy that part
3405 into the appropriate registers. Do this now, at the end,
3406 since mem-to-mem copies above may do function calls. */
3407 if (partial > 0 && reg != 0)
3409 /* Handle calls that pass values in multiple non-contiguous locations.
3410 The Irix 6 ABI has examples of this. */
3411 if (GET_CODE (reg) == PARALLEL)
3412 emit_group_load (reg, x, type, -1);
3413 else
3414 move_block_to_reg (REGNO (reg), x, partial, mode);
3417 if (extra && args_addr == 0 && where_pad == stack_direction)
3418 anti_adjust_stack (GEN_INT (extra));
3420 if (alignment_pad && args_addr == 0)
3421 anti_adjust_stack (alignment_pad);
3424 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3425 operations. */
3427 static rtx
3428 get_subtarget (rtx x)
3430 return (optimize
3431 || x == 0
3432 /* Only registers can be subtargets. */
3433 || !REG_P (x)
3434 /* Don't use hard regs to avoid extending their life. */
3435 || REGNO (x) < FIRST_PSEUDO_REGISTER
3436 ? 0 : x);
3439 /* Expand an assignment that stores the value of FROM into TO. */
3441 void
3442 expand_assignment (tree to, tree from)
3444 rtx to_rtx = 0;
3445 rtx result;
3447 /* Don't crash if the lhs of the assignment was erroneous. */
3449 if (TREE_CODE (to) == ERROR_MARK)
3451 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3452 return;
3455 /* Assignment of a structure component needs special treatment
3456 if the structure component's rtx is not simply a MEM.
3457 Assignment of an array element at a constant index, and assignment of
3458 an array element in an unaligned packed structure field, has the same
3459 problem. */
3461 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3462 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3463 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3465 enum machine_mode mode1;
3466 HOST_WIDE_INT bitsize, bitpos;
3467 rtx orig_to_rtx;
3468 tree offset;
3469 int unsignedp;
3470 int volatilep = 0;
3471 tree tem;
3473 push_temp_slots ();
3474 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3475 &unsignedp, &volatilep);
3477 /* If we are going to use store_bit_field and extract_bit_field,
3478 make sure to_rtx will be safe for multiple use. */
3480 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3482 if (offset != 0)
3484 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3486 gcc_assert (MEM_P (to_rtx));
3488 #ifdef POINTERS_EXTEND_UNSIGNED
3489 if (GET_MODE (offset_rtx) != Pmode)
3490 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3491 #else
3492 if (GET_MODE (offset_rtx) != ptr_mode)
3493 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3494 #endif
3496 /* A constant address in TO_RTX can have VOIDmode, we must not try
3497 to call force_reg for that case. Avoid that case. */
3498 if (MEM_P (to_rtx)
3499 && GET_MODE (to_rtx) == BLKmode
3500 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3501 && bitsize > 0
3502 && (bitpos % bitsize) == 0
3503 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3504 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3506 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3507 bitpos = 0;
3510 to_rtx = offset_address (to_rtx, offset_rtx,
3511 highest_pow2_factor_for_target (to,
3512 offset));
3515 if (MEM_P (to_rtx))
3517 /* If the field is at offset zero, we could have been given the
3518 DECL_RTX of the parent struct. Don't munge it. */
3519 to_rtx = shallow_copy_rtx (to_rtx);
3521 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3524 /* Deal with volatile and readonly fields. The former is only done
3525 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3526 if (volatilep && MEM_P (to_rtx))
3528 if (to_rtx == orig_to_rtx)
3529 to_rtx = copy_rtx (to_rtx);
3530 MEM_VOLATILE_P (to_rtx) = 1;
3533 if (MEM_P (to_rtx) && ! can_address_p (to))
3535 if (to_rtx == orig_to_rtx)
3536 to_rtx = copy_rtx (to_rtx);
3537 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3540 /* Optimize bitfld op= val in certain cases. */
3541 while (mode1 == VOIDmode
3542 && bitsize > 0 && bitsize < BITS_PER_WORD
3543 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3544 && !TREE_SIDE_EFFECTS (to)
3545 && !TREE_THIS_VOLATILE (to))
3547 tree src, op0, op1;
3548 rtx value, str_rtx = to_rtx;
3549 HOST_WIDE_INT bitpos1 = bitpos;
3550 optab binop;
3552 src = from;
3553 STRIP_NOPS (src);
3554 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3555 || !BINARY_CLASS_P (src))
3556 break;
3558 op0 = TREE_OPERAND (src, 0);
3559 op1 = TREE_OPERAND (src, 1);
3560 STRIP_NOPS (op0);
3562 if (! operand_equal_p (to, op0, 0))
3563 break;
3565 if (MEM_P (str_rtx))
3567 enum machine_mode mode = GET_MODE (str_rtx);
3568 HOST_WIDE_INT offset1;
3570 if (GET_MODE_BITSIZE (mode) == 0
3571 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3572 mode = word_mode;
3573 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3574 mode, 0);
3575 if (mode == VOIDmode)
3576 break;
3578 offset1 = bitpos1;
3579 bitpos1 %= GET_MODE_BITSIZE (mode);
3580 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3581 str_rtx = adjust_address (str_rtx, mode, offset1);
3583 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3584 break;
3586 /* If the bit field covers the whole REG/MEM, store_field
3587 will likely generate better code. */
3588 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3589 break;
3591 /* We can't handle fields split across multiple entities. */
3592 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3593 break;
3595 if (BYTES_BIG_ENDIAN)
3596 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3597 - bitsize;
3599 /* Special case some bitfield op= exp. */
3600 switch (TREE_CODE (src))
3602 case PLUS_EXPR:
3603 case MINUS_EXPR:
3604 /* For now, just optimize the case of the topmost bitfield
3605 where we don't need to do any masking and also
3606 1 bit bitfields where xor can be used.
3607 We might win by one instruction for the other bitfields
3608 too if insv/extv instructions aren't used, so that
3609 can be added later. */
3610 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3611 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3612 break;
3613 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3614 value = convert_modes (GET_MODE (str_rtx),
3615 TYPE_MODE (TREE_TYPE (op1)), value,
3616 TYPE_UNSIGNED (TREE_TYPE (op1)));
3618 /* We may be accessing data outside the field, which means
3619 we can alias adjacent data. */
3620 if (MEM_P (str_rtx))
3622 str_rtx = shallow_copy_rtx (str_rtx);
3623 set_mem_alias_set (str_rtx, 0);
3624 set_mem_expr (str_rtx, 0);
3627 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3628 if (bitsize == 1
3629 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3631 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3632 NULL_RTX);
3633 binop = xor_optab;
3635 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3636 build_int_cst (NULL_TREE, bitpos1),
3637 NULL_RTX, 1);
3638 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3639 value, str_rtx, 1, OPTAB_WIDEN);
3640 if (result != str_rtx)
3641 emit_move_insn (str_rtx, result);
3642 free_temp_slots ();
3643 pop_temp_slots ();
3644 return;
3646 default:
3647 break;
3650 break;
3653 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3654 TREE_TYPE (tem), get_alias_set (to));
3656 preserve_temp_slots (result);
3657 free_temp_slots ();
3658 pop_temp_slots ();
3660 /* If the value is meaningful, convert RESULT to the proper mode.
3661 Otherwise, return nothing. */
3662 return;
3665 /* If the rhs is a function call and its value is not an aggregate,
3666 call the function before we start to compute the lhs.
3667 This is needed for correct code for cases such as
3668 val = setjmp (buf) on machines where reference to val
3669 requires loading up part of an address in a separate insn.
3671 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3672 since it might be a promoted variable where the zero- or sign- extension
3673 needs to be done. Handling this in the normal way is safe because no
3674 computation is done before the call. */
3675 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3676 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3677 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3678 && REG_P (DECL_RTL (to))))
3680 rtx value;
3682 push_temp_slots ();
3683 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3684 if (to_rtx == 0)
3685 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3687 /* Handle calls that return values in multiple non-contiguous locations.
3688 The Irix 6 ABI has examples of this. */
3689 if (GET_CODE (to_rtx) == PARALLEL)
3690 emit_group_load (to_rtx, value, TREE_TYPE (from),
3691 int_size_in_bytes (TREE_TYPE (from)));
3692 else if (GET_MODE (to_rtx) == BLKmode)
3693 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3694 else
3696 if (POINTER_TYPE_P (TREE_TYPE (to)))
3697 value = convert_memory_address (GET_MODE (to_rtx), value);
3698 emit_move_insn (to_rtx, value);
3700 preserve_temp_slots (to_rtx);
3701 free_temp_slots ();
3702 pop_temp_slots ();
3703 return;
3706 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3707 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3709 if (to_rtx == 0)
3710 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3712 /* Don't move directly into a return register. */
3713 if (TREE_CODE (to) == RESULT_DECL
3714 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3716 rtx temp;
3718 push_temp_slots ();
3719 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3721 if (GET_CODE (to_rtx) == PARALLEL)
3722 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3723 int_size_in_bytes (TREE_TYPE (from)));
3724 else
3725 emit_move_insn (to_rtx, temp);
3727 preserve_temp_slots (to_rtx);
3728 free_temp_slots ();
3729 pop_temp_slots ();
3730 return;
3733 /* In case we are returning the contents of an object which overlaps
3734 the place the value is being stored, use a safe function when copying
3735 a value through a pointer into a structure value return block. */
3736 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3737 && current_function_returns_struct
3738 && !current_function_returns_pcc_struct)
3740 rtx from_rtx, size;
3742 push_temp_slots ();
3743 size = expr_size (from);
3744 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3746 emit_library_call (memmove_libfunc, LCT_NORMAL,
3747 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3748 XEXP (from_rtx, 0), Pmode,
3749 convert_to_mode (TYPE_MODE (sizetype),
3750 size, TYPE_UNSIGNED (sizetype)),
3751 TYPE_MODE (sizetype));
3753 preserve_temp_slots (to_rtx);
3754 free_temp_slots ();
3755 pop_temp_slots ();
3756 return;
3759 /* Compute FROM and store the value in the rtx we got. */
3761 push_temp_slots ();
3762 result = store_expr (from, to_rtx, 0);
3763 preserve_temp_slots (result);
3764 free_temp_slots ();
3765 pop_temp_slots ();
3766 return;
3769 /* Generate code for computing expression EXP,
3770 and storing the value into TARGET.
3772 If the mode is BLKmode then we may return TARGET itself.
3773 It turns out that in BLKmode it doesn't cause a problem.
3774 because C has no operators that could combine two different
3775 assignments into the same BLKmode object with different values
3776 with no sequence point. Will other languages need this to
3777 be more thorough?
3779 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3780 stack, and block moves may need to be treated specially. */
3783 store_expr (tree exp, rtx target, int call_param_p)
3785 rtx temp;
3786 rtx alt_rtl = NULL_RTX;
3787 int dont_return_target = 0;
3789 if (VOID_TYPE_P (TREE_TYPE (exp)))
3791 /* C++ can generate ?: expressions with a throw expression in one
3792 branch and an rvalue in the other. Here, we resolve attempts to
3793 store the throw expression's nonexistent result. */
3794 gcc_assert (!call_param_p);
3795 expand_expr (exp, const0_rtx, VOIDmode, 0);
3796 return NULL_RTX;
3798 if (TREE_CODE (exp) == COMPOUND_EXPR)
3800 /* Perform first part of compound expression, then assign from second
3801 part. */
3802 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3803 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3804 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3806 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3808 /* For conditional expression, get safe form of the target. Then
3809 test the condition, doing the appropriate assignment on either
3810 side. This avoids the creation of unnecessary temporaries.
3811 For non-BLKmode, it is more efficient not to do this. */
3813 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3815 do_pending_stack_adjust ();
3816 NO_DEFER_POP;
3817 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3818 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3819 emit_jump_insn (gen_jump (lab2));
3820 emit_barrier ();
3821 emit_label (lab1);
3822 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
3823 emit_label (lab2);
3824 OK_DEFER_POP;
3826 return NULL_RTX;
3828 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3829 /* If this is a scalar in a register that is stored in a wider mode
3830 than the declared mode, compute the result into its declared mode
3831 and then convert to the wider mode. Our value is the computed
3832 expression. */
3834 rtx inner_target = 0;
3836 /* We can do the conversion inside EXP, which will often result
3837 in some optimizations. Do the conversion in two steps: first
3838 change the signedness, if needed, then the extend. But don't
3839 do this if the type of EXP is a subtype of something else
3840 since then the conversion might involve more than just
3841 converting modes. */
3842 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
3843 && TREE_TYPE (TREE_TYPE (exp)) == 0
3844 && (!lang_hooks.reduce_bit_field_operations
3845 || (GET_MODE_PRECISION (GET_MODE (target))
3846 == TYPE_PRECISION (TREE_TYPE (exp)))))
3848 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3849 != SUBREG_PROMOTED_UNSIGNED_P (target))
3850 exp = convert
3851 (lang_hooks.types.signed_or_unsigned_type
3852 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3854 exp = convert (lang_hooks.types.type_for_mode
3855 (GET_MODE (SUBREG_REG (target)),
3856 SUBREG_PROMOTED_UNSIGNED_P (target)),
3857 exp);
3859 inner_target = SUBREG_REG (target);
3862 temp = expand_expr (exp, inner_target, VOIDmode,
3863 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3865 /* If TEMP is a VOIDmode constant, use convert_modes to make
3866 sure that we properly convert it. */
3867 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3869 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3870 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3871 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3872 GET_MODE (target), temp,
3873 SUBREG_PROMOTED_UNSIGNED_P (target));
3876 convert_move (SUBREG_REG (target), temp,
3877 SUBREG_PROMOTED_UNSIGNED_P (target));
3879 return NULL_RTX;
3881 else
3883 temp = expand_expr_real (exp, target, GET_MODE (target),
3884 (call_param_p
3885 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3886 &alt_rtl);
3887 /* Return TARGET if it's a specified hardware register.
3888 If TARGET is a volatile mem ref, either return TARGET
3889 or return a reg copied *from* TARGET; ANSI requires this.
3891 Otherwise, if TEMP is not TARGET, return TEMP
3892 if it is constant (for efficiency),
3893 or if we really want the correct value. */
3894 if (!(target && REG_P (target)
3895 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3896 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3897 && ! rtx_equal_p (temp, target)
3898 && CONSTANT_P (temp))
3899 dont_return_target = 1;
3902 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3903 the same as that of TARGET, adjust the constant. This is needed, for
3904 example, in case it is a CONST_DOUBLE and we want only a word-sized
3905 value. */
3906 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3907 && TREE_CODE (exp) != ERROR_MARK
3908 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3909 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3910 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3912 /* If value was not generated in the target, store it there.
3913 Convert the value to TARGET's type first if necessary and emit the
3914 pending incrementations that have been queued when expanding EXP.
3915 Note that we cannot emit the whole queue blindly because this will
3916 effectively disable the POST_INC optimization later.
3918 If TEMP and TARGET compare equal according to rtx_equal_p, but
3919 one or both of them are volatile memory refs, we have to distinguish
3920 two cases:
3921 - expand_expr has used TARGET. In this case, we must not generate
3922 another copy. This can be detected by TARGET being equal according
3923 to == .
3924 - expand_expr has not used TARGET - that means that the source just
3925 happens to have the same RTX form. Since temp will have been created
3926 by expand_expr, it will compare unequal according to == .
3927 We must generate a copy in this case, to reach the correct number
3928 of volatile memory references. */
3930 if ((! rtx_equal_p (temp, target)
3931 || (temp != target && (side_effects_p (temp)
3932 || side_effects_p (target))))
3933 && TREE_CODE (exp) != ERROR_MARK
3934 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3935 but TARGET is not valid memory reference, TEMP will differ
3936 from TARGET although it is really the same location. */
3937 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
3938 /* If there's nothing to copy, don't bother. Don't call expr_size
3939 unless necessary, because some front-ends (C++) expr_size-hook
3940 aborts on objects that are not supposed to be bit-copied or
3941 bit-initialized. */
3942 && expr_size (exp) != const0_rtx)
3944 if (GET_MODE (temp) != GET_MODE (target)
3945 && GET_MODE (temp) != VOIDmode)
3947 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3948 if (dont_return_target)
3950 /* In this case, we will return TEMP,
3951 so make sure it has the proper mode.
3952 But don't forget to store the value into TARGET. */
3953 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3954 emit_move_insn (target, temp);
3956 else
3957 convert_move (target, temp, unsignedp);
3960 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3962 /* Handle copying a string constant into an array. The string
3963 constant may be shorter than the array. So copy just the string's
3964 actual length, and clear the rest. First get the size of the data
3965 type of the string, which is actually the size of the target. */
3966 rtx size = expr_size (exp);
3968 if (GET_CODE (size) == CONST_INT
3969 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3970 emit_block_move (target, temp, size,
3971 (call_param_p
3972 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3973 else
3975 /* Compute the size of the data to copy from the string. */
3976 tree copy_size
3977 = size_binop (MIN_EXPR,
3978 make_tree (sizetype, size),
3979 size_int (TREE_STRING_LENGTH (exp)));
3980 rtx copy_size_rtx
3981 = expand_expr (copy_size, NULL_RTX, VOIDmode,
3982 (call_param_p
3983 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
3984 rtx label = 0;
3986 /* Copy that much. */
3987 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
3988 TYPE_UNSIGNED (sizetype));
3989 emit_block_move (target, temp, copy_size_rtx,
3990 (call_param_p
3991 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3993 /* Figure out how much is left in TARGET that we have to clear.
3994 Do all calculations in ptr_mode. */
3995 if (GET_CODE (copy_size_rtx) == CONST_INT)
3997 size = plus_constant (size, -INTVAL (copy_size_rtx));
3998 target = adjust_address (target, BLKmode,
3999 INTVAL (copy_size_rtx));
4001 else
4003 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4004 copy_size_rtx, NULL_RTX, 0,
4005 OPTAB_LIB_WIDEN);
4007 #ifdef POINTERS_EXTEND_UNSIGNED
4008 if (GET_MODE (copy_size_rtx) != Pmode)
4009 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4010 TYPE_UNSIGNED (sizetype));
4011 #endif
4013 target = offset_address (target, copy_size_rtx,
4014 highest_pow2_factor (copy_size));
4015 label = gen_label_rtx ();
4016 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4017 GET_MODE (size), 0, label);
4020 if (size != const0_rtx)
4021 clear_storage (target, size);
4023 if (label)
4024 emit_label (label);
4027 /* Handle calls that return values in multiple non-contiguous locations.
4028 The Irix 6 ABI has examples of this. */
4029 else if (GET_CODE (target) == PARALLEL)
4030 emit_group_load (target, temp, TREE_TYPE (exp),
4031 int_size_in_bytes (TREE_TYPE (exp)));
4032 else if (GET_MODE (temp) == BLKmode)
4033 emit_block_move (target, temp, expr_size (exp),
4034 (call_param_p
4035 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4036 else
4038 temp = force_operand (temp, target);
4039 if (temp != target)
4040 emit_move_insn (target, temp);
4044 return NULL_RTX;
4047 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4048 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4049 are set to non-constant values and place it in *P_NC_ELTS. */
4051 static void
4052 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4053 HOST_WIDE_INT *p_nc_elts)
4055 HOST_WIDE_INT nz_elts, nc_elts;
4056 tree list;
4058 nz_elts = 0;
4059 nc_elts = 0;
4061 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4063 tree value = TREE_VALUE (list);
4064 tree purpose = TREE_PURPOSE (list);
4065 HOST_WIDE_INT mult;
4067 mult = 1;
4068 if (TREE_CODE (purpose) == RANGE_EXPR)
4070 tree lo_index = TREE_OPERAND (purpose, 0);
4071 tree hi_index = TREE_OPERAND (purpose, 1);
4073 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4074 mult = (tree_low_cst (hi_index, 1)
4075 - tree_low_cst (lo_index, 1) + 1);
4078 switch (TREE_CODE (value))
4080 case CONSTRUCTOR:
4082 HOST_WIDE_INT nz = 0, nc = 0;
4083 categorize_ctor_elements_1 (value, &nz, &nc);
4084 nz_elts += mult * nz;
4085 nc_elts += mult * nc;
4087 break;
4089 case INTEGER_CST:
4090 case REAL_CST:
4091 if (!initializer_zerop (value))
4092 nz_elts += mult;
4093 break;
4094 case COMPLEX_CST:
4095 if (!initializer_zerop (TREE_REALPART (value)))
4096 nz_elts += mult;
4097 if (!initializer_zerop (TREE_IMAGPART (value)))
4098 nz_elts += mult;
4099 break;
4100 case VECTOR_CST:
4102 tree v;
4103 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4104 if (!initializer_zerop (TREE_VALUE (v)))
4105 nz_elts += mult;
4107 break;
4109 default:
4110 nz_elts += mult;
4111 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4112 nc_elts += mult;
4113 break;
4117 *p_nz_elts += nz_elts;
4118 *p_nc_elts += nc_elts;
4121 void
4122 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4123 HOST_WIDE_INT *p_nc_elts)
4125 *p_nz_elts = 0;
4126 *p_nc_elts = 0;
4127 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4130 /* Count the number of scalars in TYPE. Return -1 on overflow or
4131 variable-sized. */
4133 HOST_WIDE_INT
4134 count_type_elements (tree type)
4136 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4137 switch (TREE_CODE (type))
4139 case ARRAY_TYPE:
4141 tree telts = array_type_nelts (type);
4142 if (telts && host_integerp (telts, 1))
4144 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4145 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4146 if (n == 0)
4147 return 0;
4148 else if (max / n > m)
4149 return n * m;
4151 return -1;
4154 case RECORD_TYPE:
4156 HOST_WIDE_INT n = 0, t;
4157 tree f;
4159 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4160 if (TREE_CODE (f) == FIELD_DECL)
4162 t = count_type_elements (TREE_TYPE (f));
4163 if (t < 0)
4164 return -1;
4165 n += t;
4168 return n;
4171 case UNION_TYPE:
4172 case QUAL_UNION_TYPE:
4174 /* Ho hum. How in the world do we guess here? Clearly it isn't
4175 right to count the fields. Guess based on the number of words. */
4176 HOST_WIDE_INT n = int_size_in_bytes (type);
4177 if (n < 0)
4178 return -1;
4179 return n / UNITS_PER_WORD;
4182 case COMPLEX_TYPE:
4183 return 2;
4185 case VECTOR_TYPE:
4186 return TYPE_VECTOR_SUBPARTS (type);
4188 case INTEGER_TYPE:
4189 case REAL_TYPE:
4190 case ENUMERAL_TYPE:
4191 case BOOLEAN_TYPE:
4192 case CHAR_TYPE:
4193 case POINTER_TYPE:
4194 case OFFSET_TYPE:
4195 case REFERENCE_TYPE:
4196 return 1;
4198 case VOID_TYPE:
4199 case METHOD_TYPE:
4200 case FILE_TYPE:
4201 case SET_TYPE:
4202 case FUNCTION_TYPE:
4203 case LANG_TYPE:
4204 default:
4205 gcc_unreachable ();
4209 /* Return 1 if EXP contains mostly (3/4) zeros. */
4212 mostly_zeros_p (tree exp)
4214 if (TREE_CODE (exp) == CONSTRUCTOR)
4217 HOST_WIDE_INT nz_elts, nc_elts, elts;
4219 /* If there are no ranges of true bits, it is all zero. */
4220 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4221 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4223 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4224 elts = count_type_elements (TREE_TYPE (exp));
4226 return nz_elts < elts / 4;
4229 return initializer_zerop (exp);
4232 /* Helper function for store_constructor.
4233 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4234 TYPE is the type of the CONSTRUCTOR, not the element type.
4235 CLEARED is as for store_constructor.
4236 ALIAS_SET is the alias set to use for any stores.
4238 This provides a recursive shortcut back to store_constructor when it isn't
4239 necessary to go through store_field. This is so that we can pass through
4240 the cleared field to let store_constructor know that we may not have to
4241 clear a substructure if the outer structure has already been cleared. */
4243 static void
4244 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4245 HOST_WIDE_INT bitpos, enum machine_mode mode,
4246 tree exp, tree type, int cleared, int alias_set)
4248 if (TREE_CODE (exp) == CONSTRUCTOR
4249 /* We can only call store_constructor recursively if the size and
4250 bit position are on a byte boundary. */
4251 && bitpos % BITS_PER_UNIT == 0
4252 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4253 /* If we have a nonzero bitpos for a register target, then we just
4254 let store_field do the bitfield handling. This is unlikely to
4255 generate unnecessary clear instructions anyways. */
4256 && (bitpos == 0 || MEM_P (target)))
4258 if (MEM_P (target))
4259 target
4260 = adjust_address (target,
4261 GET_MODE (target) == BLKmode
4262 || 0 != (bitpos
4263 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4264 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4267 /* Update the alias set, if required. */
4268 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4269 && MEM_ALIAS_SET (target) != 0)
4271 target = copy_rtx (target);
4272 set_mem_alias_set (target, alias_set);
4275 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4277 else
4278 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4281 /* Store the value of constructor EXP into the rtx TARGET.
4282 TARGET is either a REG or a MEM; we know it cannot conflict, since
4283 safe_from_p has been called.
4284 CLEARED is true if TARGET is known to have been zero'd.
4285 SIZE is the number of bytes of TARGET we are allowed to modify: this
4286 may not be the same as the size of EXP if we are assigning to a field
4287 which has been packed to exclude padding bits. */
4289 static void
4290 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4292 tree type = TREE_TYPE (exp);
4293 #ifdef WORD_REGISTER_OPERATIONS
4294 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4295 #endif
4297 switch (TREE_CODE (type))
4299 case RECORD_TYPE:
4300 case UNION_TYPE:
4301 case QUAL_UNION_TYPE:
4303 tree elt;
4305 /* If size is zero or the target is already cleared, do nothing. */
4306 if (size == 0 || cleared)
4307 cleared = 1;
4308 /* We either clear the aggregate or indicate the value is dead. */
4309 else if ((TREE_CODE (type) == UNION_TYPE
4310 || TREE_CODE (type) == QUAL_UNION_TYPE)
4311 && ! CONSTRUCTOR_ELTS (exp))
4312 /* If the constructor is empty, clear the union. */
4314 clear_storage (target, expr_size (exp));
4315 cleared = 1;
4318 /* If we are building a static constructor into a register,
4319 set the initial value as zero so we can fold the value into
4320 a constant. But if more than one register is involved,
4321 this probably loses. */
4322 else if (REG_P (target) && TREE_STATIC (exp)
4323 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4325 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4326 cleared = 1;
4329 /* If the constructor has fewer fields than the structure or
4330 if we are initializing the structure to mostly zeros, clear
4331 the whole structure first. Don't do this if TARGET is a
4332 register whose mode size isn't equal to SIZE since
4333 clear_storage can't handle this case. */
4334 else if (size > 0
4335 && ((list_length (CONSTRUCTOR_ELTS (exp))
4336 != fields_length (type))
4337 || mostly_zeros_p (exp))
4338 && (!REG_P (target)
4339 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4340 == size)))
4342 clear_storage (target, GEN_INT (size));
4343 cleared = 1;
4346 if (! cleared)
4347 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4349 /* Store each element of the constructor into the
4350 corresponding field of TARGET. */
4352 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4354 tree field = TREE_PURPOSE (elt);
4355 tree value = TREE_VALUE (elt);
4356 enum machine_mode mode;
4357 HOST_WIDE_INT bitsize;
4358 HOST_WIDE_INT bitpos = 0;
4359 tree offset;
4360 rtx to_rtx = target;
4362 /* Just ignore missing fields. We cleared the whole
4363 structure, above, if any fields are missing. */
4364 if (field == 0)
4365 continue;
4367 if (cleared && initializer_zerop (value))
4368 continue;
4370 if (host_integerp (DECL_SIZE (field), 1))
4371 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4372 else
4373 bitsize = -1;
4375 mode = DECL_MODE (field);
4376 if (DECL_BIT_FIELD (field))
4377 mode = VOIDmode;
4379 offset = DECL_FIELD_OFFSET (field);
4380 if (host_integerp (offset, 0)
4381 && host_integerp (bit_position (field), 0))
4383 bitpos = int_bit_position (field);
4384 offset = 0;
4386 else
4387 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4389 if (offset)
4391 rtx offset_rtx;
4393 offset
4394 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4395 make_tree (TREE_TYPE (exp),
4396 target));
4398 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4399 gcc_assert (MEM_P (to_rtx));
4401 #ifdef POINTERS_EXTEND_UNSIGNED
4402 if (GET_MODE (offset_rtx) != Pmode)
4403 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4404 #else
4405 if (GET_MODE (offset_rtx) != ptr_mode)
4406 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4407 #endif
4409 to_rtx = offset_address (to_rtx, offset_rtx,
4410 highest_pow2_factor (offset));
4413 #ifdef WORD_REGISTER_OPERATIONS
4414 /* If this initializes a field that is smaller than a
4415 word, at the start of a word, try to widen it to a full
4416 word. This special case allows us to output C++ member
4417 function initializations in a form that the optimizers
4418 can understand. */
4419 if (REG_P (target)
4420 && bitsize < BITS_PER_WORD
4421 && bitpos % BITS_PER_WORD == 0
4422 && GET_MODE_CLASS (mode) == MODE_INT
4423 && TREE_CODE (value) == INTEGER_CST
4424 && exp_size >= 0
4425 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4427 tree type = TREE_TYPE (value);
4429 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4431 type = lang_hooks.types.type_for_size
4432 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4433 value = convert (type, value);
4436 if (BYTES_BIG_ENDIAN)
4437 value
4438 = fold (build2 (LSHIFT_EXPR, type, value,
4439 build_int_cst (NULL_TREE,
4440 BITS_PER_WORD - bitsize)));
4441 bitsize = BITS_PER_WORD;
4442 mode = word_mode;
4444 #endif
4446 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4447 && DECL_NONADDRESSABLE_P (field))
4449 to_rtx = copy_rtx (to_rtx);
4450 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4453 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4454 value, type, cleared,
4455 get_alias_set (TREE_TYPE (field)));
4457 break;
4459 case ARRAY_TYPE:
4461 tree elt;
4462 int i;
4463 int need_to_clear;
4464 tree domain;
4465 tree elttype = TREE_TYPE (type);
4466 int const_bounds_p;
4467 HOST_WIDE_INT minelt = 0;
4468 HOST_WIDE_INT maxelt = 0;
4470 domain = TYPE_DOMAIN (type);
4471 const_bounds_p = (TYPE_MIN_VALUE (domain)
4472 && TYPE_MAX_VALUE (domain)
4473 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4474 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4476 /* If we have constant bounds for the range of the type, get them. */
4477 if (const_bounds_p)
4479 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4480 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4483 /* If the constructor has fewer elements than the array, clear
4484 the whole array first. Similarly if this is static
4485 constructor of a non-BLKmode object. */
4486 if (cleared)
4487 need_to_clear = 0;
4488 else if (REG_P (target) && TREE_STATIC (exp))
4489 need_to_clear = 1;
4490 else
4492 HOST_WIDE_INT count = 0, zero_count = 0;
4493 need_to_clear = ! const_bounds_p;
4495 /* This loop is a more accurate version of the loop in
4496 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4497 is also needed to check for missing elements. */
4498 for (elt = CONSTRUCTOR_ELTS (exp);
4499 elt != NULL_TREE && ! need_to_clear;
4500 elt = TREE_CHAIN (elt))
4502 tree index = TREE_PURPOSE (elt);
4503 HOST_WIDE_INT this_node_count;
4505 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4507 tree lo_index = TREE_OPERAND (index, 0);
4508 tree hi_index = TREE_OPERAND (index, 1);
4510 if (! host_integerp (lo_index, 1)
4511 || ! host_integerp (hi_index, 1))
4513 need_to_clear = 1;
4514 break;
4517 this_node_count = (tree_low_cst (hi_index, 1)
4518 - tree_low_cst (lo_index, 1) + 1);
4520 else
4521 this_node_count = 1;
4523 count += this_node_count;
4524 if (mostly_zeros_p (TREE_VALUE (elt)))
4525 zero_count += this_node_count;
4528 /* Clear the entire array first if there are any missing
4529 elements, or if the incidence of zero elements is >=
4530 75%. */
4531 if (! need_to_clear
4532 && (count < maxelt - minelt + 1
4533 || 4 * zero_count >= 3 * count))
4534 need_to_clear = 1;
4537 if (need_to_clear && size > 0)
4539 if (REG_P (target))
4540 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4541 else
4542 clear_storage (target, GEN_INT (size));
4543 cleared = 1;
4546 if (!cleared && REG_P (target))
4547 /* Inform later passes that the old value is dead. */
4548 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4550 /* Store each element of the constructor into the
4551 corresponding element of TARGET, determined by counting the
4552 elements. */
4553 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4554 elt;
4555 elt = TREE_CHAIN (elt), i++)
4557 enum machine_mode mode;
4558 HOST_WIDE_INT bitsize;
4559 HOST_WIDE_INT bitpos;
4560 int unsignedp;
4561 tree value = TREE_VALUE (elt);
4562 tree index = TREE_PURPOSE (elt);
4563 rtx xtarget = target;
4565 if (cleared && initializer_zerop (value))
4566 continue;
4568 unsignedp = TYPE_UNSIGNED (elttype);
4569 mode = TYPE_MODE (elttype);
4570 if (mode == BLKmode)
4571 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4572 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4573 : -1);
4574 else
4575 bitsize = GET_MODE_BITSIZE (mode);
4577 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4579 tree lo_index = TREE_OPERAND (index, 0);
4580 tree hi_index = TREE_OPERAND (index, 1);
4581 rtx index_r, pos_rtx;
4582 HOST_WIDE_INT lo, hi, count;
4583 tree position;
4585 /* If the range is constant and "small", unroll the loop. */
4586 if (const_bounds_p
4587 && host_integerp (lo_index, 0)
4588 && host_integerp (hi_index, 0)
4589 && (lo = tree_low_cst (lo_index, 0),
4590 hi = tree_low_cst (hi_index, 0),
4591 count = hi - lo + 1,
4592 (!MEM_P (target)
4593 || count <= 2
4594 || (host_integerp (TYPE_SIZE (elttype), 1)
4595 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4596 <= 40 * 8)))))
4598 lo -= minelt; hi -= minelt;
4599 for (; lo <= hi; lo++)
4601 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4603 if (MEM_P (target)
4604 && !MEM_KEEP_ALIAS_SET_P (target)
4605 && TREE_CODE (type) == ARRAY_TYPE
4606 && TYPE_NONALIASED_COMPONENT (type))
4608 target = copy_rtx (target);
4609 MEM_KEEP_ALIAS_SET_P (target) = 1;
4612 store_constructor_field
4613 (target, bitsize, bitpos, mode, value, type, cleared,
4614 get_alias_set (elttype));
4617 else
4619 rtx loop_start = gen_label_rtx ();
4620 rtx loop_end = gen_label_rtx ();
4621 tree exit_cond;
4623 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4624 unsignedp = TYPE_UNSIGNED (domain);
4626 index = build_decl (VAR_DECL, NULL_TREE, domain);
4628 index_r
4629 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4630 &unsignedp, 0));
4631 SET_DECL_RTL (index, index_r);
4632 store_expr (lo_index, index_r, 0);
4634 /* Build the head of the loop. */
4635 do_pending_stack_adjust ();
4636 emit_label (loop_start);
4638 /* Assign value to element index. */
4639 position
4640 = convert (ssizetype,
4641 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4642 index, TYPE_MIN_VALUE (domain))));
4643 position = size_binop (MULT_EXPR, position,
4644 convert (ssizetype,
4645 TYPE_SIZE_UNIT (elttype)));
4647 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4648 xtarget = offset_address (target, pos_rtx,
4649 highest_pow2_factor (position));
4650 xtarget = adjust_address (xtarget, mode, 0);
4651 if (TREE_CODE (value) == CONSTRUCTOR)
4652 store_constructor (value, xtarget, cleared,
4653 bitsize / BITS_PER_UNIT);
4654 else
4655 store_expr (value, xtarget, 0);
4657 /* Generate a conditional jump to exit the loop. */
4658 exit_cond = build2 (LT_EXPR, integer_type_node,
4659 index, hi_index);
4660 jumpif (exit_cond, loop_end);
4662 /* Update the loop counter, and jump to the head of
4663 the loop. */
4664 expand_assignment (index,
4665 build2 (PLUS_EXPR, TREE_TYPE (index),
4666 index, integer_one_node));
4668 emit_jump (loop_start);
4670 /* Build the end of the loop. */
4671 emit_label (loop_end);
4674 else if ((index != 0 && ! host_integerp (index, 0))
4675 || ! host_integerp (TYPE_SIZE (elttype), 1))
4677 tree position;
4679 if (index == 0)
4680 index = ssize_int (1);
4682 if (minelt)
4683 index = fold_convert (ssizetype,
4684 fold (build2 (MINUS_EXPR,
4685 TREE_TYPE (index),
4686 index,
4687 TYPE_MIN_VALUE (domain))));
4689 position = size_binop (MULT_EXPR, index,
4690 convert (ssizetype,
4691 TYPE_SIZE_UNIT (elttype)));
4692 xtarget = offset_address (target,
4693 expand_expr (position, 0, VOIDmode, 0),
4694 highest_pow2_factor (position));
4695 xtarget = adjust_address (xtarget, mode, 0);
4696 store_expr (value, xtarget, 0);
4698 else
4700 if (index != 0)
4701 bitpos = ((tree_low_cst (index, 0) - minelt)
4702 * tree_low_cst (TYPE_SIZE (elttype), 1));
4703 else
4704 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4706 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4707 && TREE_CODE (type) == ARRAY_TYPE
4708 && TYPE_NONALIASED_COMPONENT (type))
4710 target = copy_rtx (target);
4711 MEM_KEEP_ALIAS_SET_P (target) = 1;
4713 store_constructor_field (target, bitsize, bitpos, mode, value,
4714 type, cleared, get_alias_set (elttype));
4717 break;
4720 case VECTOR_TYPE:
4722 tree elt;
4723 int i;
4724 int need_to_clear;
4725 int icode = 0;
4726 tree elttype = TREE_TYPE (type);
4727 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4728 enum machine_mode eltmode = TYPE_MODE (elttype);
4729 HOST_WIDE_INT bitsize;
4730 HOST_WIDE_INT bitpos;
4731 rtx *vector = NULL;
4732 unsigned n_elts;
4734 gcc_assert (eltmode != BLKmode);
4736 n_elts = TYPE_VECTOR_SUBPARTS (type);
4737 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4739 enum machine_mode mode = GET_MODE (target);
4741 icode = (int) vec_init_optab->handlers[mode].insn_code;
4742 if (icode != CODE_FOR_nothing)
4744 unsigned int i;
4746 vector = alloca (n_elts);
4747 for (i = 0; i < n_elts; i++)
4748 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4752 /* If the constructor has fewer elements than the vector,
4753 clear the whole array first. Similarly if this is static
4754 constructor of a non-BLKmode object. */
4755 if (cleared)
4756 need_to_clear = 0;
4757 else if (REG_P (target) && TREE_STATIC (exp))
4758 need_to_clear = 1;
4759 else
4761 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4763 for (elt = CONSTRUCTOR_ELTS (exp);
4764 elt != NULL_TREE;
4765 elt = TREE_CHAIN (elt))
4767 int n_elts_here = tree_low_cst
4768 (int_const_binop (TRUNC_DIV_EXPR,
4769 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4770 TYPE_SIZE (elttype), 0), 1);
4772 count += n_elts_here;
4773 if (mostly_zeros_p (TREE_VALUE (elt)))
4774 zero_count += n_elts_here;
4777 /* Clear the entire vector first if there are any missing elements,
4778 or if the incidence of zero elements is >= 75%. */
4779 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4782 if (need_to_clear && size > 0 && !vector)
4784 if (REG_P (target))
4785 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4786 else
4787 clear_storage (target, GEN_INT (size));
4788 cleared = 1;
4791 if (!cleared && REG_P (target))
4792 /* Inform later passes that the old value is dead. */
4793 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4795 /* Store each element of the constructor into the corresponding
4796 element of TARGET, determined by counting the elements. */
4797 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4798 elt;
4799 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4801 tree value = TREE_VALUE (elt);
4802 tree index = TREE_PURPOSE (elt);
4803 HOST_WIDE_INT eltpos;
4805 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4806 if (cleared && initializer_zerop (value))
4807 continue;
4809 if (index != 0)
4810 eltpos = tree_low_cst (index, 1);
4811 else
4812 eltpos = i;
4814 if (vector)
4816 /* Vector CONSTRUCTORs should only be built from smaller
4817 vectors in the case of BLKmode vectors. */
4818 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
4819 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4821 else
4823 enum machine_mode value_mode =
4824 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4825 ? TYPE_MODE (TREE_TYPE (value))
4826 : eltmode;
4827 bitpos = eltpos * elt_size;
4828 store_constructor_field (target, bitsize, bitpos,
4829 value_mode, value, type,
4830 cleared, get_alias_set (elttype));
4834 if (vector)
4835 emit_insn (GEN_FCN (icode)
4836 (target,
4837 gen_rtx_PARALLEL (GET_MODE (target),
4838 gen_rtvec_v (n_elts, vector))));
4839 break;
4842 /* Set constructor assignments. */
4843 case SET_TYPE:
4845 tree elt = CONSTRUCTOR_ELTS (exp);
4846 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4847 tree domain = TYPE_DOMAIN (type);
4848 tree domain_min, domain_max, bitlength;
4850 /* The default implementation strategy is to extract the
4851 constant parts of the constructor, use that to initialize
4852 the target, and then "or" in whatever non-constant ranges
4853 we need in addition.
4855 If a large set is all zero or all ones, it is probably
4856 better to set it using memset. Also, if a large set has
4857 just a single range, it may also be better to first clear
4858 all the first clear the set (using memset), and set the
4859 bits we want. */
4861 /* Check for all zeros. */
4862 if (elt == NULL_TREE && size > 0)
4864 if (!cleared)
4865 clear_storage (target, GEN_INT (size));
4866 return;
4869 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4870 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4871 bitlength = size_binop (PLUS_EXPR,
4872 size_diffop (domain_max, domain_min),
4873 ssize_int (1));
4875 nbits = tree_low_cst (bitlength, 1);
4877 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets
4878 that are "complicated" (more than one range), initialize
4879 (the constant parts) by copying from a constant. */
4880 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4881 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4883 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4884 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4885 char *bit_buffer = alloca (nbits);
4886 HOST_WIDE_INT word = 0;
4887 unsigned int bit_pos = 0;
4888 unsigned int ibit = 0;
4889 unsigned int offset = 0; /* In bytes from beginning of set. */
4891 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4892 for (;;)
4894 if (bit_buffer[ibit])
4896 if (BYTES_BIG_ENDIAN)
4897 word |= (1 << (set_word_size - 1 - bit_pos));
4898 else
4899 word |= 1 << bit_pos;
4902 bit_pos++; ibit++;
4903 if (bit_pos >= set_word_size || ibit == nbits)
4905 if (word != 0 || ! cleared)
4907 rtx datum = gen_int_mode (word, mode);
4908 rtx to_rtx;
4910 /* The assumption here is that it is safe to
4911 use XEXP if the set is multi-word, but not
4912 if it's single-word. */
4913 if (MEM_P (target))
4914 to_rtx = adjust_address (target, mode, offset);
4915 else
4917 gcc_assert (!offset);
4918 to_rtx = target;
4920 emit_move_insn (to_rtx, datum);
4923 if (ibit == nbits)
4924 break;
4925 word = 0;
4926 bit_pos = 0;
4927 offset += set_word_size / BITS_PER_UNIT;
4931 else if (!cleared)
4932 /* Don't bother clearing storage if the set is all ones. */
4933 if (TREE_CHAIN (elt) != NULL_TREE
4934 || (TREE_PURPOSE (elt) == NULL_TREE
4935 ? nbits != 1
4936 : ( ! host_integerp (TREE_VALUE (elt), 0)
4937 || ! host_integerp (TREE_PURPOSE (elt), 0)
4938 || (tree_low_cst (TREE_VALUE (elt), 0)
4939 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4940 != (HOST_WIDE_INT) nbits))))
4941 clear_storage (target, expr_size (exp));
4943 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4945 /* Start of range of element or NULL. */
4946 tree startbit = TREE_PURPOSE (elt);
4947 /* End of range of element, or element value. */
4948 tree endbit = TREE_VALUE (elt);
4949 HOST_WIDE_INT startb, endb;
4950 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4952 bitlength_rtx = expand_expr (bitlength,
4953 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4955 /* Handle non-range tuple element like [ expr ]. */
4956 if (startbit == NULL_TREE)
4958 startbit = save_expr (endbit);
4959 endbit = startbit;
4962 startbit = convert (sizetype, startbit);
4963 endbit = convert (sizetype, endbit);
4964 if (! integer_zerop (domain_min))
4966 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4967 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4969 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4970 EXPAND_CONST_ADDRESS);
4971 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4972 EXPAND_CONST_ADDRESS);
4974 if (REG_P (target))
4976 targetx
4977 = assign_temp
4978 ((build_qualified_type (lang_hooks.types.type_for_mode
4979 (GET_MODE (target), 0),
4980 TYPE_QUAL_CONST)),
4981 0, 1, 1);
4982 emit_move_insn (targetx, target);
4985 else
4987 gcc_assert (MEM_P (target));
4988 targetx = target;
4991 /* Optimization: If startbit and endbit are constants divisible
4992 by BITS_PER_UNIT, call memset instead. */
4993 if (TREE_CODE (startbit) == INTEGER_CST
4994 && TREE_CODE (endbit) == INTEGER_CST
4995 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4996 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4998 emit_library_call (memset_libfunc, LCT_NORMAL,
4999 VOIDmode, 3,
5000 plus_constant (XEXP (targetx, 0),
5001 startb / BITS_PER_UNIT),
5002 Pmode,
5003 constm1_rtx, TYPE_MODE (integer_type_node),
5004 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5005 TYPE_MODE (sizetype));
5007 else
5008 emit_library_call (setbits_libfunc, LCT_NORMAL,
5009 VOIDmode, 4, XEXP (targetx, 0),
5010 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5011 startbit_rtx, TYPE_MODE (sizetype),
5012 endbit_rtx, TYPE_MODE (sizetype));
5014 if (REG_P (target))
5015 emit_move_insn (target, targetx);
5017 break;
5019 default:
5020 gcc_unreachable ();
5024 /* Store the value of EXP (an expression tree)
5025 into a subfield of TARGET which has mode MODE and occupies
5026 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5027 If MODE is VOIDmode, it means that we are storing into a bit-field.
5029 Always return const0_rtx unless we have something particular to
5030 return.
5032 TYPE is the type of the underlying object,
5034 ALIAS_SET is the alias set for the destination. This value will
5035 (in general) be different from that for TARGET, since TARGET is a
5036 reference to the containing structure. */
5038 static rtx
5039 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5040 enum machine_mode mode, tree exp, tree type, int alias_set)
5042 HOST_WIDE_INT width_mask = 0;
5044 if (TREE_CODE (exp) == ERROR_MARK)
5045 return const0_rtx;
5047 /* If we have nothing to store, do nothing unless the expression has
5048 side-effects. */
5049 if (bitsize == 0)
5050 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5051 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5052 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5054 /* If we are storing into an unaligned field of an aligned union that is
5055 in a register, we may have the mode of TARGET being an integer mode but
5056 MODE == BLKmode. In that case, get an aligned object whose size and
5057 alignment are the same as TARGET and store TARGET into it (we can avoid
5058 the store if the field being stored is the entire width of TARGET). Then
5059 call ourselves recursively to store the field into a BLKmode version of
5060 that object. Finally, load from the object into TARGET. This is not
5061 very efficient in general, but should only be slightly more expensive
5062 than the otherwise-required unaligned accesses. Perhaps this can be
5063 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5064 twice, once with emit_move_insn and once via store_field. */
5066 if (mode == BLKmode
5067 && (REG_P (target) || GET_CODE (target) == SUBREG))
5069 rtx object = assign_temp (type, 0, 1, 1);
5070 rtx blk_object = adjust_address (object, BLKmode, 0);
5072 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5073 emit_move_insn (object, target);
5075 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5077 emit_move_insn (target, object);
5079 /* We want to return the BLKmode version of the data. */
5080 return blk_object;
5083 if (GET_CODE (target) == CONCAT)
5085 /* We're storing into a struct containing a single __complex. */
5087 gcc_assert (!bitpos);
5088 return store_expr (exp, target, 0);
5091 /* If the structure is in a register or if the component
5092 is a bit field, we cannot use addressing to access it.
5093 Use bit-field techniques or SUBREG to store in it. */
5095 if (mode == VOIDmode
5096 || (mode != BLKmode && ! direct_store[(int) mode]
5097 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5098 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5099 || REG_P (target)
5100 || GET_CODE (target) == SUBREG
5101 /* If the field isn't aligned enough to store as an ordinary memref,
5102 store it as a bit field. */
5103 || (mode != BLKmode
5104 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5105 || bitpos % GET_MODE_ALIGNMENT (mode))
5106 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5107 || (bitpos % BITS_PER_UNIT != 0)))
5108 /* If the RHS and field are a constant size and the size of the
5109 RHS isn't the same size as the bitfield, we must use bitfield
5110 operations. */
5111 || (bitsize >= 0
5112 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5113 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5115 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5117 /* If BITSIZE is narrower than the size of the type of EXP
5118 we will be narrowing TEMP. Normally, what's wanted are the
5119 low-order bits. However, if EXP's type is a record and this is
5120 big-endian machine, we want the upper BITSIZE bits. */
5121 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5122 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5123 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5124 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5125 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5126 - bitsize),
5127 NULL_RTX, 1);
5129 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5130 MODE. */
5131 if (mode != VOIDmode && mode != BLKmode
5132 && mode != TYPE_MODE (TREE_TYPE (exp)))
5133 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5135 /* If the modes of TARGET and TEMP are both BLKmode, both
5136 must be in memory and BITPOS must be aligned on a byte
5137 boundary. If so, we simply do a block copy. */
5138 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5140 gcc_assert (MEM_P (target) && MEM_P (temp)
5141 && !(bitpos % BITS_PER_UNIT));
5143 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5144 emit_block_move (target, temp,
5145 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5146 / BITS_PER_UNIT),
5147 BLOCK_OP_NORMAL);
5149 return const0_rtx;
5152 /* Store the value in the bitfield. */
5153 store_bit_field (target, bitsize, bitpos, mode, temp);
5155 return const0_rtx;
5157 else
5159 /* Now build a reference to just the desired component. */
5160 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5162 if (to_rtx == target)
5163 to_rtx = copy_rtx (to_rtx);
5165 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5166 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5167 set_mem_alias_set (to_rtx, alias_set);
5169 return store_expr (exp, to_rtx, 0);
5173 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5174 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5175 codes and find the ultimate containing object, which we return.
5177 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5178 bit position, and *PUNSIGNEDP to the signedness of the field.
5179 If the position of the field is variable, we store a tree
5180 giving the variable offset (in units) in *POFFSET.
5181 This offset is in addition to the bit position.
5182 If the position is not variable, we store 0 in *POFFSET.
5184 If any of the extraction expressions is volatile,
5185 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5187 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5188 is a mode that can be used to access the field. In that case, *PBITSIZE
5189 is redundant.
5191 If the field describes a variable-sized object, *PMODE is set to
5192 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5193 this case, but the address of the object can be found. */
5195 tree
5196 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5197 HOST_WIDE_INT *pbitpos, tree *poffset,
5198 enum machine_mode *pmode, int *punsignedp,
5199 int *pvolatilep)
5201 tree size_tree = 0;
5202 enum machine_mode mode = VOIDmode;
5203 tree offset = size_zero_node;
5204 tree bit_offset = bitsize_zero_node;
5205 tree tem;
5207 /* First get the mode, signedness, and size. We do this from just the
5208 outermost expression. */
5209 if (TREE_CODE (exp) == COMPONENT_REF)
5211 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5212 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5213 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5215 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5217 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5219 size_tree = TREE_OPERAND (exp, 1);
5220 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5222 else
5224 mode = TYPE_MODE (TREE_TYPE (exp));
5225 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5227 if (mode == BLKmode)
5228 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5229 else
5230 *pbitsize = GET_MODE_BITSIZE (mode);
5233 if (size_tree != 0)
5235 if (! host_integerp (size_tree, 1))
5236 mode = BLKmode, *pbitsize = -1;
5237 else
5238 *pbitsize = tree_low_cst (size_tree, 1);
5241 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5242 and find the ultimate containing object. */
5243 while (1)
5245 if (TREE_CODE (exp) == BIT_FIELD_REF)
5246 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5247 else if (TREE_CODE (exp) == COMPONENT_REF)
5249 tree field = TREE_OPERAND (exp, 1);
5250 tree this_offset = component_ref_field_offset (exp);
5252 /* If this field hasn't been filled in yet, don't go
5253 past it. This should only happen when folding expressions
5254 made during type construction. */
5255 if (this_offset == 0)
5256 break;
5258 offset = size_binop (PLUS_EXPR, offset, this_offset);
5259 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5260 DECL_FIELD_BIT_OFFSET (field));
5262 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5265 else if (TREE_CODE (exp) == ARRAY_REF
5266 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5268 tree index = TREE_OPERAND (exp, 1);
5269 tree low_bound = array_ref_low_bound (exp);
5270 tree unit_size = array_ref_element_size (exp);
5272 /* We assume all arrays have sizes that are a multiple of a byte.
5273 First subtract the lower bound, if any, in the type of the
5274 index, then convert to sizetype and multiply by the size of the
5275 array element. */
5276 if (! integer_zerop (low_bound))
5277 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5278 index, low_bound));
5280 offset = size_binop (PLUS_EXPR, offset,
5281 size_binop (MULT_EXPR,
5282 convert (sizetype, index),
5283 unit_size));
5286 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5287 conversions that don't change the mode, and all view conversions
5288 except those that need to "step up" the alignment. */
5289 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5290 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5291 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5292 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5293 && STRICT_ALIGNMENT
5294 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5295 < BIGGEST_ALIGNMENT)
5296 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5297 || TYPE_ALIGN_OK (TREE_TYPE
5298 (TREE_OPERAND (exp, 0))))))
5299 && ! ((TREE_CODE (exp) == NOP_EXPR
5300 || TREE_CODE (exp) == CONVERT_EXPR)
5301 && (TYPE_MODE (TREE_TYPE (exp))
5302 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5303 break;
5305 /* If any reference in the chain is volatile, the effect is volatile. */
5306 if (TREE_THIS_VOLATILE (exp))
5307 *pvolatilep = 1;
5309 exp = TREE_OPERAND (exp, 0);
5312 /* If OFFSET is constant, see if we can return the whole thing as a
5313 constant bit position. Otherwise, split it up. */
5314 if (host_integerp (offset, 0)
5315 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5316 bitsize_unit_node))
5317 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5318 && host_integerp (tem, 0))
5319 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5320 else
5321 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5323 *pmode = mode;
5324 return exp;
5327 /* Return a tree of sizetype representing the size, in bytes, of the element
5328 of EXP, an ARRAY_REF. */
5330 tree
5331 array_ref_element_size (tree exp)
5333 tree aligned_size = TREE_OPERAND (exp, 3);
5334 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5336 /* If a size was specified in the ARRAY_REF, it's the size measured
5337 in alignment units of the element type. So multiply by that value. */
5338 if (aligned_size)
5340 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5341 sizetype from another type of the same width and signedness. */
5342 if (TREE_TYPE (aligned_size) != sizetype)
5343 aligned_size = fold_convert (sizetype, aligned_size);
5344 return size_binop (MULT_EXPR, aligned_size,
5345 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5348 /* Otherwise, take the size from that of the element type. Substitute
5349 any PLACEHOLDER_EXPR that we have. */
5350 else
5351 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5354 /* Return a tree representing the lower bound of the array mentioned in
5355 EXP, an ARRAY_REF. */
5357 tree
5358 array_ref_low_bound (tree exp)
5360 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5362 /* If a lower bound is specified in EXP, use it. */
5363 if (TREE_OPERAND (exp, 2))
5364 return TREE_OPERAND (exp, 2);
5366 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5367 substituting for a PLACEHOLDER_EXPR as needed. */
5368 if (domain_type && TYPE_MIN_VALUE (domain_type))
5369 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5371 /* Otherwise, return a zero of the appropriate type. */
5372 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5375 /* Return a tree representing the upper bound of the array mentioned in
5376 EXP, an ARRAY_REF. */
5378 tree
5379 array_ref_up_bound (tree exp)
5381 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5383 /* If there is a domain type and it has an upper bound, use it, substituting
5384 for a PLACEHOLDER_EXPR as needed. */
5385 if (domain_type && TYPE_MAX_VALUE (domain_type))
5386 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5388 /* Otherwise fail. */
5389 return NULL_TREE;
5392 /* Return a tree representing the offset, in bytes, of the field referenced
5393 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5395 tree
5396 component_ref_field_offset (tree exp)
5398 tree aligned_offset = TREE_OPERAND (exp, 2);
5399 tree field = TREE_OPERAND (exp, 1);
5401 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5402 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5403 value. */
5404 if (aligned_offset)
5406 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5407 sizetype from another type of the same width and signedness. */
5408 if (TREE_TYPE (aligned_offset) != sizetype)
5409 aligned_offset = fold_convert (sizetype, aligned_offset);
5410 return size_binop (MULT_EXPR, aligned_offset,
5411 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5414 /* Otherwise, take the offset from that of the field. Substitute
5415 any PLACEHOLDER_EXPR that we have. */
5416 else
5417 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5420 /* Return 1 if T is an expression that get_inner_reference handles. */
5423 handled_component_p (tree t)
5425 switch (TREE_CODE (t))
5427 case BIT_FIELD_REF:
5428 case COMPONENT_REF:
5429 case ARRAY_REF:
5430 case ARRAY_RANGE_REF:
5431 case NON_LVALUE_EXPR:
5432 case VIEW_CONVERT_EXPR:
5433 return 1;
5435 /* ??? Sure they are handled, but get_inner_reference may return
5436 a different PBITSIZE, depending upon whether the expression is
5437 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5438 case NOP_EXPR:
5439 case CONVERT_EXPR:
5440 return (TYPE_MODE (TREE_TYPE (t))
5441 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5443 default:
5444 return 0;
5448 /* Given an rtx VALUE that may contain additions and multiplications, return
5449 an equivalent value that just refers to a register, memory, or constant.
5450 This is done by generating instructions to perform the arithmetic and
5451 returning a pseudo-register containing the value.
5453 The returned value may be a REG, SUBREG, MEM or constant. */
5456 force_operand (rtx value, rtx target)
5458 rtx op1, op2;
5459 /* Use subtarget as the target for operand 0 of a binary operation. */
5460 rtx subtarget = get_subtarget (target);
5461 enum rtx_code code = GET_CODE (value);
5463 /* Check for subreg applied to an expression produced by loop optimizer. */
5464 if (code == SUBREG
5465 && !REG_P (SUBREG_REG (value))
5466 && !MEM_P (SUBREG_REG (value)))
5468 value = simplify_gen_subreg (GET_MODE (value),
5469 force_reg (GET_MODE (SUBREG_REG (value)),
5470 force_operand (SUBREG_REG (value),
5471 NULL_RTX)),
5472 GET_MODE (SUBREG_REG (value)),
5473 SUBREG_BYTE (value));
5474 code = GET_CODE (value);
5477 /* Check for a PIC address load. */
5478 if ((code == PLUS || code == MINUS)
5479 && XEXP (value, 0) == pic_offset_table_rtx
5480 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5481 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5482 || GET_CODE (XEXP (value, 1)) == CONST))
5484 if (!subtarget)
5485 subtarget = gen_reg_rtx (GET_MODE (value));
5486 emit_move_insn (subtarget, value);
5487 return subtarget;
5490 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5492 if (!target)
5493 target = gen_reg_rtx (GET_MODE (value));
5494 convert_move (target, force_operand (XEXP (value, 0), NULL),
5495 code == ZERO_EXTEND);
5496 return target;
5499 if (ARITHMETIC_P (value))
5501 op2 = XEXP (value, 1);
5502 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5503 subtarget = 0;
5504 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5506 code = PLUS;
5507 op2 = negate_rtx (GET_MODE (value), op2);
5510 /* Check for an addition with OP2 a constant integer and our first
5511 operand a PLUS of a virtual register and something else. In that
5512 case, we want to emit the sum of the virtual register and the
5513 constant first and then add the other value. This allows virtual
5514 register instantiation to simply modify the constant rather than
5515 creating another one around this addition. */
5516 if (code == PLUS && GET_CODE (op2) == CONST_INT
5517 && GET_CODE (XEXP (value, 0)) == PLUS
5518 && REG_P (XEXP (XEXP (value, 0), 0))
5519 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5520 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5522 rtx temp = expand_simple_binop (GET_MODE (value), code,
5523 XEXP (XEXP (value, 0), 0), op2,
5524 subtarget, 0, OPTAB_LIB_WIDEN);
5525 return expand_simple_binop (GET_MODE (value), code, temp,
5526 force_operand (XEXP (XEXP (value,
5527 0), 1), 0),
5528 target, 0, OPTAB_LIB_WIDEN);
5531 op1 = force_operand (XEXP (value, 0), subtarget);
5532 op2 = force_operand (op2, NULL_RTX);
5533 switch (code)
5535 case MULT:
5536 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5537 case DIV:
5538 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5539 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5540 target, 1, OPTAB_LIB_WIDEN);
5541 else
5542 return expand_divmod (0,
5543 FLOAT_MODE_P (GET_MODE (value))
5544 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5545 GET_MODE (value), op1, op2, target, 0);
5546 break;
5547 case MOD:
5548 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5549 target, 0);
5550 break;
5551 case UDIV:
5552 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5553 target, 1);
5554 break;
5555 case UMOD:
5556 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5557 target, 1);
5558 break;
5559 case ASHIFTRT:
5560 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5561 target, 0, OPTAB_LIB_WIDEN);
5562 break;
5563 default:
5564 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5565 target, 1, OPTAB_LIB_WIDEN);
5568 if (UNARY_P (value))
5570 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5571 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5574 #ifdef INSN_SCHEDULING
5575 /* On machines that have insn scheduling, we want all memory reference to be
5576 explicit, so we need to deal with such paradoxical SUBREGs. */
5577 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5578 && (GET_MODE_SIZE (GET_MODE (value))
5579 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5580 value
5581 = simplify_gen_subreg (GET_MODE (value),
5582 force_reg (GET_MODE (SUBREG_REG (value)),
5583 force_operand (SUBREG_REG (value),
5584 NULL_RTX)),
5585 GET_MODE (SUBREG_REG (value)),
5586 SUBREG_BYTE (value));
5587 #endif
5589 return value;
5592 /* Subroutine of expand_expr: return nonzero iff there is no way that
5593 EXP can reference X, which is being modified. TOP_P is nonzero if this
5594 call is going to be used to determine whether we need a temporary
5595 for EXP, as opposed to a recursive call to this function.
5597 It is always safe for this routine to return zero since it merely
5598 searches for optimization opportunities. */
5601 safe_from_p (rtx x, tree exp, int top_p)
5603 rtx exp_rtl = 0;
5604 int i, nops;
5606 if (x == 0
5607 /* If EXP has varying size, we MUST use a target since we currently
5608 have no way of allocating temporaries of variable size
5609 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5610 So we assume here that something at a higher level has prevented a
5611 clash. This is somewhat bogus, but the best we can do. Only
5612 do this when X is BLKmode and when we are at the top level. */
5613 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5614 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5615 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5616 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5617 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5618 != INTEGER_CST)
5619 && GET_MODE (x) == BLKmode)
5620 /* If X is in the outgoing argument area, it is always safe. */
5621 || (MEM_P (x)
5622 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5623 || (GET_CODE (XEXP (x, 0)) == PLUS
5624 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5625 return 1;
5627 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5628 find the underlying pseudo. */
5629 if (GET_CODE (x) == SUBREG)
5631 x = SUBREG_REG (x);
5632 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5633 return 0;
5636 /* Now look at our tree code and possibly recurse. */
5637 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5639 case tcc_declaration:
5640 exp_rtl = DECL_RTL_IF_SET (exp);
5641 break;
5643 case tcc_constant:
5644 return 1;
5646 case tcc_exceptional:
5647 if (TREE_CODE (exp) == TREE_LIST)
5649 while (1)
5651 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5652 return 0;
5653 exp = TREE_CHAIN (exp);
5654 if (!exp)
5655 return 1;
5656 if (TREE_CODE (exp) != TREE_LIST)
5657 return safe_from_p (x, exp, 0);
5660 else if (TREE_CODE (exp) == ERROR_MARK)
5661 return 1; /* An already-visited SAVE_EXPR? */
5662 else
5663 return 0;
5665 case tcc_statement:
5666 /* The only case we look at here is the DECL_INITIAL inside a
5667 DECL_EXPR. */
5668 return (TREE_CODE (exp) != DECL_EXPR
5669 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5670 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5671 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5673 case tcc_binary:
5674 case tcc_comparison:
5675 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5676 return 0;
5677 /* Fall through. */
5679 case tcc_unary:
5680 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5682 case tcc_expression:
5683 case tcc_reference:
5684 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5685 the expression. If it is set, we conflict iff we are that rtx or
5686 both are in memory. Otherwise, we check all operands of the
5687 expression recursively. */
5689 switch (TREE_CODE (exp))
5691 case ADDR_EXPR:
5692 /* If the operand is static or we are static, we can't conflict.
5693 Likewise if we don't conflict with the operand at all. */
5694 if (staticp (TREE_OPERAND (exp, 0))
5695 || TREE_STATIC (exp)
5696 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5697 return 1;
5699 /* Otherwise, the only way this can conflict is if we are taking
5700 the address of a DECL a that address if part of X, which is
5701 very rare. */
5702 exp = TREE_OPERAND (exp, 0);
5703 if (DECL_P (exp))
5705 if (!DECL_RTL_SET_P (exp)
5706 || !MEM_P (DECL_RTL (exp)))
5707 return 0;
5708 else
5709 exp_rtl = XEXP (DECL_RTL (exp), 0);
5711 break;
5713 case MISALIGNED_INDIRECT_REF:
5714 case ALIGN_INDIRECT_REF:
5715 case INDIRECT_REF:
5716 if (MEM_P (x)
5717 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5718 get_alias_set (exp)))
5719 return 0;
5720 break;
5722 case CALL_EXPR:
5723 /* Assume that the call will clobber all hard registers and
5724 all of memory. */
5725 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5726 || MEM_P (x))
5727 return 0;
5728 break;
5730 case WITH_CLEANUP_EXPR:
5731 case CLEANUP_POINT_EXPR:
5732 /* Lowered by gimplify.c. */
5733 gcc_unreachable ();
5735 case SAVE_EXPR:
5736 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5738 default:
5739 break;
5742 /* If we have an rtx, we do not need to scan our operands. */
5743 if (exp_rtl)
5744 break;
5746 nops = first_rtl_op (TREE_CODE (exp));
5747 for (i = 0; i < nops; i++)
5748 if (TREE_OPERAND (exp, i) != 0
5749 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5750 return 0;
5752 /* If this is a language-specific tree code, it may require
5753 special handling. */
5754 if ((unsigned int) TREE_CODE (exp)
5755 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5756 && !lang_hooks.safe_from_p (x, exp))
5757 return 0;
5758 break;
5760 case tcc_type:
5761 /* Should never get a type here. */
5762 gcc_unreachable ();
5765 /* If we have an rtl, find any enclosed object. Then see if we conflict
5766 with it. */
5767 if (exp_rtl)
5769 if (GET_CODE (exp_rtl) == SUBREG)
5771 exp_rtl = SUBREG_REG (exp_rtl);
5772 if (REG_P (exp_rtl)
5773 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5774 return 0;
5777 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5778 are memory and they conflict. */
5779 return ! (rtx_equal_p (x, exp_rtl)
5780 || (MEM_P (x) && MEM_P (exp_rtl)
5781 && true_dependence (exp_rtl, VOIDmode, x,
5782 rtx_addr_varies_p)));
5785 /* If we reach here, it is safe. */
5786 return 1;
5790 /* Return the highest power of two that EXP is known to be a multiple of.
5791 This is used in updating alignment of MEMs in array references. */
5793 static unsigned HOST_WIDE_INT
5794 highest_pow2_factor (tree exp)
5796 unsigned HOST_WIDE_INT c0, c1;
5798 switch (TREE_CODE (exp))
5800 case INTEGER_CST:
5801 /* We can find the lowest bit that's a one. If the low
5802 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5803 We need to handle this case since we can find it in a COND_EXPR,
5804 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5805 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5806 later ICE. */
5807 if (TREE_CONSTANT_OVERFLOW (exp))
5808 return BIGGEST_ALIGNMENT;
5809 else
5811 /* Note: tree_low_cst is intentionally not used here,
5812 we don't care about the upper bits. */
5813 c0 = TREE_INT_CST_LOW (exp);
5814 c0 &= -c0;
5815 return c0 ? c0 : BIGGEST_ALIGNMENT;
5817 break;
5819 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5820 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5821 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5822 return MIN (c0, c1);
5824 case MULT_EXPR:
5825 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5826 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5827 return c0 * c1;
5829 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5830 case CEIL_DIV_EXPR:
5831 if (integer_pow2p (TREE_OPERAND (exp, 1))
5832 && host_integerp (TREE_OPERAND (exp, 1), 1))
5834 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5835 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5836 return MAX (1, c0 / c1);
5838 break;
5840 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5841 case SAVE_EXPR:
5842 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5844 case COMPOUND_EXPR:
5845 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5847 case COND_EXPR:
5848 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5849 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5850 return MIN (c0, c1);
5852 default:
5853 break;
5856 return 1;
5859 /* Similar, except that the alignment requirements of TARGET are
5860 taken into account. Assume it is at least as aligned as its
5861 type, unless it is a COMPONENT_REF in which case the layout of
5862 the structure gives the alignment. */
5864 static unsigned HOST_WIDE_INT
5865 highest_pow2_factor_for_target (tree target, tree exp)
5867 unsigned HOST_WIDE_INT target_align, factor;
5869 factor = highest_pow2_factor (exp);
5870 if (TREE_CODE (target) == COMPONENT_REF)
5871 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5872 else
5873 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5874 return MAX (factor, target_align);
5877 /* Expands variable VAR. */
5879 void
5880 expand_var (tree var)
5882 if (DECL_EXTERNAL (var))
5883 return;
5885 if (TREE_STATIC (var))
5886 /* If this is an inlined copy of a static local variable,
5887 look up the original decl. */
5888 var = DECL_ORIGIN (var);
5890 if (TREE_STATIC (var)
5891 ? !TREE_ASM_WRITTEN (var)
5892 : !DECL_RTL_SET_P (var))
5894 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5895 /* Should be ignored. */;
5896 else if (lang_hooks.expand_decl (var))
5897 /* OK. */;
5898 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5899 expand_decl (var);
5900 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5901 rest_of_decl_compilation (var, 0, 0);
5902 else
5903 /* No expansion needed. */
5904 gcc_assert (TREE_CODE (var) == TYPE_DECL
5905 || TREE_CODE (var) == CONST_DECL
5906 || TREE_CODE (var) == FUNCTION_DECL
5907 || TREE_CODE (var) == LABEL_DECL);
5911 /* Subroutine of expand_expr. Expand the two operands of a binary
5912 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5913 The value may be stored in TARGET if TARGET is nonzero. The
5914 MODIFIER argument is as documented by expand_expr. */
5916 static void
5917 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
5918 enum expand_modifier modifier)
5920 if (! safe_from_p (target, exp1, 1))
5921 target = 0;
5922 if (operand_equal_p (exp0, exp1, 0))
5924 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5925 *op1 = copy_rtx (*op0);
5927 else
5929 /* If we need to preserve evaluation order, copy exp0 into its own
5930 temporary variable so that it can't be clobbered by exp1. */
5931 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
5932 exp0 = save_expr (exp0);
5933 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5934 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
5939 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
5940 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
5942 static rtx
5943 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
5944 enum expand_modifier modifier)
5946 rtx result, subtarget;
5947 tree inner, offset;
5948 HOST_WIDE_INT bitsize, bitpos;
5949 int volatilep, unsignedp;
5950 enum machine_mode mode1;
5952 /* If we are taking the address of a constant and are at the top level,
5953 we have to use output_constant_def since we can't call force_const_mem
5954 at top level. */
5955 /* ??? This should be considered a front-end bug. We should not be
5956 generating ADDR_EXPR of something that isn't an LVALUE. The only
5957 exception here is STRING_CST. */
5958 if (TREE_CODE (exp) == CONSTRUCTOR
5959 || CONSTANT_CLASS_P (exp))
5960 return XEXP (output_constant_def (exp, 0), 0);
5962 /* Everything must be something allowed by is_gimple_addressable. */
5963 switch (TREE_CODE (exp))
5965 case INDIRECT_REF:
5966 /* This case will happen via recursion for &a->b. */
5967 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
5969 case CONST_DECL:
5970 /* Recurse and make the output_constant_def clause above handle this. */
5971 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
5972 tmode, modifier);
5974 case REALPART_EXPR:
5975 /* The real part of the complex number is always first, therefore
5976 the address is the same as the address of the parent object. */
5977 offset = 0;
5978 bitpos = 0;
5979 inner = TREE_OPERAND (exp, 0);
5980 break;
5982 case IMAGPART_EXPR:
5983 /* The imaginary part of the complex number is always second.
5984 The expression is therefore always offset by the size of the
5985 scalar type. */
5986 offset = 0;
5987 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
5988 inner = TREE_OPERAND (exp, 0);
5989 break;
5991 default:
5992 /* If the object is a DECL, then expand it for its rtl. Don't bypass
5993 expand_expr, as that can have various side effects; LABEL_DECLs for
5994 example, may not have their DECL_RTL set yet. Assume language
5995 specific tree nodes can be expanded in some interesting way. */
5996 if (DECL_P (exp)
5997 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
5999 result = expand_expr (exp, target, tmode,
6000 modifier == EXPAND_INITIALIZER
6001 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6003 /* If the DECL isn't in memory, then the DECL wasn't properly
6004 marked TREE_ADDRESSABLE, which will be either a front-end
6005 or a tree optimizer bug. */
6006 gcc_assert (GET_CODE (result) == MEM);
6007 result = XEXP (result, 0);
6009 /* ??? Is this needed anymore? */
6010 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6012 assemble_external (exp);
6013 TREE_USED (exp) = 1;
6016 if (modifier != EXPAND_INITIALIZER
6017 && modifier != EXPAND_CONST_ADDRESS)
6018 result = force_operand (result, target);
6019 return result;
6022 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6023 &mode1, &unsignedp, &volatilep);
6024 break;
6027 /* We must have made progress. */
6028 gcc_assert (inner != exp);
6030 subtarget = offset || bitpos ? NULL_RTX : target;
6031 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6033 if (offset)
6035 rtx tmp;
6037 if (modifier != EXPAND_NORMAL)
6038 result = force_operand (result, NULL);
6039 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6041 result = convert_memory_address (tmode, result);
6042 tmp = convert_memory_address (tmode, tmp);
6044 if (modifier == EXPAND_SUM)
6045 result = gen_rtx_PLUS (tmode, result, tmp);
6046 else
6048 subtarget = bitpos ? NULL_RTX : target;
6049 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6050 1, OPTAB_LIB_WIDEN);
6054 if (bitpos)
6056 /* Someone beforehand should have rejected taking the address
6057 of such an object. */
6058 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6060 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6061 if (modifier < EXPAND_SUM)
6062 result = force_operand (result, target);
6065 return result;
6068 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6069 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6071 static rtx
6072 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6073 enum expand_modifier modifier)
6075 enum machine_mode rmode;
6076 rtx result;
6078 /* Target mode of VOIDmode says "whatever's natural". */
6079 if (tmode == VOIDmode)
6080 tmode = TYPE_MODE (TREE_TYPE (exp));
6082 /* We can get called with some Weird Things if the user does silliness
6083 like "(short) &a". In that case, convert_memory_address won't do
6084 the right thing, so ignore the given target mode. */
6085 if (tmode != Pmode && tmode != ptr_mode)
6086 tmode = Pmode;
6088 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6089 tmode, modifier);
6091 /* Despite expand_expr claims concerning ignoring TMODE when not
6092 strictly convenient, stuff breaks if we don't honor it. Note
6093 that combined with the above, we only do this for pointer modes. */
6094 rmode = GET_MODE (result);
6095 if (rmode == VOIDmode)
6096 rmode = tmode;
6097 if (rmode != tmode)
6098 result = convert_memory_address (tmode, result);
6100 return result;
6104 /* expand_expr: generate code for computing expression EXP.
6105 An rtx for the computed value is returned. The value is never null.
6106 In the case of a void EXP, const0_rtx is returned.
6108 The value may be stored in TARGET if TARGET is nonzero.
6109 TARGET is just a suggestion; callers must assume that
6110 the rtx returned may not be the same as TARGET.
6112 If TARGET is CONST0_RTX, it means that the value will be ignored.
6114 If TMODE is not VOIDmode, it suggests generating the
6115 result in mode TMODE. But this is done only when convenient.
6116 Otherwise, TMODE is ignored and the value generated in its natural mode.
6117 TMODE is just a suggestion; callers must assume that
6118 the rtx returned may not have mode TMODE.
6120 Note that TARGET may have neither TMODE nor MODE. In that case, it
6121 probably will not be used.
6123 If MODIFIER is EXPAND_SUM then when EXP is an addition
6124 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6125 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6126 products as above, or REG or MEM, or constant.
6127 Ordinarily in such cases we would output mul or add instructions
6128 and then return a pseudo reg containing the sum.
6130 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6131 it also marks a label as absolutely required (it can't be dead).
6132 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6133 This is used for outputting expressions used in initializers.
6135 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6136 with a constant address even if that address is not normally legitimate.
6137 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6139 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6140 a call parameter. Such targets require special care as we haven't yet
6141 marked TARGET so that it's safe from being trashed by libcalls. We
6142 don't want to use TARGET for anything but the final result;
6143 Intermediate values must go elsewhere. Additionally, calls to
6144 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6146 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6147 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6148 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6149 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6150 recursively. */
6152 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6153 enum expand_modifier, rtx *);
6156 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6157 enum expand_modifier modifier, rtx *alt_rtl)
6159 int rn = -1;
6160 rtx ret, last = NULL;
6162 /* Handle ERROR_MARK before anybody tries to access its type. */
6163 if (TREE_CODE (exp) == ERROR_MARK
6164 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6166 ret = CONST0_RTX (tmode);
6167 return ret ? ret : const0_rtx;
6170 if (flag_non_call_exceptions)
6172 rn = lookup_stmt_eh_region (exp);
6173 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6174 if (rn >= 0)
6175 last = get_last_insn ();
6178 /* If this is an expression of some kind and it has an associated line
6179 number, then emit the line number before expanding the expression.
6181 We need to save and restore the file and line information so that
6182 errors discovered during expansion are emitted with the right
6183 information. It would be better of the diagnostic routines
6184 used the file/line information embedded in the tree nodes rather
6185 than globals. */
6186 if (cfun && EXPR_HAS_LOCATION (exp))
6188 location_t saved_location = input_location;
6189 input_location = EXPR_LOCATION (exp);
6190 emit_line_note (input_location);
6192 /* Record where the insns produced belong. */
6193 record_block_change (TREE_BLOCK (exp));
6195 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6197 input_location = saved_location;
6199 else
6201 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6204 /* If using non-call exceptions, mark all insns that may trap.
6205 expand_call() will mark CALL_INSNs before we get to this code,
6206 but it doesn't handle libcalls, and these may trap. */
6207 if (rn >= 0)
6209 rtx insn;
6210 for (insn = next_real_insn (last); insn;
6211 insn = next_real_insn (insn))
6213 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6214 /* If we want exceptions for non-call insns, any
6215 may_trap_p instruction may throw. */
6216 && GET_CODE (PATTERN (insn)) != CLOBBER
6217 && GET_CODE (PATTERN (insn)) != USE
6218 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6220 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6221 REG_NOTES (insn));
6226 return ret;
6229 static rtx
6230 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6231 enum expand_modifier modifier, rtx *alt_rtl)
6233 rtx op0, op1, temp;
6234 tree type = TREE_TYPE (exp);
6235 int unsignedp;
6236 enum machine_mode mode;
6237 enum tree_code code = TREE_CODE (exp);
6238 optab this_optab;
6239 rtx subtarget, original_target;
6240 int ignore;
6241 tree context;
6242 bool reduce_bit_field = false;
6243 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6244 ? reduce_to_bit_field_precision ((expr), \
6245 target, \
6246 type) \
6247 : (expr))
6249 mode = TYPE_MODE (type);
6250 unsignedp = TYPE_UNSIGNED (type);
6251 if (lang_hooks.reduce_bit_field_operations
6252 && TREE_CODE (type) == INTEGER_TYPE
6253 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6255 /* An operation in what may be a bit-field type needs the
6256 result to be reduced to the precision of the bit-field type,
6257 which is narrower than that of the type's mode. */
6258 reduce_bit_field = true;
6259 if (modifier == EXPAND_STACK_PARM)
6260 target = 0;
6263 /* Use subtarget as the target for operand 0 of a binary operation. */
6264 subtarget = get_subtarget (target);
6265 original_target = target;
6266 ignore = (target == const0_rtx
6267 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6268 || code == CONVERT_EXPR || code == COND_EXPR
6269 || code == VIEW_CONVERT_EXPR)
6270 && TREE_CODE (type) == VOID_TYPE));
6272 /* If we are going to ignore this result, we need only do something
6273 if there is a side-effect somewhere in the expression. If there
6274 is, short-circuit the most common cases here. Note that we must
6275 not call expand_expr with anything but const0_rtx in case this
6276 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6278 if (ignore)
6280 if (! TREE_SIDE_EFFECTS (exp))
6281 return const0_rtx;
6283 /* Ensure we reference a volatile object even if value is ignored, but
6284 don't do this if all we are doing is taking its address. */
6285 if (TREE_THIS_VOLATILE (exp)
6286 && TREE_CODE (exp) != FUNCTION_DECL
6287 && mode != VOIDmode && mode != BLKmode
6288 && modifier != EXPAND_CONST_ADDRESS)
6290 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6291 if (MEM_P (temp))
6292 temp = copy_to_reg (temp);
6293 return const0_rtx;
6296 if (TREE_CODE_CLASS (code) == tcc_unary
6297 || code == COMPONENT_REF || code == INDIRECT_REF)
6298 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6299 modifier);
6301 else if (TREE_CODE_CLASS (code) == tcc_binary
6302 || TREE_CODE_CLASS (code) == tcc_comparison
6303 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6305 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6306 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6307 return const0_rtx;
6309 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6310 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6311 /* If the second operand has no side effects, just evaluate
6312 the first. */
6313 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6314 modifier);
6315 else if (code == BIT_FIELD_REF)
6317 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6318 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6319 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6320 return const0_rtx;
6323 target = 0;
6326 /* If will do cse, generate all results into pseudo registers
6327 since 1) that allows cse to find more things
6328 and 2) otherwise cse could produce an insn the machine
6329 cannot support. An exception is a CONSTRUCTOR into a multi-word
6330 MEM: that's much more likely to be most efficient into the MEM.
6331 Another is a CALL_EXPR which must return in memory. */
6333 if (! cse_not_expected && mode != BLKmode && target
6334 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6335 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6336 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6337 target = 0;
6339 switch (code)
6341 case LABEL_DECL:
6343 tree function = decl_function_context (exp);
6345 temp = label_rtx (exp);
6346 temp = gen_rtx_LABEL_REF (Pmode, temp);
6348 if (function != current_function_decl
6349 && function != 0)
6350 LABEL_REF_NONLOCAL_P (temp) = 1;
6352 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6353 return temp;
6356 case SSA_NAME:
6357 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6358 NULL);
6360 case PARM_DECL:
6361 case VAR_DECL:
6362 /* If a static var's type was incomplete when the decl was written,
6363 but the type is complete now, lay out the decl now. */
6364 if (DECL_SIZE (exp) == 0
6365 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6366 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6367 layout_decl (exp, 0);
6369 /* ... fall through ... */
6371 case FUNCTION_DECL:
6372 case RESULT_DECL:
6373 gcc_assert (DECL_RTL (exp));
6375 /* Ensure variable marked as used even if it doesn't go through
6376 a parser. If it hasn't be used yet, write out an external
6377 definition. */
6378 if (! TREE_USED (exp))
6380 assemble_external (exp);
6381 TREE_USED (exp) = 1;
6384 /* Show we haven't gotten RTL for this yet. */
6385 temp = 0;
6387 /* Variables inherited from containing functions should have
6388 been lowered by this point. */
6389 context = decl_function_context (exp);
6390 gcc_assert (!context
6391 || context == current_function_decl
6392 || TREE_STATIC (exp)
6393 /* ??? C++ creates functions that are not TREE_STATIC. */
6394 || TREE_CODE (exp) == FUNCTION_DECL);
6396 /* This is the case of an array whose size is to be determined
6397 from its initializer, while the initializer is still being parsed.
6398 See expand_decl. */
6400 if (MEM_P (DECL_RTL (exp))
6401 && REG_P (XEXP (DECL_RTL (exp), 0)))
6402 temp = validize_mem (DECL_RTL (exp));
6404 /* If DECL_RTL is memory, we are in the normal case and either
6405 the address is not valid or it is not a register and -fforce-addr
6406 is specified, get the address into a register. */
6408 else if (MEM_P (DECL_RTL (exp))
6409 && modifier != EXPAND_CONST_ADDRESS
6410 && modifier != EXPAND_SUM
6411 && modifier != EXPAND_INITIALIZER
6412 && (! memory_address_p (DECL_MODE (exp),
6413 XEXP (DECL_RTL (exp), 0))
6414 || (flag_force_addr
6415 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6417 if (alt_rtl)
6418 *alt_rtl = DECL_RTL (exp);
6419 temp = replace_equiv_address (DECL_RTL (exp),
6420 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6423 /* If we got something, return it. But first, set the alignment
6424 if the address is a register. */
6425 if (temp != 0)
6427 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6428 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6430 return temp;
6433 /* If the mode of DECL_RTL does not match that of the decl, it
6434 must be a promoted value. We return a SUBREG of the wanted mode,
6435 but mark it so that we know that it was already extended. */
6437 if (REG_P (DECL_RTL (exp))
6438 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6440 enum machine_mode pmode;
6442 /* Get the signedness used for this variable. Ensure we get the
6443 same mode we got when the variable was declared. */
6444 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6445 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6446 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6448 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6449 SUBREG_PROMOTED_VAR_P (temp) = 1;
6450 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6451 return temp;
6454 return DECL_RTL (exp);
6456 case INTEGER_CST:
6457 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6458 TREE_INT_CST_HIGH (exp), mode);
6460 /* ??? If overflow is set, fold will have done an incomplete job,
6461 which can result in (plus xx (const_int 0)), which can get
6462 simplified by validate_replace_rtx during virtual register
6463 instantiation, which can result in unrecognizable insns.
6464 Avoid this by forcing all overflows into registers. */
6465 if (TREE_CONSTANT_OVERFLOW (exp)
6466 && modifier != EXPAND_INITIALIZER)
6467 temp = force_reg (mode, temp);
6469 return temp;
6471 case VECTOR_CST:
6472 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6473 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6474 return const_vector_from_tree (exp);
6475 else
6476 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6477 TREE_VECTOR_CST_ELTS (exp)),
6478 ignore ? const0_rtx : target, tmode, modifier);
6480 case CONST_DECL:
6481 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6483 case REAL_CST:
6484 /* If optimized, generate immediate CONST_DOUBLE
6485 which will be turned into memory by reload if necessary.
6487 We used to force a register so that loop.c could see it. But
6488 this does not allow gen_* patterns to perform optimizations with
6489 the constants. It also produces two insns in cases like "x = 1.0;".
6490 On most machines, floating-point constants are not permitted in
6491 many insns, so we'd end up copying it to a register in any case.
6493 Now, we do the copying in expand_binop, if appropriate. */
6494 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6495 TYPE_MODE (TREE_TYPE (exp)));
6497 case COMPLEX_CST:
6498 /* Handle evaluating a complex constant in a CONCAT target. */
6499 if (original_target && GET_CODE (original_target) == CONCAT)
6501 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6502 rtx rtarg, itarg;
6504 rtarg = XEXP (original_target, 0);
6505 itarg = XEXP (original_target, 1);
6507 /* Move the real and imaginary parts separately. */
6508 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6509 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6511 if (op0 != rtarg)
6512 emit_move_insn (rtarg, op0);
6513 if (op1 != itarg)
6514 emit_move_insn (itarg, op1);
6516 return original_target;
6519 /* ... fall through ... */
6521 case STRING_CST:
6522 temp = output_constant_def (exp, 1);
6524 /* temp contains a constant address.
6525 On RISC machines where a constant address isn't valid,
6526 make some insns to get that address into a register. */
6527 if (modifier != EXPAND_CONST_ADDRESS
6528 && modifier != EXPAND_INITIALIZER
6529 && modifier != EXPAND_SUM
6530 && (! memory_address_p (mode, XEXP (temp, 0))
6531 || flag_force_addr))
6532 return replace_equiv_address (temp,
6533 copy_rtx (XEXP (temp, 0)));
6534 return temp;
6536 case SAVE_EXPR:
6538 tree val = TREE_OPERAND (exp, 0);
6539 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6541 if (!SAVE_EXPR_RESOLVED_P (exp))
6543 /* We can indeed still hit this case, typically via builtin
6544 expanders calling save_expr immediately before expanding
6545 something. Assume this means that we only have to deal
6546 with non-BLKmode values. */
6547 gcc_assert (GET_MODE (ret) != BLKmode);
6549 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6550 DECL_ARTIFICIAL (val) = 1;
6551 DECL_IGNORED_P (val) = 1;
6552 TREE_OPERAND (exp, 0) = val;
6553 SAVE_EXPR_RESOLVED_P (exp) = 1;
6555 if (!CONSTANT_P (ret))
6556 ret = copy_to_reg (ret);
6557 SET_DECL_RTL (val, ret);
6560 return ret;
6563 case GOTO_EXPR:
6564 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6565 expand_goto (TREE_OPERAND (exp, 0));
6566 else
6567 expand_computed_goto (TREE_OPERAND (exp, 0));
6568 return const0_rtx;
6570 case CONSTRUCTOR:
6571 /* If we don't need the result, just ensure we evaluate any
6572 subexpressions. */
6573 if (ignore)
6575 tree elt;
6577 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6578 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6580 return const0_rtx;
6583 /* All elts simple constants => refer to a constant in memory. But
6584 if this is a non-BLKmode mode, let it store a field at a time
6585 since that should make a CONST_INT or CONST_DOUBLE when we
6586 fold. Likewise, if we have a target we can use, it is best to
6587 store directly into the target unless the type is large enough
6588 that memcpy will be used. If we are making an initializer and
6589 all operands are constant, put it in memory as well.
6591 FIXME: Avoid trying to fill vector constructors piece-meal.
6592 Output them with output_constant_def below unless we're sure
6593 they're zeros. This should go away when vector initializers
6594 are treated like VECTOR_CST instead of arrays.
6596 else if ((TREE_STATIC (exp)
6597 && ((mode == BLKmode
6598 && ! (target != 0 && safe_from_p (target, exp, 1)))
6599 || TREE_ADDRESSABLE (exp)
6600 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6601 && (! MOVE_BY_PIECES_P
6602 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6603 TYPE_ALIGN (type)))
6604 && ! mostly_zeros_p (exp))))
6605 || ((modifier == EXPAND_INITIALIZER
6606 || modifier == EXPAND_CONST_ADDRESS)
6607 && TREE_CONSTANT (exp)))
6609 rtx constructor = output_constant_def (exp, 1);
6611 if (modifier != EXPAND_CONST_ADDRESS
6612 && modifier != EXPAND_INITIALIZER
6613 && modifier != EXPAND_SUM)
6614 constructor = validize_mem (constructor);
6616 return constructor;
6618 else
6620 /* Handle calls that pass values in multiple non-contiguous
6621 locations. The Irix 6 ABI has examples of this. */
6622 if (target == 0 || ! safe_from_p (target, exp, 1)
6623 || GET_CODE (target) == PARALLEL
6624 || modifier == EXPAND_STACK_PARM)
6625 target
6626 = assign_temp (build_qualified_type (type,
6627 (TYPE_QUALS (type)
6628 | (TREE_READONLY (exp)
6629 * TYPE_QUAL_CONST))),
6630 0, TREE_ADDRESSABLE (exp), 1);
6632 store_constructor (exp, target, 0, int_expr_size (exp));
6633 return target;
6636 case MISALIGNED_INDIRECT_REF:
6637 case ALIGN_INDIRECT_REF:
6638 case INDIRECT_REF:
6640 tree exp1 = TREE_OPERAND (exp, 0);
6641 tree orig;
6643 if (code == MISALIGNED_INDIRECT_REF
6644 && !targetm.vectorize.misaligned_mem_ok (mode))
6645 abort ();
6647 if (modifier != EXPAND_WRITE)
6649 tree t;
6651 t = fold_read_from_constant_string (exp);
6652 if (t)
6653 return expand_expr (t, target, tmode, modifier);
6656 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6657 op0 = memory_address (mode, op0);
6659 if (code == ALIGN_INDIRECT_REF)
6661 int align = TYPE_ALIGN_UNIT (type);
6662 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6663 op0 = memory_address (mode, op0);
6666 temp = gen_rtx_MEM (mode, op0);
6668 orig = REF_ORIGINAL (exp);
6669 if (!orig)
6670 orig = exp;
6671 set_mem_attributes (temp, orig, 0);
6673 return temp;
6676 case ARRAY_REF:
6679 tree array = TREE_OPERAND (exp, 0);
6680 tree low_bound = array_ref_low_bound (exp);
6681 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6682 HOST_WIDE_INT i;
6684 gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE);
6686 /* Optimize the special-case of a zero lower bound.
6688 We convert the low_bound to sizetype to avoid some problems
6689 with constant folding. (E.g. suppose the lower bound is 1,
6690 and its mode is QI. Without the conversion, (ARRAY
6691 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6692 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6694 if (! integer_zerop (low_bound))
6695 index = size_diffop (index, convert (sizetype, low_bound));
6697 /* Fold an expression like: "foo"[2].
6698 This is not done in fold so it won't happen inside &.
6699 Don't fold if this is for wide characters since it's too
6700 difficult to do correctly and this is a very rare case. */
6702 if (modifier != EXPAND_CONST_ADDRESS
6703 && modifier != EXPAND_INITIALIZER
6704 && modifier != EXPAND_MEMORY)
6706 tree t = fold_read_from_constant_string (exp);
6708 if (t)
6709 return expand_expr (t, target, tmode, modifier);
6712 /* If this is a constant index into a constant array,
6713 just get the value from the array. Handle both the cases when
6714 we have an explicit constructor and when our operand is a variable
6715 that was declared const. */
6717 if (modifier != EXPAND_CONST_ADDRESS
6718 && modifier != EXPAND_INITIALIZER
6719 && modifier != EXPAND_MEMORY
6720 && TREE_CODE (array) == CONSTRUCTOR
6721 && ! TREE_SIDE_EFFECTS (array)
6722 && TREE_CODE (index) == INTEGER_CST
6723 && 0 > compare_tree_int (index,
6724 list_length (CONSTRUCTOR_ELTS
6725 (TREE_OPERAND (exp, 0)))))
6727 tree elem;
6729 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6730 i = TREE_INT_CST_LOW (index);
6731 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6734 if (elem)
6735 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6736 modifier);
6739 else if (optimize >= 1
6740 && modifier != EXPAND_CONST_ADDRESS
6741 && modifier != EXPAND_INITIALIZER
6742 && modifier != EXPAND_MEMORY
6743 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6744 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6745 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6746 && targetm.binds_local_p (array))
6748 if (TREE_CODE (index) == INTEGER_CST)
6750 tree init = DECL_INITIAL (array);
6752 if (TREE_CODE (init) == CONSTRUCTOR)
6754 tree elem;
6756 for (elem = CONSTRUCTOR_ELTS (init);
6757 (elem
6758 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6759 elem = TREE_CHAIN (elem))
6762 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6763 return expand_expr (fold (TREE_VALUE (elem)), target,
6764 tmode, modifier);
6766 else if (TREE_CODE (init) == STRING_CST
6767 && 0 > compare_tree_int (index,
6768 TREE_STRING_LENGTH (init)))
6770 tree type = TREE_TYPE (TREE_TYPE (init));
6771 enum machine_mode mode = TYPE_MODE (type);
6773 if (GET_MODE_CLASS (mode) == MODE_INT
6774 && GET_MODE_SIZE (mode) == 1)
6775 return gen_int_mode (TREE_STRING_POINTER (init)
6776 [TREE_INT_CST_LOW (index)], mode);
6781 goto normal_inner_ref;
6783 case COMPONENT_REF:
6784 /* If the operand is a CONSTRUCTOR, we can just extract the
6785 appropriate field if it is present. */
6786 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6788 tree elt;
6790 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6791 elt = TREE_CHAIN (elt))
6792 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6793 /* We can normally use the value of the field in the
6794 CONSTRUCTOR. However, if this is a bitfield in
6795 an integral mode that we can fit in a HOST_WIDE_INT,
6796 we must mask only the number of bits in the bitfield,
6797 since this is done implicitly by the constructor. If
6798 the bitfield does not meet either of those conditions,
6799 we can't do this optimization. */
6800 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6801 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6802 == MODE_INT)
6803 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6804 <= HOST_BITS_PER_WIDE_INT))))
6806 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6807 && modifier == EXPAND_STACK_PARM)
6808 target = 0;
6809 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6810 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6812 HOST_WIDE_INT bitsize
6813 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6814 enum machine_mode imode
6815 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6817 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6819 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6820 op0 = expand_and (imode, op0, op1, target);
6822 else
6824 tree count
6825 = build_int_cst (NULL_TREE,
6826 GET_MODE_BITSIZE (imode) - bitsize);
6828 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6829 target, 0);
6830 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6831 target, 0);
6835 return op0;
6838 goto normal_inner_ref;
6840 case BIT_FIELD_REF:
6841 case ARRAY_RANGE_REF:
6842 normal_inner_ref:
6844 enum machine_mode mode1;
6845 HOST_WIDE_INT bitsize, bitpos;
6846 tree offset;
6847 int volatilep = 0;
6848 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6849 &mode1, &unsignedp, &volatilep);
6850 rtx orig_op0;
6852 /* If we got back the original object, something is wrong. Perhaps
6853 we are evaluating an expression too early. In any event, don't
6854 infinitely recurse. */
6855 gcc_assert (tem != exp);
6857 /* If TEM's type is a union of variable size, pass TARGET to the inner
6858 computation, since it will need a temporary and TARGET is known
6859 to have to do. This occurs in unchecked conversion in Ada. */
6861 orig_op0 = op0
6862 = expand_expr (tem,
6863 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6864 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6865 != INTEGER_CST)
6866 && modifier != EXPAND_STACK_PARM
6867 ? target : NULL_RTX),
6868 VOIDmode,
6869 (modifier == EXPAND_INITIALIZER
6870 || modifier == EXPAND_CONST_ADDRESS
6871 || modifier == EXPAND_STACK_PARM)
6872 ? modifier : EXPAND_NORMAL);
6874 /* If this is a constant, put it into a register if it is a
6875 legitimate constant and OFFSET is 0 and memory if it isn't. */
6876 if (CONSTANT_P (op0))
6878 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6879 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6880 && offset == 0)
6881 op0 = force_reg (mode, op0);
6882 else
6883 op0 = validize_mem (force_const_mem (mode, op0));
6886 /* Otherwise, if this object not in memory and we either have an
6887 offset or a BLKmode result, put it there. This case can't occur in
6888 C, but can in Ada if we have unchecked conversion of an expression
6889 from a scalar type to an array or record type or for an
6890 ARRAY_RANGE_REF whose type is BLKmode. */
6891 else if (!MEM_P (op0)
6892 && (offset != 0
6893 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6895 tree nt = build_qualified_type (TREE_TYPE (tem),
6896 (TYPE_QUALS (TREE_TYPE (tem))
6897 | TYPE_QUAL_CONST));
6898 rtx memloc = assign_temp (nt, 1, 1, 1);
6900 emit_move_insn (memloc, op0);
6901 op0 = memloc;
6904 if (offset != 0)
6906 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6907 EXPAND_SUM);
6909 gcc_assert (MEM_P (op0));
6911 #ifdef POINTERS_EXTEND_UNSIGNED
6912 if (GET_MODE (offset_rtx) != Pmode)
6913 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6914 #else
6915 if (GET_MODE (offset_rtx) != ptr_mode)
6916 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6917 #endif
6919 if (GET_MODE (op0) == BLKmode
6920 /* A constant address in OP0 can have VOIDmode, we must
6921 not try to call force_reg in that case. */
6922 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6923 && bitsize != 0
6924 && (bitpos % bitsize) == 0
6925 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6926 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6928 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6929 bitpos = 0;
6932 op0 = offset_address (op0, offset_rtx,
6933 highest_pow2_factor (offset));
6936 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6937 record its alignment as BIGGEST_ALIGNMENT. */
6938 if (MEM_P (op0) && bitpos == 0 && offset != 0
6939 && is_aligning_offset (offset, tem))
6940 set_mem_align (op0, BIGGEST_ALIGNMENT);
6942 /* Don't forget about volatility even if this is a bitfield. */
6943 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6945 if (op0 == orig_op0)
6946 op0 = copy_rtx (op0);
6948 MEM_VOLATILE_P (op0) = 1;
6951 /* The following code doesn't handle CONCAT.
6952 Assume only bitpos == 0 can be used for CONCAT, due to
6953 one element arrays having the same mode as its element. */
6954 if (GET_CODE (op0) == CONCAT)
6956 gcc_assert (bitpos == 0
6957 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
6958 return op0;
6961 /* In cases where an aligned union has an unaligned object
6962 as a field, we might be extracting a BLKmode value from
6963 an integer-mode (e.g., SImode) object. Handle this case
6964 by doing the extract into an object as wide as the field
6965 (which we know to be the width of a basic mode), then
6966 storing into memory, and changing the mode to BLKmode. */
6967 if (mode1 == VOIDmode
6968 || REG_P (op0) || GET_CODE (op0) == SUBREG
6969 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6970 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6971 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6972 && modifier != EXPAND_CONST_ADDRESS
6973 && modifier != EXPAND_INITIALIZER)
6974 /* If the field isn't aligned enough to fetch as a memref,
6975 fetch it as a bit field. */
6976 || (mode1 != BLKmode
6977 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6978 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6979 || (MEM_P (op0)
6980 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6981 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6982 && ((modifier == EXPAND_CONST_ADDRESS
6983 || modifier == EXPAND_INITIALIZER)
6984 ? STRICT_ALIGNMENT
6985 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6986 || (bitpos % BITS_PER_UNIT != 0)))
6987 /* If the type and the field are a constant size and the
6988 size of the type isn't the same size as the bitfield,
6989 we must use bitfield operations. */
6990 || (bitsize >= 0
6991 && TYPE_SIZE (TREE_TYPE (exp))
6992 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6993 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6994 bitsize)))
6996 enum machine_mode ext_mode = mode;
6998 if (ext_mode == BLKmode
6999 && ! (target != 0 && MEM_P (op0)
7000 && MEM_P (target)
7001 && bitpos % BITS_PER_UNIT == 0))
7002 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7004 if (ext_mode == BLKmode)
7006 if (target == 0)
7007 target = assign_temp (type, 0, 1, 1);
7009 if (bitsize == 0)
7010 return target;
7012 /* In this case, BITPOS must start at a byte boundary and
7013 TARGET, if specified, must be a MEM. */
7014 gcc_assert (MEM_P (op0)
7015 && (!target || MEM_P (target))
7016 && !(bitpos % BITS_PER_UNIT));
7018 emit_block_move (target,
7019 adjust_address (op0, VOIDmode,
7020 bitpos / BITS_PER_UNIT),
7021 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7022 / BITS_PER_UNIT),
7023 (modifier == EXPAND_STACK_PARM
7024 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7026 return target;
7029 op0 = validize_mem (op0);
7031 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7032 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7034 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7035 (modifier == EXPAND_STACK_PARM
7036 ? NULL_RTX : target),
7037 ext_mode, ext_mode);
7039 /* If the result is a record type and BITSIZE is narrower than
7040 the mode of OP0, an integral mode, and this is a big endian
7041 machine, we must put the field into the high-order bits. */
7042 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7043 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7044 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7045 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7046 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7047 - bitsize),
7048 op0, 1);
7050 /* If the result type is BLKmode, store the data into a temporary
7051 of the appropriate type, but with the mode corresponding to the
7052 mode for the data we have (op0's mode). It's tempting to make
7053 this a constant type, since we know it's only being stored once,
7054 but that can cause problems if we are taking the address of this
7055 COMPONENT_REF because the MEM of any reference via that address
7056 will have flags corresponding to the type, which will not
7057 necessarily be constant. */
7058 if (mode == BLKmode)
7060 rtx new
7061 = assign_stack_temp_for_type
7062 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7064 emit_move_insn (new, op0);
7065 op0 = copy_rtx (new);
7066 PUT_MODE (op0, BLKmode);
7067 set_mem_attributes (op0, exp, 1);
7070 return op0;
7073 /* If the result is BLKmode, use that to access the object
7074 now as well. */
7075 if (mode == BLKmode)
7076 mode1 = BLKmode;
7078 /* Get a reference to just this component. */
7079 if (modifier == EXPAND_CONST_ADDRESS
7080 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7081 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7082 else
7083 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7085 if (op0 == orig_op0)
7086 op0 = copy_rtx (op0);
7088 set_mem_attributes (op0, exp, 0);
7089 if (REG_P (XEXP (op0, 0)))
7090 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7092 MEM_VOLATILE_P (op0) |= volatilep;
7093 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7094 || modifier == EXPAND_CONST_ADDRESS
7095 || modifier == EXPAND_INITIALIZER)
7096 return op0;
7097 else if (target == 0)
7098 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7100 convert_move (target, op0, unsignedp);
7101 return target;
7104 case OBJ_TYPE_REF:
7105 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7107 case CALL_EXPR:
7108 /* Check for a built-in function. */
7109 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7110 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7111 == FUNCTION_DECL)
7112 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7114 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7115 == BUILT_IN_FRONTEND)
7116 return lang_hooks.expand_expr (exp, original_target,
7117 tmode, modifier,
7118 alt_rtl);
7119 else
7120 return expand_builtin (exp, target, subtarget, tmode, ignore);
7123 return expand_call (exp, target, ignore);
7125 case NON_LVALUE_EXPR:
7126 case NOP_EXPR:
7127 case CONVERT_EXPR:
7128 if (TREE_OPERAND (exp, 0) == error_mark_node)
7129 return const0_rtx;
7131 if (TREE_CODE (type) == UNION_TYPE)
7133 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7135 /* If both input and output are BLKmode, this conversion isn't doing
7136 anything except possibly changing memory attribute. */
7137 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7139 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7140 modifier);
7142 result = copy_rtx (result);
7143 set_mem_attributes (result, exp, 0);
7144 return result;
7147 if (target == 0)
7149 if (TYPE_MODE (type) != BLKmode)
7150 target = gen_reg_rtx (TYPE_MODE (type));
7151 else
7152 target = assign_temp (type, 0, 1, 1);
7155 if (MEM_P (target))
7156 /* Store data into beginning of memory target. */
7157 store_expr (TREE_OPERAND (exp, 0),
7158 adjust_address (target, TYPE_MODE (valtype), 0),
7159 modifier == EXPAND_STACK_PARM);
7161 else
7163 gcc_assert (REG_P (target));
7165 /* Store this field into a union of the proper type. */
7166 store_field (target,
7167 MIN ((int_size_in_bytes (TREE_TYPE
7168 (TREE_OPERAND (exp, 0)))
7169 * BITS_PER_UNIT),
7170 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7171 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7172 type, 0);
7175 /* Return the entire union. */
7176 return target;
7179 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7181 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7182 modifier);
7184 /* If the signedness of the conversion differs and OP0 is
7185 a promoted SUBREG, clear that indication since we now
7186 have to do the proper extension. */
7187 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7188 && GET_CODE (op0) == SUBREG)
7189 SUBREG_PROMOTED_VAR_P (op0) = 0;
7191 return REDUCE_BIT_FIELD (op0);
7194 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7195 op0 = REDUCE_BIT_FIELD (op0);
7196 if (GET_MODE (op0) == mode)
7197 return op0;
7199 /* If OP0 is a constant, just convert it into the proper mode. */
7200 if (CONSTANT_P (op0))
7202 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7203 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7205 if (modifier == EXPAND_INITIALIZER)
7206 return simplify_gen_subreg (mode, op0, inner_mode,
7207 subreg_lowpart_offset (mode,
7208 inner_mode));
7209 else
7210 return convert_modes (mode, inner_mode, op0,
7211 TYPE_UNSIGNED (inner_type));
7214 if (modifier == EXPAND_INITIALIZER)
7215 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7217 if (target == 0)
7218 return
7219 convert_to_mode (mode, op0,
7220 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7221 else
7222 convert_move (target, op0,
7223 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7224 return target;
7226 case VIEW_CONVERT_EXPR:
7227 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7229 /* If the input and output modes are both the same, we are done.
7230 Otherwise, if neither mode is BLKmode and both are integral and within
7231 a word, we can use gen_lowpart. If neither is true, make sure the
7232 operand is in memory and convert the MEM to the new mode. */
7233 if (TYPE_MODE (type) == GET_MODE (op0))
7235 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7236 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7237 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7238 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7239 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7240 op0 = gen_lowpart (TYPE_MODE (type), op0);
7241 else if (!MEM_P (op0))
7243 /* If the operand is not a MEM, force it into memory. Since we
7244 are going to be be changing the mode of the MEM, don't call
7245 force_const_mem for constants because we don't allow pool
7246 constants to change mode. */
7247 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7249 gcc_assert (!TREE_ADDRESSABLE (exp));
7251 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7252 target
7253 = assign_stack_temp_for_type
7254 (TYPE_MODE (inner_type),
7255 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7257 emit_move_insn (target, op0);
7258 op0 = target;
7261 /* At this point, OP0 is in the correct mode. If the output type is such
7262 that the operand is known to be aligned, indicate that it is.
7263 Otherwise, we need only be concerned about alignment for non-BLKmode
7264 results. */
7265 if (MEM_P (op0))
7267 op0 = copy_rtx (op0);
7269 if (TYPE_ALIGN_OK (type))
7270 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7271 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7272 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7274 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7275 HOST_WIDE_INT temp_size
7276 = MAX (int_size_in_bytes (inner_type),
7277 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7278 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7279 temp_size, 0, type);
7280 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7282 gcc_assert (!TREE_ADDRESSABLE (exp));
7284 if (GET_MODE (op0) == BLKmode)
7285 emit_block_move (new_with_op0_mode, op0,
7286 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7287 (modifier == EXPAND_STACK_PARM
7288 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7289 else
7290 emit_move_insn (new_with_op0_mode, op0);
7292 op0 = new;
7295 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7298 return op0;
7300 case PLUS_EXPR:
7301 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7302 something else, make sure we add the register to the constant and
7303 then to the other thing. This case can occur during strength
7304 reduction and doing it this way will produce better code if the
7305 frame pointer or argument pointer is eliminated.
7307 fold-const.c will ensure that the constant is always in the inner
7308 PLUS_EXPR, so the only case we need to do anything about is if
7309 sp, ap, or fp is our second argument, in which case we must swap
7310 the innermost first argument and our second argument. */
7312 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7313 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7314 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7315 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7316 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7317 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7319 tree t = TREE_OPERAND (exp, 1);
7321 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7322 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7325 /* If the result is to be ptr_mode and we are adding an integer to
7326 something, we might be forming a constant. So try to use
7327 plus_constant. If it produces a sum and we can't accept it,
7328 use force_operand. This allows P = &ARR[const] to generate
7329 efficient code on machines where a SYMBOL_REF is not a valid
7330 address.
7332 If this is an EXPAND_SUM call, always return the sum. */
7333 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7334 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7336 if (modifier == EXPAND_STACK_PARM)
7337 target = 0;
7338 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7339 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7340 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7342 rtx constant_part;
7344 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7345 EXPAND_SUM);
7346 /* Use immed_double_const to ensure that the constant is
7347 truncated according to the mode of OP1, then sign extended
7348 to a HOST_WIDE_INT. Using the constant directly can result
7349 in non-canonical RTL in a 64x32 cross compile. */
7350 constant_part
7351 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7352 (HOST_WIDE_INT) 0,
7353 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7354 op1 = plus_constant (op1, INTVAL (constant_part));
7355 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7356 op1 = force_operand (op1, target);
7357 return REDUCE_BIT_FIELD (op1);
7360 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7361 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7362 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7364 rtx constant_part;
7366 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7367 (modifier == EXPAND_INITIALIZER
7368 ? EXPAND_INITIALIZER : EXPAND_SUM));
7369 if (! CONSTANT_P (op0))
7371 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7372 VOIDmode, modifier);
7373 /* Return a PLUS if modifier says it's OK. */
7374 if (modifier == EXPAND_SUM
7375 || modifier == EXPAND_INITIALIZER)
7376 return simplify_gen_binary (PLUS, mode, op0, op1);
7377 goto binop2;
7379 /* Use immed_double_const to ensure that the constant is
7380 truncated according to the mode of OP1, then sign extended
7381 to a HOST_WIDE_INT. Using the constant directly can result
7382 in non-canonical RTL in a 64x32 cross compile. */
7383 constant_part
7384 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7385 (HOST_WIDE_INT) 0,
7386 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7387 op0 = plus_constant (op0, INTVAL (constant_part));
7388 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7389 op0 = force_operand (op0, target);
7390 return REDUCE_BIT_FIELD (op0);
7394 /* No sense saving up arithmetic to be done
7395 if it's all in the wrong mode to form part of an address.
7396 And force_operand won't know whether to sign-extend or
7397 zero-extend. */
7398 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7399 || mode != ptr_mode)
7401 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7402 subtarget, &op0, &op1, 0);
7403 if (op0 == const0_rtx)
7404 return op1;
7405 if (op1 == const0_rtx)
7406 return op0;
7407 goto binop2;
7410 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7411 subtarget, &op0, &op1, modifier);
7412 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7414 case MINUS_EXPR:
7415 /* For initializers, we are allowed to return a MINUS of two
7416 symbolic constants. Here we handle all cases when both operands
7417 are constant. */
7418 /* Handle difference of two symbolic constants,
7419 for the sake of an initializer. */
7420 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7421 && really_constant_p (TREE_OPERAND (exp, 0))
7422 && really_constant_p (TREE_OPERAND (exp, 1)))
7424 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7425 NULL_RTX, &op0, &op1, modifier);
7427 /* If the last operand is a CONST_INT, use plus_constant of
7428 the negated constant. Else make the MINUS. */
7429 if (GET_CODE (op1) == CONST_INT)
7430 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7431 else
7432 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7435 /* No sense saving up arithmetic to be done
7436 if it's all in the wrong mode to form part of an address.
7437 And force_operand won't know whether to sign-extend or
7438 zero-extend. */
7439 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7440 || mode != ptr_mode)
7441 goto binop;
7443 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7444 subtarget, &op0, &op1, modifier);
7446 /* Convert A - const to A + (-const). */
7447 if (GET_CODE (op1) == CONST_INT)
7449 op1 = negate_rtx (mode, op1);
7450 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7453 goto binop2;
7455 case MULT_EXPR:
7456 /* If first operand is constant, swap them.
7457 Thus the following special case checks need only
7458 check the second operand. */
7459 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7461 tree t1 = TREE_OPERAND (exp, 0);
7462 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7463 TREE_OPERAND (exp, 1) = t1;
7466 /* Attempt to return something suitable for generating an
7467 indexed address, for machines that support that. */
7469 if (modifier == EXPAND_SUM && mode == ptr_mode
7470 && host_integerp (TREE_OPERAND (exp, 1), 0))
7472 tree exp1 = TREE_OPERAND (exp, 1);
7474 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7475 EXPAND_SUM);
7477 if (!REG_P (op0))
7478 op0 = force_operand (op0, NULL_RTX);
7479 if (!REG_P (op0))
7480 op0 = copy_to_mode_reg (mode, op0);
7482 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7483 gen_int_mode (tree_low_cst (exp1, 0),
7484 TYPE_MODE (TREE_TYPE (exp1)))));
7487 if (modifier == EXPAND_STACK_PARM)
7488 target = 0;
7490 /* Check for multiplying things that have been extended
7491 from a narrower type. If this machine supports multiplying
7492 in that narrower type with a result in the desired type,
7493 do it that way, and avoid the explicit type-conversion. */
7494 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7495 && TREE_CODE (type) == INTEGER_TYPE
7496 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7497 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7498 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7499 && int_fits_type_p (TREE_OPERAND (exp, 1),
7500 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7501 /* Don't use a widening multiply if a shift will do. */
7502 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7503 > HOST_BITS_PER_WIDE_INT)
7504 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7506 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7507 && (TYPE_PRECISION (TREE_TYPE
7508 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7509 == TYPE_PRECISION (TREE_TYPE
7510 (TREE_OPERAND
7511 (TREE_OPERAND (exp, 0), 0))))
7512 /* If both operands are extended, they must either both
7513 be zero-extended or both be sign-extended. */
7514 && (TYPE_UNSIGNED (TREE_TYPE
7515 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7516 == TYPE_UNSIGNED (TREE_TYPE
7517 (TREE_OPERAND
7518 (TREE_OPERAND (exp, 0), 0)))))))
7520 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7521 enum machine_mode innermode = TYPE_MODE (op0type);
7522 bool zextend_p = TYPE_UNSIGNED (op0type);
7523 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7524 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7526 if (mode == GET_MODE_WIDER_MODE (innermode))
7528 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7530 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7531 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7532 TREE_OPERAND (exp, 1),
7533 NULL_RTX, &op0, &op1, 0);
7534 else
7535 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7536 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7537 NULL_RTX, &op0, &op1, 0);
7538 goto binop3;
7540 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7541 && innermode == word_mode)
7543 rtx htem, hipart;
7544 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7545 NULL_RTX, VOIDmode, 0);
7546 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7547 op1 = convert_modes (innermode, mode,
7548 expand_expr (TREE_OPERAND (exp, 1),
7549 NULL_RTX, VOIDmode, 0),
7550 unsignedp);
7551 else
7552 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7553 NULL_RTX, VOIDmode, 0);
7554 temp = expand_binop (mode, other_optab, op0, op1, target,
7555 unsignedp, OPTAB_LIB_WIDEN);
7556 hipart = gen_highpart (innermode, temp);
7557 htem = expand_mult_highpart_adjust (innermode, hipart,
7558 op0, op1, hipart,
7559 zextend_p);
7560 if (htem != hipart)
7561 emit_move_insn (hipart, htem);
7562 return REDUCE_BIT_FIELD (temp);
7566 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7567 subtarget, &op0, &op1, 0);
7568 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7570 case TRUNC_DIV_EXPR:
7571 case FLOOR_DIV_EXPR:
7572 case CEIL_DIV_EXPR:
7573 case ROUND_DIV_EXPR:
7574 case EXACT_DIV_EXPR:
7575 if (modifier == EXPAND_STACK_PARM)
7576 target = 0;
7577 /* Possible optimization: compute the dividend with EXPAND_SUM
7578 then if the divisor is constant can optimize the case
7579 where some terms of the dividend have coeffs divisible by it. */
7580 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7581 subtarget, &op0, &op1, 0);
7582 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7584 case RDIV_EXPR:
7585 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7586 expensive divide. If not, combine will rebuild the original
7587 computation. */
7588 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7589 && TREE_CODE (type) == REAL_TYPE
7590 && !real_onep (TREE_OPERAND (exp, 0)))
7591 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7592 build2 (RDIV_EXPR, type,
7593 build_real (type, dconst1),
7594 TREE_OPERAND (exp, 1))),
7595 target, tmode, modifier);
7597 goto binop;
7599 case TRUNC_MOD_EXPR:
7600 case FLOOR_MOD_EXPR:
7601 case CEIL_MOD_EXPR:
7602 case ROUND_MOD_EXPR:
7603 if (modifier == EXPAND_STACK_PARM)
7604 target = 0;
7605 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7606 subtarget, &op0, &op1, 0);
7607 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7609 case FIX_ROUND_EXPR:
7610 case FIX_FLOOR_EXPR:
7611 case FIX_CEIL_EXPR:
7612 gcc_unreachable (); /* Not used for C. */
7614 case FIX_TRUNC_EXPR:
7615 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7616 if (target == 0 || modifier == EXPAND_STACK_PARM)
7617 target = gen_reg_rtx (mode);
7618 expand_fix (target, op0, unsignedp);
7619 return target;
7621 case FLOAT_EXPR:
7622 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7623 if (target == 0 || modifier == EXPAND_STACK_PARM)
7624 target = gen_reg_rtx (mode);
7625 /* expand_float can't figure out what to do if FROM has VOIDmode.
7626 So give it the correct mode. With -O, cse will optimize this. */
7627 if (GET_MODE (op0) == VOIDmode)
7628 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7629 op0);
7630 expand_float (target, op0,
7631 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7632 return target;
7634 case NEGATE_EXPR:
7635 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7636 if (modifier == EXPAND_STACK_PARM)
7637 target = 0;
7638 temp = expand_unop (mode,
7639 optab_for_tree_code (NEGATE_EXPR, type),
7640 op0, target, 0);
7641 gcc_assert (temp);
7642 return REDUCE_BIT_FIELD (temp);
7644 case ABS_EXPR:
7645 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7646 if (modifier == EXPAND_STACK_PARM)
7647 target = 0;
7649 /* ABS_EXPR is not valid for complex arguments. */
7650 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7651 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7653 /* Unsigned abs is simply the operand. Testing here means we don't
7654 risk generating incorrect code below. */
7655 if (TYPE_UNSIGNED (type))
7656 return op0;
7658 return expand_abs (mode, op0, target, unsignedp,
7659 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7661 case MAX_EXPR:
7662 case MIN_EXPR:
7663 target = original_target;
7664 if (target == 0
7665 || modifier == EXPAND_STACK_PARM
7666 || (MEM_P (target) && MEM_VOLATILE_P (target))
7667 || GET_MODE (target) != mode
7668 || (REG_P (target)
7669 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7670 target = gen_reg_rtx (mode);
7671 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7672 target, &op0, &op1, 0);
7674 /* First try to do it with a special MIN or MAX instruction.
7675 If that does not win, use a conditional jump to select the proper
7676 value. */
7677 this_optab = optab_for_tree_code (code, type);
7678 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7679 OPTAB_WIDEN);
7680 if (temp != 0)
7681 return temp;
7683 /* At this point, a MEM target is no longer useful; we will get better
7684 code without it. */
7686 if (MEM_P (target))
7687 target = gen_reg_rtx (mode);
7689 /* If op1 was placed in target, swap op0 and op1. */
7690 if (target != op0 && target == op1)
7692 rtx tem = op0;
7693 op0 = op1;
7694 op1 = tem;
7697 if (target != op0)
7698 emit_move_insn (target, op0);
7700 op0 = gen_label_rtx ();
7702 /* If this mode is an integer too wide to compare properly,
7703 compare word by word. Rely on cse to optimize constant cases. */
7704 if (GET_MODE_CLASS (mode) == MODE_INT
7705 && ! can_compare_p (GE, mode, ccp_jump))
7707 if (code == MAX_EXPR)
7708 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7709 NULL_RTX, op0);
7710 else
7711 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7712 NULL_RTX, op0);
7714 else
7716 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7717 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7719 emit_move_insn (target, op1);
7720 emit_label (op0);
7721 return target;
7723 case BIT_NOT_EXPR:
7724 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7725 if (modifier == EXPAND_STACK_PARM)
7726 target = 0;
7727 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7728 gcc_assert (temp);
7729 return temp;
7731 /* ??? Can optimize bitwise operations with one arg constant.
7732 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7733 and (a bitwise1 b) bitwise2 b (etc)
7734 but that is probably not worth while. */
7736 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7737 boolean values when we want in all cases to compute both of them. In
7738 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7739 as actual zero-or-1 values and then bitwise anding. In cases where
7740 there cannot be any side effects, better code would be made by
7741 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7742 how to recognize those cases. */
7744 case TRUTH_AND_EXPR:
7745 code = BIT_AND_EXPR;
7746 case BIT_AND_EXPR:
7747 goto binop;
7749 case TRUTH_OR_EXPR:
7750 code = BIT_IOR_EXPR;
7751 case BIT_IOR_EXPR:
7752 goto binop;
7754 case TRUTH_XOR_EXPR:
7755 code = BIT_XOR_EXPR;
7756 case BIT_XOR_EXPR:
7757 goto binop;
7759 case LSHIFT_EXPR:
7760 case RSHIFT_EXPR:
7761 case LROTATE_EXPR:
7762 case RROTATE_EXPR:
7763 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7764 subtarget = 0;
7765 if (modifier == EXPAND_STACK_PARM)
7766 target = 0;
7767 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7768 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7769 unsignedp);
7771 /* Could determine the answer when only additive constants differ. Also,
7772 the addition of one can be handled by changing the condition. */
7773 case LT_EXPR:
7774 case LE_EXPR:
7775 case GT_EXPR:
7776 case GE_EXPR:
7777 case EQ_EXPR:
7778 case NE_EXPR:
7779 case UNORDERED_EXPR:
7780 case ORDERED_EXPR:
7781 case UNLT_EXPR:
7782 case UNLE_EXPR:
7783 case UNGT_EXPR:
7784 case UNGE_EXPR:
7785 case UNEQ_EXPR:
7786 case LTGT_EXPR:
7787 temp = do_store_flag (exp,
7788 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7789 tmode != VOIDmode ? tmode : mode, 0);
7790 if (temp != 0)
7791 return temp;
7793 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7794 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7795 && original_target
7796 && REG_P (original_target)
7797 && (GET_MODE (original_target)
7798 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7800 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7801 VOIDmode, 0);
7803 /* If temp is constant, we can just compute the result. */
7804 if (GET_CODE (temp) == CONST_INT)
7806 if (INTVAL (temp) != 0)
7807 emit_move_insn (target, const1_rtx);
7808 else
7809 emit_move_insn (target, const0_rtx);
7811 return target;
7814 if (temp != original_target)
7816 enum machine_mode mode1 = GET_MODE (temp);
7817 if (mode1 == VOIDmode)
7818 mode1 = tmode != VOIDmode ? tmode : mode;
7820 temp = copy_to_mode_reg (mode1, temp);
7823 op1 = gen_label_rtx ();
7824 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7825 GET_MODE (temp), unsignedp, op1);
7826 emit_move_insn (temp, const1_rtx);
7827 emit_label (op1);
7828 return temp;
7831 /* If no set-flag instruction, must generate a conditional store
7832 into a temporary variable. Drop through and handle this
7833 like && and ||. */
7835 if (! ignore
7836 && (target == 0
7837 || modifier == EXPAND_STACK_PARM
7838 || ! safe_from_p (target, exp, 1)
7839 /* Make sure we don't have a hard reg (such as function's return
7840 value) live across basic blocks, if not optimizing. */
7841 || (!optimize && REG_P (target)
7842 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7843 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7845 if (target)
7846 emit_move_insn (target, const0_rtx);
7848 op1 = gen_label_rtx ();
7849 jumpifnot (exp, op1);
7851 if (target)
7852 emit_move_insn (target, const1_rtx);
7854 emit_label (op1);
7855 return ignore ? const0_rtx : target;
7857 case TRUTH_NOT_EXPR:
7858 if (modifier == EXPAND_STACK_PARM)
7859 target = 0;
7860 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7861 /* The parser is careful to generate TRUTH_NOT_EXPR
7862 only with operands that are always zero or one. */
7863 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7864 target, 1, OPTAB_LIB_WIDEN);
7865 gcc_assert (temp);
7866 return temp;
7868 case STATEMENT_LIST:
7870 tree_stmt_iterator iter;
7872 gcc_assert (ignore);
7874 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7875 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7877 return const0_rtx;
7879 case COND_EXPR:
7880 /* If it's void, we don't need to worry about computing a value. */
7881 if (VOID_TYPE_P (TREE_TYPE (exp)))
7883 tree pred = TREE_OPERAND (exp, 0);
7884 tree then_ = TREE_OPERAND (exp, 1);
7885 tree else_ = TREE_OPERAND (exp, 2);
7887 gcc_assert (TREE_CODE (then_) == GOTO_EXPR
7888 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL
7889 && TREE_CODE (else_) == GOTO_EXPR
7890 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL);
7892 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7893 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7896 /* Note that COND_EXPRs whose type is a structure or union
7897 are required to be constructed to contain assignments of
7898 a temporary variable, so that we can evaluate them here
7899 for side effect only. If type is void, we must do likewise. */
7901 gcc_assert (!TREE_ADDRESSABLE (type)
7902 && !ignore
7903 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7904 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7906 /* If we are not to produce a result, we have no target. Otherwise,
7907 if a target was specified use it; it will not be used as an
7908 intermediate target unless it is safe. If no target, use a
7909 temporary. */
7911 if (modifier != EXPAND_STACK_PARM
7912 && original_target
7913 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7914 && GET_MODE (original_target) == mode
7915 #ifdef HAVE_conditional_move
7916 && (! can_conditionally_move_p (mode)
7917 || REG_P (original_target))
7918 #endif
7919 && !MEM_P (original_target))
7920 temp = original_target;
7921 else
7922 temp = assign_temp (type, 0, 0, 1);
7924 do_pending_stack_adjust ();
7925 NO_DEFER_POP;
7926 op0 = gen_label_rtx ();
7927 op1 = gen_label_rtx ();
7928 jumpifnot (TREE_OPERAND (exp, 0), op0);
7929 store_expr (TREE_OPERAND (exp, 1), temp,
7930 modifier == EXPAND_STACK_PARM);
7932 emit_jump_insn (gen_jump (op1));
7933 emit_barrier ();
7934 emit_label (op0);
7935 store_expr (TREE_OPERAND (exp, 2), temp,
7936 modifier == EXPAND_STACK_PARM);
7938 emit_label (op1);
7939 OK_DEFER_POP;
7940 return temp;
7942 case VEC_COND_EXPR:
7943 target = expand_vec_cond_expr (exp, target);
7944 return target;
7946 case MODIFY_EXPR:
7948 tree lhs = TREE_OPERAND (exp, 0);
7949 tree rhs = TREE_OPERAND (exp, 1);
7951 gcc_assert (ignore);
7953 /* Check for |= or &= of a bitfield of size one into another bitfield
7954 of size 1. In this case, (unless we need the result of the
7955 assignment) we can do this more efficiently with a
7956 test followed by an assignment, if necessary.
7958 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7959 things change so we do, this code should be enhanced to
7960 support it. */
7961 if (TREE_CODE (lhs) == COMPONENT_REF
7962 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7963 || TREE_CODE (rhs) == BIT_AND_EXPR)
7964 && TREE_OPERAND (rhs, 0) == lhs
7965 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7966 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7967 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
7969 rtx label = gen_label_rtx ();
7971 do_jump (TREE_OPERAND (rhs, 1),
7972 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7973 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7974 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7975 (TREE_CODE (rhs) == BIT_IOR_EXPR
7976 ? integer_one_node
7977 : integer_zero_node)));
7978 do_pending_stack_adjust ();
7979 emit_label (label);
7980 return const0_rtx;
7983 expand_assignment (lhs, rhs);
7985 return const0_rtx;
7988 case RETURN_EXPR:
7989 if (!TREE_OPERAND (exp, 0))
7990 expand_null_return ();
7991 else
7992 expand_return (TREE_OPERAND (exp, 0));
7993 return const0_rtx;
7995 case ADDR_EXPR:
7996 return expand_expr_addr_expr (exp, target, tmode, modifier);
7998 /* COMPLEX type for Extended Pascal & Fortran */
7999 case COMPLEX_EXPR:
8001 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8002 rtx insns;
8004 /* Get the rtx code of the operands. */
8005 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8006 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8008 if (! target)
8009 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8011 start_sequence ();
8013 /* Move the real (op0) and imaginary (op1) parts to their location. */
8014 emit_move_insn (gen_realpart (mode, target), op0);
8015 emit_move_insn (gen_imagpart (mode, target), op1);
8017 insns = get_insns ();
8018 end_sequence ();
8020 /* Complex construction should appear as a single unit. */
8021 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8022 each with a separate pseudo as destination.
8023 It's not correct for flow to treat them as a unit. */
8024 if (GET_CODE (target) != CONCAT)
8025 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8026 else
8027 emit_insn (insns);
8029 return target;
8032 case REALPART_EXPR:
8033 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8034 return gen_realpart (mode, op0);
8036 case IMAGPART_EXPR:
8037 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8038 return gen_imagpart (mode, op0);
8040 case RESX_EXPR:
8041 expand_resx_expr (exp);
8042 return const0_rtx;
8044 case TRY_CATCH_EXPR:
8045 case CATCH_EXPR:
8046 case EH_FILTER_EXPR:
8047 case TRY_FINALLY_EXPR:
8048 /* Lowered by tree-eh.c. */
8049 gcc_unreachable ();
8051 case WITH_CLEANUP_EXPR:
8052 case CLEANUP_POINT_EXPR:
8053 case TARGET_EXPR:
8054 case CASE_LABEL_EXPR:
8055 case VA_ARG_EXPR:
8056 case BIND_EXPR:
8057 case INIT_EXPR:
8058 case CONJ_EXPR:
8059 case COMPOUND_EXPR:
8060 case PREINCREMENT_EXPR:
8061 case PREDECREMENT_EXPR:
8062 case POSTINCREMENT_EXPR:
8063 case POSTDECREMENT_EXPR:
8064 case LOOP_EXPR:
8065 case EXIT_EXPR:
8066 case LABELED_BLOCK_EXPR:
8067 case EXIT_BLOCK_EXPR:
8068 case TRUTH_ANDIF_EXPR:
8069 case TRUTH_ORIF_EXPR:
8070 /* Lowered by gimplify.c. */
8071 gcc_unreachable ();
8073 case EXC_PTR_EXPR:
8074 return get_exception_pointer (cfun);
8076 case FILTER_EXPR:
8077 return get_exception_filter (cfun);
8079 case FDESC_EXPR:
8080 /* Function descriptors are not valid except for as
8081 initialization constants, and should not be expanded. */
8082 gcc_unreachable ();
8084 case SWITCH_EXPR:
8085 expand_case (exp);
8086 return const0_rtx;
8088 case LABEL_EXPR:
8089 expand_label (TREE_OPERAND (exp, 0));
8090 return const0_rtx;
8092 case ASM_EXPR:
8093 expand_asm_expr (exp);
8094 return const0_rtx;
8096 case WITH_SIZE_EXPR:
8097 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8098 have pulled out the size to use in whatever context it needed. */
8099 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8100 modifier, alt_rtl);
8102 case REALIGN_LOAD_EXPR:
8104 tree oprnd0 = TREE_OPERAND (exp, 0);
8105 tree oprnd1 = TREE_OPERAND (exp, 1);
8106 tree oprnd2 = TREE_OPERAND (exp, 2);
8107 rtx op2;
8109 this_optab = optab_for_tree_code (code, type);
8110 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8111 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8112 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8113 target, unsignedp);
8114 if (temp == 0)
8115 abort ();
8116 return temp;
8120 default:
8121 return lang_hooks.expand_expr (exp, original_target, tmode,
8122 modifier, alt_rtl);
8125 /* Here to do an ordinary binary operator. */
8126 binop:
8127 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8128 subtarget, &op0, &op1, 0);
8129 binop2:
8130 this_optab = optab_for_tree_code (code, type);
8131 binop3:
8132 if (modifier == EXPAND_STACK_PARM)
8133 target = 0;
8134 temp = expand_binop (mode, this_optab, op0, op1, target,
8135 unsignedp, OPTAB_LIB_WIDEN);
8136 gcc_assert (temp);
8137 return REDUCE_BIT_FIELD (temp);
8139 #undef REDUCE_BIT_FIELD
8141 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8142 signedness of TYPE), possibly returning the result in TARGET. */
8143 static rtx
8144 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8146 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8147 if (target && GET_MODE (target) != GET_MODE (exp))
8148 target = 0;
8149 if (TYPE_UNSIGNED (type))
8151 rtx mask;
8152 if (prec < HOST_BITS_PER_WIDE_INT)
8153 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8154 GET_MODE (exp));
8155 else
8156 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8157 ((unsigned HOST_WIDE_INT) 1
8158 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8159 GET_MODE (exp));
8160 return expand_and (GET_MODE (exp), exp, mask, target);
8162 else
8164 tree count = build_int_cst (NULL_TREE,
8165 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8166 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8167 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8171 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8172 when applied to the address of EXP produces an address known to be
8173 aligned more than BIGGEST_ALIGNMENT. */
8175 static int
8176 is_aligning_offset (tree offset, tree exp)
8178 /* Strip off any conversions. */
8179 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8180 || TREE_CODE (offset) == NOP_EXPR
8181 || TREE_CODE (offset) == CONVERT_EXPR)
8182 offset = TREE_OPERAND (offset, 0);
8184 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8185 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8186 if (TREE_CODE (offset) != BIT_AND_EXPR
8187 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8188 || compare_tree_int (TREE_OPERAND (offset, 1),
8189 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8190 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8191 return 0;
8193 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8194 It must be NEGATE_EXPR. Then strip any more conversions. */
8195 offset = TREE_OPERAND (offset, 0);
8196 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8197 || TREE_CODE (offset) == NOP_EXPR
8198 || TREE_CODE (offset) == CONVERT_EXPR)
8199 offset = TREE_OPERAND (offset, 0);
8201 if (TREE_CODE (offset) != NEGATE_EXPR)
8202 return 0;
8204 offset = TREE_OPERAND (offset, 0);
8205 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8206 || TREE_CODE (offset) == NOP_EXPR
8207 || TREE_CODE (offset) == CONVERT_EXPR)
8208 offset = TREE_OPERAND (offset, 0);
8210 /* This must now be the address of EXP. */
8211 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8214 /* Return the tree node if an ARG corresponds to a string constant or zero
8215 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8216 in bytes within the string that ARG is accessing. The type of the
8217 offset will be `sizetype'. */
8219 tree
8220 string_constant (tree arg, tree *ptr_offset)
8222 tree array, offset;
8223 STRIP_NOPS (arg);
8225 if (TREE_CODE (arg) == ADDR_EXPR)
8227 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8229 *ptr_offset = size_zero_node;
8230 return TREE_OPERAND (arg, 0);
8232 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8234 array = TREE_OPERAND (arg, 0);
8235 offset = size_zero_node;
8237 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8239 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8240 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8241 if (TREE_CODE (array) != STRING_CST
8242 && TREE_CODE (array) != VAR_DECL)
8243 return 0;
8245 else
8246 return 0;
8248 else if (TREE_CODE (arg) == PLUS_EXPR)
8250 tree arg0 = TREE_OPERAND (arg, 0);
8251 tree arg1 = TREE_OPERAND (arg, 1);
8253 STRIP_NOPS (arg0);
8254 STRIP_NOPS (arg1);
8256 if (TREE_CODE (arg0) == ADDR_EXPR
8257 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8258 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8260 array = TREE_OPERAND (arg0, 0);
8261 offset = arg1;
8263 else if (TREE_CODE (arg1) == ADDR_EXPR
8264 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8265 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8267 array = TREE_OPERAND (arg1, 0);
8268 offset = arg0;
8270 else
8271 return 0;
8273 else
8274 return 0;
8276 if (TREE_CODE (array) == STRING_CST)
8278 *ptr_offset = convert (sizetype, offset);
8279 return array;
8281 else if (TREE_CODE (array) == VAR_DECL)
8283 int length;
8285 /* Variables initialized to string literals can be handled too. */
8286 if (DECL_INITIAL (array) == NULL_TREE
8287 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8288 return 0;
8290 /* If they are read-only, non-volatile and bind locally. */
8291 if (! TREE_READONLY (array)
8292 || TREE_SIDE_EFFECTS (array)
8293 || ! targetm.binds_local_p (array))
8294 return 0;
8296 /* Avoid const char foo[4] = "abcde"; */
8297 if (DECL_SIZE_UNIT (array) == NULL_TREE
8298 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8299 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8300 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8301 return 0;
8303 /* If variable is bigger than the string literal, OFFSET must be constant
8304 and inside of the bounds of the string literal. */
8305 offset = convert (sizetype, offset);
8306 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8307 && (! host_integerp (offset, 1)
8308 || compare_tree_int (offset, length) >= 0))
8309 return 0;
8311 *ptr_offset = offset;
8312 return DECL_INITIAL (array);
8315 return 0;
8318 /* Generate code to calculate EXP using a store-flag instruction
8319 and return an rtx for the result. EXP is either a comparison
8320 or a TRUTH_NOT_EXPR whose operand is a comparison.
8322 If TARGET is nonzero, store the result there if convenient.
8324 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8325 cheap.
8327 Return zero if there is no suitable set-flag instruction
8328 available on this machine.
8330 Once expand_expr has been called on the arguments of the comparison,
8331 we are committed to doing the store flag, since it is not safe to
8332 re-evaluate the expression. We emit the store-flag insn by calling
8333 emit_store_flag, but only expand the arguments if we have a reason
8334 to believe that emit_store_flag will be successful. If we think that
8335 it will, but it isn't, we have to simulate the store-flag with a
8336 set/jump/set sequence. */
8338 static rtx
8339 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8341 enum rtx_code code;
8342 tree arg0, arg1, type;
8343 tree tem;
8344 enum machine_mode operand_mode;
8345 int invert = 0;
8346 int unsignedp;
8347 rtx op0, op1;
8348 enum insn_code icode;
8349 rtx subtarget = target;
8350 rtx result, label;
8352 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8353 result at the end. We can't simply invert the test since it would
8354 have already been inverted if it were valid. This case occurs for
8355 some floating-point comparisons. */
8357 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8358 invert = 1, exp = TREE_OPERAND (exp, 0);
8360 arg0 = TREE_OPERAND (exp, 0);
8361 arg1 = TREE_OPERAND (exp, 1);
8363 /* Don't crash if the comparison was erroneous. */
8364 if (arg0 == error_mark_node || arg1 == error_mark_node)
8365 return const0_rtx;
8367 type = TREE_TYPE (arg0);
8368 operand_mode = TYPE_MODE (type);
8369 unsignedp = TYPE_UNSIGNED (type);
8371 /* We won't bother with BLKmode store-flag operations because it would mean
8372 passing a lot of information to emit_store_flag. */
8373 if (operand_mode == BLKmode)
8374 return 0;
8376 /* We won't bother with store-flag operations involving function pointers
8377 when function pointers must be canonicalized before comparisons. */
8378 #ifdef HAVE_canonicalize_funcptr_for_compare
8379 if (HAVE_canonicalize_funcptr_for_compare
8380 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8381 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8382 == FUNCTION_TYPE))
8383 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8384 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8385 == FUNCTION_TYPE))))
8386 return 0;
8387 #endif
8389 STRIP_NOPS (arg0);
8390 STRIP_NOPS (arg1);
8392 /* Get the rtx comparison code to use. We know that EXP is a comparison
8393 operation of some type. Some comparisons against 1 and -1 can be
8394 converted to comparisons with zero. Do so here so that the tests
8395 below will be aware that we have a comparison with zero. These
8396 tests will not catch constants in the first operand, but constants
8397 are rarely passed as the first operand. */
8399 switch (TREE_CODE (exp))
8401 case EQ_EXPR:
8402 code = EQ;
8403 break;
8404 case NE_EXPR:
8405 code = NE;
8406 break;
8407 case LT_EXPR:
8408 if (integer_onep (arg1))
8409 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8410 else
8411 code = unsignedp ? LTU : LT;
8412 break;
8413 case LE_EXPR:
8414 if (! unsignedp && integer_all_onesp (arg1))
8415 arg1 = integer_zero_node, code = LT;
8416 else
8417 code = unsignedp ? LEU : LE;
8418 break;
8419 case GT_EXPR:
8420 if (! unsignedp && integer_all_onesp (arg1))
8421 arg1 = integer_zero_node, code = GE;
8422 else
8423 code = unsignedp ? GTU : GT;
8424 break;
8425 case GE_EXPR:
8426 if (integer_onep (arg1))
8427 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8428 else
8429 code = unsignedp ? GEU : GE;
8430 break;
8432 case UNORDERED_EXPR:
8433 code = UNORDERED;
8434 break;
8435 case ORDERED_EXPR:
8436 code = ORDERED;
8437 break;
8438 case UNLT_EXPR:
8439 code = UNLT;
8440 break;
8441 case UNLE_EXPR:
8442 code = UNLE;
8443 break;
8444 case UNGT_EXPR:
8445 code = UNGT;
8446 break;
8447 case UNGE_EXPR:
8448 code = UNGE;
8449 break;
8450 case UNEQ_EXPR:
8451 code = UNEQ;
8452 break;
8453 case LTGT_EXPR:
8454 code = LTGT;
8455 break;
8457 default:
8458 gcc_unreachable ();
8461 /* Put a constant second. */
8462 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8464 tem = arg0; arg0 = arg1; arg1 = tem;
8465 code = swap_condition (code);
8468 /* If this is an equality or inequality test of a single bit, we can
8469 do this by shifting the bit being tested to the low-order bit and
8470 masking the result with the constant 1. If the condition was EQ,
8471 we xor it with 1. This does not require an scc insn and is faster
8472 than an scc insn even if we have it.
8474 The code to make this transformation was moved into fold_single_bit_test,
8475 so we just call into the folder and expand its result. */
8477 if ((code == NE || code == EQ)
8478 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8479 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8481 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8482 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8483 arg0, arg1, type),
8484 target, VOIDmode, EXPAND_NORMAL);
8487 /* Now see if we are likely to be able to do this. Return if not. */
8488 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8489 return 0;
8491 icode = setcc_gen_code[(int) code];
8492 if (icode == CODE_FOR_nothing
8493 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8495 /* We can only do this if it is one of the special cases that
8496 can be handled without an scc insn. */
8497 if ((code == LT && integer_zerop (arg1))
8498 || (! only_cheap && code == GE && integer_zerop (arg1)))
8500 else if (BRANCH_COST >= 0
8501 && ! only_cheap && (code == NE || code == EQ)
8502 && TREE_CODE (type) != REAL_TYPE
8503 && ((abs_optab->handlers[(int) operand_mode].insn_code
8504 != CODE_FOR_nothing)
8505 || (ffs_optab->handlers[(int) operand_mode].insn_code
8506 != CODE_FOR_nothing)))
8508 else
8509 return 0;
8512 if (! get_subtarget (target)
8513 || GET_MODE (subtarget) != operand_mode)
8514 subtarget = 0;
8516 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8518 if (target == 0)
8519 target = gen_reg_rtx (mode);
8521 result = emit_store_flag (target, code, op0, op1,
8522 operand_mode, unsignedp, 1);
8524 if (result)
8526 if (invert)
8527 result = expand_binop (mode, xor_optab, result, const1_rtx,
8528 result, 0, OPTAB_LIB_WIDEN);
8529 return result;
8532 /* If this failed, we have to do this with set/compare/jump/set code. */
8533 if (!REG_P (target)
8534 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8535 target = gen_reg_rtx (GET_MODE (target));
8537 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8538 result = compare_from_rtx (op0, op1, code, unsignedp,
8539 operand_mode, NULL_RTX);
8540 if (GET_CODE (result) == CONST_INT)
8541 return (((result == const0_rtx && ! invert)
8542 || (result != const0_rtx && invert))
8543 ? const0_rtx : const1_rtx);
8545 /* The code of RESULT may not match CODE if compare_from_rtx
8546 decided to swap its operands and reverse the original code.
8548 We know that compare_from_rtx returns either a CONST_INT or
8549 a new comparison code, so it is safe to just extract the
8550 code from RESULT. */
8551 code = GET_CODE (result);
8553 label = gen_label_rtx ();
8554 gcc_assert (bcc_gen_fctn[(int) code]);
8556 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8557 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8558 emit_label (label);
8560 return target;
8564 /* Stubs in case we haven't got a casesi insn. */
8565 #ifndef HAVE_casesi
8566 # define HAVE_casesi 0
8567 # define gen_casesi(a, b, c, d, e) (0)
8568 # define CODE_FOR_casesi CODE_FOR_nothing
8569 #endif
8571 /* If the machine does not have a case insn that compares the bounds,
8572 this means extra overhead for dispatch tables, which raises the
8573 threshold for using them. */
8574 #ifndef CASE_VALUES_THRESHOLD
8575 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8576 #endif /* CASE_VALUES_THRESHOLD */
8578 unsigned int
8579 case_values_threshold (void)
8581 return CASE_VALUES_THRESHOLD;
8584 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8585 0 otherwise (i.e. if there is no casesi instruction). */
8587 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8588 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8590 enum machine_mode index_mode = SImode;
8591 int index_bits = GET_MODE_BITSIZE (index_mode);
8592 rtx op1, op2, index;
8593 enum machine_mode op_mode;
8595 if (! HAVE_casesi)
8596 return 0;
8598 /* Convert the index to SImode. */
8599 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8601 enum machine_mode omode = TYPE_MODE (index_type);
8602 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8604 /* We must handle the endpoints in the original mode. */
8605 index_expr = build2 (MINUS_EXPR, index_type,
8606 index_expr, minval);
8607 minval = integer_zero_node;
8608 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8609 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8610 omode, 1, default_label);
8611 /* Now we can safely truncate. */
8612 index = convert_to_mode (index_mode, index, 0);
8614 else
8616 if (TYPE_MODE (index_type) != index_mode)
8618 index_expr = convert (lang_hooks.types.type_for_size
8619 (index_bits, 0), index_expr);
8620 index_type = TREE_TYPE (index_expr);
8623 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8626 do_pending_stack_adjust ();
8628 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8629 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8630 (index, op_mode))
8631 index = copy_to_mode_reg (op_mode, index);
8633 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8635 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8636 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8637 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8638 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8639 (op1, op_mode))
8640 op1 = copy_to_mode_reg (op_mode, op1);
8642 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8644 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8645 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8646 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8647 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8648 (op2, op_mode))
8649 op2 = copy_to_mode_reg (op_mode, op2);
8651 emit_jump_insn (gen_casesi (index, op1, op2,
8652 table_label, default_label));
8653 return 1;
8656 /* Attempt to generate a tablejump instruction; same concept. */
8657 #ifndef HAVE_tablejump
8658 #define HAVE_tablejump 0
8659 #define gen_tablejump(x, y) (0)
8660 #endif
8662 /* Subroutine of the next function.
8664 INDEX is the value being switched on, with the lowest value
8665 in the table already subtracted.
8666 MODE is its expected mode (needed if INDEX is constant).
8667 RANGE is the length of the jump table.
8668 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8670 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8671 index value is out of range. */
8673 static void
8674 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8675 rtx default_label)
8677 rtx temp, vector;
8679 if (INTVAL (range) > cfun->max_jumptable_ents)
8680 cfun->max_jumptable_ents = INTVAL (range);
8682 /* Do an unsigned comparison (in the proper mode) between the index
8683 expression and the value which represents the length of the range.
8684 Since we just finished subtracting the lower bound of the range
8685 from the index expression, this comparison allows us to simultaneously
8686 check that the original index expression value is both greater than
8687 or equal to the minimum value of the range and less than or equal to
8688 the maximum value of the range. */
8690 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8691 default_label);
8693 /* If index is in range, it must fit in Pmode.
8694 Convert to Pmode so we can index with it. */
8695 if (mode != Pmode)
8696 index = convert_to_mode (Pmode, index, 1);
8698 /* Don't let a MEM slip through, because then INDEX that comes
8699 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8700 and break_out_memory_refs will go to work on it and mess it up. */
8701 #ifdef PIC_CASE_VECTOR_ADDRESS
8702 if (flag_pic && !REG_P (index))
8703 index = copy_to_mode_reg (Pmode, index);
8704 #endif
8706 /* If flag_force_addr were to affect this address
8707 it could interfere with the tricky assumptions made
8708 about addresses that contain label-refs,
8709 which may be valid only very near the tablejump itself. */
8710 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8711 GET_MODE_SIZE, because this indicates how large insns are. The other
8712 uses should all be Pmode, because they are addresses. This code
8713 could fail if addresses and insns are not the same size. */
8714 index = gen_rtx_PLUS (Pmode,
8715 gen_rtx_MULT (Pmode, index,
8716 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8717 gen_rtx_LABEL_REF (Pmode, table_label));
8718 #ifdef PIC_CASE_VECTOR_ADDRESS
8719 if (flag_pic)
8720 index = PIC_CASE_VECTOR_ADDRESS (index);
8721 else
8722 #endif
8723 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8724 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8725 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8726 convert_move (temp, vector, 0);
8728 emit_jump_insn (gen_tablejump (temp, table_label));
8730 /* If we are generating PIC code or if the table is PC-relative, the
8731 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8732 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8733 emit_barrier ();
8737 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8738 rtx table_label, rtx default_label)
8740 rtx index;
8742 if (! HAVE_tablejump)
8743 return 0;
8745 index_expr = fold (build2 (MINUS_EXPR, index_type,
8746 convert (index_type, index_expr),
8747 convert (index_type, minval)));
8748 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8749 do_pending_stack_adjust ();
8751 do_tablejump (index, TYPE_MODE (index_type),
8752 convert_modes (TYPE_MODE (index_type),
8753 TYPE_MODE (TREE_TYPE (range)),
8754 expand_expr (range, NULL_RTX,
8755 VOIDmode, 0),
8756 TYPE_UNSIGNED (TREE_TYPE (range))),
8757 table_label, default_label);
8758 return 1;
8761 /* Nonzero if the mode is a valid vector mode for this architecture.
8762 This returns nonzero even if there is no hardware support for the
8763 vector mode, but we can emulate with narrower modes. */
8766 vector_mode_valid_p (enum machine_mode mode)
8768 enum mode_class class = GET_MODE_CLASS (mode);
8769 enum machine_mode innermode;
8771 /* Doh! What's going on? */
8772 if (class != MODE_VECTOR_INT
8773 && class != MODE_VECTOR_FLOAT)
8774 return 0;
8776 /* Hardware support. Woo hoo! */
8777 if (targetm.vector_mode_supported_p (mode))
8778 return 1;
8780 innermode = GET_MODE_INNER (mode);
8782 /* We should probably return 1 if requesting V4DI and we have no DI,
8783 but we have V2DI, but this is probably very unlikely. */
8785 /* If we have support for the inner mode, we can safely emulate it.
8786 We may not have V2DI, but me can emulate with a pair of DIs. */
8787 return targetm.scalar_mode_supported_p (innermode);
8790 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8791 static rtx
8792 const_vector_from_tree (tree exp)
8794 rtvec v;
8795 int units, i;
8796 tree link, elt;
8797 enum machine_mode inner, mode;
8799 mode = TYPE_MODE (TREE_TYPE (exp));
8801 if (initializer_zerop (exp))
8802 return CONST0_RTX (mode);
8804 units = GET_MODE_NUNITS (mode);
8805 inner = GET_MODE_INNER (mode);
8807 v = rtvec_alloc (units);
8809 link = TREE_VECTOR_CST_ELTS (exp);
8810 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8812 elt = TREE_VALUE (link);
8814 if (TREE_CODE (elt) == REAL_CST)
8815 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8816 inner);
8817 else
8818 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8819 TREE_INT_CST_HIGH (elt),
8820 inner);
8823 /* Initialize remaining elements to 0. */
8824 for (; i < units; ++i)
8825 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8827 return gen_rtx_CONST_VECTOR (mode, v);
8829 #include "gt-expr.h"