gcc/ChangeLog:
[official-gcc.git] / gcc / expr.c
blobb49813ab2a94d105349442b8b446dbca59ffcb8c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
63 #ifdef PUSH_ROUNDING
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
71 #endif
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
110 struct store_by_pieces
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, int);
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
149 static int is_aligning_offset (tree, tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) MOVE_RATIO)
195 #endif
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
200 /* This array records the insn_code of insns to perform block sets. */
201 enum insn_code setmem_optab[NUM_MACHINE_MODES];
203 /* These arrays record the insn_code of three different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
209 /* Synchronization primitives. */
210 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
211 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
230 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
233 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
235 #ifndef SLOW_UNALIGNED_ACCESS
236 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
237 #endif
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
242 void
243 init_expr_once (void)
245 rtx insn, pat;
246 enum machine_mode mode;
247 int num_clobbers;
248 rtx mem, mem1;
249 rtx reg;
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
254 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
255 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
257 /* A scratch register we can modify in-place below to avoid
258 useless RTL allocations. */
259 reg = gen_rtx_REG (VOIDmode, -1);
261 insn = rtx_alloc (INSN);
262 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
263 PATTERN (insn) = pat;
265 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
266 mode = (enum machine_mode) ((int) mode + 1))
268 int regno;
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
273 PUT_MODE (reg, mode);
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
278 if (mode != VOIDmode && mode != BLKmode)
279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
280 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
281 regno++)
283 if (! HARD_REGNO_MODE_OK (regno, mode))
284 continue;
286 REGNO (reg) = regno;
288 SET_SRC (pat) = mem;
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
293 SET_SRC (pat) = mem1;
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
298 SET_SRC (pat) = reg;
299 SET_DEST (pat) = mem;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
303 SET_SRC (pat) = reg;
304 SET_DEST (pat) = mem1;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
310 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
313 mode = GET_MODE_WIDER_MODE (mode))
315 enum machine_mode srcmode;
316 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
317 srcmode = GET_MODE_WIDER_MODE (srcmode))
319 enum insn_code ic;
321 ic = can_extend_p (mode, srcmode, 0);
322 if (ic == CODE_FOR_nothing)
323 continue;
325 PUT_MODE (mem, srcmode);
327 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
328 float_extend_from_mem[mode][srcmode] = true;
333 /* This is run at the start of compiling a function. */
335 void
336 init_expr (void)
338 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341 /* Copy data from FROM to TO, where the machine modes are not the same.
342 Both modes may be integer, or both may be floating.
343 UNSIGNEDP should be nonzero if FROM is an unsigned type.
344 This causes zero-extension instead of sign-extension. */
346 void
347 convert_move (rtx to, rtx from, int unsignedp)
349 enum machine_mode to_mode = GET_MODE (to);
350 enum machine_mode from_mode = GET_MODE (from);
351 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
352 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
353 enum insn_code code;
354 rtx libcall;
356 /* rtx code for making an equivalent value. */
357 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
358 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361 gcc_assert (to_real == from_real);
363 /* If the source and destination are already the same, then there's
364 nothing to do. */
365 if (to == from)
366 return;
368 /* If FROM is a SUBREG that indicates that we have already done at least
369 the required extension, strip it. We don't handle such SUBREGs as
370 TO here. */
372 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
373 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
374 >= GET_MODE_SIZE (to_mode))
375 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
376 from = gen_lowpart (to_mode, from), from_mode = to_mode;
378 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
380 if (to_mode == from_mode
381 || (from_mode == VOIDmode && CONSTANT_P (from)))
383 emit_move_insn (to, from);
384 return;
387 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
389 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
391 if (VECTOR_MODE_P (to_mode))
392 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
393 else
394 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
396 emit_move_insn (to, from);
397 return;
400 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
402 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
403 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
404 return;
407 if (to_real)
409 rtx value, insns;
410 convert_optab tab;
412 gcc_assert ((GET_MODE_PRECISION (from_mode)
413 != GET_MODE_PRECISION (to_mode))
414 || (DECIMAL_FLOAT_MODE_P (from_mode)
415 != DECIMAL_FLOAT_MODE_P (to_mode)));
417 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
418 /* Conversion between decimal float and binary float, same size. */
419 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
420 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
421 tab = sext_optab;
422 else
423 tab = trunc_optab;
425 /* Try converting directly if the insn is supported. */
427 code = tab->handlers[to_mode][from_mode].insn_code;
428 if (code != CODE_FOR_nothing)
430 emit_unop_insn (code, to, from,
431 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
432 return;
435 /* Otherwise use a libcall. */
436 libcall = tab->handlers[to_mode][from_mode].libfunc;
438 /* Is this conversion implemented yet? */
439 gcc_assert (libcall);
441 start_sequence ();
442 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
443 1, from, from_mode);
444 insns = get_insns ();
445 end_sequence ();
446 emit_libcall_block (insns, to, value,
447 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
448 from)
449 : gen_rtx_FLOAT_EXTEND (to_mode, from));
450 return;
453 /* Handle pointer conversion. */ /* SPEE 900220. */
454 /* Targets are expected to provide conversion insns between PxImode and
455 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
456 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
458 enum machine_mode full_mode
459 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
461 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
462 != CODE_FOR_nothing);
464 if (full_mode != from_mode)
465 from = convert_to_mode (full_mode, from, unsignedp);
466 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
467 to, from, UNKNOWN);
468 return;
470 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
472 rtx new_from;
473 enum machine_mode full_mode
474 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
476 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
477 != CODE_FOR_nothing);
479 if (to_mode == full_mode)
481 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
482 to, from, UNKNOWN);
483 return;
486 new_from = gen_reg_rtx (full_mode);
487 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
488 new_from, from, UNKNOWN);
490 /* else proceed to integer conversions below. */
491 from_mode = full_mode;
492 from = new_from;
495 /* Now both modes are integers. */
497 /* Handle expanding beyond a word. */
498 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
499 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
501 rtx insns;
502 rtx lowpart;
503 rtx fill_value;
504 rtx lowfrom;
505 int i;
506 enum machine_mode lowpart_mode;
507 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
509 /* Try converting directly if the insn is supported. */
510 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
511 != CODE_FOR_nothing)
513 /* If FROM is a SUBREG, put it into a register. Do this
514 so that we always generate the same set of insns for
515 better cse'ing; if an intermediate assignment occurred,
516 we won't be doing the operation directly on the SUBREG. */
517 if (optimize > 0 && GET_CODE (from) == SUBREG)
518 from = force_reg (from_mode, from);
519 emit_unop_insn (code, to, from, equiv_code);
520 return;
522 /* Next, try converting via full word. */
523 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
524 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
525 != CODE_FOR_nothing))
527 if (REG_P (to))
529 if (reg_overlap_mentioned_p (to, from))
530 from = force_reg (from_mode, from);
531 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
533 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
534 emit_unop_insn (code, to,
535 gen_lowpart (word_mode, to), equiv_code);
536 return;
539 /* No special multiword conversion insn; do it by hand. */
540 start_sequence ();
542 /* Since we will turn this into a no conflict block, we must ensure
543 that the source does not overlap the target. */
545 if (reg_overlap_mentioned_p (to, from))
546 from = force_reg (from_mode, from);
548 /* Get a copy of FROM widened to a word, if necessary. */
549 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
550 lowpart_mode = word_mode;
551 else
552 lowpart_mode = from_mode;
554 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
556 lowpart = gen_lowpart (lowpart_mode, to);
557 emit_move_insn (lowpart, lowfrom);
559 /* Compute the value to put in each remaining word. */
560 if (unsignedp)
561 fill_value = const0_rtx;
562 else
564 #ifdef HAVE_slt
565 if (HAVE_slt
566 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
567 && STORE_FLAG_VALUE == -1)
569 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
570 lowpart_mode, 0);
571 fill_value = gen_reg_rtx (word_mode);
572 emit_insn (gen_slt (fill_value));
574 else
575 #endif
577 fill_value
578 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
579 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
580 NULL_RTX, 0);
581 fill_value = convert_to_mode (word_mode, fill_value, 1);
585 /* Fill the remaining words. */
586 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
588 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
589 rtx subword = operand_subword (to, index, 1, to_mode);
591 gcc_assert (subword);
593 if (fill_value != subword)
594 emit_move_insn (subword, fill_value);
597 insns = get_insns ();
598 end_sequence ();
600 emit_no_conflict_block (insns, to, from, NULL_RTX,
601 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
602 return;
605 /* Truncating multi-word to a word or less. */
606 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
607 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
609 if (!((MEM_P (from)
610 && ! MEM_VOLATILE_P (from)
611 && direct_load[(int) to_mode]
612 && ! mode_dependent_address_p (XEXP (from, 0)))
613 || REG_P (from)
614 || GET_CODE (from) == SUBREG))
615 from = force_reg (from_mode, from);
616 convert_move (to, gen_lowpart (word_mode, from), 0);
617 return;
620 /* Now follow all the conversions between integers
621 no more than a word long. */
623 /* For truncation, usually we can just refer to FROM in a narrower mode. */
624 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
625 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
626 GET_MODE_BITSIZE (from_mode)))
628 if (!((MEM_P (from)
629 && ! MEM_VOLATILE_P (from)
630 && direct_load[(int) to_mode]
631 && ! mode_dependent_address_p (XEXP (from, 0)))
632 || REG_P (from)
633 || GET_CODE (from) == SUBREG))
634 from = force_reg (from_mode, from);
635 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
636 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
637 from = copy_to_reg (from);
638 emit_move_insn (to, gen_lowpart (to_mode, from));
639 return;
642 /* Handle extension. */
643 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
645 /* Convert directly if that works. */
646 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
647 != CODE_FOR_nothing)
649 emit_unop_insn (code, to, from, equiv_code);
650 return;
652 else
654 enum machine_mode intermediate;
655 rtx tmp;
656 tree shift_amount;
658 /* Search for a mode to convert via. */
659 for (intermediate = from_mode; intermediate != VOIDmode;
660 intermediate = GET_MODE_WIDER_MODE (intermediate))
661 if (((can_extend_p (to_mode, intermediate, unsignedp)
662 != CODE_FOR_nothing)
663 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
664 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
665 GET_MODE_BITSIZE (intermediate))))
666 && (can_extend_p (intermediate, from_mode, unsignedp)
667 != CODE_FOR_nothing))
669 convert_move (to, convert_to_mode (intermediate, from,
670 unsignedp), unsignedp);
671 return;
674 /* No suitable intermediate mode.
675 Generate what we need with shifts. */
676 shift_amount = build_int_cst (NULL_TREE,
677 GET_MODE_BITSIZE (to_mode)
678 - GET_MODE_BITSIZE (from_mode));
679 from = gen_lowpart (to_mode, force_reg (from_mode, from));
680 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
681 to, unsignedp);
682 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
683 to, unsignedp);
684 if (tmp != to)
685 emit_move_insn (to, tmp);
686 return;
690 /* Support special truncate insns for certain modes. */
691 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
693 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
694 to, from, UNKNOWN);
695 return;
698 /* Handle truncation of volatile memrefs, and so on;
699 the things that couldn't be truncated directly,
700 and for which there was no special instruction.
702 ??? Code above formerly short-circuited this, for most integer
703 mode pairs, with a force_reg in from_mode followed by a recursive
704 call to this routine. Appears always to have been wrong. */
705 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
707 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
708 emit_move_insn (to, temp);
709 return;
712 /* Mode combination is not recognized. */
713 gcc_unreachable ();
716 /* Return an rtx for a value that would result
717 from converting X to mode MODE.
718 Both X and MODE may be floating, or both integer.
719 UNSIGNEDP is nonzero if X is an unsigned value.
720 This can be done by referring to a part of X in place
721 or by copying to a new temporary with conversion. */
724 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
726 return convert_modes (mode, VOIDmode, x, unsignedp);
729 /* Return an rtx for a value that would result
730 from converting X from mode OLDMODE to mode MODE.
731 Both modes may be floating, or both integer.
732 UNSIGNEDP is nonzero if X is an unsigned value.
734 This can be done by referring to a part of X in place
735 or by copying to a new temporary with conversion.
737 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
740 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
742 rtx temp;
744 /* If FROM is a SUBREG that indicates that we have already done at least
745 the required extension, strip it. */
747 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
748 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
749 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
750 x = gen_lowpart (mode, x);
752 if (GET_MODE (x) != VOIDmode)
753 oldmode = GET_MODE (x);
755 if (mode == oldmode)
756 return x;
758 /* There is one case that we must handle specially: If we are converting
759 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
760 we are to interpret the constant as unsigned, gen_lowpart will do
761 the wrong if the constant appears negative. What we want to do is
762 make the high-order word of the constant zero, not all ones. */
764 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
765 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
766 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
768 HOST_WIDE_INT val = INTVAL (x);
770 if (oldmode != VOIDmode
771 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
773 int width = GET_MODE_BITSIZE (oldmode);
775 /* We need to zero extend VAL. */
776 val &= ((HOST_WIDE_INT) 1 << width) - 1;
779 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782 /* We can do this with a gen_lowpart if both desired and current modes
783 are integer, and this is either a constant integer, a register, or a
784 non-volatile MEM. Except for the constant case where MODE is no
785 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
787 if ((GET_CODE (x) == CONST_INT
788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
789 || (GET_MODE_CLASS (mode) == MODE_INT
790 && GET_MODE_CLASS (oldmode) == MODE_INT
791 && (GET_CODE (x) == CONST_DOUBLE
792 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
793 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
794 && direct_load[(int) mode])
795 || (REG_P (x)
796 && (! HARD_REGISTER_P (x)
797 || HARD_REGNO_MODE_OK (REGNO (x), mode))
798 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
799 GET_MODE_BITSIZE (GET_MODE (x)))))))))
801 /* ?? If we don't know OLDMODE, we have to assume here that
802 X does not need sign- or zero-extension. This may not be
803 the case, but it's the best we can do. */
804 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
805 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
807 HOST_WIDE_INT val = INTVAL (x);
808 int width = GET_MODE_BITSIZE (oldmode);
810 /* We must sign or zero-extend in this case. Start by
811 zero-extending, then sign extend if we need to. */
812 val &= ((HOST_WIDE_INT) 1 << width) - 1;
813 if (! unsignedp
814 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
815 val |= (HOST_WIDE_INT) (-1) << width;
817 return gen_int_mode (val, mode);
820 return gen_lowpart (mode, x);
823 /* Converting from integer constant into mode is always equivalent to an
824 subreg operation. */
825 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
827 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
828 return simplify_gen_subreg (mode, x, oldmode, 0);
831 temp = gen_reg_rtx (mode);
832 convert_move (temp, x, unsignedp);
833 return temp;
836 /* STORE_MAX_PIECES is the number of bytes at a time that we can
837 store efficiently. Due to internal GCC limitations, this is
838 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839 for an immediate constant. */
841 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
843 /* Determine whether the LEN bytes can be moved by using several move
844 instructions. Return nonzero if a call to move_by_pieces should
845 succeed. */
848 can_move_by_pieces (unsigned HOST_WIDE_INT len,
849 unsigned int align ATTRIBUTE_UNUSED)
851 return MOVE_BY_PIECES_P (len, align);
854 /* Generate several move instructions to copy LEN bytes from block FROM to
855 block TO. (These are MEM rtx's with BLKmode).
857 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858 used to push FROM to the stack.
860 ALIGN is maximum stack alignment we can assume.
862 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
864 stpcpy. */
867 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
868 unsigned int align, int endp)
870 struct move_by_pieces data;
871 rtx to_addr, from_addr = XEXP (from, 0);
872 unsigned int max_size = MOVE_MAX_PIECES + 1;
873 enum machine_mode mode = VOIDmode, tmode;
874 enum insn_code icode;
876 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
878 data.offset = 0;
879 data.from_addr = from_addr;
880 if (to)
882 to_addr = XEXP (to, 0);
883 data.to = to;
884 data.autinc_to
885 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
886 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
887 data.reverse
888 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
890 else
892 to_addr = NULL_RTX;
893 data.to = NULL_RTX;
894 data.autinc_to = 1;
895 #ifdef STACK_GROWS_DOWNWARD
896 data.reverse = 1;
897 #else
898 data.reverse = 0;
899 #endif
901 data.to_addr = to_addr;
902 data.from = from;
903 data.autinc_from
904 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
905 || GET_CODE (from_addr) == POST_INC
906 || GET_CODE (from_addr) == POST_DEC);
908 data.explicit_inc_from = 0;
909 data.explicit_inc_to = 0;
910 if (data.reverse) data.offset = len;
911 data.len = len;
913 /* If copying requires more than two move insns,
914 copy addresses to registers (to make displacements shorter)
915 and use post-increment if available. */
916 if (!(data.autinc_from && data.autinc_to)
917 && move_by_pieces_ninsns (len, align, max_size) > 2)
919 /* Find the mode of the largest move... */
920 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
921 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
922 if (GET_MODE_SIZE (tmode) < max_size)
923 mode = tmode;
925 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
927 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
928 data.autinc_from = 1;
929 data.explicit_inc_from = -1;
931 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
933 data.from_addr = copy_addr_to_reg (from_addr);
934 data.autinc_from = 1;
935 data.explicit_inc_from = 1;
937 if (!data.autinc_from && CONSTANT_P (from_addr))
938 data.from_addr = copy_addr_to_reg (from_addr);
939 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
941 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
942 data.autinc_to = 1;
943 data.explicit_inc_to = -1;
945 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
947 data.to_addr = copy_addr_to_reg (to_addr);
948 data.autinc_to = 1;
949 data.explicit_inc_to = 1;
951 if (!data.autinc_to && CONSTANT_P (to_addr))
952 data.to_addr = copy_addr_to_reg (to_addr);
955 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
956 if (align >= GET_MODE_ALIGNMENT (tmode))
957 align = GET_MODE_ALIGNMENT (tmode);
958 else
960 enum machine_mode xmode;
962 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
963 tmode != VOIDmode;
964 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
965 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
966 || SLOW_UNALIGNED_ACCESS (tmode, align))
967 break;
969 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972 /* First move what we can in the largest integer mode, then go to
973 successively smaller modes. */
975 while (max_size > 1)
977 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
978 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
979 if (GET_MODE_SIZE (tmode) < max_size)
980 mode = tmode;
982 if (mode == VOIDmode)
983 break;
985 icode = mov_optab->handlers[(int) mode].insn_code;
986 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
987 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
989 max_size = GET_MODE_SIZE (mode);
992 /* The code above should have handled everything. */
993 gcc_assert (!data.len);
995 if (endp)
997 rtx to1;
999 gcc_assert (!data.reverse);
1000 if (data.autinc_to)
1002 if (endp == 2)
1004 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1005 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1006 else
1007 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1008 -1));
1010 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1011 data.offset);
1013 else
1015 if (endp == 2)
1016 --data.offset;
1017 to1 = adjust_address (data.to, QImode, data.offset);
1019 return to1;
1021 else
1022 return data.to;
1025 /* Return number of insns required to move L bytes by pieces.
1026 ALIGN (in bits) is maximum alignment we can assume. */
1028 static unsigned HOST_WIDE_INT
1029 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1030 unsigned int max_size)
1032 unsigned HOST_WIDE_INT n_insns = 0;
1033 enum machine_mode tmode;
1035 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1036 if (align >= GET_MODE_ALIGNMENT (tmode))
1037 align = GET_MODE_ALIGNMENT (tmode);
1038 else
1040 enum machine_mode tmode, xmode;
1042 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1043 tmode != VOIDmode;
1044 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1045 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1046 || SLOW_UNALIGNED_ACCESS (tmode, align))
1047 break;
1049 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052 while (max_size > 1)
1054 enum machine_mode mode = VOIDmode;
1055 enum insn_code icode;
1057 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1058 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1059 if (GET_MODE_SIZE (tmode) < max_size)
1060 mode = tmode;
1062 if (mode == VOIDmode)
1063 break;
1065 icode = mov_optab->handlers[(int) mode].insn_code;
1066 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1067 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1069 max_size = GET_MODE_SIZE (mode);
1072 gcc_assert (!l);
1073 return n_insns;
1076 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1077 with move instructions for mode MODE. GENFUN is the gen_... function
1078 to make a move insn for that mode. DATA has all the other info. */
1080 static void
1081 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1082 struct move_by_pieces *data)
1084 unsigned int size = GET_MODE_SIZE (mode);
1085 rtx to1 = NULL_RTX, from1;
1087 while (data->len >= size)
1089 if (data->reverse)
1090 data->offset -= size;
1092 if (data->to)
1094 if (data->autinc_to)
1095 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1096 data->offset);
1097 else
1098 to1 = adjust_address (data->to, mode, data->offset);
1101 if (data->autinc_from)
1102 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1103 data->offset);
1104 else
1105 from1 = adjust_address (data->from, mode, data->offset);
1107 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1108 emit_insn (gen_add2_insn (data->to_addr,
1109 GEN_INT (-(HOST_WIDE_INT)size)));
1110 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1111 emit_insn (gen_add2_insn (data->from_addr,
1112 GEN_INT (-(HOST_WIDE_INT)size)));
1114 if (data->to)
1115 emit_insn ((*genfun) (to1, from1));
1116 else
1118 #ifdef PUSH_ROUNDING
1119 emit_single_push_insn (mode, from1, NULL);
1120 #else
1121 gcc_unreachable ();
1122 #endif
1125 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1126 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1128 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1130 if (! data->reverse)
1131 data->offset += size;
1133 data->len -= size;
1137 /* Emit code to move a block Y to a block X. This may be done with
1138 string-move instructions, with multiple scalar move instructions,
1139 or with a library call.
1141 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have.
1144 METHOD describes what kind of copy this is, and what mechanisms may be used.
1146 Return the address of the new block, if memcpy is called and returns it,
1147 0 otherwise. */
1150 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1151 unsigned int expected_align, HOST_WIDE_INT expected_size)
1153 bool may_use_call;
1154 rtx retval = 0;
1155 unsigned int align;
1157 switch (method)
1159 case BLOCK_OP_NORMAL:
1160 case BLOCK_OP_TAILCALL:
1161 may_use_call = true;
1162 break;
1164 case BLOCK_OP_CALL_PARM:
1165 may_use_call = block_move_libcall_safe_for_call_parm ();
1167 /* Make inhibit_defer_pop nonzero around the library call
1168 to force it to pop the arguments right away. */
1169 NO_DEFER_POP;
1170 break;
1172 case BLOCK_OP_NO_LIBCALL:
1173 may_use_call = false;
1174 break;
1176 default:
1177 gcc_unreachable ();
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182 gcc_assert (MEM_P (x));
1183 gcc_assert (MEM_P (y));
1184 gcc_assert (size);
1186 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1187 block copy is more efficient for other large modes, e.g. DCmode. */
1188 x = adjust_address (x, BLKmode, 0);
1189 y = adjust_address (y, BLKmode, 0);
1191 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1192 can be incorrect is coming from __builtin_memcpy. */
1193 if (GET_CODE (size) == CONST_INT)
1195 if (INTVAL (size) == 0)
1196 return 0;
1198 x = shallow_copy_rtx (x);
1199 y = shallow_copy_rtx (y);
1200 set_mem_size (x, size);
1201 set_mem_size (y, size);
1204 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1205 move_by_pieces (x, y, INTVAL (size), align, 0);
1206 else if (emit_block_move_via_movmem (x, y, size, align,
1207 expected_align, expected_size))
1209 else if (may_use_call)
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
1212 else
1213 emit_block_move_via_loop (x, y, size, align);
1215 if (method == BLOCK_OP_CALL_PARM)
1216 OK_DEFER_POP;
1218 return retval;
1222 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1224 return emit_block_move_hints (x, y, size, method, 0, -1);
1227 /* A subroutine of emit_block_move. Returns true if calling the
1228 block move libcall will not clobber any parameters which may have
1229 already been placed on the stack. */
1231 static bool
1232 block_move_libcall_safe_for_call_parm (void)
1234 /* If arguments are pushed on the stack, then they're safe. */
1235 if (PUSH_ARGS)
1236 return true;
1238 /* If registers go on the stack anyway, any argument is sure to clobber
1239 an outgoing argument. */
1240 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1242 tree fn = emit_block_move_libcall_fn (false);
1243 (void) fn;
1244 if (REG_PARM_STACK_SPACE (fn) != 0)
1245 return false;
1247 #endif
1249 /* If any argument goes in memory, then it might clobber an outgoing
1250 argument. */
1252 CUMULATIVE_ARGS args_so_far;
1253 tree fn, arg;
1255 fn = emit_block_move_libcall_fn (false);
1256 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1258 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1259 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1261 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1262 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1263 if (!tmp || !REG_P (tmp))
1264 return false;
1265 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1266 return false;
1267 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1270 return true;
1273 /* A subroutine of emit_block_move. Expand a movmem pattern;
1274 return true if successful. */
1276 static bool
1277 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1278 unsigned int expected_align, HOST_WIDE_INT expected_size)
1280 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1281 int save_volatile_ok = volatile_ok;
1282 enum machine_mode mode;
1284 if (expected_align < align)
1285 expected_align = align;
1287 /* Since this is a move insn, we don't care about volatility. */
1288 volatile_ok = 1;
1290 /* Try the most limited insn first, because there's no point
1291 including more than one in the machine description unless
1292 the more limited one has some advantage. */
1294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1295 mode = GET_MODE_WIDER_MODE (mode))
1297 enum insn_code code = movmem_optab[(int) mode];
1298 insn_operand_predicate_fn pred;
1300 if (code != CODE_FOR_nothing
1301 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1302 here because if SIZE is less than the mode mask, as it is
1303 returned by the macro, it will definitely be less than the
1304 actual mode mask. */
1305 && ((GET_CODE (size) == CONST_INT
1306 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1307 <= (GET_MODE_MASK (mode) >> 1)))
1308 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1309 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1310 || (*pred) (x, BLKmode))
1311 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1312 || (*pred) (y, BLKmode))
1313 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1314 || (*pred) (opalign, VOIDmode)))
1316 rtx op2;
1317 rtx last = get_last_insn ();
1318 rtx pat;
1320 op2 = convert_to_mode (mode, size, 1);
1321 pred = insn_data[(int) code].operand[2].predicate;
1322 if (pred != 0 && ! (*pred) (op2, mode))
1323 op2 = copy_to_mode_reg (mode, op2);
1325 /* ??? When called via emit_block_move_for_call, it'd be
1326 nice if there were some way to inform the backend, so
1327 that it doesn't fail the expansion because it thinks
1328 emitting the libcall would be more efficient. */
1330 if (insn_data[(int) code].n_operands == 4)
1331 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1332 else
1333 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1334 GEN_INT (expected_align),
1335 GEN_INT (expected_size));
1336 if (pat)
1338 emit_insn (pat);
1339 volatile_ok = save_volatile_ok;
1340 return true;
1342 else
1343 delete_insns_since (last);
1347 volatile_ok = save_volatile_ok;
1348 return false;
1351 /* A subroutine of emit_block_move. Expand a call to memcpy.
1352 Return the return value from memcpy, 0 otherwise. */
1355 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1357 rtx dst_addr, src_addr;
1358 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1359 enum machine_mode size_mode;
1360 rtx retval;
1362 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1363 pseudos. We can then place those new pseudos into a VAR_DECL and
1364 use them later. */
1366 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1367 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1369 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1370 src_addr = convert_memory_address (ptr_mode, src_addr);
1372 dst_tree = make_tree (ptr_type_node, dst_addr);
1373 src_tree = make_tree (ptr_type_node, src_addr);
1375 size_mode = TYPE_MODE (sizetype);
1377 size = convert_to_mode (size_mode, size, 1);
1378 size = copy_to_mode_reg (size_mode, size);
1380 /* It is incorrect to use the libcall calling conventions to call
1381 memcpy in this context. This could be a user call to memcpy and
1382 the user may wish to examine the return value from memcpy. For
1383 targets where libcalls and normal calls have different conventions
1384 for returning pointers, we could end up generating incorrect code. */
1386 size_tree = make_tree (sizetype, size);
1388 fn = emit_block_move_libcall_fn (true);
1389 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1390 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1391 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1393 /* Now we have to build up the CALL_EXPR itself. */
1394 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1395 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1396 call_expr, arg_list, NULL_TREE);
1397 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1399 retval = expand_normal (call_expr);
1401 return retval;
1404 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1405 for the function we use for block copies. The first time FOR_CALL
1406 is true, we call assemble_external. */
1408 static GTY(()) tree block_move_fn;
1410 void
1411 init_block_move_fn (const char *asmspec)
1413 if (!block_move_fn)
1415 tree args, fn;
1417 fn = get_identifier ("memcpy");
1418 args = build_function_type_list (ptr_type_node, ptr_type_node,
1419 const_ptr_type_node, sizetype,
1420 NULL_TREE);
1422 fn = build_decl (FUNCTION_DECL, fn, args);
1423 DECL_EXTERNAL (fn) = 1;
1424 TREE_PUBLIC (fn) = 1;
1425 DECL_ARTIFICIAL (fn) = 1;
1426 TREE_NOTHROW (fn) = 1;
1427 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1428 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1430 block_move_fn = fn;
1433 if (asmspec)
1434 set_user_assembler_name (block_move_fn, asmspec);
1437 static tree
1438 emit_block_move_libcall_fn (int for_call)
1440 static bool emitted_extern;
1442 if (!block_move_fn)
1443 init_block_move_fn (NULL);
1445 if (for_call && !emitted_extern)
1447 emitted_extern = true;
1448 make_decl_rtl (block_move_fn);
1449 assemble_external (block_move_fn);
1452 return block_move_fn;
1455 /* A subroutine of emit_block_move. Copy the data via an explicit
1456 loop. This is used only when libcalls are forbidden. */
1457 /* ??? It'd be nice to copy in hunks larger than QImode. */
1459 static void
1460 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1461 unsigned int align ATTRIBUTE_UNUSED)
1463 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1464 enum machine_mode iter_mode;
1466 iter_mode = GET_MODE (size);
1467 if (iter_mode == VOIDmode)
1468 iter_mode = word_mode;
1470 top_label = gen_label_rtx ();
1471 cmp_label = gen_label_rtx ();
1472 iter = gen_reg_rtx (iter_mode);
1474 emit_move_insn (iter, const0_rtx);
1476 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1477 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1478 do_pending_stack_adjust ();
1480 emit_jump (cmp_label);
1481 emit_label (top_label);
1483 tmp = convert_modes (Pmode, iter_mode, iter, true);
1484 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1485 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1486 x = change_address (x, QImode, x_addr);
1487 y = change_address (y, QImode, y_addr);
1489 emit_move_insn (x, y);
1491 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1492 true, OPTAB_LIB_WIDEN);
1493 if (tmp != iter)
1494 emit_move_insn (iter, tmp);
1496 emit_label (cmp_label);
1498 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1499 true, top_label);
1502 /* Copy all or part of a value X into registers starting at REGNO.
1503 The number of registers to be filled is NREGS. */
1505 void
1506 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1508 int i;
1509 #ifdef HAVE_load_multiple
1510 rtx pat;
1511 rtx last;
1512 #endif
1514 if (nregs == 0)
1515 return;
1517 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1518 x = validize_mem (force_const_mem (mode, x));
1520 /* See if the machine can do this with a load multiple insn. */
1521 #ifdef HAVE_load_multiple
1522 if (HAVE_load_multiple)
1524 last = get_last_insn ();
1525 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1526 GEN_INT (nregs));
1527 if (pat)
1529 emit_insn (pat);
1530 return;
1532 else
1533 delete_insns_since (last);
1535 #endif
1537 for (i = 0; i < nregs; i++)
1538 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1539 operand_subword_force (x, i, mode));
1542 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1543 The number of registers to be filled is NREGS. */
1545 void
1546 move_block_from_reg (int regno, rtx x, int nregs)
1548 int i;
1550 if (nregs == 0)
1551 return;
1553 /* See if the machine can do this with a store multiple insn. */
1554 #ifdef HAVE_store_multiple
1555 if (HAVE_store_multiple)
1557 rtx last = get_last_insn ();
1558 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1559 GEN_INT (nregs));
1560 if (pat)
1562 emit_insn (pat);
1563 return;
1565 else
1566 delete_insns_since (last);
1568 #endif
1570 for (i = 0; i < nregs; i++)
1572 rtx tem = operand_subword (x, i, 1, BLKmode);
1574 gcc_assert (tem);
1576 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1580 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1581 ORIG, where ORIG is a non-consecutive group of registers represented by
1582 a PARALLEL. The clone is identical to the original except in that the
1583 original set of registers is replaced by a new set of pseudo registers.
1584 The new set has the same modes as the original set. */
1587 gen_group_rtx (rtx orig)
1589 int i, length;
1590 rtx *tmps;
1592 gcc_assert (GET_CODE (orig) == PARALLEL);
1594 length = XVECLEN (orig, 0);
1595 tmps = alloca (sizeof (rtx) * length);
1597 /* Skip a NULL entry in first slot. */
1598 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1600 if (i)
1601 tmps[0] = 0;
1603 for (; i < length; i++)
1605 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1606 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1608 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1611 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1614 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1615 except that values are placed in TMPS[i], and must later be moved
1616 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1618 static void
1619 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1621 rtx src;
1622 int start, i;
1623 enum machine_mode m = GET_MODE (orig_src);
1625 gcc_assert (GET_CODE (dst) == PARALLEL);
1627 if (m != VOIDmode
1628 && !SCALAR_INT_MODE_P (m)
1629 && !MEM_P (orig_src)
1630 && GET_CODE (orig_src) != CONCAT)
1632 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1633 if (imode == BLKmode)
1634 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1635 else
1636 src = gen_reg_rtx (imode);
1637 if (imode != BLKmode)
1638 src = gen_lowpart (GET_MODE (orig_src), src);
1639 emit_move_insn (src, orig_src);
1640 /* ...and back again. */
1641 if (imode != BLKmode)
1642 src = gen_lowpart (imode, src);
1643 emit_group_load_1 (tmps, dst, src, type, ssize);
1644 return;
1647 /* Check for a NULL entry, used to indicate that the parameter goes
1648 both on the stack and in registers. */
1649 if (XEXP (XVECEXP (dst, 0, 0), 0))
1650 start = 0;
1651 else
1652 start = 1;
1654 /* Process the pieces. */
1655 for (i = start; i < XVECLEN (dst, 0); i++)
1657 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1658 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1659 unsigned int bytelen = GET_MODE_SIZE (mode);
1660 int shift = 0;
1662 /* Handle trailing fragments that run over the size of the struct. */
1663 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1665 /* Arrange to shift the fragment to where it belongs.
1666 extract_bit_field loads to the lsb of the reg. */
1667 if (
1668 #ifdef BLOCK_REG_PADDING
1669 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1670 == (BYTES_BIG_ENDIAN ? upward : downward)
1671 #else
1672 BYTES_BIG_ENDIAN
1673 #endif
1675 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1676 bytelen = ssize - bytepos;
1677 gcc_assert (bytelen > 0);
1680 /* If we won't be loading directly from memory, protect the real source
1681 from strange tricks we might play; but make sure that the source can
1682 be loaded directly into the destination. */
1683 src = orig_src;
1684 if (!MEM_P (orig_src)
1685 && (!CONSTANT_P (orig_src)
1686 || (GET_MODE (orig_src) != mode
1687 && GET_MODE (orig_src) != VOIDmode)))
1689 if (GET_MODE (orig_src) == VOIDmode)
1690 src = gen_reg_rtx (mode);
1691 else
1692 src = gen_reg_rtx (GET_MODE (orig_src));
1694 emit_move_insn (src, orig_src);
1697 /* Optimize the access just a bit. */
1698 if (MEM_P (src)
1699 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1700 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1701 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1702 && bytelen == GET_MODE_SIZE (mode))
1704 tmps[i] = gen_reg_rtx (mode);
1705 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1707 else if (COMPLEX_MODE_P (mode)
1708 && GET_MODE (src) == mode
1709 && bytelen == GET_MODE_SIZE (mode))
1710 /* Let emit_move_complex do the bulk of the work. */
1711 tmps[i] = src;
1712 else if (GET_CODE (src) == CONCAT)
1714 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1715 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1717 if ((bytepos == 0 && bytelen == slen0)
1718 || (bytepos != 0 && bytepos + bytelen <= slen))
1720 /* The following assumes that the concatenated objects all
1721 have the same size. In this case, a simple calculation
1722 can be used to determine the object and the bit field
1723 to be extracted. */
1724 tmps[i] = XEXP (src, bytepos / slen0);
1725 if (! CONSTANT_P (tmps[i])
1726 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1727 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1728 (bytepos % slen0) * BITS_PER_UNIT,
1729 1, NULL_RTX, mode, mode);
1731 else
1733 rtx mem;
1735 gcc_assert (!bytepos);
1736 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1737 emit_move_insn (mem, src);
1738 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1739 0, 1, NULL_RTX, mode, mode);
1742 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1743 SIMD register, which is currently broken. While we get GCC
1744 to emit proper RTL for these cases, let's dump to memory. */
1745 else if (VECTOR_MODE_P (GET_MODE (dst))
1746 && REG_P (src))
1748 int slen = GET_MODE_SIZE (GET_MODE (src));
1749 rtx mem;
1751 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1752 emit_move_insn (mem, src);
1753 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1755 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1756 && XVECLEN (dst, 0) > 1)
1757 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1758 else if (CONSTANT_P (src)
1759 || (REG_P (src) && GET_MODE (src) == mode))
1760 tmps[i] = src;
1761 else
1762 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1763 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1764 mode, mode);
1766 if (shift)
1767 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1768 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1772 /* Emit code to move a block SRC of type TYPE to a block DST,
1773 where DST is non-consecutive registers represented by a PARALLEL.
1774 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1775 if not known. */
1777 void
1778 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1780 rtx *tmps;
1781 int i;
1783 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1784 emit_group_load_1 (tmps, dst, src, type, ssize);
1786 /* Copy the extracted pieces into the proper (probable) hard regs. */
1787 for (i = 0; i < XVECLEN (dst, 0); i++)
1789 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1790 if (d == NULL)
1791 continue;
1792 emit_move_insn (d, tmps[i]);
1796 /* Similar, but load SRC into new pseudos in a format that looks like
1797 PARALLEL. This can later be fed to emit_group_move to get things
1798 in the right place. */
1801 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1803 rtvec vec;
1804 int i;
1806 vec = rtvec_alloc (XVECLEN (parallel, 0));
1807 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1809 /* Convert the vector to look just like the original PARALLEL, except
1810 with the computed values. */
1811 for (i = 0; i < XVECLEN (parallel, 0); i++)
1813 rtx e = XVECEXP (parallel, 0, i);
1814 rtx d = XEXP (e, 0);
1816 if (d)
1818 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1819 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1821 RTVEC_ELT (vec, i) = e;
1824 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1827 /* Emit code to move a block SRC to block DST, where SRC and DST are
1828 non-consecutive groups of registers, each represented by a PARALLEL. */
1830 void
1831 emit_group_move (rtx dst, rtx src)
1833 int i;
1835 gcc_assert (GET_CODE (src) == PARALLEL
1836 && GET_CODE (dst) == PARALLEL
1837 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1839 /* Skip first entry if NULL. */
1840 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1841 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1842 XEXP (XVECEXP (src, 0, i), 0));
1845 /* Move a group of registers represented by a PARALLEL into pseudos. */
1848 emit_group_move_into_temps (rtx src)
1850 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1851 int i;
1853 for (i = 0; i < XVECLEN (src, 0); i++)
1855 rtx e = XVECEXP (src, 0, i);
1856 rtx d = XEXP (e, 0);
1858 if (d)
1859 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1860 RTVEC_ELT (vec, i) = e;
1863 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1866 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1867 where SRC is non-consecutive registers represented by a PARALLEL.
1868 SSIZE represents the total size of block ORIG_DST, or -1 if not
1869 known. */
1871 void
1872 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1874 rtx *tmps, dst;
1875 int start, finish, i;
1876 enum machine_mode m = GET_MODE (orig_dst);
1878 gcc_assert (GET_CODE (src) == PARALLEL);
1880 if (!SCALAR_INT_MODE_P (m)
1881 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1883 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1884 if (imode == BLKmode)
1885 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1886 else
1887 dst = gen_reg_rtx (imode);
1888 emit_group_store (dst, src, type, ssize);
1889 if (imode != BLKmode)
1890 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1891 emit_move_insn (orig_dst, dst);
1892 return;
1895 /* Check for a NULL entry, used to indicate that the parameter goes
1896 both on the stack and in registers. */
1897 if (XEXP (XVECEXP (src, 0, 0), 0))
1898 start = 0;
1899 else
1900 start = 1;
1901 finish = XVECLEN (src, 0);
1903 tmps = alloca (sizeof (rtx) * finish);
1905 /* Copy the (probable) hard regs into pseudos. */
1906 for (i = start; i < finish; i++)
1908 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1909 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1911 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1912 emit_move_insn (tmps[i], reg);
1914 else
1915 tmps[i] = reg;
1918 /* If we won't be storing directly into memory, protect the real destination
1919 from strange tricks we might play. */
1920 dst = orig_dst;
1921 if (GET_CODE (dst) == PARALLEL)
1923 rtx temp;
1925 /* We can get a PARALLEL dst if there is a conditional expression in
1926 a return statement. In that case, the dst and src are the same,
1927 so no action is necessary. */
1928 if (rtx_equal_p (dst, src))
1929 return;
1931 /* It is unclear if we can ever reach here, but we may as well handle
1932 it. Allocate a temporary, and split this into a store/load to/from
1933 the temporary. */
1935 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1936 emit_group_store (temp, src, type, ssize);
1937 emit_group_load (dst, temp, type, ssize);
1938 return;
1940 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1942 enum machine_mode outer = GET_MODE (dst);
1943 enum machine_mode inner;
1944 HOST_WIDE_INT bytepos;
1945 bool done = false;
1946 rtx temp;
1948 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1949 dst = gen_reg_rtx (outer);
1951 /* Make life a bit easier for combine. */
1952 /* If the first element of the vector is the low part
1953 of the destination mode, use a paradoxical subreg to
1954 initialize the destination. */
1955 if (start < finish)
1957 inner = GET_MODE (tmps[start]);
1958 bytepos = subreg_lowpart_offset (inner, outer);
1959 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1961 temp = simplify_gen_subreg (outer, tmps[start],
1962 inner, 0);
1963 if (temp)
1965 emit_move_insn (dst, temp);
1966 done = true;
1967 start++;
1972 /* If the first element wasn't the low part, try the last. */
1973 if (!done
1974 && start < finish - 1)
1976 inner = GET_MODE (tmps[finish - 1]);
1977 bytepos = subreg_lowpart_offset (inner, outer);
1978 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1980 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1981 inner, 0);
1982 if (temp)
1984 emit_move_insn (dst, temp);
1985 done = true;
1986 finish--;
1991 /* Otherwise, simply initialize the result to zero. */
1992 if (!done)
1993 emit_move_insn (dst, CONST0_RTX (outer));
1996 /* Process the pieces. */
1997 for (i = start; i < finish; i++)
1999 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2000 enum machine_mode mode = GET_MODE (tmps[i]);
2001 unsigned int bytelen = GET_MODE_SIZE (mode);
2002 rtx dest = dst;
2004 /* Handle trailing fragments that run over the size of the struct. */
2005 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2007 /* store_bit_field always takes its value from the lsb.
2008 Move the fragment to the lsb if it's not already there. */
2009 if (
2010 #ifdef BLOCK_REG_PADDING
2011 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2012 == (BYTES_BIG_ENDIAN ? upward : downward)
2013 #else
2014 BYTES_BIG_ENDIAN
2015 #endif
2018 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2019 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2020 build_int_cst (NULL_TREE, shift),
2021 tmps[i], 0);
2023 bytelen = ssize - bytepos;
2026 if (GET_CODE (dst) == CONCAT)
2028 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2029 dest = XEXP (dst, 0);
2030 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2032 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2033 dest = XEXP (dst, 1);
2035 else
2037 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2038 dest = assign_stack_temp (GET_MODE (dest),
2039 GET_MODE_SIZE (GET_MODE (dest)), 0);
2040 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2041 tmps[i]);
2042 dst = dest;
2043 break;
2047 /* Optimize the access just a bit. */
2048 if (MEM_P (dest)
2049 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2050 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2051 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2052 && bytelen == GET_MODE_SIZE (mode))
2053 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2054 else
2055 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2056 mode, tmps[i]);
2059 /* Copy from the pseudo into the (probable) hard reg. */
2060 if (orig_dst != dst)
2061 emit_move_insn (orig_dst, dst);
2064 /* Generate code to copy a BLKmode object of TYPE out of a
2065 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2066 is null, a stack temporary is created. TGTBLK is returned.
2068 The purpose of this routine is to handle functions that return
2069 BLKmode structures in registers. Some machines (the PA for example)
2070 want to return all small structures in registers regardless of the
2071 structure's alignment. */
2074 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2076 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2077 rtx src = NULL, dst = NULL;
2078 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2079 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2081 if (tgtblk == 0)
2083 tgtblk = assign_temp (build_qualified_type (type,
2084 (TYPE_QUALS (type)
2085 | TYPE_QUAL_CONST)),
2086 0, 1, 1);
2087 preserve_temp_slots (tgtblk);
2090 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2091 into a new pseudo which is a full word. */
2093 if (GET_MODE (srcreg) != BLKmode
2094 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2095 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2097 /* If the structure doesn't take up a whole number of words, see whether
2098 SRCREG is padded on the left or on the right. If it's on the left,
2099 set PADDING_CORRECTION to the number of bits to skip.
2101 In most ABIs, the structure will be returned at the least end of
2102 the register, which translates to right padding on little-endian
2103 targets and left padding on big-endian targets. The opposite
2104 holds if the structure is returned at the most significant
2105 end of the register. */
2106 if (bytes % UNITS_PER_WORD != 0
2107 && (targetm.calls.return_in_msb (type)
2108 ? !BYTES_BIG_ENDIAN
2109 : BYTES_BIG_ENDIAN))
2110 padding_correction
2111 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2113 /* Copy the structure BITSIZE bites at a time.
2115 We could probably emit more efficient code for machines which do not use
2116 strict alignment, but it doesn't seem worth the effort at the current
2117 time. */
2118 for (bitpos = 0, xbitpos = padding_correction;
2119 bitpos < bytes * BITS_PER_UNIT;
2120 bitpos += bitsize, xbitpos += bitsize)
2122 /* We need a new source operand each time xbitpos is on a
2123 word boundary and when xbitpos == padding_correction
2124 (the first time through). */
2125 if (xbitpos % BITS_PER_WORD == 0
2126 || xbitpos == padding_correction)
2127 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2128 GET_MODE (srcreg));
2130 /* We need a new destination operand each time bitpos is on
2131 a word boundary. */
2132 if (bitpos % BITS_PER_WORD == 0)
2133 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2135 /* Use xbitpos for the source extraction (right justified) and
2136 xbitpos for the destination store (left justified). */
2137 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2138 extract_bit_field (src, bitsize,
2139 xbitpos % BITS_PER_WORD, 1,
2140 NULL_RTX, word_mode, word_mode));
2143 return tgtblk;
2146 /* Add a USE expression for REG to the (possibly empty) list pointed
2147 to by CALL_FUSAGE. REG must denote a hard register. */
2149 void
2150 use_reg (rtx *call_fusage, rtx reg)
2152 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2154 *call_fusage
2155 = gen_rtx_EXPR_LIST (VOIDmode,
2156 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2159 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2160 starting at REGNO. All of these registers must be hard registers. */
2162 void
2163 use_regs (rtx *call_fusage, int regno, int nregs)
2165 int i;
2167 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2169 for (i = 0; i < nregs; i++)
2170 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2173 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2174 PARALLEL REGS. This is for calls that pass values in multiple
2175 non-contiguous locations. The Irix 6 ABI has examples of this. */
2177 void
2178 use_group_regs (rtx *call_fusage, rtx regs)
2180 int i;
2182 for (i = 0; i < XVECLEN (regs, 0); i++)
2184 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2186 /* A NULL entry means the parameter goes both on the stack and in
2187 registers. This can also be a MEM for targets that pass values
2188 partially on the stack and partially in registers. */
2189 if (reg != 0 && REG_P (reg))
2190 use_reg (call_fusage, reg);
2195 /* Determine whether the LEN bytes generated by CONSTFUN can be
2196 stored to memory using several move instructions. CONSTFUNDATA is
2197 a pointer which will be passed as argument in every CONSTFUN call.
2198 ALIGN is maximum alignment we can assume. Return nonzero if a
2199 call to store_by_pieces should succeed. */
2202 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2203 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2204 void *constfundata, unsigned int align)
2206 unsigned HOST_WIDE_INT l;
2207 unsigned int max_size;
2208 HOST_WIDE_INT offset = 0;
2209 enum machine_mode mode, tmode;
2210 enum insn_code icode;
2211 int reverse;
2212 rtx cst;
2214 if (len == 0)
2215 return 1;
2217 if (! STORE_BY_PIECES_P (len, align))
2218 return 0;
2220 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2221 if (align >= GET_MODE_ALIGNMENT (tmode))
2222 align = GET_MODE_ALIGNMENT (tmode);
2223 else
2225 enum machine_mode xmode;
2227 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2228 tmode != VOIDmode;
2229 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2230 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2231 || SLOW_UNALIGNED_ACCESS (tmode, align))
2232 break;
2234 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2237 /* We would first store what we can in the largest integer mode, then go to
2238 successively smaller modes. */
2240 for (reverse = 0;
2241 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2242 reverse++)
2244 l = len;
2245 mode = VOIDmode;
2246 max_size = STORE_MAX_PIECES + 1;
2247 while (max_size > 1)
2249 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2250 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2251 if (GET_MODE_SIZE (tmode) < max_size)
2252 mode = tmode;
2254 if (mode == VOIDmode)
2255 break;
2257 icode = mov_optab->handlers[(int) mode].insn_code;
2258 if (icode != CODE_FOR_nothing
2259 && align >= GET_MODE_ALIGNMENT (mode))
2261 unsigned int size = GET_MODE_SIZE (mode);
2263 while (l >= size)
2265 if (reverse)
2266 offset -= size;
2268 cst = (*constfun) (constfundata, offset, mode);
2269 if (!LEGITIMATE_CONSTANT_P (cst))
2270 return 0;
2272 if (!reverse)
2273 offset += size;
2275 l -= size;
2279 max_size = GET_MODE_SIZE (mode);
2282 /* The code above should have handled everything. */
2283 gcc_assert (!l);
2286 return 1;
2289 /* Generate several move instructions to store LEN bytes generated by
2290 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2291 pointer which will be passed as argument in every CONSTFUN call.
2292 ALIGN is maximum alignment we can assume.
2293 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2294 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2295 stpcpy. */
2298 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2299 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2300 void *constfundata, unsigned int align, int endp)
2302 struct store_by_pieces data;
2304 if (len == 0)
2306 gcc_assert (endp != 2);
2307 return to;
2310 gcc_assert (STORE_BY_PIECES_P (len, align));
2311 data.constfun = constfun;
2312 data.constfundata = constfundata;
2313 data.len = len;
2314 data.to = to;
2315 store_by_pieces_1 (&data, align);
2316 if (endp)
2318 rtx to1;
2320 gcc_assert (!data.reverse);
2321 if (data.autinc_to)
2323 if (endp == 2)
2325 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2326 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2327 else
2328 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2329 -1));
2331 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2332 data.offset);
2334 else
2336 if (endp == 2)
2337 --data.offset;
2338 to1 = adjust_address (data.to, QImode, data.offset);
2340 return to1;
2342 else
2343 return data.to;
2346 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2347 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2349 static void
2350 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2352 struct store_by_pieces data;
2354 if (len == 0)
2355 return;
2357 data.constfun = clear_by_pieces_1;
2358 data.constfundata = NULL;
2359 data.len = len;
2360 data.to = to;
2361 store_by_pieces_1 (&data, align);
2364 /* Callback routine for clear_by_pieces.
2365 Return const0_rtx unconditionally. */
2367 static rtx
2368 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2369 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2370 enum machine_mode mode ATTRIBUTE_UNUSED)
2372 return const0_rtx;
2375 /* Subroutine of clear_by_pieces and store_by_pieces.
2376 Generate several move instructions to store LEN bytes of block TO. (A MEM
2377 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2379 static void
2380 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2381 unsigned int align ATTRIBUTE_UNUSED)
2383 rtx to_addr = XEXP (data->to, 0);
2384 unsigned int max_size = STORE_MAX_PIECES + 1;
2385 enum machine_mode mode = VOIDmode, tmode;
2386 enum insn_code icode;
2388 data->offset = 0;
2389 data->to_addr = to_addr;
2390 data->autinc_to
2391 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2392 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2394 data->explicit_inc_to = 0;
2395 data->reverse
2396 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2397 if (data->reverse)
2398 data->offset = data->len;
2400 /* If storing requires more than two move insns,
2401 copy addresses to registers (to make displacements shorter)
2402 and use post-increment if available. */
2403 if (!data->autinc_to
2404 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2406 /* Determine the main mode we'll be using. */
2407 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2408 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2409 if (GET_MODE_SIZE (tmode) < max_size)
2410 mode = tmode;
2412 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2414 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2415 data->autinc_to = 1;
2416 data->explicit_inc_to = -1;
2419 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2420 && ! data->autinc_to)
2422 data->to_addr = copy_addr_to_reg (to_addr);
2423 data->autinc_to = 1;
2424 data->explicit_inc_to = 1;
2427 if ( !data->autinc_to && CONSTANT_P (to_addr))
2428 data->to_addr = copy_addr_to_reg (to_addr);
2431 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2432 if (align >= GET_MODE_ALIGNMENT (tmode))
2433 align = GET_MODE_ALIGNMENT (tmode);
2434 else
2436 enum machine_mode xmode;
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2439 tmode != VOIDmode;
2440 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2441 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2442 || SLOW_UNALIGNED_ACCESS (tmode, align))
2443 break;
2445 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2448 /* First store what we can in the largest integer mode, then go to
2449 successively smaller modes. */
2451 while (max_size > 1)
2453 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2454 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2455 if (GET_MODE_SIZE (tmode) < max_size)
2456 mode = tmode;
2458 if (mode == VOIDmode)
2459 break;
2461 icode = mov_optab->handlers[(int) mode].insn_code;
2462 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2463 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2465 max_size = GET_MODE_SIZE (mode);
2468 /* The code above should have handled everything. */
2469 gcc_assert (!data->len);
2472 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2473 with move instructions for mode MODE. GENFUN is the gen_... function
2474 to make a move insn for that mode. DATA has all the other info. */
2476 static void
2477 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2478 struct store_by_pieces *data)
2480 unsigned int size = GET_MODE_SIZE (mode);
2481 rtx to1, cst;
2483 while (data->len >= size)
2485 if (data->reverse)
2486 data->offset -= size;
2488 if (data->autinc_to)
2489 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2490 data->offset);
2491 else
2492 to1 = adjust_address (data->to, mode, data->offset);
2494 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2495 emit_insn (gen_add2_insn (data->to_addr,
2496 GEN_INT (-(HOST_WIDE_INT) size)));
2498 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2499 emit_insn ((*genfun) (to1, cst));
2501 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2502 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2504 if (! data->reverse)
2505 data->offset += size;
2507 data->len -= size;
2511 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2512 its length in bytes. */
2515 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2516 unsigned int expected_align, HOST_WIDE_INT expected_size)
2518 enum machine_mode mode = GET_MODE (object);
2519 unsigned int align;
2521 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2523 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2524 just move a zero. Otherwise, do this a piece at a time. */
2525 if (mode != BLKmode
2526 && GET_CODE (size) == CONST_INT
2527 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2529 rtx zero = CONST0_RTX (mode);
2530 if (zero != NULL)
2532 emit_move_insn (object, zero);
2533 return NULL;
2536 if (COMPLEX_MODE_P (mode))
2538 zero = CONST0_RTX (GET_MODE_INNER (mode));
2539 if (zero != NULL)
2541 write_complex_part (object, zero, 0);
2542 write_complex_part (object, zero, 1);
2543 return NULL;
2548 if (size == const0_rtx)
2549 return NULL;
2551 align = MEM_ALIGN (object);
2553 if (GET_CODE (size) == CONST_INT
2554 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2555 clear_by_pieces (object, INTVAL (size), align);
2556 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2557 expected_align, expected_size))
2559 else
2560 return set_storage_via_libcall (object, size, const0_rtx,
2561 method == BLOCK_OP_TAILCALL);
2563 return NULL;
2567 clear_storage (rtx object, rtx size, enum block_op_methods method)
2569 return clear_storage_hints (object, size, method, 0, -1);
2573 /* A subroutine of clear_storage. Expand a call to memset.
2574 Return the return value of memset, 0 otherwise. */
2577 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2579 tree call_expr, arg_list, fn, object_tree, size_tree, val_tree;
2580 enum machine_mode size_mode;
2581 rtx retval;
2583 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2584 place those into new pseudos into a VAR_DECL and use them later. */
2586 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2588 size_mode = TYPE_MODE (sizetype);
2589 size = convert_to_mode (size_mode, size, 1);
2590 size = copy_to_mode_reg (size_mode, size);
2592 /* It is incorrect to use the libcall calling conventions to call
2593 memset in this context. This could be a user call to memset and
2594 the user may wish to examine the return value from memset. For
2595 targets where libcalls and normal calls have different conventions
2596 for returning pointers, we could end up generating incorrect code. */
2598 object_tree = make_tree (ptr_type_node, object);
2599 if (GET_CODE (val) != CONST_INT)
2600 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2601 size_tree = make_tree (sizetype, size);
2602 val_tree = make_tree (integer_type_node, val);
2604 fn = clear_storage_libcall_fn (true);
2605 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2606 arg_list = tree_cons (NULL_TREE, val_tree, arg_list);
2607 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2609 /* Now we have to build up the CALL_EXPR itself. */
2610 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2611 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2612 call_expr, arg_list, NULL_TREE);
2613 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2615 retval = expand_normal (call_expr);
2617 return retval;
2620 /* A subroutine of set_storage_via_libcall. Create the tree node
2621 for the function we use for block clears. The first time FOR_CALL
2622 is true, we call assemble_external. */
2624 static GTY(()) tree block_clear_fn;
2626 void
2627 init_block_clear_fn (const char *asmspec)
2629 if (!block_clear_fn)
2631 tree fn, args;
2633 fn = get_identifier ("memset");
2634 args = build_function_type_list (ptr_type_node, ptr_type_node,
2635 integer_type_node, sizetype,
2636 NULL_TREE);
2638 fn = build_decl (FUNCTION_DECL, fn, args);
2639 DECL_EXTERNAL (fn) = 1;
2640 TREE_PUBLIC (fn) = 1;
2641 DECL_ARTIFICIAL (fn) = 1;
2642 TREE_NOTHROW (fn) = 1;
2643 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2644 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2646 block_clear_fn = fn;
2649 if (asmspec)
2650 set_user_assembler_name (block_clear_fn, asmspec);
2653 static tree
2654 clear_storage_libcall_fn (int for_call)
2656 static bool emitted_extern;
2658 if (!block_clear_fn)
2659 init_block_clear_fn (NULL);
2661 if (for_call && !emitted_extern)
2663 emitted_extern = true;
2664 make_decl_rtl (block_clear_fn);
2665 assemble_external (block_clear_fn);
2668 return block_clear_fn;
2671 /* Expand a setmem pattern; return true if successful. */
2673 bool
2674 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2675 unsigned int expected_align, HOST_WIDE_INT expected_size)
2677 /* Try the most limited insn first, because there's no point
2678 including more than one in the machine description unless
2679 the more limited one has some advantage. */
2681 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2682 enum machine_mode mode;
2684 if (expected_align < align)
2685 expected_align = align;
2687 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2688 mode = GET_MODE_WIDER_MODE (mode))
2690 enum insn_code code = setmem_optab[(int) mode];
2691 insn_operand_predicate_fn pred;
2693 if (code != CODE_FOR_nothing
2694 /* We don't need MODE to be narrower than
2695 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2696 the mode mask, as it is returned by the macro, it will
2697 definitely be less than the actual mode mask. */
2698 && ((GET_CODE (size) == CONST_INT
2699 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2700 <= (GET_MODE_MASK (mode) >> 1)))
2701 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2702 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2703 || (*pred) (object, BLKmode))
2704 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2705 || (*pred) (opalign, VOIDmode)))
2707 rtx opsize, opchar;
2708 enum machine_mode char_mode;
2709 rtx last = get_last_insn ();
2710 rtx pat;
2712 opsize = convert_to_mode (mode, size, 1);
2713 pred = insn_data[(int) code].operand[1].predicate;
2714 if (pred != 0 && ! (*pred) (opsize, mode))
2715 opsize = copy_to_mode_reg (mode, opsize);
2717 opchar = val;
2718 char_mode = insn_data[(int) code].operand[2].mode;
2719 if (char_mode != VOIDmode)
2721 opchar = convert_to_mode (char_mode, opchar, 1);
2722 pred = insn_data[(int) code].operand[2].predicate;
2723 if (pred != 0 && ! (*pred) (opchar, char_mode))
2724 opchar = copy_to_mode_reg (char_mode, opchar);
2727 if (insn_data[(int) code].n_operands == 4)
2728 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2729 else
2730 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2731 GEN_INT (expected_align),
2732 GEN_INT (expected_size));
2733 if (pat)
2735 emit_insn (pat);
2736 return true;
2738 else
2739 delete_insns_since (last);
2743 return false;
2747 /* Write to one of the components of the complex value CPLX. Write VAL to
2748 the real part if IMAG_P is false, and the imaginary part if its true. */
2750 static void
2751 write_complex_part (rtx cplx, rtx val, bool imag_p)
2753 enum machine_mode cmode;
2754 enum machine_mode imode;
2755 unsigned ibitsize;
2757 if (GET_CODE (cplx) == CONCAT)
2759 emit_move_insn (XEXP (cplx, imag_p), val);
2760 return;
2763 cmode = GET_MODE (cplx);
2764 imode = GET_MODE_INNER (cmode);
2765 ibitsize = GET_MODE_BITSIZE (imode);
2767 /* For MEMs simplify_gen_subreg may generate an invalid new address
2768 because, e.g., the original address is considered mode-dependent
2769 by the target, which restricts simplify_subreg from invoking
2770 adjust_address_nv. Instead of preparing fallback support for an
2771 invalid address, we call adjust_address_nv directly. */
2772 if (MEM_P (cplx))
2774 emit_move_insn (adjust_address_nv (cplx, imode,
2775 imag_p ? GET_MODE_SIZE (imode) : 0),
2776 val);
2777 return;
2780 /* If the sub-object is at least word sized, then we know that subregging
2781 will work. This special case is important, since store_bit_field
2782 wants to operate on integer modes, and there's rarely an OImode to
2783 correspond to TCmode. */
2784 if (ibitsize >= BITS_PER_WORD
2785 /* For hard regs we have exact predicates. Assume we can split
2786 the original object if it spans an even number of hard regs.
2787 This special case is important for SCmode on 64-bit platforms
2788 where the natural size of floating-point regs is 32-bit. */
2789 || (REG_P (cplx)
2790 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2791 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2793 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2794 imag_p ? GET_MODE_SIZE (imode) : 0);
2795 if (part)
2797 emit_move_insn (part, val);
2798 return;
2800 else
2801 /* simplify_gen_subreg may fail for sub-word MEMs. */
2802 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2805 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2808 /* Extract one of the components of the complex value CPLX. Extract the
2809 real part if IMAG_P is false, and the imaginary part if it's true. */
2811 static rtx
2812 read_complex_part (rtx cplx, bool imag_p)
2814 enum machine_mode cmode, imode;
2815 unsigned ibitsize;
2817 if (GET_CODE (cplx) == CONCAT)
2818 return XEXP (cplx, imag_p);
2820 cmode = GET_MODE (cplx);
2821 imode = GET_MODE_INNER (cmode);
2822 ibitsize = GET_MODE_BITSIZE (imode);
2824 /* Special case reads from complex constants that got spilled to memory. */
2825 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2827 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2828 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2830 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2831 if (CONSTANT_CLASS_P (part))
2832 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2836 /* For MEMs simplify_gen_subreg may generate an invalid new address
2837 because, e.g., the original address is considered mode-dependent
2838 by the target, which restricts simplify_subreg from invoking
2839 adjust_address_nv. Instead of preparing fallback support for an
2840 invalid address, we call adjust_address_nv directly. */
2841 if (MEM_P (cplx))
2842 return adjust_address_nv (cplx, imode,
2843 imag_p ? GET_MODE_SIZE (imode) : 0);
2845 /* If the sub-object is at least word sized, then we know that subregging
2846 will work. This special case is important, since extract_bit_field
2847 wants to operate on integer modes, and there's rarely an OImode to
2848 correspond to TCmode. */
2849 if (ibitsize >= BITS_PER_WORD
2850 /* For hard regs we have exact predicates. Assume we can split
2851 the original object if it spans an even number of hard regs.
2852 This special case is important for SCmode on 64-bit platforms
2853 where the natural size of floating-point regs is 32-bit. */
2854 || (REG_P (cplx)
2855 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2856 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2858 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2859 imag_p ? GET_MODE_SIZE (imode) : 0);
2860 if (ret)
2861 return ret;
2862 else
2863 /* simplify_gen_subreg may fail for sub-word MEMs. */
2864 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2867 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2868 true, NULL_RTX, imode, imode);
2871 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2872 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2873 represented in NEW_MODE. If FORCE is true, this will never happen, as
2874 we'll force-create a SUBREG if needed. */
2876 static rtx
2877 emit_move_change_mode (enum machine_mode new_mode,
2878 enum machine_mode old_mode, rtx x, bool force)
2880 rtx ret;
2882 if (MEM_P (x))
2884 /* We don't have to worry about changing the address since the
2885 size in bytes is supposed to be the same. */
2886 if (reload_in_progress)
2888 /* Copy the MEM to change the mode and move any
2889 substitutions from the old MEM to the new one. */
2890 ret = adjust_address_nv (x, new_mode, 0);
2891 copy_replacements (x, ret);
2893 else
2894 ret = adjust_address (x, new_mode, 0);
2896 else
2898 /* Note that we do want simplify_subreg's behavior of validating
2899 that the new mode is ok for a hard register. If we were to use
2900 simplify_gen_subreg, we would create the subreg, but would
2901 probably run into the target not being able to implement it. */
2902 /* Except, of course, when FORCE is true, when this is exactly what
2903 we want. Which is needed for CCmodes on some targets. */
2904 if (force)
2905 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2906 else
2907 ret = simplify_subreg (new_mode, x, old_mode, 0);
2910 return ret;
2913 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2914 an integer mode of the same size as MODE. Returns the instruction
2915 emitted, or NULL if such a move could not be generated. */
2917 static rtx
2918 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2920 enum machine_mode imode;
2921 enum insn_code code;
2923 /* There must exist a mode of the exact size we require. */
2924 imode = int_mode_for_mode (mode);
2925 if (imode == BLKmode)
2926 return NULL_RTX;
2928 /* The target must support moves in this mode. */
2929 code = mov_optab->handlers[imode].insn_code;
2930 if (code == CODE_FOR_nothing)
2931 return NULL_RTX;
2933 x = emit_move_change_mode (imode, mode, x, force);
2934 if (x == NULL_RTX)
2935 return NULL_RTX;
2936 y = emit_move_change_mode (imode, mode, y, force);
2937 if (y == NULL_RTX)
2938 return NULL_RTX;
2939 return emit_insn (GEN_FCN (code) (x, y));
2942 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2943 Return an equivalent MEM that does not use an auto-increment. */
2945 static rtx
2946 emit_move_resolve_push (enum machine_mode mode, rtx x)
2948 enum rtx_code code = GET_CODE (XEXP (x, 0));
2949 HOST_WIDE_INT adjust;
2950 rtx temp;
2952 adjust = GET_MODE_SIZE (mode);
2953 #ifdef PUSH_ROUNDING
2954 adjust = PUSH_ROUNDING (adjust);
2955 #endif
2956 if (code == PRE_DEC || code == POST_DEC)
2957 adjust = -adjust;
2958 else if (code == PRE_MODIFY || code == POST_MODIFY)
2960 rtx expr = XEXP (XEXP (x, 0), 1);
2961 HOST_WIDE_INT val;
2963 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2964 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2965 val = INTVAL (XEXP (expr, 1));
2966 if (GET_CODE (expr) == MINUS)
2967 val = -val;
2968 gcc_assert (adjust == val || adjust == -val);
2969 adjust = val;
2972 /* Do not use anti_adjust_stack, since we don't want to update
2973 stack_pointer_delta. */
2974 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2975 GEN_INT (adjust), stack_pointer_rtx,
2976 0, OPTAB_LIB_WIDEN);
2977 if (temp != stack_pointer_rtx)
2978 emit_move_insn (stack_pointer_rtx, temp);
2980 switch (code)
2982 case PRE_INC:
2983 case PRE_DEC:
2984 case PRE_MODIFY:
2985 temp = stack_pointer_rtx;
2986 break;
2987 case POST_INC:
2988 case POST_DEC:
2989 case POST_MODIFY:
2990 temp = plus_constant (stack_pointer_rtx, -adjust);
2991 break;
2992 default:
2993 gcc_unreachable ();
2996 return replace_equiv_address (x, temp);
2999 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3000 X is known to satisfy push_operand, and MODE is known to be complex.
3001 Returns the last instruction emitted. */
3003 static rtx
3004 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3006 enum machine_mode submode = GET_MODE_INNER (mode);
3007 bool imag_first;
3009 #ifdef PUSH_ROUNDING
3010 unsigned int submodesize = GET_MODE_SIZE (submode);
3012 /* In case we output to the stack, but the size is smaller than the
3013 machine can push exactly, we need to use move instructions. */
3014 if (PUSH_ROUNDING (submodesize) != submodesize)
3016 x = emit_move_resolve_push (mode, x);
3017 return emit_move_insn (x, y);
3019 #endif
3021 /* Note that the real part always precedes the imag part in memory
3022 regardless of machine's endianness. */
3023 switch (GET_CODE (XEXP (x, 0)))
3025 case PRE_DEC:
3026 case POST_DEC:
3027 imag_first = true;
3028 break;
3029 case PRE_INC:
3030 case POST_INC:
3031 imag_first = false;
3032 break;
3033 default:
3034 gcc_unreachable ();
3037 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3038 read_complex_part (y, imag_first));
3039 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3040 read_complex_part (y, !imag_first));
3043 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3044 MODE is known to be complex. Returns the last instruction emitted. */
3046 static rtx
3047 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3049 bool try_int;
3051 /* Need to take special care for pushes, to maintain proper ordering
3052 of the data, and possibly extra padding. */
3053 if (push_operand (x, mode))
3054 return emit_move_complex_push (mode, x, y);
3056 /* See if we can coerce the target into moving both values at once. */
3058 /* Move floating point as parts. */
3059 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3060 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3061 try_int = false;
3062 /* Not possible if the values are inherently not adjacent. */
3063 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3064 try_int = false;
3065 /* Is possible if both are registers (or subregs of registers). */
3066 else if (register_operand (x, mode) && register_operand (y, mode))
3067 try_int = true;
3068 /* If one of the operands is a memory, and alignment constraints
3069 are friendly enough, we may be able to do combined memory operations.
3070 We do not attempt this if Y is a constant because that combination is
3071 usually better with the by-parts thing below. */
3072 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3073 && (!STRICT_ALIGNMENT
3074 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3075 try_int = true;
3076 else
3077 try_int = false;
3079 if (try_int)
3081 rtx ret;
3083 /* For memory to memory moves, optimal behavior can be had with the
3084 existing block move logic. */
3085 if (MEM_P (x) && MEM_P (y))
3087 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3088 BLOCK_OP_NO_LIBCALL);
3089 return get_last_insn ();
3092 ret = emit_move_via_integer (mode, x, y, true);
3093 if (ret)
3094 return ret;
3097 /* Show the output dies here. This is necessary for SUBREGs
3098 of pseudos since we cannot track their lifetimes correctly;
3099 hard regs shouldn't appear here except as return values. */
3100 if (!reload_completed && !reload_in_progress
3101 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3102 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3104 write_complex_part (x, read_complex_part (y, false), false);
3105 write_complex_part (x, read_complex_part (y, true), true);
3106 return get_last_insn ();
3109 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3110 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3112 static rtx
3113 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3115 rtx ret;
3117 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3118 if (mode != CCmode)
3120 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3121 if (code != CODE_FOR_nothing)
3123 x = emit_move_change_mode (CCmode, mode, x, true);
3124 y = emit_move_change_mode (CCmode, mode, y, true);
3125 return emit_insn (GEN_FCN (code) (x, y));
3129 /* Otherwise, find the MODE_INT mode of the same width. */
3130 ret = emit_move_via_integer (mode, x, y, false);
3131 gcc_assert (ret != NULL);
3132 return ret;
3135 /* Return true if word I of OP lies entirely in the
3136 undefined bits of a paradoxical subreg. */
3138 static bool
3139 undefined_operand_subword_p (rtx op, int i)
3141 enum machine_mode innermode, innermostmode;
3142 int offset;
3143 if (GET_CODE (op) != SUBREG)
3144 return false;
3145 innermode = GET_MODE (op);
3146 innermostmode = GET_MODE (SUBREG_REG (op));
3147 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3148 /* The SUBREG_BYTE represents offset, as if the value were stored in
3149 memory, except for a paradoxical subreg where we define
3150 SUBREG_BYTE to be 0; undo this exception as in
3151 simplify_subreg. */
3152 if (SUBREG_BYTE (op) == 0
3153 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3155 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3156 if (WORDS_BIG_ENDIAN)
3157 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3158 if (BYTES_BIG_ENDIAN)
3159 offset += difference % UNITS_PER_WORD;
3161 if (offset >= GET_MODE_SIZE (innermostmode)
3162 || offset <= -GET_MODE_SIZE (word_mode))
3163 return true;
3164 return false;
3167 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3168 MODE is any multi-word or full-word mode that lacks a move_insn
3169 pattern. Note that you will get better code if you define such
3170 patterns, even if they must turn into multiple assembler instructions. */
3172 static rtx
3173 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3175 rtx last_insn = 0;
3176 rtx seq, inner;
3177 bool need_clobber;
3178 int i;
3180 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3182 /* If X is a push on the stack, do the push now and replace
3183 X with a reference to the stack pointer. */
3184 if (push_operand (x, mode))
3185 x = emit_move_resolve_push (mode, x);
3187 /* If we are in reload, see if either operand is a MEM whose address
3188 is scheduled for replacement. */
3189 if (reload_in_progress && MEM_P (x)
3190 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3191 x = replace_equiv_address_nv (x, inner);
3192 if (reload_in_progress && MEM_P (y)
3193 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3194 y = replace_equiv_address_nv (y, inner);
3196 start_sequence ();
3198 need_clobber = false;
3199 for (i = 0;
3200 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3201 i++)
3203 rtx xpart = operand_subword (x, i, 1, mode);
3204 rtx ypart;
3206 /* Do not generate code for a move if it would come entirely
3207 from the undefined bits of a paradoxical subreg. */
3208 if (undefined_operand_subword_p (y, i))
3209 continue;
3211 ypart = operand_subword (y, i, 1, mode);
3213 /* If we can't get a part of Y, put Y into memory if it is a
3214 constant. Otherwise, force it into a register. Then we must
3215 be able to get a part of Y. */
3216 if (ypart == 0 && CONSTANT_P (y))
3218 y = use_anchored_address (force_const_mem (mode, y));
3219 ypart = operand_subword (y, i, 1, mode);
3221 else if (ypart == 0)
3222 ypart = operand_subword_force (y, i, mode);
3224 gcc_assert (xpart && ypart);
3226 need_clobber |= (GET_CODE (xpart) == SUBREG);
3228 last_insn = emit_move_insn (xpart, ypart);
3231 seq = get_insns ();
3232 end_sequence ();
3234 /* Show the output dies here. This is necessary for SUBREGs
3235 of pseudos since we cannot track their lifetimes correctly;
3236 hard regs shouldn't appear here except as return values.
3237 We never want to emit such a clobber after reload. */
3238 if (x != y
3239 && ! (reload_in_progress || reload_completed)
3240 && need_clobber != 0)
3241 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3243 emit_insn (seq);
3245 return last_insn;
3248 /* Low level part of emit_move_insn.
3249 Called just like emit_move_insn, but assumes X and Y
3250 are basically valid. */
3253 emit_move_insn_1 (rtx x, rtx y)
3255 enum machine_mode mode = GET_MODE (x);
3256 enum insn_code code;
3258 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3260 code = mov_optab->handlers[mode].insn_code;
3261 if (code != CODE_FOR_nothing)
3262 return emit_insn (GEN_FCN (code) (x, y));
3264 /* Expand complex moves by moving real part and imag part. */
3265 if (COMPLEX_MODE_P (mode))
3266 return emit_move_complex (mode, x, y);
3268 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3270 rtx result = emit_move_via_integer (mode, x, y, true);
3272 /* If we can't find an integer mode, use multi words. */
3273 if (result)
3274 return result;
3275 else
3276 return emit_move_multi_word (mode, x, y);
3279 if (GET_MODE_CLASS (mode) == MODE_CC)
3280 return emit_move_ccmode (mode, x, y);
3282 /* Try using a move pattern for the corresponding integer mode. This is
3283 only safe when simplify_subreg can convert MODE constants into integer
3284 constants. At present, it can only do this reliably if the value
3285 fits within a HOST_WIDE_INT. */
3286 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3288 rtx ret = emit_move_via_integer (mode, x, y, false);
3289 if (ret)
3290 return ret;
3293 return emit_move_multi_word (mode, x, y);
3296 /* Generate code to copy Y into X.
3297 Both Y and X must have the same mode, except that
3298 Y can be a constant with VOIDmode.
3299 This mode cannot be BLKmode; use emit_block_move for that.
3301 Return the last instruction emitted. */
3304 emit_move_insn (rtx x, rtx y)
3306 enum machine_mode mode = GET_MODE (x);
3307 rtx y_cst = NULL_RTX;
3308 rtx last_insn, set;
3310 gcc_assert (mode != BLKmode
3311 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3313 if (CONSTANT_P (y))
3315 if (optimize
3316 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3317 && (last_insn = compress_float_constant (x, y)))
3318 return last_insn;
3320 y_cst = y;
3322 if (!LEGITIMATE_CONSTANT_P (y))
3324 y = force_const_mem (mode, y);
3326 /* If the target's cannot_force_const_mem prevented the spill,
3327 assume that the target's move expanders will also take care
3328 of the non-legitimate constant. */
3329 if (!y)
3330 y = y_cst;
3331 else
3332 y = use_anchored_address (y);
3336 /* If X or Y are memory references, verify that their addresses are valid
3337 for the machine. */
3338 if (MEM_P (x)
3339 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3340 && ! push_operand (x, GET_MODE (x)))
3341 || (flag_force_addr
3342 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3343 x = validize_mem (x);
3345 if (MEM_P (y)
3346 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3347 || (flag_force_addr
3348 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3349 y = validize_mem (y);
3351 gcc_assert (mode != BLKmode);
3353 last_insn = emit_move_insn_1 (x, y);
3355 if (y_cst && REG_P (x)
3356 && (set = single_set (last_insn)) != NULL_RTX
3357 && SET_DEST (set) == x
3358 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3359 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3361 return last_insn;
3364 /* If Y is representable exactly in a narrower mode, and the target can
3365 perform the extension directly from constant or memory, then emit the
3366 move as an extension. */
3368 static rtx
3369 compress_float_constant (rtx x, rtx y)
3371 enum machine_mode dstmode = GET_MODE (x);
3372 enum machine_mode orig_srcmode = GET_MODE (y);
3373 enum machine_mode srcmode;
3374 REAL_VALUE_TYPE r;
3375 int oldcost, newcost;
3377 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3379 if (LEGITIMATE_CONSTANT_P (y))
3380 oldcost = rtx_cost (y, SET);
3381 else
3382 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3384 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3385 srcmode != orig_srcmode;
3386 srcmode = GET_MODE_WIDER_MODE (srcmode))
3388 enum insn_code ic;
3389 rtx trunc_y, last_insn;
3391 /* Skip if the target can't extend this way. */
3392 ic = can_extend_p (dstmode, srcmode, 0);
3393 if (ic == CODE_FOR_nothing)
3394 continue;
3396 /* Skip if the narrowed value isn't exact. */
3397 if (! exact_real_truncate (srcmode, &r))
3398 continue;
3400 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3402 if (LEGITIMATE_CONSTANT_P (trunc_y))
3404 /* Skip if the target needs extra instructions to perform
3405 the extension. */
3406 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3407 continue;
3408 /* This is valid, but may not be cheaper than the original. */
3409 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3410 if (oldcost < newcost)
3411 continue;
3413 else if (float_extend_from_mem[dstmode][srcmode])
3415 trunc_y = force_const_mem (srcmode, trunc_y);
3416 /* This is valid, but may not be cheaper than the original. */
3417 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3418 if (oldcost < newcost)
3419 continue;
3420 trunc_y = validize_mem (trunc_y);
3422 else
3423 continue;
3425 /* For CSE's benefit, force the compressed constant pool entry
3426 into a new pseudo. This constant may be used in different modes,
3427 and if not, combine will put things back together for us. */
3428 trunc_y = force_reg (srcmode, trunc_y);
3429 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3430 last_insn = get_last_insn ();
3432 if (REG_P (x))
3433 set_unique_reg_note (last_insn, REG_EQUAL, y);
3435 return last_insn;
3438 return NULL_RTX;
3441 /* Pushing data onto the stack. */
3443 /* Push a block of length SIZE (perhaps variable)
3444 and return an rtx to address the beginning of the block.
3445 The value may be virtual_outgoing_args_rtx.
3447 EXTRA is the number of bytes of padding to push in addition to SIZE.
3448 BELOW nonzero means this padding comes at low addresses;
3449 otherwise, the padding comes at high addresses. */
3452 push_block (rtx size, int extra, int below)
3454 rtx temp;
3456 size = convert_modes (Pmode, ptr_mode, size, 1);
3457 if (CONSTANT_P (size))
3458 anti_adjust_stack (plus_constant (size, extra));
3459 else if (REG_P (size) && extra == 0)
3460 anti_adjust_stack (size);
3461 else
3463 temp = copy_to_mode_reg (Pmode, size);
3464 if (extra != 0)
3465 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3466 temp, 0, OPTAB_LIB_WIDEN);
3467 anti_adjust_stack (temp);
3470 #ifndef STACK_GROWS_DOWNWARD
3471 if (0)
3472 #else
3473 if (1)
3474 #endif
3476 temp = virtual_outgoing_args_rtx;
3477 if (extra != 0 && below)
3478 temp = plus_constant (temp, extra);
3480 else
3482 if (GET_CODE (size) == CONST_INT)
3483 temp = plus_constant (virtual_outgoing_args_rtx,
3484 -INTVAL (size) - (below ? 0 : extra));
3485 else if (extra != 0 && !below)
3486 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3487 negate_rtx (Pmode, plus_constant (size, extra)));
3488 else
3489 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3490 negate_rtx (Pmode, size));
3493 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3496 #ifdef PUSH_ROUNDING
3498 /* Emit single push insn. */
3500 static void
3501 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3503 rtx dest_addr;
3504 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3505 rtx dest;
3506 enum insn_code icode;
3507 insn_operand_predicate_fn pred;
3509 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3510 /* If there is push pattern, use it. Otherwise try old way of throwing
3511 MEM representing push operation to move expander. */
3512 icode = push_optab->handlers[(int) mode].insn_code;
3513 if (icode != CODE_FOR_nothing)
3515 if (((pred = insn_data[(int) icode].operand[0].predicate)
3516 && !((*pred) (x, mode))))
3517 x = force_reg (mode, x);
3518 emit_insn (GEN_FCN (icode) (x));
3519 return;
3521 if (GET_MODE_SIZE (mode) == rounded_size)
3522 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3523 /* If we are to pad downward, adjust the stack pointer first and
3524 then store X into the stack location using an offset. This is
3525 because emit_move_insn does not know how to pad; it does not have
3526 access to type. */
3527 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3529 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3530 HOST_WIDE_INT offset;
3532 emit_move_insn (stack_pointer_rtx,
3533 expand_binop (Pmode,
3534 #ifdef STACK_GROWS_DOWNWARD
3535 sub_optab,
3536 #else
3537 add_optab,
3538 #endif
3539 stack_pointer_rtx,
3540 GEN_INT (rounded_size),
3541 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3543 offset = (HOST_WIDE_INT) padding_size;
3544 #ifdef STACK_GROWS_DOWNWARD
3545 if (STACK_PUSH_CODE == POST_DEC)
3546 /* We have already decremented the stack pointer, so get the
3547 previous value. */
3548 offset += (HOST_WIDE_INT) rounded_size;
3549 #else
3550 if (STACK_PUSH_CODE == POST_INC)
3551 /* We have already incremented the stack pointer, so get the
3552 previous value. */
3553 offset -= (HOST_WIDE_INT) rounded_size;
3554 #endif
3555 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3557 else
3559 #ifdef STACK_GROWS_DOWNWARD
3560 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3561 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3562 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3563 #else
3564 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3565 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3566 GEN_INT (rounded_size));
3567 #endif
3568 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3571 dest = gen_rtx_MEM (mode, dest_addr);
3573 if (type != 0)
3575 set_mem_attributes (dest, type, 1);
3577 if (flag_optimize_sibling_calls)
3578 /* Function incoming arguments may overlap with sibling call
3579 outgoing arguments and we cannot allow reordering of reads
3580 from function arguments with stores to outgoing arguments
3581 of sibling calls. */
3582 set_mem_alias_set (dest, 0);
3584 emit_move_insn (dest, x);
3586 #endif
3588 /* Generate code to push X onto the stack, assuming it has mode MODE and
3589 type TYPE.
3590 MODE is redundant except when X is a CONST_INT (since they don't
3591 carry mode info).
3592 SIZE is an rtx for the size of data to be copied (in bytes),
3593 needed only if X is BLKmode.
3595 ALIGN (in bits) is maximum alignment we can assume.
3597 If PARTIAL and REG are both nonzero, then copy that many of the first
3598 bytes of X into registers starting with REG, and push the rest of X.
3599 The amount of space pushed is decreased by PARTIAL bytes.
3600 REG must be a hard register in this case.
3601 If REG is zero but PARTIAL is not, take any all others actions for an
3602 argument partially in registers, but do not actually load any
3603 registers.
3605 EXTRA is the amount in bytes of extra space to leave next to this arg.
3606 This is ignored if an argument block has already been allocated.
3608 On a machine that lacks real push insns, ARGS_ADDR is the address of
3609 the bottom of the argument block for this call. We use indexing off there
3610 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3611 argument block has not been preallocated.
3613 ARGS_SO_FAR is the size of args previously pushed for this call.
3615 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3616 for arguments passed in registers. If nonzero, it will be the number
3617 of bytes required. */
3619 void
3620 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3621 unsigned int align, int partial, rtx reg, int extra,
3622 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3623 rtx alignment_pad)
3625 rtx xinner;
3626 enum direction stack_direction
3627 #ifdef STACK_GROWS_DOWNWARD
3628 = downward;
3629 #else
3630 = upward;
3631 #endif
3633 /* Decide where to pad the argument: `downward' for below,
3634 `upward' for above, or `none' for don't pad it.
3635 Default is below for small data on big-endian machines; else above. */
3636 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3638 /* Invert direction if stack is post-decrement.
3639 FIXME: why? */
3640 if (STACK_PUSH_CODE == POST_DEC)
3641 if (where_pad != none)
3642 where_pad = (where_pad == downward ? upward : downward);
3644 xinner = x;
3646 if (mode == BLKmode)
3648 /* Copy a block into the stack, entirely or partially. */
3650 rtx temp;
3651 int used;
3652 int offset;
3653 int skip;
3655 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3656 used = partial - offset;
3658 gcc_assert (size);
3660 /* USED is now the # of bytes we need not copy to the stack
3661 because registers will take care of them. */
3663 if (partial != 0)
3664 xinner = adjust_address (xinner, BLKmode, used);
3666 /* If the partial register-part of the arg counts in its stack size,
3667 skip the part of stack space corresponding to the registers.
3668 Otherwise, start copying to the beginning of the stack space,
3669 by setting SKIP to 0. */
3670 skip = (reg_parm_stack_space == 0) ? 0 : used;
3672 #ifdef PUSH_ROUNDING
3673 /* Do it with several push insns if that doesn't take lots of insns
3674 and if there is no difficulty with push insns that skip bytes
3675 on the stack for alignment purposes. */
3676 if (args_addr == 0
3677 && PUSH_ARGS
3678 && GET_CODE (size) == CONST_INT
3679 && skip == 0
3680 && MEM_ALIGN (xinner) >= align
3681 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3682 /* Here we avoid the case of a structure whose weak alignment
3683 forces many pushes of a small amount of data,
3684 and such small pushes do rounding that causes trouble. */
3685 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3686 || align >= BIGGEST_ALIGNMENT
3687 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3688 == (align / BITS_PER_UNIT)))
3689 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3691 /* Push padding now if padding above and stack grows down,
3692 or if padding below and stack grows up.
3693 But if space already allocated, this has already been done. */
3694 if (extra && args_addr == 0
3695 && where_pad != none && where_pad != stack_direction)
3696 anti_adjust_stack (GEN_INT (extra));
3698 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3700 else
3701 #endif /* PUSH_ROUNDING */
3703 rtx target;
3705 /* Otherwise make space on the stack and copy the data
3706 to the address of that space. */
3708 /* Deduct words put into registers from the size we must copy. */
3709 if (partial != 0)
3711 if (GET_CODE (size) == CONST_INT)
3712 size = GEN_INT (INTVAL (size) - used);
3713 else
3714 size = expand_binop (GET_MODE (size), sub_optab, size,
3715 GEN_INT (used), NULL_RTX, 0,
3716 OPTAB_LIB_WIDEN);
3719 /* Get the address of the stack space.
3720 In this case, we do not deal with EXTRA separately.
3721 A single stack adjust will do. */
3722 if (! args_addr)
3724 temp = push_block (size, extra, where_pad == downward);
3725 extra = 0;
3727 else if (GET_CODE (args_so_far) == CONST_INT)
3728 temp = memory_address (BLKmode,
3729 plus_constant (args_addr,
3730 skip + INTVAL (args_so_far)));
3731 else
3732 temp = memory_address (BLKmode,
3733 plus_constant (gen_rtx_PLUS (Pmode,
3734 args_addr,
3735 args_so_far),
3736 skip));
3738 if (!ACCUMULATE_OUTGOING_ARGS)
3740 /* If the source is referenced relative to the stack pointer,
3741 copy it to another register to stabilize it. We do not need
3742 to do this if we know that we won't be changing sp. */
3744 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3745 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3746 temp = copy_to_reg (temp);
3749 target = gen_rtx_MEM (BLKmode, temp);
3751 /* We do *not* set_mem_attributes here, because incoming arguments
3752 may overlap with sibling call outgoing arguments and we cannot
3753 allow reordering of reads from function arguments with stores
3754 to outgoing arguments of sibling calls. We do, however, want
3755 to record the alignment of the stack slot. */
3756 /* ALIGN may well be better aligned than TYPE, e.g. due to
3757 PARM_BOUNDARY. Assume the caller isn't lying. */
3758 set_mem_align (target, align);
3760 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3763 else if (partial > 0)
3765 /* Scalar partly in registers. */
3767 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3768 int i;
3769 int not_stack;
3770 /* # bytes of start of argument
3771 that we must make space for but need not store. */
3772 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3773 int args_offset = INTVAL (args_so_far);
3774 int skip;
3776 /* Push padding now if padding above and stack grows down,
3777 or if padding below and stack grows up.
3778 But if space already allocated, this has already been done. */
3779 if (extra && args_addr == 0
3780 && where_pad != none && where_pad != stack_direction)
3781 anti_adjust_stack (GEN_INT (extra));
3783 /* If we make space by pushing it, we might as well push
3784 the real data. Otherwise, we can leave OFFSET nonzero
3785 and leave the space uninitialized. */
3786 if (args_addr == 0)
3787 offset = 0;
3789 /* Now NOT_STACK gets the number of words that we don't need to
3790 allocate on the stack. Convert OFFSET to words too. */
3791 not_stack = (partial - offset) / UNITS_PER_WORD;
3792 offset /= UNITS_PER_WORD;
3794 /* If the partial register-part of the arg counts in its stack size,
3795 skip the part of stack space corresponding to the registers.
3796 Otherwise, start copying to the beginning of the stack space,
3797 by setting SKIP to 0. */
3798 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3800 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3801 x = validize_mem (force_const_mem (mode, x));
3803 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3804 SUBREGs of such registers are not allowed. */
3805 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3806 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3807 x = copy_to_reg (x);
3809 /* Loop over all the words allocated on the stack for this arg. */
3810 /* We can do it by words, because any scalar bigger than a word
3811 has a size a multiple of a word. */
3812 #ifndef PUSH_ARGS_REVERSED
3813 for (i = not_stack; i < size; i++)
3814 #else
3815 for (i = size - 1; i >= not_stack; i--)
3816 #endif
3817 if (i >= not_stack + offset)
3818 emit_push_insn (operand_subword_force (x, i, mode),
3819 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3820 0, args_addr,
3821 GEN_INT (args_offset + ((i - not_stack + skip)
3822 * UNITS_PER_WORD)),
3823 reg_parm_stack_space, alignment_pad);
3825 else
3827 rtx addr;
3828 rtx dest;
3830 /* Push padding now if padding above and stack grows down,
3831 or if padding below and stack grows up.
3832 But if space already allocated, this has already been done. */
3833 if (extra && args_addr == 0
3834 && where_pad != none && where_pad != stack_direction)
3835 anti_adjust_stack (GEN_INT (extra));
3837 #ifdef PUSH_ROUNDING
3838 if (args_addr == 0 && PUSH_ARGS)
3839 emit_single_push_insn (mode, x, type);
3840 else
3841 #endif
3843 if (GET_CODE (args_so_far) == CONST_INT)
3844 addr
3845 = memory_address (mode,
3846 plus_constant (args_addr,
3847 INTVAL (args_so_far)));
3848 else
3849 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3850 args_so_far));
3851 dest = gen_rtx_MEM (mode, addr);
3853 /* We do *not* set_mem_attributes here, because incoming arguments
3854 may overlap with sibling call outgoing arguments and we cannot
3855 allow reordering of reads from function arguments with stores
3856 to outgoing arguments of sibling calls. We do, however, want
3857 to record the alignment of the stack slot. */
3858 /* ALIGN may well be better aligned than TYPE, e.g. due to
3859 PARM_BOUNDARY. Assume the caller isn't lying. */
3860 set_mem_align (dest, align);
3862 emit_move_insn (dest, x);
3866 /* If part should go in registers, copy that part
3867 into the appropriate registers. Do this now, at the end,
3868 since mem-to-mem copies above may do function calls. */
3869 if (partial > 0 && reg != 0)
3871 /* Handle calls that pass values in multiple non-contiguous locations.
3872 The Irix 6 ABI has examples of this. */
3873 if (GET_CODE (reg) == PARALLEL)
3874 emit_group_load (reg, x, type, -1);
3875 else
3877 gcc_assert (partial % UNITS_PER_WORD == 0);
3878 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3882 if (extra && args_addr == 0 && where_pad == stack_direction)
3883 anti_adjust_stack (GEN_INT (extra));
3885 if (alignment_pad && args_addr == 0)
3886 anti_adjust_stack (alignment_pad);
3889 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3890 operations. */
3892 static rtx
3893 get_subtarget (rtx x)
3895 return (optimize
3896 || x == 0
3897 /* Only registers can be subtargets. */
3898 || !REG_P (x)
3899 /* Don't use hard regs to avoid extending their life. */
3900 || REGNO (x) < FIRST_PSEUDO_REGISTER
3901 ? 0 : x);
3904 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3905 FIELD is a bitfield. Returns true if the optimization was successful,
3906 and there's nothing else to do. */
3908 static bool
3909 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3910 unsigned HOST_WIDE_INT bitpos,
3911 enum machine_mode mode1, rtx str_rtx,
3912 tree to, tree src)
3914 enum machine_mode str_mode = GET_MODE (str_rtx);
3915 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3916 tree op0, op1;
3917 rtx value, result;
3918 optab binop;
3920 if (mode1 != VOIDmode
3921 || bitsize >= BITS_PER_WORD
3922 || str_bitsize > BITS_PER_WORD
3923 || TREE_SIDE_EFFECTS (to)
3924 || TREE_THIS_VOLATILE (to))
3925 return false;
3927 STRIP_NOPS (src);
3928 if (!BINARY_CLASS_P (src)
3929 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3930 return false;
3932 op0 = TREE_OPERAND (src, 0);
3933 op1 = TREE_OPERAND (src, 1);
3934 STRIP_NOPS (op0);
3936 if (!operand_equal_p (to, op0, 0))
3937 return false;
3939 if (MEM_P (str_rtx))
3941 unsigned HOST_WIDE_INT offset1;
3943 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3944 str_mode = word_mode;
3945 str_mode = get_best_mode (bitsize, bitpos,
3946 MEM_ALIGN (str_rtx), str_mode, 0);
3947 if (str_mode == VOIDmode)
3948 return false;
3949 str_bitsize = GET_MODE_BITSIZE (str_mode);
3951 offset1 = bitpos;
3952 bitpos %= str_bitsize;
3953 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3954 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3956 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3957 return false;
3959 /* If the bit field covers the whole REG/MEM, store_field
3960 will likely generate better code. */
3961 if (bitsize >= str_bitsize)
3962 return false;
3964 /* We can't handle fields split across multiple entities. */
3965 if (bitpos + bitsize > str_bitsize)
3966 return false;
3968 if (BYTES_BIG_ENDIAN)
3969 bitpos = str_bitsize - bitpos - bitsize;
3971 switch (TREE_CODE (src))
3973 case PLUS_EXPR:
3974 case MINUS_EXPR:
3975 /* For now, just optimize the case of the topmost bitfield
3976 where we don't need to do any masking and also
3977 1 bit bitfields where xor can be used.
3978 We might win by one instruction for the other bitfields
3979 too if insv/extv instructions aren't used, so that
3980 can be added later. */
3981 if (bitpos + bitsize != str_bitsize
3982 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3983 break;
3985 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3986 value = convert_modes (str_mode,
3987 TYPE_MODE (TREE_TYPE (op1)), value,
3988 TYPE_UNSIGNED (TREE_TYPE (op1)));
3990 /* We may be accessing data outside the field, which means
3991 we can alias adjacent data. */
3992 if (MEM_P (str_rtx))
3994 str_rtx = shallow_copy_rtx (str_rtx);
3995 set_mem_alias_set (str_rtx, 0);
3996 set_mem_expr (str_rtx, 0);
3999 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4000 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4002 value = expand_and (str_mode, value, const1_rtx, NULL);
4003 binop = xor_optab;
4005 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4006 build_int_cst (NULL_TREE, bitpos),
4007 NULL_RTX, 1);
4008 result = expand_binop (str_mode, binop, str_rtx,
4009 value, str_rtx, 1, OPTAB_WIDEN);
4010 if (result != str_rtx)
4011 emit_move_insn (str_rtx, result);
4012 return true;
4014 case BIT_IOR_EXPR:
4015 case BIT_XOR_EXPR:
4016 if (TREE_CODE (op1) != INTEGER_CST)
4017 break;
4018 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
4019 value = convert_modes (GET_MODE (str_rtx),
4020 TYPE_MODE (TREE_TYPE (op1)), value,
4021 TYPE_UNSIGNED (TREE_TYPE (op1)));
4023 /* We may be accessing data outside the field, which means
4024 we can alias adjacent data. */
4025 if (MEM_P (str_rtx))
4027 str_rtx = shallow_copy_rtx (str_rtx);
4028 set_mem_alias_set (str_rtx, 0);
4029 set_mem_expr (str_rtx, 0);
4032 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4033 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4035 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4036 - 1);
4037 value = expand_and (GET_MODE (str_rtx), value, mask,
4038 NULL_RTX);
4040 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4041 build_int_cst (NULL_TREE, bitpos),
4042 NULL_RTX, 1);
4043 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4044 value, str_rtx, 1, OPTAB_WIDEN);
4045 if (result != str_rtx)
4046 emit_move_insn (str_rtx, result);
4047 return true;
4049 default:
4050 break;
4053 return false;
4057 /* Expand an assignment that stores the value of FROM into TO. */
4059 void
4060 expand_assignment (tree to, tree from)
4062 rtx to_rtx = 0;
4063 rtx result;
4065 /* Don't crash if the lhs of the assignment was erroneous. */
4066 if (TREE_CODE (to) == ERROR_MARK)
4068 result = expand_normal (from);
4069 return;
4072 /* Optimize away no-op moves without side-effects. */
4073 if (operand_equal_p (to, from, 0))
4074 return;
4076 /* Assignment of a structure component needs special treatment
4077 if the structure component's rtx is not simply a MEM.
4078 Assignment of an array element at a constant index, and assignment of
4079 an array element in an unaligned packed structure field, has the same
4080 problem. */
4081 if (handled_component_p (to)
4082 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4084 enum machine_mode mode1;
4085 HOST_WIDE_INT bitsize, bitpos;
4086 tree offset;
4087 int unsignedp;
4088 int volatilep = 0;
4089 tree tem;
4091 push_temp_slots ();
4092 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4093 &unsignedp, &volatilep, true);
4095 /* If we are going to use store_bit_field and extract_bit_field,
4096 make sure to_rtx will be safe for multiple use. */
4098 to_rtx = expand_normal (tem);
4100 if (offset != 0)
4102 rtx offset_rtx;
4104 if (!MEM_P (to_rtx))
4106 /* We can get constant negative offsets into arrays with broken
4107 user code. Translate this to a trap instead of ICEing. */
4108 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4109 expand_builtin_trap ();
4110 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4113 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4114 #ifdef POINTERS_EXTEND_UNSIGNED
4115 if (GET_MODE (offset_rtx) != Pmode)
4116 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4117 #else
4118 if (GET_MODE (offset_rtx) != ptr_mode)
4119 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4120 #endif
4122 /* A constant address in TO_RTX can have VOIDmode, we must not try
4123 to call force_reg for that case. Avoid that case. */
4124 if (MEM_P (to_rtx)
4125 && GET_MODE (to_rtx) == BLKmode
4126 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4127 && bitsize > 0
4128 && (bitpos % bitsize) == 0
4129 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4130 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4132 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4133 bitpos = 0;
4136 to_rtx = offset_address (to_rtx, offset_rtx,
4137 highest_pow2_factor_for_target (to,
4138 offset));
4141 /* Handle expand_expr of a complex value returning a CONCAT. */
4142 if (GET_CODE (to_rtx) == CONCAT)
4144 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4146 gcc_assert (bitpos == 0);
4147 result = store_expr (from, to_rtx, false);
4149 else
4151 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4152 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4155 else
4157 if (MEM_P (to_rtx))
4159 /* If the field is at offset zero, we could have been given the
4160 DECL_RTX of the parent struct. Don't munge it. */
4161 to_rtx = shallow_copy_rtx (to_rtx);
4163 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4165 /* Deal with volatile and readonly fields. The former is only
4166 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4167 if (volatilep)
4168 MEM_VOLATILE_P (to_rtx) = 1;
4169 if (component_uses_parent_alias_set (to))
4170 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4173 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4174 to_rtx, to, from))
4175 result = NULL;
4176 else
4177 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4178 TREE_TYPE (tem), get_alias_set (to));
4181 if (result)
4182 preserve_temp_slots (result);
4183 free_temp_slots ();
4184 pop_temp_slots ();
4185 return;
4188 /* If the rhs is a function call and its value is not an aggregate,
4189 call the function before we start to compute the lhs.
4190 This is needed for correct code for cases such as
4191 val = setjmp (buf) on machines where reference to val
4192 requires loading up part of an address in a separate insn.
4194 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4195 since it might be a promoted variable where the zero- or sign- extension
4196 needs to be done. Handling this in the normal way is safe because no
4197 computation is done before the call. */
4198 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4199 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4200 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4201 && REG_P (DECL_RTL (to))))
4203 rtx value;
4205 push_temp_slots ();
4206 value = expand_normal (from);
4207 if (to_rtx == 0)
4208 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4210 /* Handle calls that return values in multiple non-contiguous locations.
4211 The Irix 6 ABI has examples of this. */
4212 if (GET_CODE (to_rtx) == PARALLEL)
4213 emit_group_load (to_rtx, value, TREE_TYPE (from),
4214 int_size_in_bytes (TREE_TYPE (from)));
4215 else if (GET_MODE (to_rtx) == BLKmode)
4216 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4217 else
4219 if (POINTER_TYPE_P (TREE_TYPE (to)))
4220 value = convert_memory_address (GET_MODE (to_rtx), value);
4221 emit_move_insn (to_rtx, value);
4223 preserve_temp_slots (to_rtx);
4224 free_temp_slots ();
4225 pop_temp_slots ();
4226 return;
4229 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4230 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4232 if (to_rtx == 0)
4233 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4235 /* Don't move directly into a return register. */
4236 if (TREE_CODE (to) == RESULT_DECL
4237 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4239 rtx temp;
4241 push_temp_slots ();
4242 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4244 if (GET_CODE (to_rtx) == PARALLEL)
4245 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4246 int_size_in_bytes (TREE_TYPE (from)));
4247 else
4248 emit_move_insn (to_rtx, temp);
4250 preserve_temp_slots (to_rtx);
4251 free_temp_slots ();
4252 pop_temp_slots ();
4253 return;
4256 /* In case we are returning the contents of an object which overlaps
4257 the place the value is being stored, use a safe function when copying
4258 a value through a pointer into a structure value return block. */
4259 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4260 && current_function_returns_struct
4261 && !current_function_returns_pcc_struct)
4263 rtx from_rtx, size;
4265 push_temp_slots ();
4266 size = expr_size (from);
4267 from_rtx = expand_normal (from);
4269 emit_library_call (memmove_libfunc, LCT_NORMAL,
4270 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4271 XEXP (from_rtx, 0), Pmode,
4272 convert_to_mode (TYPE_MODE (sizetype),
4273 size, TYPE_UNSIGNED (sizetype)),
4274 TYPE_MODE (sizetype));
4276 preserve_temp_slots (to_rtx);
4277 free_temp_slots ();
4278 pop_temp_slots ();
4279 return;
4282 /* Compute FROM and store the value in the rtx we got. */
4284 push_temp_slots ();
4285 result = store_expr (from, to_rtx, 0);
4286 preserve_temp_slots (result);
4287 free_temp_slots ();
4288 pop_temp_slots ();
4289 return;
4292 /* Generate code for computing expression EXP,
4293 and storing the value into TARGET.
4295 If the mode is BLKmode then we may return TARGET itself.
4296 It turns out that in BLKmode it doesn't cause a problem.
4297 because C has no operators that could combine two different
4298 assignments into the same BLKmode object with different values
4299 with no sequence point. Will other languages need this to
4300 be more thorough?
4302 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4303 stack, and block moves may need to be treated specially. */
4306 store_expr (tree exp, rtx target, int call_param_p)
4308 rtx temp;
4309 rtx alt_rtl = NULL_RTX;
4310 int dont_return_target = 0;
4312 if (VOID_TYPE_P (TREE_TYPE (exp)))
4314 /* C++ can generate ?: expressions with a throw expression in one
4315 branch and an rvalue in the other. Here, we resolve attempts to
4316 store the throw expression's nonexistent result. */
4317 gcc_assert (!call_param_p);
4318 expand_expr (exp, const0_rtx, VOIDmode, 0);
4319 return NULL_RTX;
4321 if (TREE_CODE (exp) == COMPOUND_EXPR)
4323 /* Perform first part of compound expression, then assign from second
4324 part. */
4325 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4326 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4327 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4329 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4331 /* For conditional expression, get safe form of the target. Then
4332 test the condition, doing the appropriate assignment on either
4333 side. This avoids the creation of unnecessary temporaries.
4334 For non-BLKmode, it is more efficient not to do this. */
4336 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4338 do_pending_stack_adjust ();
4339 NO_DEFER_POP;
4340 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4341 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4342 emit_jump_insn (gen_jump (lab2));
4343 emit_barrier ();
4344 emit_label (lab1);
4345 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4346 emit_label (lab2);
4347 OK_DEFER_POP;
4349 return NULL_RTX;
4351 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4352 /* If this is a scalar in a register that is stored in a wider mode
4353 than the declared mode, compute the result into its declared mode
4354 and then convert to the wider mode. Our value is the computed
4355 expression. */
4357 rtx inner_target = 0;
4359 /* We can do the conversion inside EXP, which will often result
4360 in some optimizations. Do the conversion in two steps: first
4361 change the signedness, if needed, then the extend. But don't
4362 do this if the type of EXP is a subtype of something else
4363 since then the conversion might involve more than just
4364 converting modes. */
4365 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4366 && TREE_TYPE (TREE_TYPE (exp)) == 0
4367 && (!lang_hooks.reduce_bit_field_operations
4368 || (GET_MODE_PRECISION (GET_MODE (target))
4369 == TYPE_PRECISION (TREE_TYPE (exp)))))
4371 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4372 != SUBREG_PROMOTED_UNSIGNED_P (target))
4373 exp = fold_convert
4374 (lang_hooks.types.signed_or_unsigned_type
4375 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4377 exp = fold_convert (lang_hooks.types.type_for_mode
4378 (GET_MODE (SUBREG_REG (target)),
4379 SUBREG_PROMOTED_UNSIGNED_P (target)),
4380 exp);
4382 inner_target = SUBREG_REG (target);
4385 temp = expand_expr (exp, inner_target, VOIDmode,
4386 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4388 /* If TEMP is a VOIDmode constant, use convert_modes to make
4389 sure that we properly convert it. */
4390 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4392 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4393 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4394 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4395 GET_MODE (target), temp,
4396 SUBREG_PROMOTED_UNSIGNED_P (target));
4399 convert_move (SUBREG_REG (target), temp,
4400 SUBREG_PROMOTED_UNSIGNED_P (target));
4402 return NULL_RTX;
4404 else
4406 temp = expand_expr_real (exp, target, GET_MODE (target),
4407 (call_param_p
4408 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4409 &alt_rtl);
4410 /* Return TARGET if it's a specified hardware register.
4411 If TARGET is a volatile mem ref, either return TARGET
4412 or return a reg copied *from* TARGET; ANSI requires this.
4414 Otherwise, if TEMP is not TARGET, return TEMP
4415 if it is constant (for efficiency),
4416 or if we really want the correct value. */
4417 if (!(target && REG_P (target)
4418 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4419 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4420 && ! rtx_equal_p (temp, target)
4421 && CONSTANT_P (temp))
4422 dont_return_target = 1;
4425 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4426 the same as that of TARGET, adjust the constant. This is needed, for
4427 example, in case it is a CONST_DOUBLE and we want only a word-sized
4428 value. */
4429 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4430 && TREE_CODE (exp) != ERROR_MARK
4431 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4432 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4433 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4435 /* If value was not generated in the target, store it there.
4436 Convert the value to TARGET's type first if necessary and emit the
4437 pending incrementations that have been queued when expanding EXP.
4438 Note that we cannot emit the whole queue blindly because this will
4439 effectively disable the POST_INC optimization later.
4441 If TEMP and TARGET compare equal according to rtx_equal_p, but
4442 one or both of them are volatile memory refs, we have to distinguish
4443 two cases:
4444 - expand_expr has used TARGET. In this case, we must not generate
4445 another copy. This can be detected by TARGET being equal according
4446 to == .
4447 - expand_expr has not used TARGET - that means that the source just
4448 happens to have the same RTX form. Since temp will have been created
4449 by expand_expr, it will compare unequal according to == .
4450 We must generate a copy in this case, to reach the correct number
4451 of volatile memory references. */
4453 if ((! rtx_equal_p (temp, target)
4454 || (temp != target && (side_effects_p (temp)
4455 || side_effects_p (target))))
4456 && TREE_CODE (exp) != ERROR_MARK
4457 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4458 but TARGET is not valid memory reference, TEMP will differ
4459 from TARGET although it is really the same location. */
4460 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4461 /* If there's nothing to copy, don't bother. Don't call
4462 expr_size unless necessary, because some front-ends (C++)
4463 expr_size-hook must not be given objects that are not
4464 supposed to be bit-copied or bit-initialized. */
4465 && expr_size (exp) != const0_rtx)
4467 if (GET_MODE (temp) != GET_MODE (target)
4468 && GET_MODE (temp) != VOIDmode)
4470 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4471 if (dont_return_target)
4473 /* In this case, we will return TEMP,
4474 so make sure it has the proper mode.
4475 But don't forget to store the value into TARGET. */
4476 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4477 emit_move_insn (target, temp);
4479 else
4480 convert_move (target, temp, unsignedp);
4483 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4485 /* Handle copying a string constant into an array. The string
4486 constant may be shorter than the array. So copy just the string's
4487 actual length, and clear the rest. First get the size of the data
4488 type of the string, which is actually the size of the target. */
4489 rtx size = expr_size (exp);
4491 if (GET_CODE (size) == CONST_INT
4492 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4493 emit_block_move (target, temp, size,
4494 (call_param_p
4495 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4496 else
4498 /* Compute the size of the data to copy from the string. */
4499 tree copy_size
4500 = size_binop (MIN_EXPR,
4501 make_tree (sizetype, size),
4502 size_int (TREE_STRING_LENGTH (exp)));
4503 rtx copy_size_rtx
4504 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4505 (call_param_p
4506 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4507 rtx label = 0;
4509 /* Copy that much. */
4510 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4511 TYPE_UNSIGNED (sizetype));
4512 emit_block_move (target, temp, copy_size_rtx,
4513 (call_param_p
4514 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4516 /* Figure out how much is left in TARGET that we have to clear.
4517 Do all calculations in ptr_mode. */
4518 if (GET_CODE (copy_size_rtx) == CONST_INT)
4520 size = plus_constant (size, -INTVAL (copy_size_rtx));
4521 target = adjust_address (target, BLKmode,
4522 INTVAL (copy_size_rtx));
4524 else
4526 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4527 copy_size_rtx, NULL_RTX, 0,
4528 OPTAB_LIB_WIDEN);
4530 #ifdef POINTERS_EXTEND_UNSIGNED
4531 if (GET_MODE (copy_size_rtx) != Pmode)
4532 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4533 TYPE_UNSIGNED (sizetype));
4534 #endif
4536 target = offset_address (target, copy_size_rtx,
4537 highest_pow2_factor (copy_size));
4538 label = gen_label_rtx ();
4539 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4540 GET_MODE (size), 0, label);
4543 if (size != const0_rtx)
4544 clear_storage (target, size, BLOCK_OP_NORMAL);
4546 if (label)
4547 emit_label (label);
4550 /* Handle calls that return values in multiple non-contiguous locations.
4551 The Irix 6 ABI has examples of this. */
4552 else if (GET_CODE (target) == PARALLEL)
4553 emit_group_load (target, temp, TREE_TYPE (exp),
4554 int_size_in_bytes (TREE_TYPE (exp)));
4555 else if (GET_MODE (temp) == BLKmode)
4556 emit_block_move (target, temp, expr_size (exp),
4557 (call_param_p
4558 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4559 else
4561 temp = force_operand (temp, target);
4562 if (temp != target)
4563 emit_move_insn (target, temp);
4567 return NULL_RTX;
4570 /* Helper for categorize_ctor_elements. Identical interface. */
4572 static bool
4573 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4574 HOST_WIDE_INT *p_elt_count,
4575 bool *p_must_clear)
4577 unsigned HOST_WIDE_INT idx;
4578 HOST_WIDE_INT nz_elts, elt_count;
4579 tree value, purpose;
4581 /* Whether CTOR is a valid constant initializer, in accordance with what
4582 initializer_constant_valid_p does. If inferred from the constructor
4583 elements, true until proven otherwise. */
4584 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4585 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4587 nz_elts = 0;
4588 elt_count = 0;
4590 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4592 HOST_WIDE_INT mult;
4594 mult = 1;
4595 if (TREE_CODE (purpose) == RANGE_EXPR)
4597 tree lo_index = TREE_OPERAND (purpose, 0);
4598 tree hi_index = TREE_OPERAND (purpose, 1);
4600 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4601 mult = (tree_low_cst (hi_index, 1)
4602 - tree_low_cst (lo_index, 1) + 1);
4605 switch (TREE_CODE (value))
4607 case CONSTRUCTOR:
4609 HOST_WIDE_INT nz = 0, ic = 0;
4611 bool const_elt_p
4612 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4614 nz_elts += mult * nz;
4615 elt_count += mult * ic;
4617 if (const_from_elts_p && const_p)
4618 const_p = const_elt_p;
4620 break;
4622 case INTEGER_CST:
4623 case REAL_CST:
4624 if (!initializer_zerop (value))
4625 nz_elts += mult;
4626 elt_count += mult;
4627 break;
4629 case STRING_CST:
4630 nz_elts += mult * TREE_STRING_LENGTH (value);
4631 elt_count += mult * TREE_STRING_LENGTH (value);
4632 break;
4634 case COMPLEX_CST:
4635 if (!initializer_zerop (TREE_REALPART (value)))
4636 nz_elts += mult;
4637 if (!initializer_zerop (TREE_IMAGPART (value)))
4638 nz_elts += mult;
4639 elt_count += mult;
4640 break;
4642 case VECTOR_CST:
4644 tree v;
4645 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4647 if (!initializer_zerop (TREE_VALUE (v)))
4648 nz_elts += mult;
4649 elt_count += mult;
4652 break;
4654 default:
4655 nz_elts += mult;
4656 elt_count += mult;
4658 if (const_from_elts_p && const_p)
4659 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4660 != NULL_TREE;
4661 break;
4665 if (!*p_must_clear
4666 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4667 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4669 tree init_sub_type;
4670 bool clear_this = true;
4672 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4674 /* We don't expect more than one element of the union to be
4675 initialized. Not sure what we should do otherwise... */
4676 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4677 == 1);
4679 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4680 CONSTRUCTOR_ELTS (ctor),
4681 0)->value);
4683 /* ??? We could look at each element of the union, and find the
4684 largest element. Which would avoid comparing the size of the
4685 initialized element against any tail padding in the union.
4686 Doesn't seem worth the effort... */
4687 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4688 TYPE_SIZE (init_sub_type)) == 1)
4690 /* And now we have to find out if the element itself is fully
4691 constructed. E.g. for union { struct { int a, b; } s; } u
4692 = { .s = { .a = 1 } }. */
4693 if (elt_count == count_type_elements (init_sub_type, false))
4694 clear_this = false;
4698 *p_must_clear = clear_this;
4701 *p_nz_elts += nz_elts;
4702 *p_elt_count += elt_count;
4704 return const_p;
4707 /* Examine CTOR to discover:
4708 * how many scalar fields are set to nonzero values,
4709 and place it in *P_NZ_ELTS;
4710 * how many scalar fields in total are in CTOR,
4711 and place it in *P_ELT_COUNT.
4712 * if a type is a union, and the initializer from the constructor
4713 is not the largest element in the union, then set *p_must_clear.
4715 Return whether or not CTOR is a valid static constant initializer, the same
4716 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4718 bool
4719 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4720 HOST_WIDE_INT *p_elt_count,
4721 bool *p_must_clear)
4723 *p_nz_elts = 0;
4724 *p_elt_count = 0;
4725 *p_must_clear = false;
4727 return
4728 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4731 /* Count the number of scalars in TYPE. Return -1 on overflow or
4732 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4733 array member at the end of the structure. */
4735 HOST_WIDE_INT
4736 count_type_elements (tree type, bool allow_flexarr)
4738 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4739 switch (TREE_CODE (type))
4741 case ARRAY_TYPE:
4743 tree telts = array_type_nelts (type);
4744 if (telts && host_integerp (telts, 1))
4746 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4747 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4748 if (n == 0)
4749 return 0;
4750 else if (max / n > m)
4751 return n * m;
4753 return -1;
4756 case RECORD_TYPE:
4758 HOST_WIDE_INT n = 0, t;
4759 tree f;
4761 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4762 if (TREE_CODE (f) == FIELD_DECL)
4764 t = count_type_elements (TREE_TYPE (f), false);
4765 if (t < 0)
4767 /* Check for structures with flexible array member. */
4768 tree tf = TREE_TYPE (f);
4769 if (allow_flexarr
4770 && TREE_CHAIN (f) == NULL
4771 && TREE_CODE (tf) == ARRAY_TYPE
4772 && TYPE_DOMAIN (tf)
4773 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4774 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4775 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4776 && int_size_in_bytes (type) >= 0)
4777 break;
4779 return -1;
4781 n += t;
4784 return n;
4787 case UNION_TYPE:
4788 case QUAL_UNION_TYPE:
4790 /* Ho hum. How in the world do we guess here? Clearly it isn't
4791 right to count the fields. Guess based on the number of words. */
4792 HOST_WIDE_INT n = int_size_in_bytes (type);
4793 if (n < 0)
4794 return -1;
4795 return n / UNITS_PER_WORD;
4798 case COMPLEX_TYPE:
4799 return 2;
4801 case VECTOR_TYPE:
4802 return TYPE_VECTOR_SUBPARTS (type);
4804 case INTEGER_TYPE:
4805 case REAL_TYPE:
4806 case ENUMERAL_TYPE:
4807 case BOOLEAN_TYPE:
4808 case POINTER_TYPE:
4809 case OFFSET_TYPE:
4810 case REFERENCE_TYPE:
4811 return 1;
4813 case VOID_TYPE:
4814 case METHOD_TYPE:
4815 case FUNCTION_TYPE:
4816 case LANG_TYPE:
4817 default:
4818 gcc_unreachable ();
4822 /* Return 1 if EXP contains mostly (3/4) zeros. */
4824 static int
4825 mostly_zeros_p (tree exp)
4827 if (TREE_CODE (exp) == CONSTRUCTOR)
4830 HOST_WIDE_INT nz_elts, count, elts;
4831 bool must_clear;
4833 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4834 if (must_clear)
4835 return 1;
4837 elts = count_type_elements (TREE_TYPE (exp), false);
4839 return nz_elts < elts / 4;
4842 return initializer_zerop (exp);
4845 /* Return 1 if EXP contains all zeros. */
4847 static int
4848 all_zeros_p (tree exp)
4850 if (TREE_CODE (exp) == CONSTRUCTOR)
4853 HOST_WIDE_INT nz_elts, count;
4854 bool must_clear;
4856 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4857 return nz_elts == 0;
4860 return initializer_zerop (exp);
4863 /* Helper function for store_constructor.
4864 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4865 TYPE is the type of the CONSTRUCTOR, not the element type.
4866 CLEARED is as for store_constructor.
4867 ALIAS_SET is the alias set to use for any stores.
4869 This provides a recursive shortcut back to store_constructor when it isn't
4870 necessary to go through store_field. This is so that we can pass through
4871 the cleared field to let store_constructor know that we may not have to
4872 clear a substructure if the outer structure has already been cleared. */
4874 static void
4875 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4876 HOST_WIDE_INT bitpos, enum machine_mode mode,
4877 tree exp, tree type, int cleared, int alias_set)
4879 if (TREE_CODE (exp) == CONSTRUCTOR
4880 /* We can only call store_constructor recursively if the size and
4881 bit position are on a byte boundary. */
4882 && bitpos % BITS_PER_UNIT == 0
4883 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4884 /* If we have a nonzero bitpos for a register target, then we just
4885 let store_field do the bitfield handling. This is unlikely to
4886 generate unnecessary clear instructions anyways. */
4887 && (bitpos == 0 || MEM_P (target)))
4889 if (MEM_P (target))
4890 target
4891 = adjust_address (target,
4892 GET_MODE (target) == BLKmode
4893 || 0 != (bitpos
4894 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4895 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4898 /* Update the alias set, if required. */
4899 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4900 && MEM_ALIAS_SET (target) != 0)
4902 target = copy_rtx (target);
4903 set_mem_alias_set (target, alias_set);
4906 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4908 else
4909 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4912 /* Store the value of constructor EXP into the rtx TARGET.
4913 TARGET is either a REG or a MEM; we know it cannot conflict, since
4914 safe_from_p has been called.
4915 CLEARED is true if TARGET is known to have been zero'd.
4916 SIZE is the number of bytes of TARGET we are allowed to modify: this
4917 may not be the same as the size of EXP if we are assigning to a field
4918 which has been packed to exclude padding bits. */
4920 static void
4921 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4923 tree type = TREE_TYPE (exp);
4924 #ifdef WORD_REGISTER_OPERATIONS
4925 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4926 #endif
4928 switch (TREE_CODE (type))
4930 case RECORD_TYPE:
4931 case UNION_TYPE:
4932 case QUAL_UNION_TYPE:
4934 unsigned HOST_WIDE_INT idx;
4935 tree field, value;
4937 /* If size is zero or the target is already cleared, do nothing. */
4938 if (size == 0 || cleared)
4939 cleared = 1;
4940 /* We either clear the aggregate or indicate the value is dead. */
4941 else if ((TREE_CODE (type) == UNION_TYPE
4942 || TREE_CODE (type) == QUAL_UNION_TYPE)
4943 && ! CONSTRUCTOR_ELTS (exp))
4944 /* If the constructor is empty, clear the union. */
4946 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4947 cleared = 1;
4950 /* If we are building a static constructor into a register,
4951 set the initial value as zero so we can fold the value into
4952 a constant. But if more than one register is involved,
4953 this probably loses. */
4954 else if (REG_P (target) && TREE_STATIC (exp)
4955 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4957 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4958 cleared = 1;
4961 /* If the constructor has fewer fields than the structure or
4962 if we are initializing the structure to mostly zeros, clear
4963 the whole structure first. Don't do this if TARGET is a
4964 register whose mode size isn't equal to SIZE since
4965 clear_storage can't handle this case. */
4966 else if (size > 0
4967 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4968 != fields_length (type))
4969 || mostly_zeros_p (exp))
4970 && (!REG_P (target)
4971 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4972 == size)))
4974 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4975 cleared = 1;
4978 if (! cleared)
4979 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4981 /* Store each element of the constructor into the
4982 corresponding field of TARGET. */
4983 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4985 enum machine_mode mode;
4986 HOST_WIDE_INT bitsize;
4987 HOST_WIDE_INT bitpos = 0;
4988 tree offset;
4989 rtx to_rtx = target;
4991 /* Just ignore missing fields. We cleared the whole
4992 structure, above, if any fields are missing. */
4993 if (field == 0)
4994 continue;
4996 if (cleared && initializer_zerop (value))
4997 continue;
4999 if (host_integerp (DECL_SIZE (field), 1))
5000 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5001 else
5002 bitsize = -1;
5004 mode = DECL_MODE (field);
5005 if (DECL_BIT_FIELD (field))
5006 mode = VOIDmode;
5008 offset = DECL_FIELD_OFFSET (field);
5009 if (host_integerp (offset, 0)
5010 && host_integerp (bit_position (field), 0))
5012 bitpos = int_bit_position (field);
5013 offset = 0;
5015 else
5016 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5018 if (offset)
5020 rtx offset_rtx;
5022 offset
5023 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5024 make_tree (TREE_TYPE (exp),
5025 target));
5027 offset_rtx = expand_normal (offset);
5028 gcc_assert (MEM_P (to_rtx));
5030 #ifdef POINTERS_EXTEND_UNSIGNED
5031 if (GET_MODE (offset_rtx) != Pmode)
5032 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5033 #else
5034 if (GET_MODE (offset_rtx) != ptr_mode)
5035 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5036 #endif
5038 to_rtx = offset_address (to_rtx, offset_rtx,
5039 highest_pow2_factor (offset));
5042 #ifdef WORD_REGISTER_OPERATIONS
5043 /* If this initializes a field that is smaller than a
5044 word, at the start of a word, try to widen it to a full
5045 word. This special case allows us to output C++ member
5046 function initializations in a form that the optimizers
5047 can understand. */
5048 if (REG_P (target)
5049 && bitsize < BITS_PER_WORD
5050 && bitpos % BITS_PER_WORD == 0
5051 && GET_MODE_CLASS (mode) == MODE_INT
5052 && TREE_CODE (value) == INTEGER_CST
5053 && exp_size >= 0
5054 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5056 tree type = TREE_TYPE (value);
5058 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5060 type = lang_hooks.types.type_for_size
5061 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5062 value = fold_convert (type, value);
5065 if (BYTES_BIG_ENDIAN)
5066 value
5067 = fold_build2 (LSHIFT_EXPR, type, value,
5068 build_int_cst (type,
5069 BITS_PER_WORD - bitsize));
5070 bitsize = BITS_PER_WORD;
5071 mode = word_mode;
5073 #endif
5075 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5076 && DECL_NONADDRESSABLE_P (field))
5078 to_rtx = copy_rtx (to_rtx);
5079 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5082 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5083 value, type, cleared,
5084 get_alias_set (TREE_TYPE (field)));
5086 break;
5088 case ARRAY_TYPE:
5090 tree value, index;
5091 unsigned HOST_WIDE_INT i;
5092 int need_to_clear;
5093 tree domain;
5094 tree elttype = TREE_TYPE (type);
5095 int const_bounds_p;
5096 HOST_WIDE_INT minelt = 0;
5097 HOST_WIDE_INT maxelt = 0;
5099 domain = TYPE_DOMAIN (type);
5100 const_bounds_p = (TYPE_MIN_VALUE (domain)
5101 && TYPE_MAX_VALUE (domain)
5102 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5103 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5105 /* If we have constant bounds for the range of the type, get them. */
5106 if (const_bounds_p)
5108 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5109 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5112 /* If the constructor has fewer elements than the array, clear
5113 the whole array first. Similarly if this is static
5114 constructor of a non-BLKmode object. */
5115 if (cleared)
5116 need_to_clear = 0;
5117 else if (REG_P (target) && TREE_STATIC (exp))
5118 need_to_clear = 1;
5119 else
5121 unsigned HOST_WIDE_INT idx;
5122 tree index, value;
5123 HOST_WIDE_INT count = 0, zero_count = 0;
5124 need_to_clear = ! const_bounds_p;
5126 /* This loop is a more accurate version of the loop in
5127 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5128 is also needed to check for missing elements. */
5129 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5131 HOST_WIDE_INT this_node_count;
5133 if (need_to_clear)
5134 break;
5136 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5138 tree lo_index = TREE_OPERAND (index, 0);
5139 tree hi_index = TREE_OPERAND (index, 1);
5141 if (! host_integerp (lo_index, 1)
5142 || ! host_integerp (hi_index, 1))
5144 need_to_clear = 1;
5145 break;
5148 this_node_count = (tree_low_cst (hi_index, 1)
5149 - tree_low_cst (lo_index, 1) + 1);
5151 else
5152 this_node_count = 1;
5154 count += this_node_count;
5155 if (mostly_zeros_p (value))
5156 zero_count += this_node_count;
5159 /* Clear the entire array first if there are any missing
5160 elements, or if the incidence of zero elements is >=
5161 75%. */
5162 if (! need_to_clear
5163 && (count < maxelt - minelt + 1
5164 || 4 * zero_count >= 3 * count))
5165 need_to_clear = 1;
5168 if (need_to_clear && size > 0)
5170 if (REG_P (target))
5171 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5172 else
5173 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5174 cleared = 1;
5177 if (!cleared && REG_P (target))
5178 /* Inform later passes that the old value is dead. */
5179 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5181 /* Store each element of the constructor into the
5182 corresponding element of TARGET, determined by counting the
5183 elements. */
5184 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5186 enum machine_mode mode;
5187 HOST_WIDE_INT bitsize;
5188 HOST_WIDE_INT bitpos;
5189 int unsignedp;
5190 rtx xtarget = target;
5192 if (cleared && initializer_zerop (value))
5193 continue;
5195 unsignedp = TYPE_UNSIGNED (elttype);
5196 mode = TYPE_MODE (elttype);
5197 if (mode == BLKmode)
5198 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5199 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5200 : -1);
5201 else
5202 bitsize = GET_MODE_BITSIZE (mode);
5204 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5206 tree lo_index = TREE_OPERAND (index, 0);
5207 tree hi_index = TREE_OPERAND (index, 1);
5208 rtx index_r, pos_rtx;
5209 HOST_WIDE_INT lo, hi, count;
5210 tree position;
5212 /* If the range is constant and "small", unroll the loop. */
5213 if (const_bounds_p
5214 && host_integerp (lo_index, 0)
5215 && host_integerp (hi_index, 0)
5216 && (lo = tree_low_cst (lo_index, 0),
5217 hi = tree_low_cst (hi_index, 0),
5218 count = hi - lo + 1,
5219 (!MEM_P (target)
5220 || count <= 2
5221 || (host_integerp (TYPE_SIZE (elttype), 1)
5222 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5223 <= 40 * 8)))))
5225 lo -= minelt; hi -= minelt;
5226 for (; lo <= hi; lo++)
5228 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5230 if (MEM_P (target)
5231 && !MEM_KEEP_ALIAS_SET_P (target)
5232 && TREE_CODE (type) == ARRAY_TYPE
5233 && TYPE_NONALIASED_COMPONENT (type))
5235 target = copy_rtx (target);
5236 MEM_KEEP_ALIAS_SET_P (target) = 1;
5239 store_constructor_field
5240 (target, bitsize, bitpos, mode, value, type, cleared,
5241 get_alias_set (elttype));
5244 else
5246 rtx loop_start = gen_label_rtx ();
5247 rtx loop_end = gen_label_rtx ();
5248 tree exit_cond;
5250 expand_normal (hi_index);
5251 unsignedp = TYPE_UNSIGNED (domain);
5253 index = build_decl (VAR_DECL, NULL_TREE, domain);
5255 index_r
5256 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5257 &unsignedp, 0));
5258 SET_DECL_RTL (index, index_r);
5259 store_expr (lo_index, index_r, 0);
5261 /* Build the head of the loop. */
5262 do_pending_stack_adjust ();
5263 emit_label (loop_start);
5265 /* Assign value to element index. */
5266 position =
5267 fold_convert (ssizetype,
5268 fold_build2 (MINUS_EXPR,
5269 TREE_TYPE (index),
5270 index,
5271 TYPE_MIN_VALUE (domain)));
5273 position =
5274 size_binop (MULT_EXPR, position,
5275 fold_convert (ssizetype,
5276 TYPE_SIZE_UNIT (elttype)));
5278 pos_rtx = expand_normal (position);
5279 xtarget = offset_address (target, pos_rtx,
5280 highest_pow2_factor (position));
5281 xtarget = adjust_address (xtarget, mode, 0);
5282 if (TREE_CODE (value) == CONSTRUCTOR)
5283 store_constructor (value, xtarget, cleared,
5284 bitsize / BITS_PER_UNIT);
5285 else
5286 store_expr (value, xtarget, 0);
5288 /* Generate a conditional jump to exit the loop. */
5289 exit_cond = build2 (LT_EXPR, integer_type_node,
5290 index, hi_index);
5291 jumpif (exit_cond, loop_end);
5293 /* Update the loop counter, and jump to the head of
5294 the loop. */
5295 expand_assignment (index,
5296 build2 (PLUS_EXPR, TREE_TYPE (index),
5297 index, integer_one_node));
5299 emit_jump (loop_start);
5301 /* Build the end of the loop. */
5302 emit_label (loop_end);
5305 else if ((index != 0 && ! host_integerp (index, 0))
5306 || ! host_integerp (TYPE_SIZE (elttype), 1))
5308 tree position;
5310 if (index == 0)
5311 index = ssize_int (1);
5313 if (minelt)
5314 index = fold_convert (ssizetype,
5315 fold_build2 (MINUS_EXPR,
5316 TREE_TYPE (index),
5317 index,
5318 TYPE_MIN_VALUE (domain)));
5320 position =
5321 size_binop (MULT_EXPR, index,
5322 fold_convert (ssizetype,
5323 TYPE_SIZE_UNIT (elttype)));
5324 xtarget = offset_address (target,
5325 expand_normal (position),
5326 highest_pow2_factor (position));
5327 xtarget = adjust_address (xtarget, mode, 0);
5328 store_expr (value, xtarget, 0);
5330 else
5332 if (index != 0)
5333 bitpos = ((tree_low_cst (index, 0) - minelt)
5334 * tree_low_cst (TYPE_SIZE (elttype), 1));
5335 else
5336 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5338 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5339 && TREE_CODE (type) == ARRAY_TYPE
5340 && TYPE_NONALIASED_COMPONENT (type))
5342 target = copy_rtx (target);
5343 MEM_KEEP_ALIAS_SET_P (target) = 1;
5345 store_constructor_field (target, bitsize, bitpos, mode, value,
5346 type, cleared, get_alias_set (elttype));
5349 break;
5352 case VECTOR_TYPE:
5354 unsigned HOST_WIDE_INT idx;
5355 constructor_elt *ce;
5356 int i;
5357 int need_to_clear;
5358 int icode = 0;
5359 tree elttype = TREE_TYPE (type);
5360 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5361 enum machine_mode eltmode = TYPE_MODE (elttype);
5362 HOST_WIDE_INT bitsize;
5363 HOST_WIDE_INT bitpos;
5364 rtvec vector = NULL;
5365 unsigned n_elts;
5367 gcc_assert (eltmode != BLKmode);
5369 n_elts = TYPE_VECTOR_SUBPARTS (type);
5370 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5372 enum machine_mode mode = GET_MODE (target);
5374 icode = (int) vec_init_optab->handlers[mode].insn_code;
5375 if (icode != CODE_FOR_nothing)
5377 unsigned int i;
5379 vector = rtvec_alloc (n_elts);
5380 for (i = 0; i < n_elts; i++)
5381 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5385 /* If the constructor has fewer elements than the vector,
5386 clear the whole array first. Similarly if this is static
5387 constructor of a non-BLKmode object. */
5388 if (cleared)
5389 need_to_clear = 0;
5390 else if (REG_P (target) && TREE_STATIC (exp))
5391 need_to_clear = 1;
5392 else
5394 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5395 tree value;
5397 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5399 int n_elts_here = tree_low_cst
5400 (int_const_binop (TRUNC_DIV_EXPR,
5401 TYPE_SIZE (TREE_TYPE (value)),
5402 TYPE_SIZE (elttype), 0), 1);
5404 count += n_elts_here;
5405 if (mostly_zeros_p (value))
5406 zero_count += n_elts_here;
5409 /* Clear the entire vector first if there are any missing elements,
5410 or if the incidence of zero elements is >= 75%. */
5411 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5414 if (need_to_clear && size > 0 && !vector)
5416 if (REG_P (target))
5417 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5418 else
5419 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5420 cleared = 1;
5423 /* Inform later passes that the old value is dead. */
5424 if (!cleared && !vector && REG_P (target))
5425 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5427 /* Store each element of the constructor into the corresponding
5428 element of TARGET, determined by counting the elements. */
5429 for (idx = 0, i = 0;
5430 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5431 idx++, i += bitsize / elt_size)
5433 HOST_WIDE_INT eltpos;
5434 tree value = ce->value;
5436 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5437 if (cleared && initializer_zerop (value))
5438 continue;
5440 if (ce->index)
5441 eltpos = tree_low_cst (ce->index, 1);
5442 else
5443 eltpos = i;
5445 if (vector)
5447 /* Vector CONSTRUCTORs should only be built from smaller
5448 vectors in the case of BLKmode vectors. */
5449 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5450 RTVEC_ELT (vector, eltpos)
5451 = expand_normal (value);
5453 else
5455 enum machine_mode value_mode =
5456 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5457 ? TYPE_MODE (TREE_TYPE (value))
5458 : eltmode;
5459 bitpos = eltpos * elt_size;
5460 store_constructor_field (target, bitsize, bitpos,
5461 value_mode, value, type,
5462 cleared, get_alias_set (elttype));
5466 if (vector)
5467 emit_insn (GEN_FCN (icode)
5468 (target,
5469 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5470 break;
5473 default:
5474 gcc_unreachable ();
5478 /* Store the value of EXP (an expression tree)
5479 into a subfield of TARGET which has mode MODE and occupies
5480 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5481 If MODE is VOIDmode, it means that we are storing into a bit-field.
5483 Always return const0_rtx unless we have something particular to
5484 return.
5486 TYPE is the type of the underlying object,
5488 ALIAS_SET is the alias set for the destination. This value will
5489 (in general) be different from that for TARGET, since TARGET is a
5490 reference to the containing structure. */
5492 static rtx
5493 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5494 enum machine_mode mode, tree exp, tree type, int alias_set)
5496 HOST_WIDE_INT width_mask = 0;
5498 if (TREE_CODE (exp) == ERROR_MARK)
5499 return const0_rtx;
5501 /* If we have nothing to store, do nothing unless the expression has
5502 side-effects. */
5503 if (bitsize == 0)
5504 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5505 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5506 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5508 /* If we are storing into an unaligned field of an aligned union that is
5509 in a register, we may have the mode of TARGET being an integer mode but
5510 MODE == BLKmode. In that case, get an aligned object whose size and
5511 alignment are the same as TARGET and store TARGET into it (we can avoid
5512 the store if the field being stored is the entire width of TARGET). Then
5513 call ourselves recursively to store the field into a BLKmode version of
5514 that object. Finally, load from the object into TARGET. This is not
5515 very efficient in general, but should only be slightly more expensive
5516 than the otherwise-required unaligned accesses. Perhaps this can be
5517 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5518 twice, once with emit_move_insn and once via store_field. */
5520 if (mode == BLKmode
5521 && (REG_P (target) || GET_CODE (target) == SUBREG))
5523 rtx object = assign_temp (type, 0, 1, 1);
5524 rtx blk_object = adjust_address (object, BLKmode, 0);
5526 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5527 emit_move_insn (object, target);
5529 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5531 emit_move_insn (target, object);
5533 /* We want to return the BLKmode version of the data. */
5534 return blk_object;
5537 if (GET_CODE (target) == CONCAT)
5539 /* We're storing into a struct containing a single __complex. */
5541 gcc_assert (!bitpos);
5542 return store_expr (exp, target, 0);
5545 /* If the structure is in a register or if the component
5546 is a bit field, we cannot use addressing to access it.
5547 Use bit-field techniques or SUBREG to store in it. */
5549 if (mode == VOIDmode
5550 || (mode != BLKmode && ! direct_store[(int) mode]
5551 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5552 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5553 || REG_P (target)
5554 || GET_CODE (target) == SUBREG
5555 /* If the field isn't aligned enough to store as an ordinary memref,
5556 store it as a bit field. */
5557 || (mode != BLKmode
5558 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5559 || bitpos % GET_MODE_ALIGNMENT (mode))
5560 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5561 || (bitpos % BITS_PER_UNIT != 0)))
5562 /* If the RHS and field are a constant size and the size of the
5563 RHS isn't the same size as the bitfield, we must use bitfield
5564 operations. */
5565 || (bitsize >= 0
5566 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5567 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5569 rtx temp;
5571 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5572 implies a mask operation. If the precision is the same size as
5573 the field we're storing into, that mask is redundant. This is
5574 particularly common with bit field assignments generated by the
5575 C front end. */
5576 if (TREE_CODE (exp) == NOP_EXPR)
5578 tree type = TREE_TYPE (exp);
5579 if (INTEGRAL_TYPE_P (type)
5580 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5581 && bitsize == TYPE_PRECISION (type))
5583 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5584 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5585 exp = TREE_OPERAND (exp, 0);
5589 temp = expand_normal (exp);
5591 /* If BITSIZE is narrower than the size of the type of EXP
5592 we will be narrowing TEMP. Normally, what's wanted are the
5593 low-order bits. However, if EXP's type is a record and this is
5594 big-endian machine, we want the upper BITSIZE bits. */
5595 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5596 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5597 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5598 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5599 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5600 - bitsize),
5601 NULL_RTX, 1);
5603 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5604 MODE. */
5605 if (mode != VOIDmode && mode != BLKmode
5606 && mode != TYPE_MODE (TREE_TYPE (exp)))
5607 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5609 /* If the modes of TARGET and TEMP are both BLKmode, both
5610 must be in memory and BITPOS must be aligned on a byte
5611 boundary. If so, we simply do a block copy. */
5612 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5614 gcc_assert (MEM_P (target) && MEM_P (temp)
5615 && !(bitpos % BITS_PER_UNIT));
5617 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5618 emit_block_move (target, temp,
5619 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5620 / BITS_PER_UNIT),
5621 BLOCK_OP_NORMAL);
5623 return const0_rtx;
5626 /* Store the value in the bitfield. */
5627 store_bit_field (target, bitsize, bitpos, mode, temp);
5629 return const0_rtx;
5631 else
5633 /* Now build a reference to just the desired component. */
5634 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5636 if (to_rtx == target)
5637 to_rtx = copy_rtx (to_rtx);
5639 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5640 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5641 set_mem_alias_set (to_rtx, alias_set);
5643 return store_expr (exp, to_rtx, 0);
5647 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5648 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5649 codes and find the ultimate containing object, which we return.
5651 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5652 bit position, and *PUNSIGNEDP to the signedness of the field.
5653 If the position of the field is variable, we store a tree
5654 giving the variable offset (in units) in *POFFSET.
5655 This offset is in addition to the bit position.
5656 If the position is not variable, we store 0 in *POFFSET.
5658 If any of the extraction expressions is volatile,
5659 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5661 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5662 is a mode that can be used to access the field. In that case, *PBITSIZE
5663 is redundant.
5665 If the field describes a variable-sized object, *PMODE is set to
5666 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5667 this case, but the address of the object can be found.
5669 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5670 look through nodes that serve as markers of a greater alignment than
5671 the one that can be deduced from the expression. These nodes make it
5672 possible for front-ends to prevent temporaries from being created by
5673 the middle-end on alignment considerations. For that purpose, the
5674 normal operating mode at high-level is to always pass FALSE so that
5675 the ultimate containing object is really returned; moreover, the
5676 associated predicate handled_component_p will always return TRUE
5677 on these nodes, thus indicating that they are essentially handled
5678 by get_inner_reference. TRUE should only be passed when the caller
5679 is scanning the expression in order to build another representation
5680 and specifically knows how to handle these nodes; as such, this is
5681 the normal operating mode in the RTL expanders. */
5683 tree
5684 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5685 HOST_WIDE_INT *pbitpos, tree *poffset,
5686 enum machine_mode *pmode, int *punsignedp,
5687 int *pvolatilep, bool keep_aligning)
5689 tree size_tree = 0;
5690 enum machine_mode mode = VOIDmode;
5691 tree offset = size_zero_node;
5692 tree bit_offset = bitsize_zero_node;
5693 tree tem;
5695 /* First get the mode, signedness, and size. We do this from just the
5696 outermost expression. */
5697 if (TREE_CODE (exp) == COMPONENT_REF)
5699 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5700 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5701 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5703 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5705 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5707 size_tree = TREE_OPERAND (exp, 1);
5708 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5710 /* For vector types, with the correct size of access, use the mode of
5711 inner type. */
5712 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5713 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5714 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5715 mode = TYPE_MODE (TREE_TYPE (exp));
5717 else
5719 mode = TYPE_MODE (TREE_TYPE (exp));
5720 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5722 if (mode == BLKmode)
5723 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5724 else
5725 *pbitsize = GET_MODE_BITSIZE (mode);
5728 if (size_tree != 0)
5730 if (! host_integerp (size_tree, 1))
5731 mode = BLKmode, *pbitsize = -1;
5732 else
5733 *pbitsize = tree_low_cst (size_tree, 1);
5736 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5737 and find the ultimate containing object. */
5738 while (1)
5740 switch (TREE_CODE (exp))
5742 case BIT_FIELD_REF:
5743 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5744 TREE_OPERAND (exp, 2));
5745 break;
5747 case COMPONENT_REF:
5749 tree field = TREE_OPERAND (exp, 1);
5750 tree this_offset = component_ref_field_offset (exp);
5752 /* If this field hasn't been filled in yet, don't go past it.
5753 This should only happen when folding expressions made during
5754 type construction. */
5755 if (this_offset == 0)
5756 break;
5758 offset = size_binop (PLUS_EXPR, offset, this_offset);
5759 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5760 DECL_FIELD_BIT_OFFSET (field));
5762 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5764 break;
5766 case ARRAY_REF:
5767 case ARRAY_RANGE_REF:
5769 tree index = TREE_OPERAND (exp, 1);
5770 tree low_bound = array_ref_low_bound (exp);
5771 tree unit_size = array_ref_element_size (exp);
5773 /* We assume all arrays have sizes that are a multiple of a byte.
5774 First subtract the lower bound, if any, in the type of the
5775 index, then convert to sizetype and multiply by the size of
5776 the array element. */
5777 if (! integer_zerop (low_bound))
5778 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5779 index, low_bound);
5781 offset = size_binop (PLUS_EXPR, offset,
5782 size_binop (MULT_EXPR,
5783 fold_convert (sizetype, index),
5784 unit_size));
5786 break;
5788 case REALPART_EXPR:
5789 break;
5791 case IMAGPART_EXPR:
5792 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5793 bitsize_int (*pbitsize));
5794 break;
5796 case VIEW_CONVERT_EXPR:
5797 if (keep_aligning && STRICT_ALIGNMENT
5798 && (TYPE_ALIGN (TREE_TYPE (exp))
5799 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5800 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5801 < BIGGEST_ALIGNMENT)
5802 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5803 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5804 goto done;
5805 break;
5807 default:
5808 goto done;
5811 /* If any reference in the chain is volatile, the effect is volatile. */
5812 if (TREE_THIS_VOLATILE (exp))
5813 *pvolatilep = 1;
5815 exp = TREE_OPERAND (exp, 0);
5817 done:
5819 /* If OFFSET is constant, see if we can return the whole thing as a
5820 constant bit position. Otherwise, split it up. */
5821 if (host_integerp (offset, 0)
5822 && 0 != (tem = size_binop (MULT_EXPR,
5823 fold_convert (bitsizetype, offset),
5824 bitsize_unit_node))
5825 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5826 && host_integerp (tem, 0))
5827 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5828 else
5829 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5831 *pmode = mode;
5832 return exp;
5835 /* Return a tree of sizetype representing the size, in bytes, of the element
5836 of EXP, an ARRAY_REF. */
5838 tree
5839 array_ref_element_size (tree exp)
5841 tree aligned_size = TREE_OPERAND (exp, 3);
5842 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5844 /* If a size was specified in the ARRAY_REF, it's the size measured
5845 in alignment units of the element type. So multiply by that value. */
5846 if (aligned_size)
5848 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5849 sizetype from another type of the same width and signedness. */
5850 if (TREE_TYPE (aligned_size) != sizetype)
5851 aligned_size = fold_convert (sizetype, aligned_size);
5852 return size_binop (MULT_EXPR, aligned_size,
5853 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5856 /* Otherwise, take the size from that of the element type. Substitute
5857 any PLACEHOLDER_EXPR that we have. */
5858 else
5859 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5862 /* Return a tree representing the lower bound of the array mentioned in
5863 EXP, an ARRAY_REF. */
5865 tree
5866 array_ref_low_bound (tree exp)
5868 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5870 /* If a lower bound is specified in EXP, use it. */
5871 if (TREE_OPERAND (exp, 2))
5872 return TREE_OPERAND (exp, 2);
5874 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5875 substituting for a PLACEHOLDER_EXPR as needed. */
5876 if (domain_type && TYPE_MIN_VALUE (domain_type))
5877 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5879 /* Otherwise, return a zero of the appropriate type. */
5880 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5883 /* Return a tree representing the upper bound of the array mentioned in
5884 EXP, an ARRAY_REF. */
5886 tree
5887 array_ref_up_bound (tree exp)
5889 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5891 /* If there is a domain type and it has an upper bound, use it, substituting
5892 for a PLACEHOLDER_EXPR as needed. */
5893 if (domain_type && TYPE_MAX_VALUE (domain_type))
5894 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5896 /* Otherwise fail. */
5897 return NULL_TREE;
5900 /* Return a tree representing the offset, in bytes, of the field referenced
5901 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5903 tree
5904 component_ref_field_offset (tree exp)
5906 tree aligned_offset = TREE_OPERAND (exp, 2);
5907 tree field = TREE_OPERAND (exp, 1);
5909 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5910 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5911 value. */
5912 if (aligned_offset)
5914 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5915 sizetype from another type of the same width and signedness. */
5916 if (TREE_TYPE (aligned_offset) != sizetype)
5917 aligned_offset = fold_convert (sizetype, aligned_offset);
5918 return size_binop (MULT_EXPR, aligned_offset,
5919 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5922 /* Otherwise, take the offset from that of the field. Substitute
5923 any PLACEHOLDER_EXPR that we have. */
5924 else
5925 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5928 /* Return 1 if T is an expression that get_inner_reference handles. */
5931 handled_component_p (tree t)
5933 switch (TREE_CODE (t))
5935 case BIT_FIELD_REF:
5936 case COMPONENT_REF:
5937 case ARRAY_REF:
5938 case ARRAY_RANGE_REF:
5939 case VIEW_CONVERT_EXPR:
5940 case REALPART_EXPR:
5941 case IMAGPART_EXPR:
5942 return 1;
5944 default:
5945 return 0;
5949 /* Given an rtx VALUE that may contain additions and multiplications, return
5950 an equivalent value that just refers to a register, memory, or constant.
5951 This is done by generating instructions to perform the arithmetic and
5952 returning a pseudo-register containing the value.
5954 The returned value may be a REG, SUBREG, MEM or constant. */
5957 force_operand (rtx value, rtx target)
5959 rtx op1, op2;
5960 /* Use subtarget as the target for operand 0 of a binary operation. */
5961 rtx subtarget = get_subtarget (target);
5962 enum rtx_code code = GET_CODE (value);
5964 /* Check for subreg applied to an expression produced by loop optimizer. */
5965 if (code == SUBREG
5966 && !REG_P (SUBREG_REG (value))
5967 && !MEM_P (SUBREG_REG (value)))
5969 value = simplify_gen_subreg (GET_MODE (value),
5970 force_reg (GET_MODE (SUBREG_REG (value)),
5971 force_operand (SUBREG_REG (value),
5972 NULL_RTX)),
5973 GET_MODE (SUBREG_REG (value)),
5974 SUBREG_BYTE (value));
5975 code = GET_CODE (value);
5978 /* Check for a PIC address load. */
5979 if ((code == PLUS || code == MINUS)
5980 && XEXP (value, 0) == pic_offset_table_rtx
5981 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5982 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5983 || GET_CODE (XEXP (value, 1)) == CONST))
5985 if (!subtarget)
5986 subtarget = gen_reg_rtx (GET_MODE (value));
5987 emit_move_insn (subtarget, value);
5988 return subtarget;
5991 if (ARITHMETIC_P (value))
5993 op2 = XEXP (value, 1);
5994 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5995 subtarget = 0;
5996 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5998 code = PLUS;
5999 op2 = negate_rtx (GET_MODE (value), op2);
6002 /* Check for an addition with OP2 a constant integer and our first
6003 operand a PLUS of a virtual register and something else. In that
6004 case, we want to emit the sum of the virtual register and the
6005 constant first and then add the other value. This allows virtual
6006 register instantiation to simply modify the constant rather than
6007 creating another one around this addition. */
6008 if (code == PLUS && GET_CODE (op2) == CONST_INT
6009 && GET_CODE (XEXP (value, 0)) == PLUS
6010 && REG_P (XEXP (XEXP (value, 0), 0))
6011 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6012 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6014 rtx temp = expand_simple_binop (GET_MODE (value), code,
6015 XEXP (XEXP (value, 0), 0), op2,
6016 subtarget, 0, OPTAB_LIB_WIDEN);
6017 return expand_simple_binop (GET_MODE (value), code, temp,
6018 force_operand (XEXP (XEXP (value,
6019 0), 1), 0),
6020 target, 0, OPTAB_LIB_WIDEN);
6023 op1 = force_operand (XEXP (value, 0), subtarget);
6024 op2 = force_operand (op2, NULL_RTX);
6025 switch (code)
6027 case MULT:
6028 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6029 case DIV:
6030 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6031 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6032 target, 1, OPTAB_LIB_WIDEN);
6033 else
6034 return expand_divmod (0,
6035 FLOAT_MODE_P (GET_MODE (value))
6036 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6037 GET_MODE (value), op1, op2, target, 0);
6038 break;
6039 case MOD:
6040 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6041 target, 0);
6042 break;
6043 case UDIV:
6044 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6045 target, 1);
6046 break;
6047 case UMOD:
6048 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6049 target, 1);
6050 break;
6051 case ASHIFTRT:
6052 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6053 target, 0, OPTAB_LIB_WIDEN);
6054 break;
6055 default:
6056 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6057 target, 1, OPTAB_LIB_WIDEN);
6060 if (UNARY_P (value))
6062 if (!target)
6063 target = gen_reg_rtx (GET_MODE (value));
6064 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6065 switch (code)
6067 case ZERO_EXTEND:
6068 case SIGN_EXTEND:
6069 case TRUNCATE:
6070 case FLOAT_EXTEND:
6071 case FLOAT_TRUNCATE:
6072 convert_move (target, op1, code == ZERO_EXTEND);
6073 return target;
6075 case FIX:
6076 case UNSIGNED_FIX:
6077 expand_fix (target, op1, code == UNSIGNED_FIX);
6078 return target;
6080 case FLOAT:
6081 case UNSIGNED_FLOAT:
6082 expand_float (target, op1, code == UNSIGNED_FLOAT);
6083 return target;
6085 default:
6086 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6090 #ifdef INSN_SCHEDULING
6091 /* On machines that have insn scheduling, we want all memory reference to be
6092 explicit, so we need to deal with such paradoxical SUBREGs. */
6093 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6094 && (GET_MODE_SIZE (GET_MODE (value))
6095 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6096 value
6097 = simplify_gen_subreg (GET_MODE (value),
6098 force_reg (GET_MODE (SUBREG_REG (value)),
6099 force_operand (SUBREG_REG (value),
6100 NULL_RTX)),
6101 GET_MODE (SUBREG_REG (value)),
6102 SUBREG_BYTE (value));
6103 #endif
6105 return value;
6108 /* Subroutine of expand_expr: return nonzero iff there is no way that
6109 EXP can reference X, which is being modified. TOP_P is nonzero if this
6110 call is going to be used to determine whether we need a temporary
6111 for EXP, as opposed to a recursive call to this function.
6113 It is always safe for this routine to return zero since it merely
6114 searches for optimization opportunities. */
6117 safe_from_p (rtx x, tree exp, int top_p)
6119 rtx exp_rtl = 0;
6120 int i, nops;
6122 if (x == 0
6123 /* If EXP has varying size, we MUST use a target since we currently
6124 have no way of allocating temporaries of variable size
6125 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6126 So we assume here that something at a higher level has prevented a
6127 clash. This is somewhat bogus, but the best we can do. Only
6128 do this when X is BLKmode and when we are at the top level. */
6129 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6130 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6131 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6132 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6133 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6134 != INTEGER_CST)
6135 && GET_MODE (x) == BLKmode)
6136 /* If X is in the outgoing argument area, it is always safe. */
6137 || (MEM_P (x)
6138 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6139 || (GET_CODE (XEXP (x, 0)) == PLUS
6140 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6141 return 1;
6143 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6144 find the underlying pseudo. */
6145 if (GET_CODE (x) == SUBREG)
6147 x = SUBREG_REG (x);
6148 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6149 return 0;
6152 /* Now look at our tree code and possibly recurse. */
6153 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6155 case tcc_declaration:
6156 exp_rtl = DECL_RTL_IF_SET (exp);
6157 break;
6159 case tcc_constant:
6160 return 1;
6162 case tcc_exceptional:
6163 if (TREE_CODE (exp) == TREE_LIST)
6165 while (1)
6167 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6168 return 0;
6169 exp = TREE_CHAIN (exp);
6170 if (!exp)
6171 return 1;
6172 if (TREE_CODE (exp) != TREE_LIST)
6173 return safe_from_p (x, exp, 0);
6176 else if (TREE_CODE (exp) == CONSTRUCTOR)
6178 constructor_elt *ce;
6179 unsigned HOST_WIDE_INT idx;
6181 for (idx = 0;
6182 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6183 idx++)
6184 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6185 || !safe_from_p (x, ce->value, 0))
6186 return 0;
6187 return 1;
6189 else if (TREE_CODE (exp) == ERROR_MARK)
6190 return 1; /* An already-visited SAVE_EXPR? */
6191 else
6192 return 0;
6194 case tcc_statement:
6195 /* The only case we look at here is the DECL_INITIAL inside a
6196 DECL_EXPR. */
6197 return (TREE_CODE (exp) != DECL_EXPR
6198 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6199 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6200 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6202 case tcc_binary:
6203 case tcc_comparison:
6204 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6205 return 0;
6206 /* Fall through. */
6208 case tcc_unary:
6209 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6211 case tcc_expression:
6212 case tcc_reference:
6213 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6214 the expression. If it is set, we conflict iff we are that rtx or
6215 both are in memory. Otherwise, we check all operands of the
6216 expression recursively. */
6218 switch (TREE_CODE (exp))
6220 case ADDR_EXPR:
6221 /* If the operand is static or we are static, we can't conflict.
6222 Likewise if we don't conflict with the operand at all. */
6223 if (staticp (TREE_OPERAND (exp, 0))
6224 || TREE_STATIC (exp)
6225 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6226 return 1;
6228 /* Otherwise, the only way this can conflict is if we are taking
6229 the address of a DECL a that address if part of X, which is
6230 very rare. */
6231 exp = TREE_OPERAND (exp, 0);
6232 if (DECL_P (exp))
6234 if (!DECL_RTL_SET_P (exp)
6235 || !MEM_P (DECL_RTL (exp)))
6236 return 0;
6237 else
6238 exp_rtl = XEXP (DECL_RTL (exp), 0);
6240 break;
6242 case MISALIGNED_INDIRECT_REF:
6243 case ALIGN_INDIRECT_REF:
6244 case INDIRECT_REF:
6245 if (MEM_P (x)
6246 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6247 get_alias_set (exp)))
6248 return 0;
6249 break;
6251 case CALL_EXPR:
6252 /* Assume that the call will clobber all hard registers and
6253 all of memory. */
6254 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6255 || MEM_P (x))
6256 return 0;
6257 break;
6259 case WITH_CLEANUP_EXPR:
6260 case CLEANUP_POINT_EXPR:
6261 /* Lowered by gimplify.c. */
6262 gcc_unreachable ();
6264 case SAVE_EXPR:
6265 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6267 default:
6268 break;
6271 /* If we have an rtx, we do not need to scan our operands. */
6272 if (exp_rtl)
6273 break;
6275 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6276 for (i = 0; i < nops; i++)
6277 if (TREE_OPERAND (exp, i) != 0
6278 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6279 return 0;
6281 /* If this is a language-specific tree code, it may require
6282 special handling. */
6283 if ((unsigned int) TREE_CODE (exp)
6284 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6285 && !lang_hooks.safe_from_p (x, exp))
6286 return 0;
6287 break;
6289 case tcc_type:
6290 /* Should never get a type here. */
6291 gcc_unreachable ();
6293 case tcc_gimple_stmt:
6294 gcc_unreachable ();
6297 /* If we have an rtl, find any enclosed object. Then see if we conflict
6298 with it. */
6299 if (exp_rtl)
6301 if (GET_CODE (exp_rtl) == SUBREG)
6303 exp_rtl = SUBREG_REG (exp_rtl);
6304 if (REG_P (exp_rtl)
6305 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6306 return 0;
6309 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6310 are memory and they conflict. */
6311 return ! (rtx_equal_p (x, exp_rtl)
6312 || (MEM_P (x) && MEM_P (exp_rtl)
6313 && true_dependence (exp_rtl, VOIDmode, x,
6314 rtx_addr_varies_p)));
6317 /* If we reach here, it is safe. */
6318 return 1;
6322 /* Return the highest power of two that EXP is known to be a multiple of.
6323 This is used in updating alignment of MEMs in array references. */
6325 unsigned HOST_WIDE_INT
6326 highest_pow2_factor (tree exp)
6328 unsigned HOST_WIDE_INT c0, c1;
6330 switch (TREE_CODE (exp))
6332 case INTEGER_CST:
6333 /* We can find the lowest bit that's a one. If the low
6334 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6335 We need to handle this case since we can find it in a COND_EXPR,
6336 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6337 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6338 later ICE. */
6339 if (TREE_OVERFLOW (exp))
6340 return BIGGEST_ALIGNMENT;
6341 else
6343 /* Note: tree_low_cst is intentionally not used here,
6344 we don't care about the upper bits. */
6345 c0 = TREE_INT_CST_LOW (exp);
6346 c0 &= -c0;
6347 return c0 ? c0 : BIGGEST_ALIGNMENT;
6349 break;
6351 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6352 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6353 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6354 return MIN (c0, c1);
6356 case MULT_EXPR:
6357 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6358 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6359 return c0 * c1;
6361 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6362 case CEIL_DIV_EXPR:
6363 if (integer_pow2p (TREE_OPERAND (exp, 1))
6364 && host_integerp (TREE_OPERAND (exp, 1), 1))
6366 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6367 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6368 return MAX (1, c0 / c1);
6370 break;
6372 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6373 case SAVE_EXPR:
6374 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6376 case COMPOUND_EXPR:
6377 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6379 case COND_EXPR:
6380 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6381 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6382 return MIN (c0, c1);
6384 default:
6385 break;
6388 return 1;
6391 /* Similar, except that the alignment requirements of TARGET are
6392 taken into account. Assume it is at least as aligned as its
6393 type, unless it is a COMPONENT_REF in which case the layout of
6394 the structure gives the alignment. */
6396 static unsigned HOST_WIDE_INT
6397 highest_pow2_factor_for_target (tree target, tree exp)
6399 unsigned HOST_WIDE_INT target_align, factor;
6401 factor = highest_pow2_factor (exp);
6402 if (TREE_CODE (target) == COMPONENT_REF)
6403 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6404 else
6405 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6406 return MAX (factor, target_align);
6409 /* Expands variable VAR. */
6411 void
6412 expand_var (tree var)
6414 if (DECL_EXTERNAL (var))
6415 return;
6417 if (TREE_STATIC (var))
6418 /* If this is an inlined copy of a static local variable,
6419 look up the original decl. */
6420 var = DECL_ORIGIN (var);
6422 if (TREE_STATIC (var)
6423 ? !TREE_ASM_WRITTEN (var)
6424 : !DECL_RTL_SET_P (var))
6426 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6427 /* Should be ignored. */;
6428 else if (lang_hooks.expand_decl (var))
6429 /* OK. */;
6430 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6431 expand_decl (var);
6432 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6433 rest_of_decl_compilation (var, 0, 0);
6434 else
6435 /* No expansion needed. */
6436 gcc_assert (TREE_CODE (var) == TYPE_DECL
6437 || TREE_CODE (var) == CONST_DECL
6438 || TREE_CODE (var) == FUNCTION_DECL
6439 || TREE_CODE (var) == LABEL_DECL);
6443 /* Subroutine of expand_expr. Expand the two operands of a binary
6444 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6445 The value may be stored in TARGET if TARGET is nonzero. The
6446 MODIFIER argument is as documented by expand_expr. */
6448 static void
6449 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6450 enum expand_modifier modifier)
6452 if (! safe_from_p (target, exp1, 1))
6453 target = 0;
6454 if (operand_equal_p (exp0, exp1, 0))
6456 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6457 *op1 = copy_rtx (*op0);
6459 else
6461 /* If we need to preserve evaluation order, copy exp0 into its own
6462 temporary variable so that it can't be clobbered by exp1. */
6463 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6464 exp0 = save_expr (exp0);
6465 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6466 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6471 /* Return a MEM that contains constant EXP. DEFER is as for
6472 output_constant_def and MODIFIER is as for expand_expr. */
6474 static rtx
6475 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6477 rtx mem;
6479 mem = output_constant_def (exp, defer);
6480 if (modifier != EXPAND_INITIALIZER)
6481 mem = use_anchored_address (mem);
6482 return mem;
6485 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6486 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6488 static rtx
6489 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6490 enum expand_modifier modifier)
6492 rtx result, subtarget;
6493 tree inner, offset;
6494 HOST_WIDE_INT bitsize, bitpos;
6495 int volatilep, unsignedp;
6496 enum machine_mode mode1;
6498 /* If we are taking the address of a constant and are at the top level,
6499 we have to use output_constant_def since we can't call force_const_mem
6500 at top level. */
6501 /* ??? This should be considered a front-end bug. We should not be
6502 generating ADDR_EXPR of something that isn't an LVALUE. The only
6503 exception here is STRING_CST. */
6504 if (TREE_CODE (exp) == CONSTRUCTOR
6505 || CONSTANT_CLASS_P (exp))
6506 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6508 /* Everything must be something allowed by is_gimple_addressable. */
6509 switch (TREE_CODE (exp))
6511 case INDIRECT_REF:
6512 /* This case will happen via recursion for &a->b. */
6513 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6515 case CONST_DECL:
6516 /* Recurse and make the output_constant_def clause above handle this. */
6517 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6518 tmode, modifier);
6520 case REALPART_EXPR:
6521 /* The real part of the complex number is always first, therefore
6522 the address is the same as the address of the parent object. */
6523 offset = 0;
6524 bitpos = 0;
6525 inner = TREE_OPERAND (exp, 0);
6526 break;
6528 case IMAGPART_EXPR:
6529 /* The imaginary part of the complex number is always second.
6530 The expression is therefore always offset by the size of the
6531 scalar type. */
6532 offset = 0;
6533 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6534 inner = TREE_OPERAND (exp, 0);
6535 break;
6537 default:
6538 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6539 expand_expr, as that can have various side effects; LABEL_DECLs for
6540 example, may not have their DECL_RTL set yet. Assume language
6541 specific tree nodes can be expanded in some interesting way. */
6542 if (DECL_P (exp)
6543 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6545 result = expand_expr (exp, target, tmode,
6546 modifier == EXPAND_INITIALIZER
6547 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6549 /* If the DECL isn't in memory, then the DECL wasn't properly
6550 marked TREE_ADDRESSABLE, which will be either a front-end
6551 or a tree optimizer bug. */
6552 gcc_assert (MEM_P (result));
6553 result = XEXP (result, 0);
6555 /* ??? Is this needed anymore? */
6556 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6558 assemble_external (exp);
6559 TREE_USED (exp) = 1;
6562 if (modifier != EXPAND_INITIALIZER
6563 && modifier != EXPAND_CONST_ADDRESS)
6564 result = force_operand (result, target);
6565 return result;
6568 /* Pass FALSE as the last argument to get_inner_reference although
6569 we are expanding to RTL. The rationale is that we know how to
6570 handle "aligning nodes" here: we can just bypass them because
6571 they won't change the final object whose address will be returned
6572 (they actually exist only for that purpose). */
6573 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6574 &mode1, &unsignedp, &volatilep, false);
6575 break;
6578 /* We must have made progress. */
6579 gcc_assert (inner != exp);
6581 subtarget = offset || bitpos ? NULL_RTX : target;
6582 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6584 if (offset)
6586 rtx tmp;
6588 if (modifier != EXPAND_NORMAL)
6589 result = force_operand (result, NULL);
6590 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6592 result = convert_memory_address (tmode, result);
6593 tmp = convert_memory_address (tmode, tmp);
6595 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6596 result = gen_rtx_PLUS (tmode, result, tmp);
6597 else
6599 subtarget = bitpos ? NULL_RTX : target;
6600 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6601 1, OPTAB_LIB_WIDEN);
6605 if (bitpos)
6607 /* Someone beforehand should have rejected taking the address
6608 of such an object. */
6609 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6611 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6612 if (modifier < EXPAND_SUM)
6613 result = force_operand (result, target);
6616 return result;
6619 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6620 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6622 static rtx
6623 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6624 enum expand_modifier modifier)
6626 enum machine_mode rmode;
6627 rtx result;
6629 /* Target mode of VOIDmode says "whatever's natural". */
6630 if (tmode == VOIDmode)
6631 tmode = TYPE_MODE (TREE_TYPE (exp));
6633 /* We can get called with some Weird Things if the user does silliness
6634 like "(short) &a". In that case, convert_memory_address won't do
6635 the right thing, so ignore the given target mode. */
6636 if (tmode != Pmode && tmode != ptr_mode)
6637 tmode = Pmode;
6639 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6640 tmode, modifier);
6642 /* Despite expand_expr claims concerning ignoring TMODE when not
6643 strictly convenient, stuff breaks if we don't honor it. Note
6644 that combined with the above, we only do this for pointer modes. */
6645 rmode = GET_MODE (result);
6646 if (rmode == VOIDmode)
6647 rmode = tmode;
6648 if (rmode != tmode)
6649 result = convert_memory_address (tmode, result);
6651 return result;
6655 /* expand_expr: generate code for computing expression EXP.
6656 An rtx for the computed value is returned. The value is never null.
6657 In the case of a void EXP, const0_rtx is returned.
6659 The value may be stored in TARGET if TARGET is nonzero.
6660 TARGET is just a suggestion; callers must assume that
6661 the rtx returned may not be the same as TARGET.
6663 If TARGET is CONST0_RTX, it means that the value will be ignored.
6665 If TMODE is not VOIDmode, it suggests generating the
6666 result in mode TMODE. But this is done only when convenient.
6667 Otherwise, TMODE is ignored and the value generated in its natural mode.
6668 TMODE is just a suggestion; callers must assume that
6669 the rtx returned may not have mode TMODE.
6671 Note that TARGET may have neither TMODE nor MODE. In that case, it
6672 probably will not be used.
6674 If MODIFIER is EXPAND_SUM then when EXP is an addition
6675 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6676 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6677 products as above, or REG or MEM, or constant.
6678 Ordinarily in such cases we would output mul or add instructions
6679 and then return a pseudo reg containing the sum.
6681 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6682 it also marks a label as absolutely required (it can't be dead).
6683 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6684 This is used for outputting expressions used in initializers.
6686 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6687 with a constant address even if that address is not normally legitimate.
6688 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6690 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6691 a call parameter. Such targets require special care as we haven't yet
6692 marked TARGET so that it's safe from being trashed by libcalls. We
6693 don't want to use TARGET for anything but the final result;
6694 Intermediate values must go elsewhere. Additionally, calls to
6695 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6697 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6698 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6699 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6700 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6701 recursively. */
6703 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6704 enum expand_modifier, rtx *);
6707 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6708 enum expand_modifier modifier, rtx *alt_rtl)
6710 int rn = -1;
6711 rtx ret, last = NULL;
6713 /* Handle ERROR_MARK before anybody tries to access its type. */
6714 if (TREE_CODE (exp) == ERROR_MARK
6715 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6717 ret = CONST0_RTX (tmode);
6718 return ret ? ret : const0_rtx;
6721 if (flag_non_call_exceptions)
6723 rn = lookup_stmt_eh_region (exp);
6724 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6725 if (rn >= 0)
6726 last = get_last_insn ();
6729 /* If this is an expression of some kind and it has an associated line
6730 number, then emit the line number before expanding the expression.
6732 We need to save and restore the file and line information so that
6733 errors discovered during expansion are emitted with the right
6734 information. It would be better of the diagnostic routines
6735 used the file/line information embedded in the tree nodes rather
6736 than globals. */
6737 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6739 location_t saved_location = input_location;
6740 input_location = EXPR_LOCATION (exp);
6741 emit_line_note (input_location);
6743 /* Record where the insns produced belong. */
6744 record_block_change (TREE_BLOCK (exp));
6746 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6748 input_location = saved_location;
6750 else
6752 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6755 /* If using non-call exceptions, mark all insns that may trap.
6756 expand_call() will mark CALL_INSNs before we get to this code,
6757 but it doesn't handle libcalls, and these may trap. */
6758 if (rn >= 0)
6760 rtx insn;
6761 for (insn = next_real_insn (last); insn;
6762 insn = next_real_insn (insn))
6764 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6765 /* If we want exceptions for non-call insns, any
6766 may_trap_p instruction may throw. */
6767 && GET_CODE (PATTERN (insn)) != CLOBBER
6768 && GET_CODE (PATTERN (insn)) != USE
6769 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6771 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6772 REG_NOTES (insn));
6777 return ret;
6780 static rtx
6781 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6782 enum expand_modifier modifier, rtx *alt_rtl)
6784 rtx op0, op1, temp, decl_rtl;
6785 tree type;
6786 int unsignedp;
6787 enum machine_mode mode;
6788 enum tree_code code = TREE_CODE (exp);
6789 optab this_optab;
6790 rtx subtarget, original_target;
6791 int ignore;
6792 tree context, subexp0, subexp1;
6793 bool reduce_bit_field = false;
6794 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6795 ? reduce_to_bit_field_precision ((expr), \
6796 target, \
6797 type) \
6798 : (expr))
6800 if (GIMPLE_STMT_P (exp))
6802 type = void_type_node;
6803 mode = VOIDmode;
6804 unsignedp = 0;
6806 else
6808 type = TREE_TYPE (exp);
6809 mode = TYPE_MODE (type);
6810 unsignedp = TYPE_UNSIGNED (type);
6812 if (lang_hooks.reduce_bit_field_operations
6813 && TREE_CODE (type) == INTEGER_TYPE
6814 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6816 /* An operation in what may be a bit-field type needs the
6817 result to be reduced to the precision of the bit-field type,
6818 which is narrower than that of the type's mode. */
6819 reduce_bit_field = true;
6820 if (modifier == EXPAND_STACK_PARM)
6821 target = 0;
6824 /* Use subtarget as the target for operand 0 of a binary operation. */
6825 subtarget = get_subtarget (target);
6826 original_target = target;
6827 ignore = (target == const0_rtx
6828 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6829 || code == CONVERT_EXPR || code == COND_EXPR
6830 || code == VIEW_CONVERT_EXPR)
6831 && TREE_CODE (type) == VOID_TYPE));
6833 /* If we are going to ignore this result, we need only do something
6834 if there is a side-effect somewhere in the expression. If there
6835 is, short-circuit the most common cases here. Note that we must
6836 not call expand_expr with anything but const0_rtx in case this
6837 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6839 if (ignore)
6841 if (! TREE_SIDE_EFFECTS (exp))
6842 return const0_rtx;
6844 /* Ensure we reference a volatile object even if value is ignored, but
6845 don't do this if all we are doing is taking its address. */
6846 if (TREE_THIS_VOLATILE (exp)
6847 && TREE_CODE (exp) != FUNCTION_DECL
6848 && mode != VOIDmode && mode != BLKmode
6849 && modifier != EXPAND_CONST_ADDRESS)
6851 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6852 if (MEM_P (temp))
6853 temp = copy_to_reg (temp);
6854 return const0_rtx;
6857 if (TREE_CODE_CLASS (code) == tcc_unary
6858 || code == COMPONENT_REF || code == INDIRECT_REF)
6859 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6860 modifier);
6862 else if (TREE_CODE_CLASS (code) == tcc_binary
6863 || TREE_CODE_CLASS (code) == tcc_comparison
6864 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6866 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6867 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6868 return const0_rtx;
6870 else if (code == BIT_FIELD_REF)
6872 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6873 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6874 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6875 return const0_rtx;
6878 target = 0;
6882 switch (code)
6884 case LABEL_DECL:
6886 tree function = decl_function_context (exp);
6888 temp = label_rtx (exp);
6889 temp = gen_rtx_LABEL_REF (Pmode, temp);
6891 if (function != current_function_decl
6892 && function != 0)
6893 LABEL_REF_NONLOCAL_P (temp) = 1;
6895 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6896 return temp;
6899 case SSA_NAME:
6900 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6901 NULL);
6903 case PARM_DECL:
6904 case VAR_DECL:
6905 /* If a static var's type was incomplete when the decl was written,
6906 but the type is complete now, lay out the decl now. */
6907 if (DECL_SIZE (exp) == 0
6908 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6909 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6910 layout_decl (exp, 0);
6912 /* ... fall through ... */
6914 case FUNCTION_DECL:
6915 case RESULT_DECL:
6916 decl_rtl = DECL_RTL (exp);
6917 gcc_assert (decl_rtl);
6919 /* Ensure variable marked as used even if it doesn't go through
6920 a parser. If it hasn't be used yet, write out an external
6921 definition. */
6922 if (! TREE_USED (exp))
6924 assemble_external (exp);
6925 TREE_USED (exp) = 1;
6928 /* Show we haven't gotten RTL for this yet. */
6929 temp = 0;
6931 /* Variables inherited from containing functions should have
6932 been lowered by this point. */
6933 context = decl_function_context (exp);
6934 gcc_assert (!context
6935 || context == current_function_decl
6936 || TREE_STATIC (exp)
6937 /* ??? C++ creates functions that are not TREE_STATIC. */
6938 || TREE_CODE (exp) == FUNCTION_DECL);
6940 /* This is the case of an array whose size is to be determined
6941 from its initializer, while the initializer is still being parsed.
6942 See expand_decl. */
6944 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6945 temp = validize_mem (decl_rtl);
6947 /* If DECL_RTL is memory, we are in the normal case and either
6948 the address is not valid or it is not a register and -fforce-addr
6949 is specified, get the address into a register. */
6951 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6953 if (alt_rtl)
6954 *alt_rtl = decl_rtl;
6955 decl_rtl = use_anchored_address (decl_rtl);
6956 if (modifier != EXPAND_CONST_ADDRESS
6957 && modifier != EXPAND_SUM
6958 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6959 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6960 temp = replace_equiv_address (decl_rtl,
6961 copy_rtx (XEXP (decl_rtl, 0)));
6964 /* If we got something, return it. But first, set the alignment
6965 if the address is a register. */
6966 if (temp != 0)
6968 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6969 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6971 return temp;
6974 /* If the mode of DECL_RTL does not match that of the decl, it
6975 must be a promoted value. We return a SUBREG of the wanted mode,
6976 but mark it so that we know that it was already extended. */
6978 if (REG_P (decl_rtl)
6979 && GET_MODE (decl_rtl) != DECL_MODE (exp))
6981 enum machine_mode pmode;
6983 /* Get the signedness used for this variable. Ensure we get the
6984 same mode we got when the variable was declared. */
6985 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6986 (TREE_CODE (exp) == RESULT_DECL
6987 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6988 gcc_assert (GET_MODE (decl_rtl) == pmode);
6990 temp = gen_lowpart_SUBREG (mode, decl_rtl);
6991 SUBREG_PROMOTED_VAR_P (temp) = 1;
6992 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6993 return temp;
6996 return decl_rtl;
6998 case INTEGER_CST:
6999 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7000 TREE_INT_CST_HIGH (exp), mode);
7002 /* ??? If overflow is set, fold will have done an incomplete job,
7003 which can result in (plus xx (const_int 0)), which can get
7004 simplified by validate_replace_rtx during virtual register
7005 instantiation, which can result in unrecognizable insns.
7006 Avoid this by forcing all overflows into registers. */
7007 if (TREE_OVERFLOW (exp)
7008 && modifier != EXPAND_INITIALIZER)
7009 temp = force_reg (mode, temp);
7011 return temp;
7013 case VECTOR_CST:
7015 tree tmp = NULL_TREE;
7016 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7017 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7018 return const_vector_from_tree (exp);
7019 if (GET_MODE_CLASS (mode) == MODE_INT)
7021 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7022 if (type_for_mode)
7023 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7025 if (!tmp)
7026 tmp = build_constructor_from_list (type,
7027 TREE_VECTOR_CST_ELTS (exp));
7028 return expand_expr (tmp, ignore ? const0_rtx : target,
7029 tmode, modifier);
7032 case CONST_DECL:
7033 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7035 case REAL_CST:
7036 /* If optimized, generate immediate CONST_DOUBLE
7037 which will be turned into memory by reload if necessary.
7039 We used to force a register so that loop.c could see it. But
7040 this does not allow gen_* patterns to perform optimizations with
7041 the constants. It also produces two insns in cases like "x = 1.0;".
7042 On most machines, floating-point constants are not permitted in
7043 many insns, so we'd end up copying it to a register in any case.
7045 Now, we do the copying in expand_binop, if appropriate. */
7046 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7047 TYPE_MODE (TREE_TYPE (exp)));
7049 case COMPLEX_CST:
7050 /* Handle evaluating a complex constant in a CONCAT target. */
7051 if (original_target && GET_CODE (original_target) == CONCAT)
7053 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7054 rtx rtarg, itarg;
7056 rtarg = XEXP (original_target, 0);
7057 itarg = XEXP (original_target, 1);
7059 /* Move the real and imaginary parts separately. */
7060 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7061 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7063 if (op0 != rtarg)
7064 emit_move_insn (rtarg, op0);
7065 if (op1 != itarg)
7066 emit_move_insn (itarg, op1);
7068 return original_target;
7071 /* ... fall through ... */
7073 case STRING_CST:
7074 temp = expand_expr_constant (exp, 1, modifier);
7076 /* temp contains a constant address.
7077 On RISC machines where a constant address isn't valid,
7078 make some insns to get that address into a register. */
7079 if (modifier != EXPAND_CONST_ADDRESS
7080 && modifier != EXPAND_INITIALIZER
7081 && modifier != EXPAND_SUM
7082 && (! memory_address_p (mode, XEXP (temp, 0))
7083 || flag_force_addr))
7084 return replace_equiv_address (temp,
7085 copy_rtx (XEXP (temp, 0)));
7086 return temp;
7088 case SAVE_EXPR:
7090 tree val = TREE_OPERAND (exp, 0);
7091 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7093 if (!SAVE_EXPR_RESOLVED_P (exp))
7095 /* We can indeed still hit this case, typically via builtin
7096 expanders calling save_expr immediately before expanding
7097 something. Assume this means that we only have to deal
7098 with non-BLKmode values. */
7099 gcc_assert (GET_MODE (ret) != BLKmode);
7101 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7102 DECL_ARTIFICIAL (val) = 1;
7103 DECL_IGNORED_P (val) = 1;
7104 TREE_OPERAND (exp, 0) = val;
7105 SAVE_EXPR_RESOLVED_P (exp) = 1;
7107 if (!CONSTANT_P (ret))
7108 ret = copy_to_reg (ret);
7109 SET_DECL_RTL (val, ret);
7112 return ret;
7115 case GOTO_EXPR:
7116 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7117 expand_goto (TREE_OPERAND (exp, 0));
7118 else
7119 expand_computed_goto (TREE_OPERAND (exp, 0));
7120 return const0_rtx;
7122 case CONSTRUCTOR:
7123 /* If we don't need the result, just ensure we evaluate any
7124 subexpressions. */
7125 if (ignore)
7127 unsigned HOST_WIDE_INT idx;
7128 tree value;
7130 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7131 expand_expr (value, const0_rtx, VOIDmode, 0);
7133 return const0_rtx;
7136 /* Try to avoid creating a temporary at all. This is possible
7137 if all of the initializer is zero.
7138 FIXME: try to handle all [0..255] initializers we can handle
7139 with memset. */
7140 else if (TREE_STATIC (exp)
7141 && !TREE_ADDRESSABLE (exp)
7142 && target != 0 && mode == BLKmode
7143 && all_zeros_p (exp))
7145 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7146 return target;
7149 /* All elts simple constants => refer to a constant in memory. But
7150 if this is a non-BLKmode mode, let it store a field at a time
7151 since that should make a CONST_INT or CONST_DOUBLE when we
7152 fold. Likewise, if we have a target we can use, it is best to
7153 store directly into the target unless the type is large enough
7154 that memcpy will be used. If we are making an initializer and
7155 all operands are constant, put it in memory as well.
7157 FIXME: Avoid trying to fill vector constructors piece-meal.
7158 Output them with output_constant_def below unless we're sure
7159 they're zeros. This should go away when vector initializers
7160 are treated like VECTOR_CST instead of arrays.
7162 else if ((TREE_STATIC (exp)
7163 && ((mode == BLKmode
7164 && ! (target != 0 && safe_from_p (target, exp, 1)))
7165 || TREE_ADDRESSABLE (exp)
7166 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7167 && (! MOVE_BY_PIECES_P
7168 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7169 TYPE_ALIGN (type)))
7170 && ! mostly_zeros_p (exp))))
7171 || ((modifier == EXPAND_INITIALIZER
7172 || modifier == EXPAND_CONST_ADDRESS)
7173 && TREE_CONSTANT (exp)))
7175 rtx constructor = expand_expr_constant (exp, 1, modifier);
7177 if (modifier != EXPAND_CONST_ADDRESS
7178 && modifier != EXPAND_INITIALIZER
7179 && modifier != EXPAND_SUM)
7180 constructor = validize_mem (constructor);
7182 return constructor;
7184 else
7186 /* Handle calls that pass values in multiple non-contiguous
7187 locations. The Irix 6 ABI has examples of this. */
7188 if (target == 0 || ! safe_from_p (target, exp, 1)
7189 || GET_CODE (target) == PARALLEL
7190 || modifier == EXPAND_STACK_PARM)
7191 target
7192 = assign_temp (build_qualified_type (type,
7193 (TYPE_QUALS (type)
7194 | (TREE_READONLY (exp)
7195 * TYPE_QUAL_CONST))),
7196 0, TREE_ADDRESSABLE (exp), 1);
7198 store_constructor (exp, target, 0, int_expr_size (exp));
7199 return target;
7202 case MISALIGNED_INDIRECT_REF:
7203 case ALIGN_INDIRECT_REF:
7204 case INDIRECT_REF:
7206 tree exp1 = TREE_OPERAND (exp, 0);
7208 if (modifier != EXPAND_WRITE)
7210 tree t;
7212 t = fold_read_from_constant_string (exp);
7213 if (t)
7214 return expand_expr (t, target, tmode, modifier);
7217 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7218 op0 = memory_address (mode, op0);
7220 if (code == ALIGN_INDIRECT_REF)
7222 int align = TYPE_ALIGN_UNIT (type);
7223 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7224 op0 = memory_address (mode, op0);
7227 temp = gen_rtx_MEM (mode, op0);
7229 set_mem_attributes (temp, exp, 0);
7231 /* Resolve the misalignment now, so that we don't have to remember
7232 to resolve it later. Of course, this only works for reads. */
7233 /* ??? When we get around to supporting writes, we'll have to handle
7234 this in store_expr directly. The vectorizer isn't generating
7235 those yet, however. */
7236 if (code == MISALIGNED_INDIRECT_REF)
7238 int icode;
7239 rtx reg, insn;
7241 gcc_assert (modifier == EXPAND_NORMAL
7242 || modifier == EXPAND_STACK_PARM);
7244 /* The vectorizer should have already checked the mode. */
7245 icode = movmisalign_optab->handlers[mode].insn_code;
7246 gcc_assert (icode != CODE_FOR_nothing);
7248 /* We've already validated the memory, and we're creating a
7249 new pseudo destination. The predicates really can't fail. */
7250 reg = gen_reg_rtx (mode);
7252 /* Nor can the insn generator. */
7253 insn = GEN_FCN (icode) (reg, temp);
7254 emit_insn (insn);
7256 return reg;
7259 return temp;
7262 case TARGET_MEM_REF:
7264 struct mem_address addr;
7266 get_address_description (exp, &addr);
7267 op0 = addr_for_mem_ref (&addr, true);
7268 op0 = memory_address (mode, op0);
7269 temp = gen_rtx_MEM (mode, op0);
7270 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7272 return temp;
7274 case ARRAY_REF:
7277 tree array = TREE_OPERAND (exp, 0);
7278 tree index = TREE_OPERAND (exp, 1);
7280 /* Fold an expression like: "foo"[2].
7281 This is not done in fold so it won't happen inside &.
7282 Don't fold if this is for wide characters since it's too
7283 difficult to do correctly and this is a very rare case. */
7285 if (modifier != EXPAND_CONST_ADDRESS
7286 && modifier != EXPAND_INITIALIZER
7287 && modifier != EXPAND_MEMORY)
7289 tree t = fold_read_from_constant_string (exp);
7291 if (t)
7292 return expand_expr (t, target, tmode, modifier);
7295 /* If this is a constant index into a constant array,
7296 just get the value from the array. Handle both the cases when
7297 we have an explicit constructor and when our operand is a variable
7298 that was declared const. */
7300 if (modifier != EXPAND_CONST_ADDRESS
7301 && modifier != EXPAND_INITIALIZER
7302 && modifier != EXPAND_MEMORY
7303 && TREE_CODE (array) == CONSTRUCTOR
7304 && ! TREE_SIDE_EFFECTS (array)
7305 && TREE_CODE (index) == INTEGER_CST)
7307 unsigned HOST_WIDE_INT ix;
7308 tree field, value;
7310 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7311 field, value)
7312 if (tree_int_cst_equal (field, index))
7314 if (!TREE_SIDE_EFFECTS (value))
7315 return expand_expr (fold (value), target, tmode, modifier);
7316 break;
7320 else if (optimize >= 1
7321 && modifier != EXPAND_CONST_ADDRESS
7322 && modifier != EXPAND_INITIALIZER
7323 && modifier != EXPAND_MEMORY
7324 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7325 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7326 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7327 && targetm.binds_local_p (array))
7329 if (TREE_CODE (index) == INTEGER_CST)
7331 tree init = DECL_INITIAL (array);
7333 if (TREE_CODE (init) == CONSTRUCTOR)
7335 unsigned HOST_WIDE_INT ix;
7336 tree field, value;
7338 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7339 field, value)
7340 if (tree_int_cst_equal (field, index))
7342 if (!TREE_SIDE_EFFECTS (value))
7343 return expand_expr (fold (value), target, tmode,
7344 modifier);
7345 break;
7348 else if(TREE_CODE (init) == STRING_CST)
7350 tree index1 = index;
7351 tree low_bound = array_ref_low_bound (exp);
7352 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7354 /* Optimize the special-case of a zero lower bound.
7356 We convert the low_bound to sizetype to avoid some problems
7357 with constant folding. (E.g. suppose the lower bound is 1,
7358 and its mode is QI. Without the conversion,l (ARRAY
7359 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7360 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7362 if (! integer_zerop (low_bound))
7363 index1 = size_diffop (index1, fold_convert (sizetype,
7364 low_bound));
7366 if (0 > compare_tree_int (index1,
7367 TREE_STRING_LENGTH (init)))
7369 tree type = TREE_TYPE (TREE_TYPE (init));
7370 enum machine_mode mode = TYPE_MODE (type);
7372 if (GET_MODE_CLASS (mode) == MODE_INT
7373 && GET_MODE_SIZE (mode) == 1)
7374 return gen_int_mode (TREE_STRING_POINTER (init)
7375 [TREE_INT_CST_LOW (index1)],
7376 mode);
7382 goto normal_inner_ref;
7384 case COMPONENT_REF:
7385 /* If the operand is a CONSTRUCTOR, we can just extract the
7386 appropriate field if it is present. */
7387 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7389 unsigned HOST_WIDE_INT idx;
7390 tree field, value;
7392 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7393 idx, field, value)
7394 if (field == TREE_OPERAND (exp, 1)
7395 /* We can normally use the value of the field in the
7396 CONSTRUCTOR. However, if this is a bitfield in
7397 an integral mode that we can fit in a HOST_WIDE_INT,
7398 we must mask only the number of bits in the bitfield,
7399 since this is done implicitly by the constructor. If
7400 the bitfield does not meet either of those conditions,
7401 we can't do this optimization. */
7402 && (! DECL_BIT_FIELD (field)
7403 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7404 && (GET_MODE_BITSIZE (DECL_MODE (field))
7405 <= HOST_BITS_PER_WIDE_INT))))
7407 if (DECL_BIT_FIELD (field)
7408 && modifier == EXPAND_STACK_PARM)
7409 target = 0;
7410 op0 = expand_expr (value, target, tmode, modifier);
7411 if (DECL_BIT_FIELD (field))
7413 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7414 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7416 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7418 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7419 op0 = expand_and (imode, op0, op1, target);
7421 else
7423 tree count
7424 = build_int_cst (NULL_TREE,
7425 GET_MODE_BITSIZE (imode) - bitsize);
7427 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7428 target, 0);
7429 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7430 target, 0);
7434 return op0;
7437 goto normal_inner_ref;
7439 case BIT_FIELD_REF:
7440 case ARRAY_RANGE_REF:
7441 normal_inner_ref:
7443 enum machine_mode mode1;
7444 HOST_WIDE_INT bitsize, bitpos;
7445 tree offset;
7446 int volatilep = 0;
7447 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7448 &mode1, &unsignedp, &volatilep, true);
7449 rtx orig_op0;
7451 /* If we got back the original object, something is wrong. Perhaps
7452 we are evaluating an expression too early. In any event, don't
7453 infinitely recurse. */
7454 gcc_assert (tem != exp);
7456 /* If TEM's type is a union of variable size, pass TARGET to the inner
7457 computation, since it will need a temporary and TARGET is known
7458 to have to do. This occurs in unchecked conversion in Ada. */
7460 orig_op0 = op0
7461 = expand_expr (tem,
7462 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7463 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7464 != INTEGER_CST)
7465 && modifier != EXPAND_STACK_PARM
7466 ? target : NULL_RTX),
7467 VOIDmode,
7468 (modifier == EXPAND_INITIALIZER
7469 || modifier == EXPAND_CONST_ADDRESS
7470 || modifier == EXPAND_STACK_PARM)
7471 ? modifier : EXPAND_NORMAL);
7473 /* If this is a constant, put it into a register if it is a legitimate
7474 constant, OFFSET is 0, and we won't try to extract outside the
7475 register (in case we were passed a partially uninitialized object
7476 or a view_conversion to a larger size). Force the constant to
7477 memory otherwise. */
7478 if (CONSTANT_P (op0))
7480 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7481 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7482 && offset == 0
7483 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7484 op0 = force_reg (mode, op0);
7485 else
7486 op0 = validize_mem (force_const_mem (mode, op0));
7489 /* Otherwise, if this object not in memory and we either have an
7490 offset, a BLKmode result, or a reference outside the object, put it
7491 there. Such cases can occur in Ada if we have unchecked conversion
7492 of an expression from a scalar type to an array or record type or
7493 for an ARRAY_RANGE_REF whose type is BLKmode. */
7494 else if (!MEM_P (op0)
7495 && (offset != 0
7496 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7497 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7499 tree nt = build_qualified_type (TREE_TYPE (tem),
7500 (TYPE_QUALS (TREE_TYPE (tem))
7501 | TYPE_QUAL_CONST));
7502 rtx memloc = assign_temp (nt, 1, 1, 1);
7504 emit_move_insn (memloc, op0);
7505 op0 = memloc;
7508 if (offset != 0)
7510 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7511 EXPAND_SUM);
7513 gcc_assert (MEM_P (op0));
7515 #ifdef POINTERS_EXTEND_UNSIGNED
7516 if (GET_MODE (offset_rtx) != Pmode)
7517 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7518 #else
7519 if (GET_MODE (offset_rtx) != ptr_mode)
7520 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7521 #endif
7523 if (GET_MODE (op0) == BLKmode
7524 /* A constant address in OP0 can have VOIDmode, we must
7525 not try to call force_reg in that case. */
7526 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7527 && bitsize != 0
7528 && (bitpos % bitsize) == 0
7529 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7530 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7532 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7533 bitpos = 0;
7536 op0 = offset_address (op0, offset_rtx,
7537 highest_pow2_factor (offset));
7540 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7541 record its alignment as BIGGEST_ALIGNMENT. */
7542 if (MEM_P (op0) && bitpos == 0 && offset != 0
7543 && is_aligning_offset (offset, tem))
7544 set_mem_align (op0, BIGGEST_ALIGNMENT);
7546 /* Don't forget about volatility even if this is a bitfield. */
7547 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7549 if (op0 == orig_op0)
7550 op0 = copy_rtx (op0);
7552 MEM_VOLATILE_P (op0) = 1;
7555 /* The following code doesn't handle CONCAT.
7556 Assume only bitpos == 0 can be used for CONCAT, due to
7557 one element arrays having the same mode as its element. */
7558 if (GET_CODE (op0) == CONCAT)
7560 gcc_assert (bitpos == 0
7561 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7562 return op0;
7565 /* In cases where an aligned union has an unaligned object
7566 as a field, we might be extracting a BLKmode value from
7567 an integer-mode (e.g., SImode) object. Handle this case
7568 by doing the extract into an object as wide as the field
7569 (which we know to be the width of a basic mode), then
7570 storing into memory, and changing the mode to BLKmode. */
7571 if (mode1 == VOIDmode
7572 || REG_P (op0) || GET_CODE (op0) == SUBREG
7573 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7574 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7575 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7576 && modifier != EXPAND_CONST_ADDRESS
7577 && modifier != EXPAND_INITIALIZER)
7578 /* If the field isn't aligned enough to fetch as a memref,
7579 fetch it as a bit field. */
7580 || (mode1 != BLKmode
7581 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7582 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7583 || (MEM_P (op0)
7584 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7585 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7586 && ((modifier == EXPAND_CONST_ADDRESS
7587 || modifier == EXPAND_INITIALIZER)
7588 ? STRICT_ALIGNMENT
7589 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7590 || (bitpos % BITS_PER_UNIT != 0)))
7591 /* If the type and the field are a constant size and the
7592 size of the type isn't the same size as the bitfield,
7593 we must use bitfield operations. */
7594 || (bitsize >= 0
7595 && TYPE_SIZE (TREE_TYPE (exp))
7596 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7597 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7598 bitsize)))
7600 enum machine_mode ext_mode = mode;
7602 if (ext_mode == BLKmode
7603 && ! (target != 0 && MEM_P (op0)
7604 && MEM_P (target)
7605 && bitpos % BITS_PER_UNIT == 0))
7606 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7608 if (ext_mode == BLKmode)
7610 if (target == 0)
7611 target = assign_temp (type, 0, 1, 1);
7613 if (bitsize == 0)
7614 return target;
7616 /* In this case, BITPOS must start at a byte boundary and
7617 TARGET, if specified, must be a MEM. */
7618 gcc_assert (MEM_P (op0)
7619 && (!target || MEM_P (target))
7620 && !(bitpos % BITS_PER_UNIT));
7622 emit_block_move (target,
7623 adjust_address (op0, VOIDmode,
7624 bitpos / BITS_PER_UNIT),
7625 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7626 / BITS_PER_UNIT),
7627 (modifier == EXPAND_STACK_PARM
7628 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7630 return target;
7633 op0 = validize_mem (op0);
7635 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7636 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7638 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7639 (modifier == EXPAND_STACK_PARM
7640 ? NULL_RTX : target),
7641 ext_mode, ext_mode);
7643 /* If the result is a record type and BITSIZE is narrower than
7644 the mode of OP0, an integral mode, and this is a big endian
7645 machine, we must put the field into the high-order bits. */
7646 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7647 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7648 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7649 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7650 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7651 - bitsize),
7652 op0, 1);
7654 /* If the result type is BLKmode, store the data into a temporary
7655 of the appropriate type, but with the mode corresponding to the
7656 mode for the data we have (op0's mode). It's tempting to make
7657 this a constant type, since we know it's only being stored once,
7658 but that can cause problems if we are taking the address of this
7659 COMPONENT_REF because the MEM of any reference via that address
7660 will have flags corresponding to the type, which will not
7661 necessarily be constant. */
7662 if (mode == BLKmode)
7664 rtx new
7665 = assign_stack_temp_for_type
7666 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7668 emit_move_insn (new, op0);
7669 op0 = copy_rtx (new);
7670 PUT_MODE (op0, BLKmode);
7671 set_mem_attributes (op0, exp, 1);
7674 return op0;
7677 /* If the result is BLKmode, use that to access the object
7678 now as well. */
7679 if (mode == BLKmode)
7680 mode1 = BLKmode;
7682 /* Get a reference to just this component. */
7683 if (modifier == EXPAND_CONST_ADDRESS
7684 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7685 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7686 else
7687 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7689 if (op0 == orig_op0)
7690 op0 = copy_rtx (op0);
7692 set_mem_attributes (op0, exp, 0);
7693 if (REG_P (XEXP (op0, 0)))
7694 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7696 MEM_VOLATILE_P (op0) |= volatilep;
7697 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7698 || modifier == EXPAND_CONST_ADDRESS
7699 || modifier == EXPAND_INITIALIZER)
7700 return op0;
7701 else if (target == 0)
7702 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7704 convert_move (target, op0, unsignedp);
7705 return target;
7708 case OBJ_TYPE_REF:
7709 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7711 case CALL_EXPR:
7712 /* Check for a built-in function. */
7713 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7714 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7715 == FUNCTION_DECL)
7716 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7718 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7719 == BUILT_IN_FRONTEND)
7720 return lang_hooks.expand_expr (exp, original_target,
7721 tmode, modifier,
7722 alt_rtl);
7723 else
7724 return expand_builtin (exp, target, subtarget, tmode, ignore);
7727 return expand_call (exp, target, ignore);
7729 case NON_LVALUE_EXPR:
7730 case NOP_EXPR:
7731 case CONVERT_EXPR:
7732 if (TREE_OPERAND (exp, 0) == error_mark_node)
7733 return const0_rtx;
7735 if (TREE_CODE (type) == UNION_TYPE)
7737 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7739 /* If both input and output are BLKmode, this conversion isn't doing
7740 anything except possibly changing memory attribute. */
7741 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7743 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7744 modifier);
7746 result = copy_rtx (result);
7747 set_mem_attributes (result, exp, 0);
7748 return result;
7751 if (target == 0)
7753 if (TYPE_MODE (type) != BLKmode)
7754 target = gen_reg_rtx (TYPE_MODE (type));
7755 else
7756 target = assign_temp (type, 0, 1, 1);
7759 if (MEM_P (target))
7760 /* Store data into beginning of memory target. */
7761 store_expr (TREE_OPERAND (exp, 0),
7762 adjust_address (target, TYPE_MODE (valtype), 0),
7763 modifier == EXPAND_STACK_PARM);
7765 else
7767 gcc_assert (REG_P (target));
7769 /* Store this field into a union of the proper type. */
7770 store_field (target,
7771 MIN ((int_size_in_bytes (TREE_TYPE
7772 (TREE_OPERAND (exp, 0)))
7773 * BITS_PER_UNIT),
7774 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7775 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7776 type, 0);
7779 /* Return the entire union. */
7780 return target;
7783 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7785 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7786 modifier);
7788 /* If the signedness of the conversion differs and OP0 is
7789 a promoted SUBREG, clear that indication since we now
7790 have to do the proper extension. */
7791 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7792 && GET_CODE (op0) == SUBREG)
7793 SUBREG_PROMOTED_VAR_P (op0) = 0;
7795 return REDUCE_BIT_FIELD (op0);
7798 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7799 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7800 if (GET_MODE (op0) == mode)
7803 /* If OP0 is a constant, just convert it into the proper mode. */
7804 else if (CONSTANT_P (op0))
7806 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7807 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7809 if (modifier == EXPAND_INITIALIZER)
7810 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7811 subreg_lowpart_offset (mode,
7812 inner_mode));
7813 else
7814 op0= convert_modes (mode, inner_mode, op0,
7815 TYPE_UNSIGNED (inner_type));
7818 else if (modifier == EXPAND_INITIALIZER)
7819 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7821 else if (target == 0)
7822 op0 = convert_to_mode (mode, op0,
7823 TYPE_UNSIGNED (TREE_TYPE
7824 (TREE_OPERAND (exp, 0))));
7825 else
7827 convert_move (target, op0,
7828 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7829 op0 = target;
7832 return REDUCE_BIT_FIELD (op0);
7834 case VIEW_CONVERT_EXPR:
7835 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7837 /* If the input and output modes are both the same, we are done. */
7838 if (TYPE_MODE (type) == GET_MODE (op0))
7840 /* If neither mode is BLKmode, and both modes are the same size
7841 then we can use gen_lowpart. */
7842 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7843 && GET_MODE_SIZE (TYPE_MODE (type))
7844 == GET_MODE_SIZE (GET_MODE (op0)))
7846 if (GET_CODE (op0) == SUBREG)
7847 op0 = force_reg (GET_MODE (op0), op0);
7848 op0 = gen_lowpart (TYPE_MODE (type), op0);
7850 /* If both modes are integral, then we can convert from one to the
7851 other. */
7852 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7853 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7854 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7855 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7856 /* As a last resort, spill op0 to memory, and reload it in a
7857 different mode. */
7858 else if (!MEM_P (op0))
7860 /* If the operand is not a MEM, force it into memory. Since we
7861 are going to be changing the mode of the MEM, don't call
7862 force_const_mem for constants because we don't allow pool
7863 constants to change mode. */
7864 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7866 gcc_assert (!TREE_ADDRESSABLE (exp));
7868 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7869 target
7870 = assign_stack_temp_for_type
7871 (TYPE_MODE (inner_type),
7872 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7874 emit_move_insn (target, op0);
7875 op0 = target;
7878 /* At this point, OP0 is in the correct mode. If the output type is such
7879 that the operand is known to be aligned, indicate that it is.
7880 Otherwise, we need only be concerned about alignment for non-BLKmode
7881 results. */
7882 if (MEM_P (op0))
7884 op0 = copy_rtx (op0);
7886 if (TYPE_ALIGN_OK (type))
7887 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7888 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7889 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7891 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7892 HOST_WIDE_INT temp_size
7893 = MAX (int_size_in_bytes (inner_type),
7894 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7895 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7896 temp_size, 0, type);
7897 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7899 gcc_assert (!TREE_ADDRESSABLE (exp));
7901 if (GET_MODE (op0) == BLKmode)
7902 emit_block_move (new_with_op0_mode, op0,
7903 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7904 (modifier == EXPAND_STACK_PARM
7905 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7906 else
7907 emit_move_insn (new_with_op0_mode, op0);
7909 op0 = new;
7912 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7915 return op0;
7917 case PLUS_EXPR:
7918 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7919 something else, make sure we add the register to the constant and
7920 then to the other thing. This case can occur during strength
7921 reduction and doing it this way will produce better code if the
7922 frame pointer or argument pointer is eliminated.
7924 fold-const.c will ensure that the constant is always in the inner
7925 PLUS_EXPR, so the only case we need to do anything about is if
7926 sp, ap, or fp is our second argument, in which case we must swap
7927 the innermost first argument and our second argument. */
7929 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7930 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7931 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7932 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7933 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7934 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7936 tree t = TREE_OPERAND (exp, 1);
7938 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7939 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7942 /* If the result is to be ptr_mode and we are adding an integer to
7943 something, we might be forming a constant. So try to use
7944 plus_constant. If it produces a sum and we can't accept it,
7945 use force_operand. This allows P = &ARR[const] to generate
7946 efficient code on machines where a SYMBOL_REF is not a valid
7947 address.
7949 If this is an EXPAND_SUM call, always return the sum. */
7950 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7951 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7953 if (modifier == EXPAND_STACK_PARM)
7954 target = 0;
7955 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7956 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7957 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7959 rtx constant_part;
7961 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7962 EXPAND_SUM);
7963 /* Use immed_double_const to ensure that the constant is
7964 truncated according to the mode of OP1, then sign extended
7965 to a HOST_WIDE_INT. Using the constant directly can result
7966 in non-canonical RTL in a 64x32 cross compile. */
7967 constant_part
7968 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7969 (HOST_WIDE_INT) 0,
7970 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7971 op1 = plus_constant (op1, INTVAL (constant_part));
7972 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7973 op1 = force_operand (op1, target);
7974 return REDUCE_BIT_FIELD (op1);
7977 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7978 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7979 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7981 rtx constant_part;
7983 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7984 (modifier == EXPAND_INITIALIZER
7985 ? EXPAND_INITIALIZER : EXPAND_SUM));
7986 if (! CONSTANT_P (op0))
7988 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7989 VOIDmode, modifier);
7990 /* Return a PLUS if modifier says it's OK. */
7991 if (modifier == EXPAND_SUM
7992 || modifier == EXPAND_INITIALIZER)
7993 return simplify_gen_binary (PLUS, mode, op0, op1);
7994 goto binop2;
7996 /* Use immed_double_const to ensure that the constant is
7997 truncated according to the mode of OP1, then sign extended
7998 to a HOST_WIDE_INT. Using the constant directly can result
7999 in non-canonical RTL in a 64x32 cross compile. */
8000 constant_part
8001 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8002 (HOST_WIDE_INT) 0,
8003 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8004 op0 = plus_constant (op0, INTVAL (constant_part));
8005 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8006 op0 = force_operand (op0, target);
8007 return REDUCE_BIT_FIELD (op0);
8011 /* No sense saving up arithmetic to be done
8012 if it's all in the wrong mode to form part of an address.
8013 And force_operand won't know whether to sign-extend or
8014 zero-extend. */
8015 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8016 || mode != ptr_mode)
8018 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8019 subtarget, &op0, &op1, 0);
8020 if (op0 == const0_rtx)
8021 return op1;
8022 if (op1 == const0_rtx)
8023 return op0;
8024 goto binop2;
8027 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8028 subtarget, &op0, &op1, modifier);
8029 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8031 case MINUS_EXPR:
8032 /* For initializers, we are allowed to return a MINUS of two
8033 symbolic constants. Here we handle all cases when both operands
8034 are constant. */
8035 /* Handle difference of two symbolic constants,
8036 for the sake of an initializer. */
8037 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8038 && really_constant_p (TREE_OPERAND (exp, 0))
8039 && really_constant_p (TREE_OPERAND (exp, 1)))
8041 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8042 NULL_RTX, &op0, &op1, modifier);
8044 /* If the last operand is a CONST_INT, use plus_constant of
8045 the negated constant. Else make the MINUS. */
8046 if (GET_CODE (op1) == CONST_INT)
8047 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8048 else
8049 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8052 /* No sense saving up arithmetic to be done
8053 if it's all in the wrong mode to form part of an address.
8054 And force_operand won't know whether to sign-extend or
8055 zero-extend. */
8056 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8057 || mode != ptr_mode)
8058 goto binop;
8060 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8061 subtarget, &op0, &op1, modifier);
8063 /* Convert A - const to A + (-const). */
8064 if (GET_CODE (op1) == CONST_INT)
8066 op1 = negate_rtx (mode, op1);
8067 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8070 goto binop2;
8072 case MULT_EXPR:
8073 /* If first operand is constant, swap them.
8074 Thus the following special case checks need only
8075 check the second operand. */
8076 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8078 tree t1 = TREE_OPERAND (exp, 0);
8079 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8080 TREE_OPERAND (exp, 1) = t1;
8083 /* Attempt to return something suitable for generating an
8084 indexed address, for machines that support that. */
8086 if (modifier == EXPAND_SUM && mode == ptr_mode
8087 && host_integerp (TREE_OPERAND (exp, 1), 0))
8089 tree exp1 = TREE_OPERAND (exp, 1);
8091 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8092 EXPAND_SUM);
8094 if (!REG_P (op0))
8095 op0 = force_operand (op0, NULL_RTX);
8096 if (!REG_P (op0))
8097 op0 = copy_to_mode_reg (mode, op0);
8099 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8100 gen_int_mode (tree_low_cst (exp1, 0),
8101 TYPE_MODE (TREE_TYPE (exp1)))));
8104 if (modifier == EXPAND_STACK_PARM)
8105 target = 0;
8107 /* Check for multiplying things that have been extended
8108 from a narrower type. If this machine supports multiplying
8109 in that narrower type with a result in the desired type,
8110 do it that way, and avoid the explicit type-conversion. */
8112 subexp0 = TREE_OPERAND (exp, 0);
8113 subexp1 = TREE_OPERAND (exp, 1);
8114 /* First, check if we have a multiplication of one signed and one
8115 unsigned operand. */
8116 if (TREE_CODE (subexp0) == NOP_EXPR
8117 && TREE_CODE (subexp1) == NOP_EXPR
8118 && TREE_CODE (type) == INTEGER_TYPE
8119 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8120 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8121 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8122 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8123 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8124 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8126 enum machine_mode innermode
8127 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8128 this_optab = usmul_widen_optab;
8129 if (mode == GET_MODE_WIDER_MODE (innermode))
8131 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8133 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8134 expand_operands (TREE_OPERAND (subexp0, 0),
8135 TREE_OPERAND (subexp1, 0),
8136 NULL_RTX, &op0, &op1, 0);
8137 else
8138 expand_operands (TREE_OPERAND (subexp0, 0),
8139 TREE_OPERAND (subexp1, 0),
8140 NULL_RTX, &op1, &op0, 0);
8142 goto binop3;
8146 /* Check for a multiplication with matching signedness. */
8147 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8148 && TREE_CODE (type) == INTEGER_TYPE
8149 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8150 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8151 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8152 && int_fits_type_p (TREE_OPERAND (exp, 1),
8153 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8154 /* Don't use a widening multiply if a shift will do. */
8155 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8156 > HOST_BITS_PER_WIDE_INT)
8157 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8159 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8160 && (TYPE_PRECISION (TREE_TYPE
8161 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8162 == TYPE_PRECISION (TREE_TYPE
8163 (TREE_OPERAND
8164 (TREE_OPERAND (exp, 0), 0))))
8165 /* If both operands are extended, they must either both
8166 be zero-extended or both be sign-extended. */
8167 && (TYPE_UNSIGNED (TREE_TYPE
8168 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8169 == TYPE_UNSIGNED (TREE_TYPE
8170 (TREE_OPERAND
8171 (TREE_OPERAND (exp, 0), 0)))))))
8173 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8174 enum machine_mode innermode = TYPE_MODE (op0type);
8175 bool zextend_p = TYPE_UNSIGNED (op0type);
8176 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8177 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8179 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8181 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8183 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8184 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8185 TREE_OPERAND (exp, 1),
8186 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8187 else
8188 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8189 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8190 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8191 goto binop3;
8193 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8194 && innermode == word_mode)
8196 rtx htem, hipart;
8197 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8198 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8199 op1 = convert_modes (innermode, mode,
8200 expand_normal (TREE_OPERAND (exp, 1)),
8201 unsignedp);
8202 else
8203 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8204 temp = expand_binop (mode, other_optab, op0, op1, target,
8205 unsignedp, OPTAB_LIB_WIDEN);
8206 hipart = gen_highpart (innermode, temp);
8207 htem = expand_mult_highpart_adjust (innermode, hipart,
8208 op0, op1, hipart,
8209 zextend_p);
8210 if (htem != hipart)
8211 emit_move_insn (hipart, htem);
8212 return REDUCE_BIT_FIELD (temp);
8216 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8217 subtarget, &op0, &op1, 0);
8218 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8220 case TRUNC_DIV_EXPR:
8221 case FLOOR_DIV_EXPR:
8222 case CEIL_DIV_EXPR:
8223 case ROUND_DIV_EXPR:
8224 case EXACT_DIV_EXPR:
8225 if (modifier == EXPAND_STACK_PARM)
8226 target = 0;
8227 /* Possible optimization: compute the dividend with EXPAND_SUM
8228 then if the divisor is constant can optimize the case
8229 where some terms of the dividend have coeffs divisible by it. */
8230 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8231 subtarget, &op0, &op1, 0);
8232 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8234 case RDIV_EXPR:
8235 goto binop;
8237 case TRUNC_MOD_EXPR:
8238 case FLOOR_MOD_EXPR:
8239 case CEIL_MOD_EXPR:
8240 case ROUND_MOD_EXPR:
8241 if (modifier == EXPAND_STACK_PARM)
8242 target = 0;
8243 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8244 subtarget, &op0, &op1, 0);
8245 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8247 case FIX_TRUNC_EXPR:
8248 op0 = expand_normal (TREE_OPERAND (exp, 0));
8249 if (target == 0 || modifier == EXPAND_STACK_PARM)
8250 target = gen_reg_rtx (mode);
8251 expand_fix (target, op0, unsignedp);
8252 return target;
8254 case FLOAT_EXPR:
8255 op0 = expand_normal (TREE_OPERAND (exp, 0));
8256 if (target == 0 || modifier == EXPAND_STACK_PARM)
8257 target = gen_reg_rtx (mode);
8258 /* expand_float can't figure out what to do if FROM has VOIDmode.
8259 So give it the correct mode. With -O, cse will optimize this. */
8260 if (GET_MODE (op0) == VOIDmode)
8261 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8262 op0);
8263 expand_float (target, op0,
8264 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8265 return target;
8267 case NEGATE_EXPR:
8268 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8269 if (modifier == EXPAND_STACK_PARM)
8270 target = 0;
8271 temp = expand_unop (mode,
8272 optab_for_tree_code (NEGATE_EXPR, type),
8273 op0, target, 0);
8274 gcc_assert (temp);
8275 return REDUCE_BIT_FIELD (temp);
8277 case ABS_EXPR:
8278 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8279 if (modifier == EXPAND_STACK_PARM)
8280 target = 0;
8282 /* ABS_EXPR is not valid for complex arguments. */
8283 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8284 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8286 /* Unsigned abs is simply the operand. Testing here means we don't
8287 risk generating incorrect code below. */
8288 if (TYPE_UNSIGNED (type))
8289 return op0;
8291 return expand_abs (mode, op0, target, unsignedp,
8292 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8294 case MAX_EXPR:
8295 case MIN_EXPR:
8296 target = original_target;
8297 if (target == 0
8298 || modifier == EXPAND_STACK_PARM
8299 || (MEM_P (target) && MEM_VOLATILE_P (target))
8300 || GET_MODE (target) != mode
8301 || (REG_P (target)
8302 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8303 target = gen_reg_rtx (mode);
8304 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8305 target, &op0, &op1, 0);
8307 /* First try to do it with a special MIN or MAX instruction.
8308 If that does not win, use a conditional jump to select the proper
8309 value. */
8310 this_optab = optab_for_tree_code (code, type);
8311 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8312 OPTAB_WIDEN);
8313 if (temp != 0)
8314 return temp;
8316 /* At this point, a MEM target is no longer useful; we will get better
8317 code without it. */
8319 if (! REG_P (target))
8320 target = gen_reg_rtx (mode);
8322 /* If op1 was placed in target, swap op0 and op1. */
8323 if (target != op0 && target == op1)
8325 temp = op0;
8326 op0 = op1;
8327 op1 = temp;
8330 /* We generate better code and avoid problems with op1 mentioning
8331 target by forcing op1 into a pseudo if it isn't a constant. */
8332 if (! CONSTANT_P (op1))
8333 op1 = force_reg (mode, op1);
8336 enum rtx_code comparison_code;
8337 rtx cmpop1 = op1;
8339 if (code == MAX_EXPR)
8340 comparison_code = unsignedp ? GEU : GE;
8341 else
8342 comparison_code = unsignedp ? LEU : LE;
8344 /* Canonicalize to comparisons against 0. */
8345 if (op1 == const1_rtx)
8347 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8348 or (a != 0 ? a : 1) for unsigned.
8349 For MIN we are safe converting (a <= 1 ? a : 1)
8350 into (a <= 0 ? a : 1) */
8351 cmpop1 = const0_rtx;
8352 if (code == MAX_EXPR)
8353 comparison_code = unsignedp ? NE : GT;
8355 if (op1 == constm1_rtx && !unsignedp)
8357 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8358 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8359 cmpop1 = const0_rtx;
8360 if (code == MIN_EXPR)
8361 comparison_code = LT;
8363 #ifdef HAVE_conditional_move
8364 /* Use a conditional move if possible. */
8365 if (can_conditionally_move_p (mode))
8367 rtx insn;
8369 /* ??? Same problem as in expmed.c: emit_conditional_move
8370 forces a stack adjustment via compare_from_rtx, and we
8371 lose the stack adjustment if the sequence we are about
8372 to create is discarded. */
8373 do_pending_stack_adjust ();
8375 start_sequence ();
8377 /* Try to emit the conditional move. */
8378 insn = emit_conditional_move (target, comparison_code,
8379 op0, cmpop1, mode,
8380 op0, op1, mode,
8381 unsignedp);
8383 /* If we could do the conditional move, emit the sequence,
8384 and return. */
8385 if (insn)
8387 rtx seq = get_insns ();
8388 end_sequence ();
8389 emit_insn (seq);
8390 return target;
8393 /* Otherwise discard the sequence and fall back to code with
8394 branches. */
8395 end_sequence ();
8397 #endif
8398 if (target != op0)
8399 emit_move_insn (target, op0);
8401 temp = gen_label_rtx ();
8402 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8403 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8405 emit_move_insn (target, op1);
8406 emit_label (temp);
8407 return target;
8409 case BIT_NOT_EXPR:
8410 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8411 if (modifier == EXPAND_STACK_PARM)
8412 target = 0;
8413 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8414 gcc_assert (temp);
8415 return temp;
8417 /* ??? Can optimize bitwise operations with one arg constant.
8418 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8419 and (a bitwise1 b) bitwise2 b (etc)
8420 but that is probably not worth while. */
8422 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8423 boolean values when we want in all cases to compute both of them. In
8424 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8425 as actual zero-or-1 values and then bitwise anding. In cases where
8426 there cannot be any side effects, better code would be made by
8427 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8428 how to recognize those cases. */
8430 case TRUTH_AND_EXPR:
8431 code = BIT_AND_EXPR;
8432 case BIT_AND_EXPR:
8433 goto binop;
8435 case TRUTH_OR_EXPR:
8436 code = BIT_IOR_EXPR;
8437 case BIT_IOR_EXPR:
8438 goto binop;
8440 case TRUTH_XOR_EXPR:
8441 code = BIT_XOR_EXPR;
8442 case BIT_XOR_EXPR:
8443 goto binop;
8445 case LSHIFT_EXPR:
8446 case RSHIFT_EXPR:
8447 case LROTATE_EXPR:
8448 case RROTATE_EXPR:
8449 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8450 subtarget = 0;
8451 if (modifier == EXPAND_STACK_PARM)
8452 target = 0;
8453 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8454 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8455 unsignedp);
8457 /* Could determine the answer when only additive constants differ. Also,
8458 the addition of one can be handled by changing the condition. */
8459 case LT_EXPR:
8460 case LE_EXPR:
8461 case GT_EXPR:
8462 case GE_EXPR:
8463 case EQ_EXPR:
8464 case NE_EXPR:
8465 case UNORDERED_EXPR:
8466 case ORDERED_EXPR:
8467 case UNLT_EXPR:
8468 case UNLE_EXPR:
8469 case UNGT_EXPR:
8470 case UNGE_EXPR:
8471 case UNEQ_EXPR:
8472 case LTGT_EXPR:
8473 temp = do_store_flag (exp,
8474 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8475 tmode != VOIDmode ? tmode : mode, 0);
8476 if (temp != 0)
8477 return temp;
8479 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8480 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8481 && original_target
8482 && REG_P (original_target)
8483 && (GET_MODE (original_target)
8484 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8486 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8487 VOIDmode, 0);
8489 /* If temp is constant, we can just compute the result. */
8490 if (GET_CODE (temp) == CONST_INT)
8492 if (INTVAL (temp) != 0)
8493 emit_move_insn (target, const1_rtx);
8494 else
8495 emit_move_insn (target, const0_rtx);
8497 return target;
8500 if (temp != original_target)
8502 enum machine_mode mode1 = GET_MODE (temp);
8503 if (mode1 == VOIDmode)
8504 mode1 = tmode != VOIDmode ? tmode : mode;
8506 temp = copy_to_mode_reg (mode1, temp);
8509 op1 = gen_label_rtx ();
8510 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8511 GET_MODE (temp), unsignedp, op1);
8512 emit_move_insn (temp, const1_rtx);
8513 emit_label (op1);
8514 return temp;
8517 /* If no set-flag instruction, must generate a conditional store
8518 into a temporary variable. Drop through and handle this
8519 like && and ||. */
8521 if (! ignore
8522 && (target == 0
8523 || modifier == EXPAND_STACK_PARM
8524 || ! safe_from_p (target, exp, 1)
8525 /* Make sure we don't have a hard reg (such as function's return
8526 value) live across basic blocks, if not optimizing. */
8527 || (!optimize && REG_P (target)
8528 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8529 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8531 if (target)
8532 emit_move_insn (target, const0_rtx);
8534 op1 = gen_label_rtx ();
8535 jumpifnot (exp, op1);
8537 if (target)
8538 emit_move_insn (target, const1_rtx);
8540 emit_label (op1);
8541 return ignore ? const0_rtx : target;
8543 case TRUTH_NOT_EXPR:
8544 if (modifier == EXPAND_STACK_PARM)
8545 target = 0;
8546 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8547 /* The parser is careful to generate TRUTH_NOT_EXPR
8548 only with operands that are always zero or one. */
8549 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8550 target, 1, OPTAB_LIB_WIDEN);
8551 gcc_assert (temp);
8552 return temp;
8554 case STATEMENT_LIST:
8556 tree_stmt_iterator iter;
8558 gcc_assert (ignore);
8560 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8561 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8563 return const0_rtx;
8565 case COND_EXPR:
8566 /* A COND_EXPR with its type being VOID_TYPE represents a
8567 conditional jump and is handled in
8568 expand_gimple_cond_expr. */
8569 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8571 /* Note that COND_EXPRs whose type is a structure or union
8572 are required to be constructed to contain assignments of
8573 a temporary variable, so that we can evaluate them here
8574 for side effect only. If type is void, we must do likewise. */
8576 gcc_assert (!TREE_ADDRESSABLE (type)
8577 && !ignore
8578 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8579 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8581 /* If we are not to produce a result, we have no target. Otherwise,
8582 if a target was specified use it; it will not be used as an
8583 intermediate target unless it is safe. If no target, use a
8584 temporary. */
8586 if (modifier != EXPAND_STACK_PARM
8587 && original_target
8588 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8589 && GET_MODE (original_target) == mode
8590 #ifdef HAVE_conditional_move
8591 && (! can_conditionally_move_p (mode)
8592 || REG_P (original_target))
8593 #endif
8594 && !MEM_P (original_target))
8595 temp = original_target;
8596 else
8597 temp = assign_temp (type, 0, 0, 1);
8599 do_pending_stack_adjust ();
8600 NO_DEFER_POP;
8601 op0 = gen_label_rtx ();
8602 op1 = gen_label_rtx ();
8603 jumpifnot (TREE_OPERAND (exp, 0), op0);
8604 store_expr (TREE_OPERAND (exp, 1), temp,
8605 modifier == EXPAND_STACK_PARM);
8607 emit_jump_insn (gen_jump (op1));
8608 emit_barrier ();
8609 emit_label (op0);
8610 store_expr (TREE_OPERAND (exp, 2), temp,
8611 modifier == EXPAND_STACK_PARM);
8613 emit_label (op1);
8614 OK_DEFER_POP;
8615 return temp;
8617 case VEC_COND_EXPR:
8618 target = expand_vec_cond_expr (exp, target);
8619 return target;
8621 case GIMPLE_MODIFY_STMT:
8623 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8624 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8626 gcc_assert (ignore);
8628 /* Check for |= or &= of a bitfield of size one into another bitfield
8629 of size 1. In this case, (unless we need the result of the
8630 assignment) we can do this more efficiently with a
8631 test followed by an assignment, if necessary.
8633 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8634 things change so we do, this code should be enhanced to
8635 support it. */
8636 if (TREE_CODE (lhs) == COMPONENT_REF
8637 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8638 || TREE_CODE (rhs) == BIT_AND_EXPR)
8639 && TREE_OPERAND (rhs, 0) == lhs
8640 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8641 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8642 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8644 rtx label = gen_label_rtx ();
8645 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8646 do_jump (TREE_OPERAND (rhs, 1),
8647 value ? label : 0,
8648 value ? 0 : label);
8649 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8650 do_pending_stack_adjust ();
8651 emit_label (label);
8652 return const0_rtx;
8655 expand_assignment (lhs, rhs);
8657 return const0_rtx;
8660 case RETURN_EXPR:
8661 if (!TREE_OPERAND (exp, 0))
8662 expand_null_return ();
8663 else
8664 expand_return (TREE_OPERAND (exp, 0));
8665 return const0_rtx;
8667 case ADDR_EXPR:
8668 return expand_expr_addr_expr (exp, target, tmode, modifier);
8670 case COMPLEX_EXPR:
8671 /* Get the rtx code of the operands. */
8672 op0 = expand_normal (TREE_OPERAND (exp, 0));
8673 op1 = expand_normal (TREE_OPERAND (exp, 1));
8675 if (!target)
8676 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8678 /* Move the real (op0) and imaginary (op1) parts to their location. */
8679 write_complex_part (target, op0, false);
8680 write_complex_part (target, op1, true);
8682 return target;
8684 case REALPART_EXPR:
8685 op0 = expand_normal (TREE_OPERAND (exp, 0));
8686 return read_complex_part (op0, false);
8688 case IMAGPART_EXPR:
8689 op0 = expand_normal (TREE_OPERAND (exp, 0));
8690 return read_complex_part (op0, true);
8692 case RESX_EXPR:
8693 expand_resx_expr (exp);
8694 return const0_rtx;
8696 case TRY_CATCH_EXPR:
8697 case CATCH_EXPR:
8698 case EH_FILTER_EXPR:
8699 case TRY_FINALLY_EXPR:
8700 /* Lowered by tree-eh.c. */
8701 gcc_unreachable ();
8703 case WITH_CLEANUP_EXPR:
8704 case CLEANUP_POINT_EXPR:
8705 case TARGET_EXPR:
8706 case CASE_LABEL_EXPR:
8707 case VA_ARG_EXPR:
8708 case BIND_EXPR:
8709 case INIT_EXPR:
8710 case CONJ_EXPR:
8711 case COMPOUND_EXPR:
8712 case PREINCREMENT_EXPR:
8713 case PREDECREMENT_EXPR:
8714 case POSTINCREMENT_EXPR:
8715 case POSTDECREMENT_EXPR:
8716 case LOOP_EXPR:
8717 case EXIT_EXPR:
8718 case TRUTH_ANDIF_EXPR:
8719 case TRUTH_ORIF_EXPR:
8720 /* Lowered by gimplify.c. */
8721 gcc_unreachable ();
8723 case EXC_PTR_EXPR:
8724 return get_exception_pointer (cfun);
8726 case FILTER_EXPR:
8727 return get_exception_filter (cfun);
8729 case FDESC_EXPR:
8730 /* Function descriptors are not valid except for as
8731 initialization constants, and should not be expanded. */
8732 gcc_unreachable ();
8734 case SWITCH_EXPR:
8735 expand_case (exp);
8736 return const0_rtx;
8738 case LABEL_EXPR:
8739 expand_label (TREE_OPERAND (exp, 0));
8740 return const0_rtx;
8742 case ASM_EXPR:
8743 expand_asm_expr (exp);
8744 return const0_rtx;
8746 case WITH_SIZE_EXPR:
8747 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8748 have pulled out the size to use in whatever context it needed. */
8749 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8750 modifier, alt_rtl);
8752 case REALIGN_LOAD_EXPR:
8754 tree oprnd0 = TREE_OPERAND (exp, 0);
8755 tree oprnd1 = TREE_OPERAND (exp, 1);
8756 tree oprnd2 = TREE_OPERAND (exp, 2);
8757 rtx op2;
8759 this_optab = optab_for_tree_code (code, type);
8760 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8761 op2 = expand_normal (oprnd2);
8762 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8763 target, unsignedp);
8764 gcc_assert (temp);
8765 return temp;
8768 case DOT_PROD_EXPR:
8770 tree oprnd0 = TREE_OPERAND (exp, 0);
8771 tree oprnd1 = TREE_OPERAND (exp, 1);
8772 tree oprnd2 = TREE_OPERAND (exp, 2);
8773 rtx op2;
8775 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8776 op2 = expand_normal (oprnd2);
8777 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8778 target, unsignedp);
8779 return target;
8782 case WIDEN_SUM_EXPR:
8784 tree oprnd0 = TREE_OPERAND (exp, 0);
8785 tree oprnd1 = TREE_OPERAND (exp, 1);
8787 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8788 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8789 target, unsignedp);
8790 return target;
8793 case REDUC_MAX_EXPR:
8794 case REDUC_MIN_EXPR:
8795 case REDUC_PLUS_EXPR:
8797 op0 = expand_normal (TREE_OPERAND (exp, 0));
8798 this_optab = optab_for_tree_code (code, type);
8799 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8800 gcc_assert (temp);
8801 return temp;
8804 case VEC_EXTRACT_EVEN_EXPR:
8805 case VEC_EXTRACT_ODD_EXPR:
8807 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8808 NULL_RTX, &op0, &op1, 0);
8809 this_optab = optab_for_tree_code (code, type);
8810 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8811 OPTAB_WIDEN);
8812 gcc_assert (temp);
8813 return temp;
8816 case VEC_INTERLEAVE_HIGH_EXPR:
8817 case VEC_INTERLEAVE_LOW_EXPR:
8819 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8820 NULL_RTX, &op0, &op1, 0);
8821 this_optab = optab_for_tree_code (code, type);
8822 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8823 OPTAB_WIDEN);
8824 gcc_assert (temp);
8825 return temp;
8828 case VEC_LSHIFT_EXPR:
8829 case VEC_RSHIFT_EXPR:
8831 target = expand_vec_shift_expr (exp, target);
8832 return target;
8835 case VEC_UNPACK_HI_EXPR:
8836 case VEC_UNPACK_LO_EXPR:
8838 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8839 this_optab = optab_for_tree_code (code, type);
8840 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
8841 target, unsignedp);
8842 gcc_assert (temp);
8843 return temp;
8846 case VEC_WIDEN_MULT_HI_EXPR:
8847 case VEC_WIDEN_MULT_LO_EXPR:
8849 tree oprnd0 = TREE_OPERAND (exp, 0);
8850 tree oprnd1 = TREE_OPERAND (exp, 1);
8852 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8853 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
8854 target, unsignedp);
8855 gcc_assert (target);
8856 return target;
8859 case VEC_PACK_MOD_EXPR:
8860 case VEC_PACK_SAT_EXPR:
8862 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8863 goto binop;
8866 default:
8867 return lang_hooks.expand_expr (exp, original_target, tmode,
8868 modifier, alt_rtl);
8871 /* Here to do an ordinary binary operator. */
8872 binop:
8873 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8874 subtarget, &op0, &op1, 0);
8875 binop2:
8876 this_optab = optab_for_tree_code (code, type);
8877 binop3:
8878 if (modifier == EXPAND_STACK_PARM)
8879 target = 0;
8880 temp = expand_binop (mode, this_optab, op0, op1, target,
8881 unsignedp, OPTAB_LIB_WIDEN);
8882 gcc_assert (temp);
8883 return REDUCE_BIT_FIELD (temp);
8885 #undef REDUCE_BIT_FIELD
8887 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8888 signedness of TYPE), possibly returning the result in TARGET. */
8889 static rtx
8890 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8892 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8893 if (target && GET_MODE (target) != GET_MODE (exp))
8894 target = 0;
8895 if (TYPE_UNSIGNED (type))
8897 rtx mask;
8898 if (prec < HOST_BITS_PER_WIDE_INT)
8899 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8900 GET_MODE (exp));
8901 else
8902 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8903 ((unsigned HOST_WIDE_INT) 1
8904 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8905 GET_MODE (exp));
8906 return expand_and (GET_MODE (exp), exp, mask, target);
8908 else
8910 tree count = build_int_cst (NULL_TREE,
8911 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8912 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8913 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8917 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8918 when applied to the address of EXP produces an address known to be
8919 aligned more than BIGGEST_ALIGNMENT. */
8921 static int
8922 is_aligning_offset (tree offset, tree exp)
8924 /* Strip off any conversions. */
8925 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8926 || TREE_CODE (offset) == NOP_EXPR
8927 || TREE_CODE (offset) == CONVERT_EXPR)
8928 offset = TREE_OPERAND (offset, 0);
8930 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8931 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8932 if (TREE_CODE (offset) != BIT_AND_EXPR
8933 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8934 || compare_tree_int (TREE_OPERAND (offset, 1),
8935 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8936 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8937 return 0;
8939 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8940 It must be NEGATE_EXPR. Then strip any more conversions. */
8941 offset = TREE_OPERAND (offset, 0);
8942 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8943 || TREE_CODE (offset) == NOP_EXPR
8944 || TREE_CODE (offset) == CONVERT_EXPR)
8945 offset = TREE_OPERAND (offset, 0);
8947 if (TREE_CODE (offset) != NEGATE_EXPR)
8948 return 0;
8950 offset = TREE_OPERAND (offset, 0);
8951 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8952 || TREE_CODE (offset) == NOP_EXPR
8953 || TREE_CODE (offset) == CONVERT_EXPR)
8954 offset = TREE_OPERAND (offset, 0);
8956 /* This must now be the address of EXP. */
8957 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8960 /* Return the tree node if an ARG corresponds to a string constant or zero
8961 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8962 in bytes within the string that ARG is accessing. The type of the
8963 offset will be `sizetype'. */
8965 tree
8966 string_constant (tree arg, tree *ptr_offset)
8968 tree array, offset, lower_bound;
8969 STRIP_NOPS (arg);
8971 if (TREE_CODE (arg) == ADDR_EXPR)
8973 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8975 *ptr_offset = size_zero_node;
8976 return TREE_OPERAND (arg, 0);
8978 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8980 array = TREE_OPERAND (arg, 0);
8981 offset = size_zero_node;
8983 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8985 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8986 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8987 if (TREE_CODE (array) != STRING_CST
8988 && TREE_CODE (array) != VAR_DECL)
8989 return 0;
8991 /* Check if the array has a nonzero lower bound. */
8992 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
8993 if (!integer_zerop (lower_bound))
8995 /* If the offset and base aren't both constants, return 0. */
8996 if (TREE_CODE (lower_bound) != INTEGER_CST)
8997 return 0;
8998 if (TREE_CODE (offset) != INTEGER_CST)
8999 return 0;
9000 /* Adjust offset by the lower bound. */
9001 offset = size_diffop (fold_convert (sizetype, offset),
9002 fold_convert (sizetype, lower_bound));
9005 else
9006 return 0;
9008 else if (TREE_CODE (arg) == PLUS_EXPR)
9010 tree arg0 = TREE_OPERAND (arg, 0);
9011 tree arg1 = TREE_OPERAND (arg, 1);
9013 STRIP_NOPS (arg0);
9014 STRIP_NOPS (arg1);
9016 if (TREE_CODE (arg0) == ADDR_EXPR
9017 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9018 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9020 array = TREE_OPERAND (arg0, 0);
9021 offset = arg1;
9023 else if (TREE_CODE (arg1) == ADDR_EXPR
9024 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9025 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9027 array = TREE_OPERAND (arg1, 0);
9028 offset = arg0;
9030 else
9031 return 0;
9033 else
9034 return 0;
9036 if (TREE_CODE (array) == STRING_CST)
9038 *ptr_offset = fold_convert (sizetype, offset);
9039 return array;
9041 else if (TREE_CODE (array) == VAR_DECL)
9043 int length;
9045 /* Variables initialized to string literals can be handled too. */
9046 if (DECL_INITIAL (array) == NULL_TREE
9047 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9048 return 0;
9050 /* If they are read-only, non-volatile and bind locally. */
9051 if (! TREE_READONLY (array)
9052 || TREE_SIDE_EFFECTS (array)
9053 || ! targetm.binds_local_p (array))
9054 return 0;
9056 /* Avoid const char foo[4] = "abcde"; */
9057 if (DECL_SIZE_UNIT (array) == NULL_TREE
9058 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9059 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9060 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9061 return 0;
9063 /* If variable is bigger than the string literal, OFFSET must be constant
9064 and inside of the bounds of the string literal. */
9065 offset = fold_convert (sizetype, offset);
9066 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9067 && (! host_integerp (offset, 1)
9068 || compare_tree_int (offset, length) >= 0))
9069 return 0;
9071 *ptr_offset = offset;
9072 return DECL_INITIAL (array);
9075 return 0;
9078 /* Generate code to calculate EXP using a store-flag instruction
9079 and return an rtx for the result. EXP is either a comparison
9080 or a TRUTH_NOT_EXPR whose operand is a comparison.
9082 If TARGET is nonzero, store the result there if convenient.
9084 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9085 cheap.
9087 Return zero if there is no suitable set-flag instruction
9088 available on this machine.
9090 Once expand_expr has been called on the arguments of the comparison,
9091 we are committed to doing the store flag, since it is not safe to
9092 re-evaluate the expression. We emit the store-flag insn by calling
9093 emit_store_flag, but only expand the arguments if we have a reason
9094 to believe that emit_store_flag will be successful. If we think that
9095 it will, but it isn't, we have to simulate the store-flag with a
9096 set/jump/set sequence. */
9098 static rtx
9099 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9101 enum rtx_code code;
9102 tree arg0, arg1, type;
9103 tree tem;
9104 enum machine_mode operand_mode;
9105 int invert = 0;
9106 int unsignedp;
9107 rtx op0, op1;
9108 enum insn_code icode;
9109 rtx subtarget = target;
9110 rtx result, label;
9112 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9113 result at the end. We can't simply invert the test since it would
9114 have already been inverted if it were valid. This case occurs for
9115 some floating-point comparisons. */
9117 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9118 invert = 1, exp = TREE_OPERAND (exp, 0);
9120 arg0 = TREE_OPERAND (exp, 0);
9121 arg1 = TREE_OPERAND (exp, 1);
9123 /* Don't crash if the comparison was erroneous. */
9124 if (arg0 == error_mark_node || arg1 == error_mark_node)
9125 return const0_rtx;
9127 type = TREE_TYPE (arg0);
9128 operand_mode = TYPE_MODE (type);
9129 unsignedp = TYPE_UNSIGNED (type);
9131 /* We won't bother with BLKmode store-flag operations because it would mean
9132 passing a lot of information to emit_store_flag. */
9133 if (operand_mode == BLKmode)
9134 return 0;
9136 /* We won't bother with store-flag operations involving function pointers
9137 when function pointers must be canonicalized before comparisons. */
9138 #ifdef HAVE_canonicalize_funcptr_for_compare
9139 if (HAVE_canonicalize_funcptr_for_compare
9140 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9141 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9142 == FUNCTION_TYPE))
9143 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9144 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9145 == FUNCTION_TYPE))))
9146 return 0;
9147 #endif
9149 STRIP_NOPS (arg0);
9150 STRIP_NOPS (arg1);
9152 /* Get the rtx comparison code to use. We know that EXP is a comparison
9153 operation of some type. Some comparisons against 1 and -1 can be
9154 converted to comparisons with zero. Do so here so that the tests
9155 below will be aware that we have a comparison with zero. These
9156 tests will not catch constants in the first operand, but constants
9157 are rarely passed as the first operand. */
9159 switch (TREE_CODE (exp))
9161 case EQ_EXPR:
9162 code = EQ;
9163 break;
9164 case NE_EXPR:
9165 code = NE;
9166 break;
9167 case LT_EXPR:
9168 if (integer_onep (arg1))
9169 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9170 else
9171 code = unsignedp ? LTU : LT;
9172 break;
9173 case LE_EXPR:
9174 if (! unsignedp && integer_all_onesp (arg1))
9175 arg1 = integer_zero_node, code = LT;
9176 else
9177 code = unsignedp ? LEU : LE;
9178 break;
9179 case GT_EXPR:
9180 if (! unsignedp && integer_all_onesp (arg1))
9181 arg1 = integer_zero_node, code = GE;
9182 else
9183 code = unsignedp ? GTU : GT;
9184 break;
9185 case GE_EXPR:
9186 if (integer_onep (arg1))
9187 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9188 else
9189 code = unsignedp ? GEU : GE;
9190 break;
9192 case UNORDERED_EXPR:
9193 code = UNORDERED;
9194 break;
9195 case ORDERED_EXPR:
9196 code = ORDERED;
9197 break;
9198 case UNLT_EXPR:
9199 code = UNLT;
9200 break;
9201 case UNLE_EXPR:
9202 code = UNLE;
9203 break;
9204 case UNGT_EXPR:
9205 code = UNGT;
9206 break;
9207 case UNGE_EXPR:
9208 code = UNGE;
9209 break;
9210 case UNEQ_EXPR:
9211 code = UNEQ;
9212 break;
9213 case LTGT_EXPR:
9214 code = LTGT;
9215 break;
9217 default:
9218 gcc_unreachable ();
9221 /* Put a constant second. */
9222 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9224 tem = arg0; arg0 = arg1; arg1 = tem;
9225 code = swap_condition (code);
9228 /* If this is an equality or inequality test of a single bit, we can
9229 do this by shifting the bit being tested to the low-order bit and
9230 masking the result with the constant 1. If the condition was EQ,
9231 we xor it with 1. This does not require an scc insn and is faster
9232 than an scc insn even if we have it.
9234 The code to make this transformation was moved into fold_single_bit_test,
9235 so we just call into the folder and expand its result. */
9237 if ((code == NE || code == EQ)
9238 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9239 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9241 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9242 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9243 arg0, arg1, type),
9244 target, VOIDmode, EXPAND_NORMAL);
9247 /* Now see if we are likely to be able to do this. Return if not. */
9248 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9249 return 0;
9251 icode = setcc_gen_code[(int) code];
9253 if (icode == CODE_FOR_nothing)
9255 enum machine_mode wmode;
9257 for (wmode = operand_mode;
9258 icode == CODE_FOR_nothing && wmode != VOIDmode;
9259 wmode = GET_MODE_WIDER_MODE (wmode))
9260 icode = cstore_optab->handlers[(int) wmode].insn_code;
9263 if (icode == CODE_FOR_nothing
9264 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9266 /* We can only do this if it is one of the special cases that
9267 can be handled without an scc insn. */
9268 if ((code == LT && integer_zerop (arg1))
9269 || (! only_cheap && code == GE && integer_zerop (arg1)))
9271 else if (! only_cheap && (code == NE || code == EQ)
9272 && TREE_CODE (type) != REAL_TYPE
9273 && ((abs_optab->handlers[(int) operand_mode].insn_code
9274 != CODE_FOR_nothing)
9275 || (ffs_optab->handlers[(int) operand_mode].insn_code
9276 != CODE_FOR_nothing)))
9278 else
9279 return 0;
9282 if (! get_subtarget (target)
9283 || GET_MODE (subtarget) != operand_mode)
9284 subtarget = 0;
9286 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9288 if (target == 0)
9289 target = gen_reg_rtx (mode);
9291 result = emit_store_flag (target, code, op0, op1,
9292 operand_mode, unsignedp, 1);
9294 if (result)
9296 if (invert)
9297 result = expand_binop (mode, xor_optab, result, const1_rtx,
9298 result, 0, OPTAB_LIB_WIDEN);
9299 return result;
9302 /* If this failed, we have to do this with set/compare/jump/set code. */
9303 if (!REG_P (target)
9304 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9305 target = gen_reg_rtx (GET_MODE (target));
9307 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9308 label = gen_label_rtx ();
9309 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9310 NULL_RTX, label);
9312 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9313 emit_label (label);
9315 return target;
9319 /* Stubs in case we haven't got a casesi insn. */
9320 #ifndef HAVE_casesi
9321 # define HAVE_casesi 0
9322 # define gen_casesi(a, b, c, d, e) (0)
9323 # define CODE_FOR_casesi CODE_FOR_nothing
9324 #endif
9326 /* If the machine does not have a case insn that compares the bounds,
9327 this means extra overhead for dispatch tables, which raises the
9328 threshold for using them. */
9329 #ifndef CASE_VALUES_THRESHOLD
9330 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9331 #endif /* CASE_VALUES_THRESHOLD */
9333 unsigned int
9334 case_values_threshold (void)
9336 return CASE_VALUES_THRESHOLD;
9339 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9340 0 otherwise (i.e. if there is no casesi instruction). */
9342 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9343 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9345 enum machine_mode index_mode = SImode;
9346 int index_bits = GET_MODE_BITSIZE (index_mode);
9347 rtx op1, op2, index;
9348 enum machine_mode op_mode;
9350 if (! HAVE_casesi)
9351 return 0;
9353 /* Convert the index to SImode. */
9354 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9356 enum machine_mode omode = TYPE_MODE (index_type);
9357 rtx rangertx = expand_normal (range);
9359 /* We must handle the endpoints in the original mode. */
9360 index_expr = build2 (MINUS_EXPR, index_type,
9361 index_expr, minval);
9362 minval = integer_zero_node;
9363 index = expand_normal (index_expr);
9364 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9365 omode, 1, default_label);
9366 /* Now we can safely truncate. */
9367 index = convert_to_mode (index_mode, index, 0);
9369 else
9371 if (TYPE_MODE (index_type) != index_mode)
9373 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9374 index_expr = fold_convert (index_type, index_expr);
9377 index = expand_normal (index_expr);
9380 do_pending_stack_adjust ();
9382 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9383 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9384 (index, op_mode))
9385 index = copy_to_mode_reg (op_mode, index);
9387 op1 = expand_normal (minval);
9389 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9390 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9391 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9392 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9393 (op1, op_mode))
9394 op1 = copy_to_mode_reg (op_mode, op1);
9396 op2 = expand_normal (range);
9398 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9399 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9400 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9401 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9402 (op2, op_mode))
9403 op2 = copy_to_mode_reg (op_mode, op2);
9405 emit_jump_insn (gen_casesi (index, op1, op2,
9406 table_label, default_label));
9407 return 1;
9410 /* Attempt to generate a tablejump instruction; same concept. */
9411 #ifndef HAVE_tablejump
9412 #define HAVE_tablejump 0
9413 #define gen_tablejump(x, y) (0)
9414 #endif
9416 /* Subroutine of the next function.
9418 INDEX is the value being switched on, with the lowest value
9419 in the table already subtracted.
9420 MODE is its expected mode (needed if INDEX is constant).
9421 RANGE is the length of the jump table.
9422 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9424 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9425 index value is out of range. */
9427 static void
9428 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9429 rtx default_label)
9431 rtx temp, vector;
9433 if (INTVAL (range) > cfun->max_jumptable_ents)
9434 cfun->max_jumptable_ents = INTVAL (range);
9436 /* Do an unsigned comparison (in the proper mode) between the index
9437 expression and the value which represents the length of the range.
9438 Since we just finished subtracting the lower bound of the range
9439 from the index expression, this comparison allows us to simultaneously
9440 check that the original index expression value is both greater than
9441 or equal to the minimum value of the range and less than or equal to
9442 the maximum value of the range. */
9444 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9445 default_label);
9447 /* If index is in range, it must fit in Pmode.
9448 Convert to Pmode so we can index with it. */
9449 if (mode != Pmode)
9450 index = convert_to_mode (Pmode, index, 1);
9452 /* Don't let a MEM slip through, because then INDEX that comes
9453 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9454 and break_out_memory_refs will go to work on it and mess it up. */
9455 #ifdef PIC_CASE_VECTOR_ADDRESS
9456 if (flag_pic && !REG_P (index))
9457 index = copy_to_mode_reg (Pmode, index);
9458 #endif
9460 /* If flag_force_addr were to affect this address
9461 it could interfere with the tricky assumptions made
9462 about addresses that contain label-refs,
9463 which may be valid only very near the tablejump itself. */
9464 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9465 GET_MODE_SIZE, because this indicates how large insns are. The other
9466 uses should all be Pmode, because they are addresses. This code
9467 could fail if addresses and insns are not the same size. */
9468 index = gen_rtx_PLUS (Pmode,
9469 gen_rtx_MULT (Pmode, index,
9470 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9471 gen_rtx_LABEL_REF (Pmode, table_label));
9472 #ifdef PIC_CASE_VECTOR_ADDRESS
9473 if (flag_pic)
9474 index = PIC_CASE_VECTOR_ADDRESS (index);
9475 else
9476 #endif
9477 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9478 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9479 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9480 convert_move (temp, vector, 0);
9482 emit_jump_insn (gen_tablejump (temp, table_label));
9484 /* If we are generating PIC code or if the table is PC-relative, the
9485 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9486 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9487 emit_barrier ();
9491 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9492 rtx table_label, rtx default_label)
9494 rtx index;
9496 if (! HAVE_tablejump)
9497 return 0;
9499 index_expr = fold_build2 (MINUS_EXPR, index_type,
9500 fold_convert (index_type, index_expr),
9501 fold_convert (index_type, minval));
9502 index = expand_normal (index_expr);
9503 do_pending_stack_adjust ();
9505 do_tablejump (index, TYPE_MODE (index_type),
9506 convert_modes (TYPE_MODE (index_type),
9507 TYPE_MODE (TREE_TYPE (range)),
9508 expand_normal (range),
9509 TYPE_UNSIGNED (TREE_TYPE (range))),
9510 table_label, default_label);
9511 return 1;
9514 /* Nonzero if the mode is a valid vector mode for this architecture.
9515 This returns nonzero even if there is no hardware support for the
9516 vector mode, but we can emulate with narrower modes. */
9519 vector_mode_valid_p (enum machine_mode mode)
9521 enum mode_class class = GET_MODE_CLASS (mode);
9522 enum machine_mode innermode;
9524 /* Doh! What's going on? */
9525 if (class != MODE_VECTOR_INT
9526 && class != MODE_VECTOR_FLOAT)
9527 return 0;
9529 /* Hardware support. Woo hoo! */
9530 if (targetm.vector_mode_supported_p (mode))
9531 return 1;
9533 innermode = GET_MODE_INNER (mode);
9535 /* We should probably return 1 if requesting V4DI and we have no DI,
9536 but we have V2DI, but this is probably very unlikely. */
9538 /* If we have support for the inner mode, we can safely emulate it.
9539 We may not have V2DI, but me can emulate with a pair of DIs. */
9540 return targetm.scalar_mode_supported_p (innermode);
9543 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9544 static rtx
9545 const_vector_from_tree (tree exp)
9547 rtvec v;
9548 int units, i;
9549 tree link, elt;
9550 enum machine_mode inner, mode;
9552 mode = TYPE_MODE (TREE_TYPE (exp));
9554 if (initializer_zerop (exp))
9555 return CONST0_RTX (mode);
9557 units = GET_MODE_NUNITS (mode);
9558 inner = GET_MODE_INNER (mode);
9560 v = rtvec_alloc (units);
9562 link = TREE_VECTOR_CST_ELTS (exp);
9563 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9565 elt = TREE_VALUE (link);
9567 if (TREE_CODE (elt) == REAL_CST)
9568 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9569 inner);
9570 else
9571 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9572 TREE_INT_CST_HIGH (elt),
9573 inner);
9576 /* Initialize remaining elements to 0. */
9577 for (; i < units; ++i)
9578 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9580 return gen_rtx_CONST_VECTOR (mode, v);
9582 #include "gt-expr.h"