re PR middle-end/39867 (Wrong result of conditional operator exp < 2 ? 2U : (unsigned...
[official-gcc.git] / gcc / expr.c
blob7935c67cf674b09e9c00469801530501ea1019c1
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
56 #include "diagnostic.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
64 #ifdef PUSH_ROUNDING
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #endif
70 #endif
72 #endif
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* This structure is used by move_by_pieces to describe the move to
92 be performed. */
93 struct move_by_pieces
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 rtx from;
100 rtx from_addr;
101 int autinc_from;
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
105 int reverse;
108 /* This structure is used by store_by_pieces to describe the clear to
109 be performed. */
111 struct store_by_pieces
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
120 void *constfundata;
121 int reverse;
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 unsigned int,
126 unsigned int);
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
179 #endif
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero. */
191 #ifndef SET_BY_PIECES_P
192 #define SET_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
195 #endif
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memcpy" storage when the source is a constant string. */
199 #ifndef STORE_BY_PIECES_P
200 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
202 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
203 #endif
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block sets. */
209 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211 /* These arrays record the insn_code of three different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217 /* Synchronization primitives. */
218 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
237 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
240 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
242 #ifndef SLOW_UNALIGNED_ACCESS
243 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
244 #endif
246 /* This is run to set up which modes can be used
247 directly in memory and to initialize the block move optab. It is run
248 at the beginning of compilation and when the target is reinitialized. */
250 void
251 init_expr_target (void)
253 rtx insn, pat;
254 enum machine_mode mode;
255 int num_clobbers;
256 rtx mem, mem1;
257 rtx reg;
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265 /* A scratch register we can modify in-place below to avoid
266 useless RTL allocations. */
267 reg = gen_rtx_REG (VOIDmode, -1);
269 insn = rtx_alloc (INSN);
270 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
271 PATTERN (insn) = pat;
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
276 int regno;
278 direct_load[(int) mode] = direct_store[(int) mode] = 0;
279 PUT_MODE (mem, mode);
280 PUT_MODE (mem1, mode);
281 PUT_MODE (reg, mode);
283 /* See if there is some register that can be used in this mode and
284 directly loaded or stored from memory. */
286 if (mode != VOIDmode && mode != BLKmode)
287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
288 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
289 regno++)
291 if (! HARD_REGNO_MODE_OK (regno, mode))
292 continue;
294 SET_REGNO (reg, regno);
296 SET_SRC (pat) = mem;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
301 SET_SRC (pat) = mem1;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
306 SET_SRC (pat) = reg;
307 SET_DEST (pat) = mem;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
311 SET_SRC (pat) = reg;
312 SET_DEST (pat) = mem1;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
318 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
321 mode = GET_MODE_WIDER_MODE (mode))
323 enum machine_mode srcmode;
324 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
325 srcmode = GET_MODE_WIDER_MODE (srcmode))
327 enum insn_code ic;
329 ic = can_extend_p (mode, srcmode, 0);
330 if (ic == CODE_FOR_nothing)
331 continue;
333 PUT_MODE (mem, srcmode);
335 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
336 float_extend_from_mem[mode][srcmode] = true;
341 /* This is run at the start of compiling a function. */
343 void
344 init_expr (void)
346 memset (&crtl->expr, 0, sizeof (crtl->expr));
349 /* Copy data from FROM to TO, where the machine modes are not the same.
350 Both modes may be integer, or both may be floating, or both may be
351 fixed-point.
352 UNSIGNEDP should be nonzero if FROM is an unsigned type.
353 This causes zero-extension instead of sign-extension. */
355 void
356 convert_move (rtx to, rtx from, int unsignedp)
358 enum machine_mode to_mode = GET_MODE (to);
359 enum machine_mode from_mode = GET_MODE (from);
360 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
361 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
362 enum insn_code code;
363 rtx libcall;
365 /* rtx code for making an equivalent value. */
366 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
367 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
370 gcc_assert (to_real == from_real);
371 gcc_assert (to_mode != BLKmode);
372 gcc_assert (from_mode != BLKmode);
374 /* If the source and destination are already the same, then there's
375 nothing to do. */
376 if (to == from)
377 return;
379 /* If FROM is a SUBREG that indicates that we have already done at least
380 the required extension, strip it. We don't handle such SUBREGs as
381 TO here. */
383 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
384 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
385 >= GET_MODE_SIZE (to_mode))
386 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
387 from = gen_lowpart (to_mode, from), from_mode = to_mode;
389 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
391 if (to_mode == from_mode
392 || (from_mode == VOIDmode && CONSTANT_P (from)))
394 emit_move_insn (to, from);
395 return;
398 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
400 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
402 if (VECTOR_MODE_P (to_mode))
403 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
404 else
405 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
407 emit_move_insn (to, from);
408 return;
411 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
413 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
414 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
415 return;
418 if (to_real)
420 rtx value, insns;
421 convert_optab tab;
423 gcc_assert ((GET_MODE_PRECISION (from_mode)
424 != GET_MODE_PRECISION (to_mode))
425 || (DECIMAL_FLOAT_MODE_P (from_mode)
426 != DECIMAL_FLOAT_MODE_P (to_mode)));
428 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
429 /* Conversion between decimal float and binary float, same size. */
430 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
431 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
432 tab = sext_optab;
433 else
434 tab = trunc_optab;
436 /* Try converting directly if the insn is supported. */
438 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
439 if (code != CODE_FOR_nothing)
441 emit_unop_insn (code, to, from,
442 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
443 return;
446 /* Otherwise use a libcall. */
447 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
449 /* Is this conversion implemented yet? */
450 gcc_assert (libcall);
452 start_sequence ();
453 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
454 1, from, from_mode);
455 insns = get_insns ();
456 end_sequence ();
457 emit_libcall_block (insns, to, value,
458 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
459 from)
460 : gen_rtx_FLOAT_EXTEND (to_mode, from));
461 return;
464 /* Handle pointer conversion. */ /* SPEE 900220. */
465 /* Targets are expected to provide conversion insns between PxImode and
466 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
467 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
472 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
473 != CODE_FOR_nothing);
475 if (full_mode != from_mode)
476 from = convert_to_mode (full_mode, from, unsignedp);
477 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
478 to, from, UNKNOWN);
479 return;
481 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
483 rtx new_from;
484 enum machine_mode full_mode
485 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
487 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
488 != CODE_FOR_nothing);
490 if (to_mode == full_mode)
492 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
493 to, from, UNKNOWN);
494 return;
497 new_from = gen_reg_rtx (full_mode);
498 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
499 new_from, from, UNKNOWN);
501 /* else proceed to integer conversions below. */
502 from_mode = full_mode;
503 from = new_from;
506 /* Make sure both are fixed-point modes or both are not. */
507 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
508 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
509 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
511 /* If we widen from_mode to to_mode and they are in the same class,
512 we won't saturate the result.
513 Otherwise, always saturate the result to play safe. */
514 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
515 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
516 expand_fixed_convert (to, from, 0, 0);
517 else
518 expand_fixed_convert (to, from, 0, 1);
519 return;
522 /* Now both modes are integers. */
524 /* Handle expanding beyond a word. */
525 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
526 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
528 rtx insns;
529 rtx lowpart;
530 rtx fill_value;
531 rtx lowfrom;
532 int i;
533 enum machine_mode lowpart_mode;
534 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
536 /* Try converting directly if the insn is supported. */
537 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
538 != CODE_FOR_nothing)
540 /* If FROM is a SUBREG, put it into a register. Do this
541 so that we always generate the same set of insns for
542 better cse'ing; if an intermediate assignment occurred,
543 we won't be doing the operation directly on the SUBREG. */
544 if (optimize > 0 && GET_CODE (from) == SUBREG)
545 from = force_reg (from_mode, from);
546 emit_unop_insn (code, to, from, equiv_code);
547 return;
549 /* Next, try converting via full word. */
550 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
551 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
552 != CODE_FOR_nothing))
554 rtx word_to = gen_reg_rtx (word_mode);
555 if (REG_P (to))
557 if (reg_overlap_mentioned_p (to, from))
558 from = force_reg (from_mode, from);
559 emit_clobber (to);
561 convert_move (word_to, from, unsignedp);
562 emit_unop_insn (code, to, word_to, equiv_code);
563 return;
566 /* No special multiword conversion insn; do it by hand. */
567 start_sequence ();
569 /* Since we will turn this into a no conflict block, we must ensure
570 that the source does not overlap the target. */
572 if (reg_overlap_mentioned_p (to, from))
573 from = force_reg (from_mode, from);
575 /* Get a copy of FROM widened to a word, if necessary. */
576 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
577 lowpart_mode = word_mode;
578 else
579 lowpart_mode = from_mode;
581 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
583 lowpart = gen_lowpart (lowpart_mode, to);
584 emit_move_insn (lowpart, lowfrom);
586 /* Compute the value to put in each remaining word. */
587 if (unsignedp)
588 fill_value = const0_rtx;
589 else
590 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
591 LT, lowfrom, const0_rtx,
592 VOIDmode, 0, -1);
594 /* Fill the remaining words. */
595 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
597 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
598 rtx subword = operand_subword (to, index, 1, to_mode);
600 gcc_assert (subword);
602 if (fill_value != subword)
603 emit_move_insn (subword, fill_value);
606 insns = get_insns ();
607 end_sequence ();
609 emit_insn (insns);
610 return;
613 /* Truncating multi-word to a word or less. */
614 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
615 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
617 if (!((MEM_P (from)
618 && ! MEM_VOLATILE_P (from)
619 && direct_load[(int) to_mode]
620 && ! mode_dependent_address_p (XEXP (from, 0)))
621 || REG_P (from)
622 || GET_CODE (from) == SUBREG))
623 from = force_reg (from_mode, from);
624 convert_move (to, gen_lowpart (word_mode, from), 0);
625 return;
628 /* Now follow all the conversions between integers
629 no more than a word long. */
631 /* For truncation, usually we can just refer to FROM in a narrower mode. */
632 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
633 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
634 GET_MODE_BITSIZE (from_mode)))
636 if (!((MEM_P (from)
637 && ! MEM_VOLATILE_P (from)
638 && direct_load[(int) to_mode]
639 && ! mode_dependent_address_p (XEXP (from, 0)))
640 || REG_P (from)
641 || GET_CODE (from) == SUBREG))
642 from = force_reg (from_mode, from);
643 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
644 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
645 from = copy_to_reg (from);
646 emit_move_insn (to, gen_lowpart (to_mode, from));
647 return;
650 /* Handle extension. */
651 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
653 /* Convert directly if that works. */
654 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
655 != CODE_FOR_nothing)
657 emit_unop_insn (code, to, from, equiv_code);
658 return;
660 else
662 enum machine_mode intermediate;
663 rtx tmp;
664 tree shift_amount;
666 /* Search for a mode to convert via. */
667 for (intermediate = from_mode; intermediate != VOIDmode;
668 intermediate = GET_MODE_WIDER_MODE (intermediate))
669 if (((can_extend_p (to_mode, intermediate, unsignedp)
670 != CODE_FOR_nothing)
671 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
672 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
673 GET_MODE_BITSIZE (intermediate))))
674 && (can_extend_p (intermediate, from_mode, unsignedp)
675 != CODE_FOR_nothing))
677 convert_move (to, convert_to_mode (intermediate, from,
678 unsignedp), unsignedp);
679 return;
682 /* No suitable intermediate mode.
683 Generate what we need with shifts. */
684 shift_amount = build_int_cst (NULL_TREE,
685 GET_MODE_BITSIZE (to_mode)
686 - GET_MODE_BITSIZE (from_mode));
687 from = gen_lowpart (to_mode, force_reg (from_mode, from));
688 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
689 to, unsignedp);
690 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
691 to, unsignedp);
692 if (tmp != to)
693 emit_move_insn (to, tmp);
694 return;
698 /* Support special truncate insns for certain modes. */
699 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
701 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
702 to, from, UNKNOWN);
703 return;
706 /* Handle truncation of volatile memrefs, and so on;
707 the things that couldn't be truncated directly,
708 and for which there was no special instruction.
710 ??? Code above formerly short-circuited this, for most integer
711 mode pairs, with a force_reg in from_mode followed by a recursive
712 call to this routine. Appears always to have been wrong. */
713 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
715 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
716 emit_move_insn (to, temp);
717 return;
720 /* Mode combination is not recognized. */
721 gcc_unreachable ();
724 /* Return an rtx for a value that would result
725 from converting X to mode MODE.
726 Both X and MODE may be floating, or both integer.
727 UNSIGNEDP is nonzero if X is an unsigned value.
728 This can be done by referring to a part of X in place
729 or by copying to a new temporary with conversion. */
732 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
734 return convert_modes (mode, VOIDmode, x, unsignedp);
737 /* Return an rtx for a value that would result
738 from converting X from mode OLDMODE to mode MODE.
739 Both modes may be floating, or both integer.
740 UNSIGNEDP is nonzero if X is an unsigned value.
742 This can be done by referring to a part of X in place
743 or by copying to a new temporary with conversion.
745 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
748 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
750 rtx temp;
752 /* If FROM is a SUBREG that indicates that we have already done at least
753 the required extension, strip it. */
755 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
756 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
757 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
758 x = gen_lowpart (mode, x);
760 if (GET_MODE (x) != VOIDmode)
761 oldmode = GET_MODE (x);
763 if (mode == oldmode)
764 return x;
766 /* There is one case that we must handle specially: If we are converting
767 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
768 we are to interpret the constant as unsigned, gen_lowpart will do
769 the wrong if the constant appears negative. What we want to do is
770 make the high-order word of the constant zero, not all ones. */
772 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
773 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
774 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
776 HOST_WIDE_INT val = INTVAL (x);
778 if (oldmode != VOIDmode
779 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
781 int width = GET_MODE_BITSIZE (oldmode);
783 /* We need to zero extend VAL. */
784 val &= ((HOST_WIDE_INT) 1 << width) - 1;
787 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
790 /* We can do this with a gen_lowpart if both desired and current modes
791 are integer, and this is either a constant integer, a register, or a
792 non-volatile MEM. Except for the constant case where MODE is no
793 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
795 if ((GET_CODE (x) == CONST_INT
796 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
797 || (GET_MODE_CLASS (mode) == MODE_INT
798 && GET_MODE_CLASS (oldmode) == MODE_INT
799 && (GET_CODE (x) == CONST_DOUBLE
800 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
801 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
802 && direct_load[(int) mode])
803 || (REG_P (x)
804 && (! HARD_REGISTER_P (x)
805 || HARD_REGNO_MODE_OK (REGNO (x), mode))
806 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
807 GET_MODE_BITSIZE (GET_MODE (x)))))))))
809 /* ?? If we don't know OLDMODE, we have to assume here that
810 X does not need sign- or zero-extension. This may not be
811 the case, but it's the best we can do. */
812 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
813 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
815 HOST_WIDE_INT val = INTVAL (x);
816 int width = GET_MODE_BITSIZE (oldmode);
818 /* We must sign or zero-extend in this case. Start by
819 zero-extending, then sign extend if we need to. */
820 val &= ((HOST_WIDE_INT) 1 << width) - 1;
821 if (! unsignedp
822 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
823 val |= (HOST_WIDE_INT) (-1) << width;
825 return gen_int_mode (val, mode);
828 return gen_lowpart (mode, x);
831 /* Converting from integer constant into mode is always equivalent to an
832 subreg operation. */
833 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
835 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
836 return simplify_gen_subreg (mode, x, oldmode, 0);
839 temp = gen_reg_rtx (mode);
840 convert_move (temp, x, unsignedp);
841 return temp;
844 /* STORE_MAX_PIECES is the number of bytes at a time that we can
845 store efficiently. Due to internal GCC limitations, this is
846 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
847 for an immediate constant. */
849 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
851 /* Determine whether the LEN bytes can be moved by using several move
852 instructions. Return nonzero if a call to move_by_pieces should
853 succeed. */
856 can_move_by_pieces (unsigned HOST_WIDE_INT len,
857 unsigned int align ATTRIBUTE_UNUSED)
859 return MOVE_BY_PIECES_P (len, align);
862 /* Generate several move instructions to copy LEN bytes from block FROM to
863 block TO. (These are MEM rtx's with BLKmode).
865 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
866 used to push FROM to the stack.
868 ALIGN is maximum stack alignment we can assume.
870 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
871 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
872 stpcpy. */
875 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
876 unsigned int align, int endp)
878 struct move_by_pieces data;
879 rtx to_addr, from_addr = XEXP (from, 0);
880 unsigned int max_size = MOVE_MAX_PIECES + 1;
881 enum machine_mode mode = VOIDmode, tmode;
882 enum insn_code icode;
884 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
886 data.offset = 0;
887 data.from_addr = from_addr;
888 if (to)
890 to_addr = XEXP (to, 0);
891 data.to = to;
892 data.autinc_to
893 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
894 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
895 data.reverse
896 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
898 else
900 to_addr = NULL_RTX;
901 data.to = NULL_RTX;
902 data.autinc_to = 1;
903 #ifdef STACK_GROWS_DOWNWARD
904 data.reverse = 1;
905 #else
906 data.reverse = 0;
907 #endif
909 data.to_addr = to_addr;
910 data.from = from;
911 data.autinc_from
912 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
913 || GET_CODE (from_addr) == POST_INC
914 || GET_CODE (from_addr) == POST_DEC);
916 data.explicit_inc_from = 0;
917 data.explicit_inc_to = 0;
918 if (data.reverse) data.offset = len;
919 data.len = len;
921 /* If copying requires more than two move insns,
922 copy addresses to registers (to make displacements shorter)
923 and use post-increment if available. */
924 if (!(data.autinc_from && data.autinc_to)
925 && move_by_pieces_ninsns (len, align, max_size) > 2)
927 /* Find the mode of the largest move... */
928 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
929 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
930 if (GET_MODE_SIZE (tmode) < max_size)
931 mode = tmode;
933 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
935 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
936 data.autinc_from = 1;
937 data.explicit_inc_from = -1;
939 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
941 data.from_addr = copy_addr_to_reg (from_addr);
942 data.autinc_from = 1;
943 data.explicit_inc_from = 1;
945 if (!data.autinc_from && CONSTANT_P (from_addr))
946 data.from_addr = copy_addr_to_reg (from_addr);
947 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
949 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
950 data.autinc_to = 1;
951 data.explicit_inc_to = -1;
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
955 data.to_addr = copy_addr_to_reg (to_addr);
956 data.autinc_to = 1;
957 data.explicit_inc_to = 1;
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_addr_to_reg (to_addr);
963 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
964 if (align >= GET_MODE_ALIGNMENT (tmode))
965 align = GET_MODE_ALIGNMENT (tmode);
966 else
968 enum machine_mode xmode;
970 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
971 tmode != VOIDmode;
972 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
973 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
974 || SLOW_UNALIGNED_ACCESS (tmode, align))
975 break;
977 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
980 /* First move what we can in the largest integer mode, then go to
981 successively smaller modes. */
983 while (max_size > 1)
985 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
986 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
987 if (GET_MODE_SIZE (tmode) < max_size)
988 mode = tmode;
990 if (mode == VOIDmode)
991 break;
993 icode = optab_handler (mov_optab, mode)->insn_code;
994 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
995 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
997 max_size = GET_MODE_SIZE (mode);
1000 /* The code above should have handled everything. */
1001 gcc_assert (!data.len);
1003 if (endp)
1005 rtx to1;
1007 gcc_assert (!data.reverse);
1008 if (data.autinc_to)
1010 if (endp == 2)
1012 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1013 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1014 else
1015 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1016 -1));
1018 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1019 data.offset);
1021 else
1023 if (endp == 2)
1024 --data.offset;
1025 to1 = adjust_address (data.to, QImode, data.offset);
1027 return to1;
1029 else
1030 return data.to;
1033 /* Return number of insns required to move L bytes by pieces.
1034 ALIGN (in bits) is maximum alignment we can assume. */
1036 static unsigned HOST_WIDE_INT
1037 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1038 unsigned int max_size)
1040 unsigned HOST_WIDE_INT n_insns = 0;
1041 enum machine_mode tmode;
1043 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1044 if (align >= GET_MODE_ALIGNMENT (tmode))
1045 align = GET_MODE_ALIGNMENT (tmode);
1046 else
1048 enum machine_mode tmode, xmode;
1050 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1051 tmode != VOIDmode;
1052 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1053 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1054 || SLOW_UNALIGNED_ACCESS (tmode, align))
1055 break;
1057 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1060 while (max_size > 1)
1062 enum machine_mode mode = VOIDmode;
1063 enum insn_code icode;
1065 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1066 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1067 if (GET_MODE_SIZE (tmode) < max_size)
1068 mode = tmode;
1070 if (mode == VOIDmode)
1071 break;
1073 icode = optab_handler (mov_optab, mode)->insn_code;
1074 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1075 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1077 max_size = GET_MODE_SIZE (mode);
1080 gcc_assert (!l);
1081 return n_insns;
1084 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1085 with move instructions for mode MODE. GENFUN is the gen_... function
1086 to make a move insn for that mode. DATA has all the other info. */
1088 static void
1089 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1090 struct move_by_pieces *data)
1092 unsigned int size = GET_MODE_SIZE (mode);
1093 rtx to1 = NULL_RTX, from1;
1095 while (data->len >= size)
1097 if (data->reverse)
1098 data->offset -= size;
1100 if (data->to)
1102 if (data->autinc_to)
1103 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1104 data->offset);
1105 else
1106 to1 = adjust_address (data->to, mode, data->offset);
1109 if (data->autinc_from)
1110 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1111 data->offset);
1112 else
1113 from1 = adjust_address (data->from, mode, data->offset);
1115 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1116 emit_insn (gen_add2_insn (data->to_addr,
1117 GEN_INT (-(HOST_WIDE_INT)size)));
1118 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1119 emit_insn (gen_add2_insn (data->from_addr,
1120 GEN_INT (-(HOST_WIDE_INT)size)));
1122 if (data->to)
1123 emit_insn ((*genfun) (to1, from1));
1124 else
1126 #ifdef PUSH_ROUNDING
1127 emit_single_push_insn (mode, from1, NULL);
1128 #else
1129 gcc_unreachable ();
1130 #endif
1133 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1134 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1135 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1136 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1138 if (! data->reverse)
1139 data->offset += size;
1141 data->len -= size;
1145 /* Emit code to move a block Y to a block X. This may be done with
1146 string-move instructions, with multiple scalar move instructions,
1147 or with a library call.
1149 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1150 SIZE is an rtx that says how long they are.
1151 ALIGN is the maximum alignment we can assume they have.
1152 METHOD describes what kind of copy this is, and what mechanisms may be used.
1154 Return the address of the new block, if memcpy is called and returns it,
1155 0 otherwise. */
1158 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1159 unsigned int expected_align, HOST_WIDE_INT expected_size)
1161 bool may_use_call;
1162 rtx retval = 0;
1163 unsigned int align;
1165 switch (method)
1167 case BLOCK_OP_NORMAL:
1168 case BLOCK_OP_TAILCALL:
1169 may_use_call = true;
1170 break;
1172 case BLOCK_OP_CALL_PARM:
1173 may_use_call = block_move_libcall_safe_for_call_parm ();
1175 /* Make inhibit_defer_pop nonzero around the library call
1176 to force it to pop the arguments right away. */
1177 NO_DEFER_POP;
1178 break;
1180 case BLOCK_OP_NO_LIBCALL:
1181 may_use_call = false;
1182 break;
1184 default:
1185 gcc_unreachable ();
1188 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1190 gcc_assert (MEM_P (x));
1191 gcc_assert (MEM_P (y));
1192 gcc_assert (size);
1194 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1195 block copy is more efficient for other large modes, e.g. DCmode. */
1196 x = adjust_address (x, BLKmode, 0);
1197 y = adjust_address (y, BLKmode, 0);
1199 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1200 can be incorrect is coming from __builtin_memcpy. */
1201 if (GET_CODE (size) == CONST_INT)
1203 if (INTVAL (size) == 0)
1204 return 0;
1206 x = shallow_copy_rtx (x);
1207 y = shallow_copy_rtx (y);
1208 set_mem_size (x, size);
1209 set_mem_size (y, size);
1212 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1213 move_by_pieces (x, y, INTVAL (size), align, 0);
1214 else if (emit_block_move_via_movmem (x, y, size, align,
1215 expected_align, expected_size))
1217 else if (may_use_call)
1218 retval = emit_block_move_via_libcall (x, y, size,
1219 method == BLOCK_OP_TAILCALL);
1220 else
1221 emit_block_move_via_loop (x, y, size, align);
1223 if (method == BLOCK_OP_CALL_PARM)
1224 OK_DEFER_POP;
1226 return retval;
1230 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1232 return emit_block_move_hints (x, y, size, method, 0, -1);
1235 /* A subroutine of emit_block_move. Returns true if calling the
1236 block move libcall will not clobber any parameters which may have
1237 already been placed on the stack. */
1239 static bool
1240 block_move_libcall_safe_for_call_parm (void)
1242 #if defined (REG_PARM_STACK_SPACE)
1243 tree fn;
1244 #endif
1246 /* If arguments are pushed on the stack, then they're safe. */
1247 if (PUSH_ARGS)
1248 return true;
1250 /* If registers go on the stack anyway, any argument is sure to clobber
1251 an outgoing argument. */
1252 #if defined (REG_PARM_STACK_SPACE)
1253 fn = emit_block_move_libcall_fn (false);
1254 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1255 && REG_PARM_STACK_SPACE (fn) != 0)
1256 return false;
1257 #endif
1259 /* If any argument goes in memory, then it might clobber an outgoing
1260 argument. */
1262 CUMULATIVE_ARGS args_so_far;
1263 tree fn, arg;
1265 fn = emit_block_move_libcall_fn (false);
1266 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1268 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1269 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1271 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1272 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1273 if (!tmp || !REG_P (tmp))
1274 return false;
1275 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1276 return false;
1277 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1280 return true;
1283 /* A subroutine of emit_block_move. Expand a movmem pattern;
1284 return true if successful. */
1286 static bool
1287 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1288 unsigned int expected_align, HOST_WIDE_INT expected_size)
1290 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1291 int save_volatile_ok = volatile_ok;
1292 enum machine_mode mode;
1294 if (expected_align < align)
1295 expected_align = align;
1297 /* Since this is a move insn, we don't care about volatility. */
1298 volatile_ok = 1;
1300 /* Try the most limited insn first, because there's no point
1301 including more than one in the machine description unless
1302 the more limited one has some advantage. */
1304 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1305 mode = GET_MODE_WIDER_MODE (mode))
1307 enum insn_code code = movmem_optab[(int) mode];
1308 insn_operand_predicate_fn pred;
1310 if (code != CODE_FOR_nothing
1311 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1312 here because if SIZE is less than the mode mask, as it is
1313 returned by the macro, it will definitely be less than the
1314 actual mode mask. */
1315 && ((GET_CODE (size) == CONST_INT
1316 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1317 <= (GET_MODE_MASK (mode) >> 1)))
1318 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1319 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1320 || (*pred) (x, BLKmode))
1321 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1322 || (*pred) (y, BLKmode))
1323 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1324 || (*pred) (opalign, VOIDmode)))
1326 rtx op2;
1327 rtx last = get_last_insn ();
1328 rtx pat;
1330 op2 = convert_to_mode (mode, size, 1);
1331 pred = insn_data[(int) code].operand[2].predicate;
1332 if (pred != 0 && ! (*pred) (op2, mode))
1333 op2 = copy_to_mode_reg (mode, op2);
1335 /* ??? When called via emit_block_move_for_call, it'd be
1336 nice if there were some way to inform the backend, so
1337 that it doesn't fail the expansion because it thinks
1338 emitting the libcall would be more efficient. */
1340 if (insn_data[(int) code].n_operands == 4)
1341 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1342 else
1343 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1344 GEN_INT (expected_align
1345 / BITS_PER_UNIT),
1346 GEN_INT (expected_size));
1347 if (pat)
1349 emit_insn (pat);
1350 volatile_ok = save_volatile_ok;
1351 return true;
1353 else
1354 delete_insns_since (last);
1358 volatile_ok = save_volatile_ok;
1359 return false;
1362 /* A subroutine of emit_block_move. Expand a call to memcpy.
1363 Return the return value from memcpy, 0 otherwise. */
1366 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1368 rtx dst_addr, src_addr;
1369 tree call_expr, fn, src_tree, dst_tree, size_tree;
1370 enum machine_mode size_mode;
1371 rtx retval;
1373 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1374 pseudos. We can then place those new pseudos into a VAR_DECL and
1375 use them later. */
1377 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1378 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1380 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1381 src_addr = convert_memory_address (ptr_mode, src_addr);
1383 dst_tree = make_tree (ptr_type_node, dst_addr);
1384 src_tree = make_tree (ptr_type_node, src_addr);
1386 size_mode = TYPE_MODE (sizetype);
1388 size = convert_to_mode (size_mode, size, 1);
1389 size = copy_to_mode_reg (size_mode, size);
1391 /* It is incorrect to use the libcall calling conventions to call
1392 memcpy in this context. This could be a user call to memcpy and
1393 the user may wish to examine the return value from memcpy. For
1394 targets where libcalls and normal calls have different conventions
1395 for returning pointers, we could end up generating incorrect code. */
1397 size_tree = make_tree (sizetype, size);
1399 fn = emit_block_move_libcall_fn (true);
1400 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1401 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1403 retval = expand_normal (call_expr);
1405 return retval;
1408 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1409 for the function we use for block copies. The first time FOR_CALL
1410 is true, we call assemble_external. */
1412 static GTY(()) tree block_move_fn;
1414 void
1415 init_block_move_fn (const char *asmspec)
1417 if (!block_move_fn)
1419 tree args, fn;
1421 fn = get_identifier ("memcpy");
1422 args = build_function_type_list (ptr_type_node, ptr_type_node,
1423 const_ptr_type_node, sizetype,
1424 NULL_TREE);
1426 fn = build_decl (FUNCTION_DECL, fn, args);
1427 DECL_EXTERNAL (fn) = 1;
1428 TREE_PUBLIC (fn) = 1;
1429 DECL_ARTIFICIAL (fn) = 1;
1430 TREE_NOTHROW (fn) = 1;
1431 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1432 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1434 block_move_fn = fn;
1437 if (asmspec)
1438 set_user_assembler_name (block_move_fn, asmspec);
1441 static tree
1442 emit_block_move_libcall_fn (int for_call)
1444 static bool emitted_extern;
1446 if (!block_move_fn)
1447 init_block_move_fn (NULL);
1449 if (for_call && !emitted_extern)
1451 emitted_extern = true;
1452 make_decl_rtl (block_move_fn);
1453 assemble_external (block_move_fn);
1456 return block_move_fn;
1459 /* A subroutine of emit_block_move. Copy the data via an explicit
1460 loop. This is used only when libcalls are forbidden. */
1461 /* ??? It'd be nice to copy in hunks larger than QImode. */
1463 static void
1464 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1465 unsigned int align ATTRIBUTE_UNUSED)
1467 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1468 enum machine_mode iter_mode;
1470 iter_mode = GET_MODE (size);
1471 if (iter_mode == VOIDmode)
1472 iter_mode = word_mode;
1474 top_label = gen_label_rtx ();
1475 cmp_label = gen_label_rtx ();
1476 iter = gen_reg_rtx (iter_mode);
1478 emit_move_insn (iter, const0_rtx);
1480 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1481 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1482 do_pending_stack_adjust ();
1484 emit_jump (cmp_label);
1485 emit_label (top_label);
1487 tmp = convert_modes (Pmode, iter_mode, iter, true);
1488 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1489 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1490 x = change_address (x, QImode, x_addr);
1491 y = change_address (y, QImode, y_addr);
1493 emit_move_insn (x, y);
1495 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1496 true, OPTAB_LIB_WIDEN);
1497 if (tmp != iter)
1498 emit_move_insn (iter, tmp);
1500 emit_label (cmp_label);
1502 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1503 true, top_label);
1506 /* Copy all or part of a value X into registers starting at REGNO.
1507 The number of registers to be filled is NREGS. */
1509 void
1510 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1512 int i;
1513 #ifdef HAVE_load_multiple
1514 rtx pat;
1515 rtx last;
1516 #endif
1518 if (nregs == 0)
1519 return;
1521 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1522 x = validize_mem (force_const_mem (mode, x));
1524 /* See if the machine can do this with a load multiple insn. */
1525 #ifdef HAVE_load_multiple
1526 if (HAVE_load_multiple)
1528 last = get_last_insn ();
1529 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1530 GEN_INT (nregs));
1531 if (pat)
1533 emit_insn (pat);
1534 return;
1536 else
1537 delete_insns_since (last);
1539 #endif
1541 for (i = 0; i < nregs; i++)
1542 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1543 operand_subword_force (x, i, mode));
1546 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1547 The number of registers to be filled is NREGS. */
1549 void
1550 move_block_from_reg (int regno, rtx x, int nregs)
1552 int i;
1554 if (nregs == 0)
1555 return;
1557 /* See if the machine can do this with a store multiple insn. */
1558 #ifdef HAVE_store_multiple
1559 if (HAVE_store_multiple)
1561 rtx last = get_last_insn ();
1562 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1563 GEN_INT (nregs));
1564 if (pat)
1566 emit_insn (pat);
1567 return;
1569 else
1570 delete_insns_since (last);
1572 #endif
1574 for (i = 0; i < nregs; i++)
1576 rtx tem = operand_subword (x, i, 1, BLKmode);
1578 gcc_assert (tem);
1580 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1584 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1585 ORIG, where ORIG is a non-consecutive group of registers represented by
1586 a PARALLEL. The clone is identical to the original except in that the
1587 original set of registers is replaced by a new set of pseudo registers.
1588 The new set has the same modes as the original set. */
1591 gen_group_rtx (rtx orig)
1593 int i, length;
1594 rtx *tmps;
1596 gcc_assert (GET_CODE (orig) == PARALLEL);
1598 length = XVECLEN (orig, 0);
1599 tmps = XALLOCAVEC (rtx, length);
1601 /* Skip a NULL entry in first slot. */
1602 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1604 if (i)
1605 tmps[0] = 0;
1607 for (; i < length; i++)
1609 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1610 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1612 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1615 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1618 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1619 except that values are placed in TMPS[i], and must later be moved
1620 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1622 static void
1623 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1625 rtx src;
1626 int start, i;
1627 enum machine_mode m = GET_MODE (orig_src);
1629 gcc_assert (GET_CODE (dst) == PARALLEL);
1631 if (m != VOIDmode
1632 && !SCALAR_INT_MODE_P (m)
1633 && !MEM_P (orig_src)
1634 && GET_CODE (orig_src) != CONCAT)
1636 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1637 if (imode == BLKmode)
1638 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1639 else
1640 src = gen_reg_rtx (imode);
1641 if (imode != BLKmode)
1642 src = gen_lowpart (GET_MODE (orig_src), src);
1643 emit_move_insn (src, orig_src);
1644 /* ...and back again. */
1645 if (imode != BLKmode)
1646 src = gen_lowpart (imode, src);
1647 emit_group_load_1 (tmps, dst, src, type, ssize);
1648 return;
1651 /* Check for a NULL entry, used to indicate that the parameter goes
1652 both on the stack and in registers. */
1653 if (XEXP (XVECEXP (dst, 0, 0), 0))
1654 start = 0;
1655 else
1656 start = 1;
1658 /* Process the pieces. */
1659 for (i = start; i < XVECLEN (dst, 0); i++)
1661 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1662 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1663 unsigned int bytelen = GET_MODE_SIZE (mode);
1664 int shift = 0;
1666 /* Handle trailing fragments that run over the size of the struct. */
1667 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1669 /* Arrange to shift the fragment to where it belongs.
1670 extract_bit_field loads to the lsb of the reg. */
1671 if (
1672 #ifdef BLOCK_REG_PADDING
1673 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1674 == (BYTES_BIG_ENDIAN ? upward : downward)
1675 #else
1676 BYTES_BIG_ENDIAN
1677 #endif
1679 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1680 bytelen = ssize - bytepos;
1681 gcc_assert (bytelen > 0);
1684 /* If we won't be loading directly from memory, protect the real source
1685 from strange tricks we might play; but make sure that the source can
1686 be loaded directly into the destination. */
1687 src = orig_src;
1688 if (!MEM_P (orig_src)
1689 && (!CONSTANT_P (orig_src)
1690 || (GET_MODE (orig_src) != mode
1691 && GET_MODE (orig_src) != VOIDmode)))
1693 if (GET_MODE (orig_src) == VOIDmode)
1694 src = gen_reg_rtx (mode);
1695 else
1696 src = gen_reg_rtx (GET_MODE (orig_src));
1698 emit_move_insn (src, orig_src);
1701 /* Optimize the access just a bit. */
1702 if (MEM_P (src)
1703 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1704 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1705 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1706 && bytelen == GET_MODE_SIZE (mode))
1708 tmps[i] = gen_reg_rtx (mode);
1709 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1711 else if (COMPLEX_MODE_P (mode)
1712 && GET_MODE (src) == mode
1713 && bytelen == GET_MODE_SIZE (mode))
1714 /* Let emit_move_complex do the bulk of the work. */
1715 tmps[i] = src;
1716 else if (GET_CODE (src) == CONCAT)
1718 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1719 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1721 if ((bytepos == 0 && bytelen == slen0)
1722 || (bytepos != 0 && bytepos + bytelen <= slen))
1724 /* The following assumes that the concatenated objects all
1725 have the same size. In this case, a simple calculation
1726 can be used to determine the object and the bit field
1727 to be extracted. */
1728 tmps[i] = XEXP (src, bytepos / slen0);
1729 if (! CONSTANT_P (tmps[i])
1730 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1731 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1732 (bytepos % slen0) * BITS_PER_UNIT,
1733 1, NULL_RTX, mode, mode);
1735 else
1737 rtx mem;
1739 gcc_assert (!bytepos);
1740 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1741 emit_move_insn (mem, src);
1742 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1743 0, 1, NULL_RTX, mode, mode);
1746 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1747 SIMD register, which is currently broken. While we get GCC
1748 to emit proper RTL for these cases, let's dump to memory. */
1749 else if (VECTOR_MODE_P (GET_MODE (dst))
1750 && REG_P (src))
1752 int slen = GET_MODE_SIZE (GET_MODE (src));
1753 rtx mem;
1755 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1756 emit_move_insn (mem, src);
1757 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1759 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1760 && XVECLEN (dst, 0) > 1)
1761 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1762 else if (CONSTANT_P (src))
1764 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1766 if (len == ssize)
1767 tmps[i] = src;
1768 else
1770 rtx first, second;
1772 gcc_assert (2 * len == ssize);
1773 split_double (src, &first, &second);
1774 if (i)
1775 tmps[i] = second;
1776 else
1777 tmps[i] = first;
1780 else if (REG_P (src) && GET_MODE (src) == mode)
1781 tmps[i] = src;
1782 else
1783 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1784 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1785 mode, mode);
1787 if (shift)
1788 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1789 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1793 /* Emit code to move a block SRC of type TYPE to a block DST,
1794 where DST is non-consecutive registers represented by a PARALLEL.
1795 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1796 if not known. */
1798 void
1799 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1801 rtx *tmps;
1802 int i;
1804 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1805 emit_group_load_1 (tmps, dst, src, type, ssize);
1807 /* Copy the extracted pieces into the proper (probable) hard regs. */
1808 for (i = 0; i < XVECLEN (dst, 0); i++)
1810 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1811 if (d == NULL)
1812 continue;
1813 emit_move_insn (d, tmps[i]);
1817 /* Similar, but load SRC into new pseudos in a format that looks like
1818 PARALLEL. This can later be fed to emit_group_move to get things
1819 in the right place. */
1822 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1824 rtvec vec;
1825 int i;
1827 vec = rtvec_alloc (XVECLEN (parallel, 0));
1828 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1830 /* Convert the vector to look just like the original PARALLEL, except
1831 with the computed values. */
1832 for (i = 0; i < XVECLEN (parallel, 0); i++)
1834 rtx e = XVECEXP (parallel, 0, i);
1835 rtx d = XEXP (e, 0);
1837 if (d)
1839 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1840 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1842 RTVEC_ELT (vec, i) = e;
1845 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1848 /* Emit code to move a block SRC to block DST, where SRC and DST are
1849 non-consecutive groups of registers, each represented by a PARALLEL. */
1851 void
1852 emit_group_move (rtx dst, rtx src)
1854 int i;
1856 gcc_assert (GET_CODE (src) == PARALLEL
1857 && GET_CODE (dst) == PARALLEL
1858 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1860 /* Skip first entry if NULL. */
1861 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1862 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1863 XEXP (XVECEXP (src, 0, i), 0));
1866 /* Move a group of registers represented by a PARALLEL into pseudos. */
1869 emit_group_move_into_temps (rtx src)
1871 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1872 int i;
1874 for (i = 0; i < XVECLEN (src, 0); i++)
1876 rtx e = XVECEXP (src, 0, i);
1877 rtx d = XEXP (e, 0);
1879 if (d)
1880 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1881 RTVEC_ELT (vec, i) = e;
1884 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1887 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1888 where SRC is non-consecutive registers represented by a PARALLEL.
1889 SSIZE represents the total size of block ORIG_DST, or -1 if not
1890 known. */
1892 void
1893 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1895 rtx *tmps, dst;
1896 int start, finish, i;
1897 enum machine_mode m = GET_MODE (orig_dst);
1899 gcc_assert (GET_CODE (src) == PARALLEL);
1901 if (!SCALAR_INT_MODE_P (m)
1902 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1904 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1905 if (imode == BLKmode)
1906 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1907 else
1908 dst = gen_reg_rtx (imode);
1909 emit_group_store (dst, src, type, ssize);
1910 if (imode != BLKmode)
1911 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1912 emit_move_insn (orig_dst, dst);
1913 return;
1916 /* Check for a NULL entry, used to indicate that the parameter goes
1917 both on the stack and in registers. */
1918 if (XEXP (XVECEXP (src, 0, 0), 0))
1919 start = 0;
1920 else
1921 start = 1;
1922 finish = XVECLEN (src, 0);
1924 tmps = XALLOCAVEC (rtx, finish);
1926 /* Copy the (probable) hard regs into pseudos. */
1927 for (i = start; i < finish; i++)
1929 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1930 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1932 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1933 emit_move_insn (tmps[i], reg);
1935 else
1936 tmps[i] = reg;
1939 /* If we won't be storing directly into memory, protect the real destination
1940 from strange tricks we might play. */
1941 dst = orig_dst;
1942 if (GET_CODE (dst) == PARALLEL)
1944 rtx temp;
1946 /* We can get a PARALLEL dst if there is a conditional expression in
1947 a return statement. In that case, the dst and src are the same,
1948 so no action is necessary. */
1949 if (rtx_equal_p (dst, src))
1950 return;
1952 /* It is unclear if we can ever reach here, but we may as well handle
1953 it. Allocate a temporary, and split this into a store/load to/from
1954 the temporary. */
1956 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1957 emit_group_store (temp, src, type, ssize);
1958 emit_group_load (dst, temp, type, ssize);
1959 return;
1961 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1963 enum machine_mode outer = GET_MODE (dst);
1964 enum machine_mode inner;
1965 HOST_WIDE_INT bytepos;
1966 bool done = false;
1967 rtx temp;
1969 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1970 dst = gen_reg_rtx (outer);
1972 /* Make life a bit easier for combine. */
1973 /* If the first element of the vector is the low part
1974 of the destination mode, use a paradoxical subreg to
1975 initialize the destination. */
1976 if (start < finish)
1978 inner = GET_MODE (tmps[start]);
1979 bytepos = subreg_lowpart_offset (inner, outer);
1980 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1982 temp = simplify_gen_subreg (outer, tmps[start],
1983 inner, 0);
1984 if (temp)
1986 emit_move_insn (dst, temp);
1987 done = true;
1988 start++;
1993 /* If the first element wasn't the low part, try the last. */
1994 if (!done
1995 && start < finish - 1)
1997 inner = GET_MODE (tmps[finish - 1]);
1998 bytepos = subreg_lowpart_offset (inner, outer);
1999 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2001 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2002 inner, 0);
2003 if (temp)
2005 emit_move_insn (dst, temp);
2006 done = true;
2007 finish--;
2012 /* Otherwise, simply initialize the result to zero. */
2013 if (!done)
2014 emit_move_insn (dst, CONST0_RTX (outer));
2017 /* Process the pieces. */
2018 for (i = start; i < finish; i++)
2020 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2021 enum machine_mode mode = GET_MODE (tmps[i]);
2022 unsigned int bytelen = GET_MODE_SIZE (mode);
2023 unsigned int adj_bytelen = bytelen;
2024 rtx dest = dst;
2026 /* Handle trailing fragments that run over the size of the struct. */
2027 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2028 adj_bytelen = ssize - bytepos;
2030 if (GET_CODE (dst) == CONCAT)
2032 if (bytepos + adj_bytelen
2033 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2034 dest = XEXP (dst, 0);
2035 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2037 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2038 dest = XEXP (dst, 1);
2040 else
2042 enum machine_mode dest_mode = GET_MODE (dest);
2043 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2045 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2047 if (GET_MODE_ALIGNMENT (dest_mode)
2048 >= GET_MODE_ALIGNMENT (tmp_mode))
2050 dest = assign_stack_temp (dest_mode,
2051 GET_MODE_SIZE (dest_mode),
2053 emit_move_insn (adjust_address (dest,
2054 tmp_mode,
2055 bytepos),
2056 tmps[i]);
2057 dst = dest;
2059 else
2061 dest = assign_stack_temp (tmp_mode,
2062 GET_MODE_SIZE (tmp_mode),
2064 emit_move_insn (dest, tmps[i]);
2065 dst = adjust_address (dest, dest_mode, bytepos);
2067 break;
2071 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2073 /* store_bit_field always takes its value from the lsb.
2074 Move the fragment to the lsb if it's not already there. */
2075 if (
2076 #ifdef BLOCK_REG_PADDING
2077 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2078 == (BYTES_BIG_ENDIAN ? upward : downward)
2079 #else
2080 BYTES_BIG_ENDIAN
2081 #endif
2084 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2085 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2086 build_int_cst (NULL_TREE, shift),
2087 tmps[i], 0);
2089 bytelen = adj_bytelen;
2092 /* Optimize the access just a bit. */
2093 if (MEM_P (dest)
2094 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2095 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2096 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2097 && bytelen == GET_MODE_SIZE (mode))
2098 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2099 else
2100 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2101 mode, tmps[i]);
2104 /* Copy from the pseudo into the (probable) hard reg. */
2105 if (orig_dst != dst)
2106 emit_move_insn (orig_dst, dst);
2109 /* Generate code to copy a BLKmode object of TYPE out of a
2110 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2111 is null, a stack temporary is created. TGTBLK is returned.
2113 The purpose of this routine is to handle functions that return
2114 BLKmode structures in registers. Some machines (the PA for example)
2115 want to return all small structures in registers regardless of the
2116 structure's alignment. */
2119 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2121 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2122 rtx src = NULL, dst = NULL;
2123 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2124 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2125 enum machine_mode copy_mode;
2127 if (tgtblk == 0)
2129 tgtblk = assign_temp (build_qualified_type (type,
2130 (TYPE_QUALS (type)
2131 | TYPE_QUAL_CONST)),
2132 0, 1, 1);
2133 preserve_temp_slots (tgtblk);
2136 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2137 into a new pseudo which is a full word. */
2139 if (GET_MODE (srcreg) != BLKmode
2140 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2141 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2143 /* If the structure doesn't take up a whole number of words, see whether
2144 SRCREG is padded on the left or on the right. If it's on the left,
2145 set PADDING_CORRECTION to the number of bits to skip.
2147 In most ABIs, the structure will be returned at the least end of
2148 the register, which translates to right padding on little-endian
2149 targets and left padding on big-endian targets. The opposite
2150 holds if the structure is returned at the most significant
2151 end of the register. */
2152 if (bytes % UNITS_PER_WORD != 0
2153 && (targetm.calls.return_in_msb (type)
2154 ? !BYTES_BIG_ENDIAN
2155 : BYTES_BIG_ENDIAN))
2156 padding_correction
2157 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2159 /* Copy the structure BITSIZE bits at a time. If the target lives in
2160 memory, take care of not reading/writing past its end by selecting
2161 a copy mode suited to BITSIZE. This should always be possible given
2162 how it is computed.
2164 We could probably emit more efficient code for machines which do not use
2165 strict alignment, but it doesn't seem worth the effort at the current
2166 time. */
2168 copy_mode = word_mode;
2169 if (MEM_P (tgtblk))
2171 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2172 if (mem_mode != BLKmode)
2173 copy_mode = mem_mode;
2176 for (bitpos = 0, xbitpos = padding_correction;
2177 bitpos < bytes * BITS_PER_UNIT;
2178 bitpos += bitsize, xbitpos += bitsize)
2180 /* We need a new source operand each time xbitpos is on a
2181 word boundary and when xbitpos == padding_correction
2182 (the first time through). */
2183 if (xbitpos % BITS_PER_WORD == 0
2184 || xbitpos == padding_correction)
2185 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2186 GET_MODE (srcreg));
2188 /* We need a new destination operand each time bitpos is on
2189 a word boundary. */
2190 if (bitpos % BITS_PER_WORD == 0)
2191 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2193 /* Use xbitpos for the source extraction (right justified) and
2194 bitpos for the destination store (left justified). */
2195 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2196 extract_bit_field (src, bitsize,
2197 xbitpos % BITS_PER_WORD, 1,
2198 NULL_RTX, copy_mode, copy_mode));
2201 return tgtblk;
2204 /* Add a USE expression for REG to the (possibly empty) list pointed
2205 to by CALL_FUSAGE. REG must denote a hard register. */
2207 void
2208 use_reg (rtx *call_fusage, rtx reg)
2210 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2212 *call_fusage
2213 = gen_rtx_EXPR_LIST (VOIDmode,
2214 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2217 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2218 starting at REGNO. All of these registers must be hard registers. */
2220 void
2221 use_regs (rtx *call_fusage, int regno, int nregs)
2223 int i;
2225 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2227 for (i = 0; i < nregs; i++)
2228 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2231 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2232 PARALLEL REGS. This is for calls that pass values in multiple
2233 non-contiguous locations. The Irix 6 ABI has examples of this. */
2235 void
2236 use_group_regs (rtx *call_fusage, rtx regs)
2238 int i;
2240 for (i = 0; i < XVECLEN (regs, 0); i++)
2242 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2244 /* A NULL entry means the parameter goes both on the stack and in
2245 registers. This can also be a MEM for targets that pass values
2246 partially on the stack and partially in registers. */
2247 if (reg != 0 && REG_P (reg))
2248 use_reg (call_fusage, reg);
2253 /* Determine whether the LEN bytes generated by CONSTFUN can be
2254 stored to memory using several move instructions. CONSTFUNDATA is
2255 a pointer which will be passed as argument in every CONSTFUN call.
2256 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2257 a memset operation and false if it's a copy of a constant string.
2258 Return nonzero if a call to store_by_pieces should succeed. */
2261 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2262 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2263 void *constfundata, unsigned int align, bool memsetp)
2265 unsigned HOST_WIDE_INT l;
2266 unsigned int max_size;
2267 HOST_WIDE_INT offset = 0;
2268 enum machine_mode mode, tmode;
2269 enum insn_code icode;
2270 int reverse;
2271 rtx cst;
2273 if (len == 0)
2274 return 1;
2276 if (! (memsetp
2277 ? SET_BY_PIECES_P (len, align)
2278 : STORE_BY_PIECES_P (len, align)))
2279 return 0;
2281 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2282 if (align >= GET_MODE_ALIGNMENT (tmode))
2283 align = GET_MODE_ALIGNMENT (tmode);
2284 else
2286 enum machine_mode xmode;
2288 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2289 tmode != VOIDmode;
2290 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2291 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2292 || SLOW_UNALIGNED_ACCESS (tmode, align))
2293 break;
2295 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2298 /* We would first store what we can in the largest integer mode, then go to
2299 successively smaller modes. */
2301 for (reverse = 0;
2302 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2303 reverse++)
2305 l = len;
2306 mode = VOIDmode;
2307 max_size = STORE_MAX_PIECES + 1;
2308 while (max_size > 1)
2310 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2311 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2312 if (GET_MODE_SIZE (tmode) < max_size)
2313 mode = tmode;
2315 if (mode == VOIDmode)
2316 break;
2318 icode = optab_handler (mov_optab, mode)->insn_code;
2319 if (icode != CODE_FOR_nothing
2320 && align >= GET_MODE_ALIGNMENT (mode))
2322 unsigned int size = GET_MODE_SIZE (mode);
2324 while (l >= size)
2326 if (reverse)
2327 offset -= size;
2329 cst = (*constfun) (constfundata, offset, mode);
2330 if (!LEGITIMATE_CONSTANT_P (cst))
2331 return 0;
2333 if (!reverse)
2334 offset += size;
2336 l -= size;
2340 max_size = GET_MODE_SIZE (mode);
2343 /* The code above should have handled everything. */
2344 gcc_assert (!l);
2347 return 1;
2350 /* Generate several move instructions to store LEN bytes generated by
2351 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2352 pointer which will be passed as argument in every CONSTFUN call.
2353 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2354 a memset operation and false if it's a copy of a constant string.
2355 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2356 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2357 stpcpy. */
2360 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2361 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2362 void *constfundata, unsigned int align, bool memsetp, int endp)
2364 struct store_by_pieces data;
2366 if (len == 0)
2368 gcc_assert (endp != 2);
2369 return to;
2372 gcc_assert (memsetp
2373 ? SET_BY_PIECES_P (len, align)
2374 : STORE_BY_PIECES_P (len, align));
2375 data.constfun = constfun;
2376 data.constfundata = constfundata;
2377 data.len = len;
2378 data.to = to;
2379 store_by_pieces_1 (&data, align);
2380 if (endp)
2382 rtx to1;
2384 gcc_assert (!data.reverse);
2385 if (data.autinc_to)
2387 if (endp == 2)
2389 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2390 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2391 else
2392 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2393 -1));
2395 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2396 data.offset);
2398 else
2400 if (endp == 2)
2401 --data.offset;
2402 to1 = adjust_address (data.to, QImode, data.offset);
2404 return to1;
2406 else
2407 return data.to;
2410 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2411 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2413 static void
2414 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2416 struct store_by_pieces data;
2418 if (len == 0)
2419 return;
2421 data.constfun = clear_by_pieces_1;
2422 data.constfundata = NULL;
2423 data.len = len;
2424 data.to = to;
2425 store_by_pieces_1 (&data, align);
2428 /* Callback routine for clear_by_pieces.
2429 Return const0_rtx unconditionally. */
2431 static rtx
2432 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2433 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2434 enum machine_mode mode ATTRIBUTE_UNUSED)
2436 return const0_rtx;
2439 /* Subroutine of clear_by_pieces and store_by_pieces.
2440 Generate several move instructions to store LEN bytes of block TO. (A MEM
2441 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2443 static void
2444 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2445 unsigned int align ATTRIBUTE_UNUSED)
2447 rtx to_addr = XEXP (data->to, 0);
2448 unsigned int max_size = STORE_MAX_PIECES + 1;
2449 enum machine_mode mode = VOIDmode, tmode;
2450 enum insn_code icode;
2452 data->offset = 0;
2453 data->to_addr = to_addr;
2454 data->autinc_to
2455 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2456 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2458 data->explicit_inc_to = 0;
2459 data->reverse
2460 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2461 if (data->reverse)
2462 data->offset = data->len;
2464 /* If storing requires more than two move insns,
2465 copy addresses to registers (to make displacements shorter)
2466 and use post-increment if available. */
2467 if (!data->autinc_to
2468 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2470 /* Determine the main mode we'll be using. */
2471 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2472 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2473 if (GET_MODE_SIZE (tmode) < max_size)
2474 mode = tmode;
2476 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2478 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2479 data->autinc_to = 1;
2480 data->explicit_inc_to = -1;
2483 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2484 && ! data->autinc_to)
2486 data->to_addr = copy_addr_to_reg (to_addr);
2487 data->autinc_to = 1;
2488 data->explicit_inc_to = 1;
2491 if ( !data->autinc_to && CONSTANT_P (to_addr))
2492 data->to_addr = copy_addr_to_reg (to_addr);
2495 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2496 if (align >= GET_MODE_ALIGNMENT (tmode))
2497 align = GET_MODE_ALIGNMENT (tmode);
2498 else
2500 enum machine_mode xmode;
2502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2503 tmode != VOIDmode;
2504 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2505 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2506 || SLOW_UNALIGNED_ACCESS (tmode, align))
2507 break;
2509 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2512 /* First store what we can in the largest integer mode, then go to
2513 successively smaller modes. */
2515 while (max_size > 1)
2517 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2518 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2519 if (GET_MODE_SIZE (tmode) < max_size)
2520 mode = tmode;
2522 if (mode == VOIDmode)
2523 break;
2525 icode = optab_handler (mov_optab, mode)->insn_code;
2526 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2527 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2529 max_size = GET_MODE_SIZE (mode);
2532 /* The code above should have handled everything. */
2533 gcc_assert (!data->len);
2536 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2537 with move instructions for mode MODE. GENFUN is the gen_... function
2538 to make a move insn for that mode. DATA has all the other info. */
2540 static void
2541 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2542 struct store_by_pieces *data)
2544 unsigned int size = GET_MODE_SIZE (mode);
2545 rtx to1, cst;
2547 while (data->len >= size)
2549 if (data->reverse)
2550 data->offset -= size;
2552 if (data->autinc_to)
2553 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2554 data->offset);
2555 else
2556 to1 = adjust_address (data->to, mode, data->offset);
2558 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2559 emit_insn (gen_add2_insn (data->to_addr,
2560 GEN_INT (-(HOST_WIDE_INT) size)));
2562 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2563 emit_insn ((*genfun) (to1, cst));
2565 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2566 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2568 if (! data->reverse)
2569 data->offset += size;
2571 data->len -= size;
2575 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2576 its length in bytes. */
2579 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2580 unsigned int expected_align, HOST_WIDE_INT expected_size)
2582 enum machine_mode mode = GET_MODE (object);
2583 unsigned int align;
2585 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2587 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2588 just move a zero. Otherwise, do this a piece at a time. */
2589 if (mode != BLKmode
2590 && GET_CODE (size) == CONST_INT
2591 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2593 rtx zero = CONST0_RTX (mode);
2594 if (zero != NULL)
2596 emit_move_insn (object, zero);
2597 return NULL;
2600 if (COMPLEX_MODE_P (mode))
2602 zero = CONST0_RTX (GET_MODE_INNER (mode));
2603 if (zero != NULL)
2605 write_complex_part (object, zero, 0);
2606 write_complex_part (object, zero, 1);
2607 return NULL;
2612 if (size == const0_rtx)
2613 return NULL;
2615 align = MEM_ALIGN (object);
2617 if (GET_CODE (size) == CONST_INT
2618 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2619 clear_by_pieces (object, INTVAL (size), align);
2620 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2621 expected_align, expected_size))
2623 else
2624 return set_storage_via_libcall (object, size, const0_rtx,
2625 method == BLOCK_OP_TAILCALL);
2627 return NULL;
2631 clear_storage (rtx object, rtx size, enum block_op_methods method)
2633 return clear_storage_hints (object, size, method, 0, -1);
2637 /* A subroutine of clear_storage. Expand a call to memset.
2638 Return the return value of memset, 0 otherwise. */
2641 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2643 tree call_expr, fn, object_tree, size_tree, val_tree;
2644 enum machine_mode size_mode;
2645 rtx retval;
2647 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2648 place those into new pseudos into a VAR_DECL and use them later. */
2650 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2652 size_mode = TYPE_MODE (sizetype);
2653 size = convert_to_mode (size_mode, size, 1);
2654 size = copy_to_mode_reg (size_mode, size);
2656 /* It is incorrect to use the libcall calling conventions to call
2657 memset in this context. This could be a user call to memset and
2658 the user may wish to examine the return value from memset. For
2659 targets where libcalls and normal calls have different conventions
2660 for returning pointers, we could end up generating incorrect code. */
2662 object_tree = make_tree (ptr_type_node, object);
2663 if (GET_CODE (val) != CONST_INT)
2664 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2665 size_tree = make_tree (sizetype, size);
2666 val_tree = make_tree (integer_type_node, val);
2668 fn = clear_storage_libcall_fn (true);
2669 call_expr = build_call_expr (fn, 3,
2670 object_tree, integer_zero_node, size_tree);
2671 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2673 retval = expand_normal (call_expr);
2675 return retval;
2678 /* A subroutine of set_storage_via_libcall. Create the tree node
2679 for the function we use for block clears. The first time FOR_CALL
2680 is true, we call assemble_external. */
2682 tree block_clear_fn;
2684 void
2685 init_block_clear_fn (const char *asmspec)
2687 if (!block_clear_fn)
2689 tree fn, args;
2691 fn = get_identifier ("memset");
2692 args = build_function_type_list (ptr_type_node, ptr_type_node,
2693 integer_type_node, sizetype,
2694 NULL_TREE);
2696 fn = build_decl (FUNCTION_DECL, fn, args);
2697 DECL_EXTERNAL (fn) = 1;
2698 TREE_PUBLIC (fn) = 1;
2699 DECL_ARTIFICIAL (fn) = 1;
2700 TREE_NOTHROW (fn) = 1;
2701 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2702 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2704 block_clear_fn = fn;
2707 if (asmspec)
2708 set_user_assembler_name (block_clear_fn, asmspec);
2711 static tree
2712 clear_storage_libcall_fn (int for_call)
2714 static bool emitted_extern;
2716 if (!block_clear_fn)
2717 init_block_clear_fn (NULL);
2719 if (for_call && !emitted_extern)
2721 emitted_extern = true;
2722 make_decl_rtl (block_clear_fn);
2723 assemble_external (block_clear_fn);
2726 return block_clear_fn;
2729 /* Expand a setmem pattern; return true if successful. */
2731 bool
2732 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2733 unsigned int expected_align, HOST_WIDE_INT expected_size)
2735 /* Try the most limited insn first, because there's no point
2736 including more than one in the machine description unless
2737 the more limited one has some advantage. */
2739 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2740 enum machine_mode mode;
2742 if (expected_align < align)
2743 expected_align = align;
2745 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2746 mode = GET_MODE_WIDER_MODE (mode))
2748 enum insn_code code = setmem_optab[(int) mode];
2749 insn_operand_predicate_fn pred;
2751 if (code != CODE_FOR_nothing
2752 /* We don't need MODE to be narrower than
2753 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2754 the mode mask, as it is returned by the macro, it will
2755 definitely be less than the actual mode mask. */
2756 && ((GET_CODE (size) == CONST_INT
2757 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2758 <= (GET_MODE_MASK (mode) >> 1)))
2759 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2760 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2761 || (*pred) (object, BLKmode))
2762 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2763 || (*pred) (opalign, VOIDmode)))
2765 rtx opsize, opchar;
2766 enum machine_mode char_mode;
2767 rtx last = get_last_insn ();
2768 rtx pat;
2770 opsize = convert_to_mode (mode, size, 1);
2771 pred = insn_data[(int) code].operand[1].predicate;
2772 if (pred != 0 && ! (*pred) (opsize, mode))
2773 opsize = copy_to_mode_reg (mode, opsize);
2775 opchar = val;
2776 char_mode = insn_data[(int) code].operand[2].mode;
2777 if (char_mode != VOIDmode)
2779 opchar = convert_to_mode (char_mode, opchar, 1);
2780 pred = insn_data[(int) code].operand[2].predicate;
2781 if (pred != 0 && ! (*pred) (opchar, char_mode))
2782 opchar = copy_to_mode_reg (char_mode, opchar);
2785 if (insn_data[(int) code].n_operands == 4)
2786 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2787 else
2788 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2789 GEN_INT (expected_align
2790 / BITS_PER_UNIT),
2791 GEN_INT (expected_size));
2792 if (pat)
2794 emit_insn (pat);
2795 return true;
2797 else
2798 delete_insns_since (last);
2802 return false;
2806 /* Write to one of the components of the complex value CPLX. Write VAL to
2807 the real part if IMAG_P is false, and the imaginary part if its true. */
2809 static void
2810 write_complex_part (rtx cplx, rtx val, bool imag_p)
2812 enum machine_mode cmode;
2813 enum machine_mode imode;
2814 unsigned ibitsize;
2816 if (GET_CODE (cplx) == CONCAT)
2818 emit_move_insn (XEXP (cplx, imag_p), val);
2819 return;
2822 cmode = GET_MODE (cplx);
2823 imode = GET_MODE_INNER (cmode);
2824 ibitsize = GET_MODE_BITSIZE (imode);
2826 /* For MEMs simplify_gen_subreg may generate an invalid new address
2827 because, e.g., the original address is considered mode-dependent
2828 by the target, which restricts simplify_subreg from invoking
2829 adjust_address_nv. Instead of preparing fallback support for an
2830 invalid address, we call adjust_address_nv directly. */
2831 if (MEM_P (cplx))
2833 emit_move_insn (adjust_address_nv (cplx, imode,
2834 imag_p ? GET_MODE_SIZE (imode) : 0),
2835 val);
2836 return;
2839 /* If the sub-object is at least word sized, then we know that subregging
2840 will work. This special case is important, since store_bit_field
2841 wants to operate on integer modes, and there's rarely an OImode to
2842 correspond to TCmode. */
2843 if (ibitsize >= BITS_PER_WORD
2844 /* For hard regs we have exact predicates. Assume we can split
2845 the original object if it spans an even number of hard regs.
2846 This special case is important for SCmode on 64-bit platforms
2847 where the natural size of floating-point regs is 32-bit. */
2848 || (REG_P (cplx)
2849 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2850 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2852 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2853 imag_p ? GET_MODE_SIZE (imode) : 0);
2854 if (part)
2856 emit_move_insn (part, val);
2857 return;
2859 else
2860 /* simplify_gen_subreg may fail for sub-word MEMs. */
2861 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2864 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2867 /* Extract one of the components of the complex value CPLX. Extract the
2868 real part if IMAG_P is false, and the imaginary part if it's true. */
2870 static rtx
2871 read_complex_part (rtx cplx, bool imag_p)
2873 enum machine_mode cmode, imode;
2874 unsigned ibitsize;
2876 if (GET_CODE (cplx) == CONCAT)
2877 return XEXP (cplx, imag_p);
2879 cmode = GET_MODE (cplx);
2880 imode = GET_MODE_INNER (cmode);
2881 ibitsize = GET_MODE_BITSIZE (imode);
2883 /* Special case reads from complex constants that got spilled to memory. */
2884 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2886 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2887 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2889 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2890 if (CONSTANT_CLASS_P (part))
2891 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2895 /* For MEMs simplify_gen_subreg may generate an invalid new address
2896 because, e.g., the original address is considered mode-dependent
2897 by the target, which restricts simplify_subreg from invoking
2898 adjust_address_nv. Instead of preparing fallback support for an
2899 invalid address, we call adjust_address_nv directly. */
2900 if (MEM_P (cplx))
2901 return adjust_address_nv (cplx, imode,
2902 imag_p ? GET_MODE_SIZE (imode) : 0);
2904 /* If the sub-object is at least word sized, then we know that subregging
2905 will work. This special case is important, since extract_bit_field
2906 wants to operate on integer modes, and there's rarely an OImode to
2907 correspond to TCmode. */
2908 if (ibitsize >= BITS_PER_WORD
2909 /* For hard regs we have exact predicates. Assume we can split
2910 the original object if it spans an even number of hard regs.
2911 This special case is important for SCmode on 64-bit platforms
2912 where the natural size of floating-point regs is 32-bit. */
2913 || (REG_P (cplx)
2914 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2915 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2917 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2918 imag_p ? GET_MODE_SIZE (imode) : 0);
2919 if (ret)
2920 return ret;
2921 else
2922 /* simplify_gen_subreg may fail for sub-word MEMs. */
2923 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2926 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2927 true, NULL_RTX, imode, imode);
2930 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2931 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2932 represented in NEW_MODE. If FORCE is true, this will never happen, as
2933 we'll force-create a SUBREG if needed. */
2935 static rtx
2936 emit_move_change_mode (enum machine_mode new_mode,
2937 enum machine_mode old_mode, rtx x, bool force)
2939 rtx ret;
2941 if (push_operand (x, GET_MODE (x)))
2943 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2944 MEM_COPY_ATTRIBUTES (ret, x);
2946 else if (MEM_P (x))
2948 /* We don't have to worry about changing the address since the
2949 size in bytes is supposed to be the same. */
2950 if (reload_in_progress)
2952 /* Copy the MEM to change the mode and move any
2953 substitutions from the old MEM to the new one. */
2954 ret = adjust_address_nv (x, new_mode, 0);
2955 copy_replacements (x, ret);
2957 else
2958 ret = adjust_address (x, new_mode, 0);
2960 else
2962 /* Note that we do want simplify_subreg's behavior of validating
2963 that the new mode is ok for a hard register. If we were to use
2964 simplify_gen_subreg, we would create the subreg, but would
2965 probably run into the target not being able to implement it. */
2966 /* Except, of course, when FORCE is true, when this is exactly what
2967 we want. Which is needed for CCmodes on some targets. */
2968 if (force)
2969 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2970 else
2971 ret = simplify_subreg (new_mode, x, old_mode, 0);
2974 return ret;
2977 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2978 an integer mode of the same size as MODE. Returns the instruction
2979 emitted, or NULL if such a move could not be generated. */
2981 static rtx
2982 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2984 enum machine_mode imode;
2985 enum insn_code code;
2987 /* There must exist a mode of the exact size we require. */
2988 imode = int_mode_for_mode (mode);
2989 if (imode == BLKmode)
2990 return NULL_RTX;
2992 /* The target must support moves in this mode. */
2993 code = optab_handler (mov_optab, imode)->insn_code;
2994 if (code == CODE_FOR_nothing)
2995 return NULL_RTX;
2997 x = emit_move_change_mode (imode, mode, x, force);
2998 if (x == NULL_RTX)
2999 return NULL_RTX;
3000 y = emit_move_change_mode (imode, mode, y, force);
3001 if (y == NULL_RTX)
3002 return NULL_RTX;
3003 return emit_insn (GEN_FCN (code) (x, y));
3006 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3007 Return an equivalent MEM that does not use an auto-increment. */
3009 static rtx
3010 emit_move_resolve_push (enum machine_mode mode, rtx x)
3012 enum rtx_code code = GET_CODE (XEXP (x, 0));
3013 HOST_WIDE_INT adjust;
3014 rtx temp;
3016 adjust = GET_MODE_SIZE (mode);
3017 #ifdef PUSH_ROUNDING
3018 adjust = PUSH_ROUNDING (adjust);
3019 #endif
3020 if (code == PRE_DEC || code == POST_DEC)
3021 adjust = -adjust;
3022 else if (code == PRE_MODIFY || code == POST_MODIFY)
3024 rtx expr = XEXP (XEXP (x, 0), 1);
3025 HOST_WIDE_INT val;
3027 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3028 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
3029 val = INTVAL (XEXP (expr, 1));
3030 if (GET_CODE (expr) == MINUS)
3031 val = -val;
3032 gcc_assert (adjust == val || adjust == -val);
3033 adjust = val;
3036 /* Do not use anti_adjust_stack, since we don't want to update
3037 stack_pointer_delta. */
3038 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3039 GEN_INT (adjust), stack_pointer_rtx,
3040 0, OPTAB_LIB_WIDEN);
3041 if (temp != stack_pointer_rtx)
3042 emit_move_insn (stack_pointer_rtx, temp);
3044 switch (code)
3046 case PRE_INC:
3047 case PRE_DEC:
3048 case PRE_MODIFY:
3049 temp = stack_pointer_rtx;
3050 break;
3051 case POST_INC:
3052 case POST_DEC:
3053 case POST_MODIFY:
3054 temp = plus_constant (stack_pointer_rtx, -adjust);
3055 break;
3056 default:
3057 gcc_unreachable ();
3060 return replace_equiv_address (x, temp);
3063 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3064 X is known to satisfy push_operand, and MODE is known to be complex.
3065 Returns the last instruction emitted. */
3068 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3070 enum machine_mode submode = GET_MODE_INNER (mode);
3071 bool imag_first;
3073 #ifdef PUSH_ROUNDING
3074 unsigned int submodesize = GET_MODE_SIZE (submode);
3076 /* In case we output to the stack, but the size is smaller than the
3077 machine can push exactly, we need to use move instructions. */
3078 if (PUSH_ROUNDING (submodesize) != submodesize)
3080 x = emit_move_resolve_push (mode, x);
3081 return emit_move_insn (x, y);
3083 #endif
3085 /* Note that the real part always precedes the imag part in memory
3086 regardless of machine's endianness. */
3087 switch (GET_CODE (XEXP (x, 0)))
3089 case PRE_DEC:
3090 case POST_DEC:
3091 imag_first = true;
3092 break;
3093 case PRE_INC:
3094 case POST_INC:
3095 imag_first = false;
3096 break;
3097 default:
3098 gcc_unreachable ();
3101 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3102 read_complex_part (y, imag_first));
3103 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3104 read_complex_part (y, !imag_first));
3107 /* A subroutine of emit_move_complex. Perform the move from Y to X
3108 via two moves of the parts. Returns the last instruction emitted. */
3111 emit_move_complex_parts (rtx x, rtx y)
3113 /* Show the output dies here. This is necessary for SUBREGs
3114 of pseudos since we cannot track their lifetimes correctly;
3115 hard regs shouldn't appear here except as return values. */
3116 if (!reload_completed && !reload_in_progress
3117 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3118 emit_clobber (x);
3120 write_complex_part (x, read_complex_part (y, false), false);
3121 write_complex_part (x, read_complex_part (y, true), true);
3123 return get_last_insn ();
3126 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3127 MODE is known to be complex. Returns the last instruction emitted. */
3129 static rtx
3130 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3132 bool try_int;
3134 /* Need to take special care for pushes, to maintain proper ordering
3135 of the data, and possibly extra padding. */
3136 if (push_operand (x, mode))
3137 return emit_move_complex_push (mode, x, y);
3139 /* See if we can coerce the target into moving both values at once. */
3141 /* Move floating point as parts. */
3142 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3143 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3144 try_int = false;
3145 /* Not possible if the values are inherently not adjacent. */
3146 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3147 try_int = false;
3148 /* Is possible if both are registers (or subregs of registers). */
3149 else if (register_operand (x, mode) && register_operand (y, mode))
3150 try_int = true;
3151 /* If one of the operands is a memory, and alignment constraints
3152 are friendly enough, we may be able to do combined memory operations.
3153 We do not attempt this if Y is a constant because that combination is
3154 usually better with the by-parts thing below. */
3155 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3156 && (!STRICT_ALIGNMENT
3157 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3158 try_int = true;
3159 else
3160 try_int = false;
3162 if (try_int)
3164 rtx ret;
3166 /* For memory to memory moves, optimal behavior can be had with the
3167 existing block move logic. */
3168 if (MEM_P (x) && MEM_P (y))
3170 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3171 BLOCK_OP_NO_LIBCALL);
3172 return get_last_insn ();
3175 ret = emit_move_via_integer (mode, x, y, true);
3176 if (ret)
3177 return ret;
3180 return emit_move_complex_parts (x, y);
3183 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3184 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3186 static rtx
3187 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3189 rtx ret;
3191 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3192 if (mode != CCmode)
3194 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3195 if (code != CODE_FOR_nothing)
3197 x = emit_move_change_mode (CCmode, mode, x, true);
3198 y = emit_move_change_mode (CCmode, mode, y, true);
3199 return emit_insn (GEN_FCN (code) (x, y));
3203 /* Otherwise, find the MODE_INT mode of the same width. */
3204 ret = emit_move_via_integer (mode, x, y, false);
3205 gcc_assert (ret != NULL);
3206 return ret;
3209 /* Return true if word I of OP lies entirely in the
3210 undefined bits of a paradoxical subreg. */
3212 static bool
3213 undefined_operand_subword_p (const_rtx op, int i)
3215 enum machine_mode innermode, innermostmode;
3216 int offset;
3217 if (GET_CODE (op) != SUBREG)
3218 return false;
3219 innermode = GET_MODE (op);
3220 innermostmode = GET_MODE (SUBREG_REG (op));
3221 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3222 /* The SUBREG_BYTE represents offset, as if the value were stored in
3223 memory, except for a paradoxical subreg where we define
3224 SUBREG_BYTE to be 0; undo this exception as in
3225 simplify_subreg. */
3226 if (SUBREG_BYTE (op) == 0
3227 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3229 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3230 if (WORDS_BIG_ENDIAN)
3231 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3232 if (BYTES_BIG_ENDIAN)
3233 offset += difference % UNITS_PER_WORD;
3235 if (offset >= GET_MODE_SIZE (innermostmode)
3236 || offset <= -GET_MODE_SIZE (word_mode))
3237 return true;
3238 return false;
3241 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3242 MODE is any multi-word or full-word mode that lacks a move_insn
3243 pattern. Note that you will get better code if you define such
3244 patterns, even if they must turn into multiple assembler instructions. */
3246 static rtx
3247 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3249 rtx last_insn = 0;
3250 rtx seq, inner;
3251 bool need_clobber;
3252 int i;
3254 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3256 /* If X is a push on the stack, do the push now and replace
3257 X with a reference to the stack pointer. */
3258 if (push_operand (x, mode))
3259 x = emit_move_resolve_push (mode, x);
3261 /* If we are in reload, see if either operand is a MEM whose address
3262 is scheduled for replacement. */
3263 if (reload_in_progress && MEM_P (x)
3264 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3265 x = replace_equiv_address_nv (x, inner);
3266 if (reload_in_progress && MEM_P (y)
3267 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3268 y = replace_equiv_address_nv (y, inner);
3270 start_sequence ();
3272 need_clobber = false;
3273 for (i = 0;
3274 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3275 i++)
3277 rtx xpart = operand_subword (x, i, 1, mode);
3278 rtx ypart;
3280 /* Do not generate code for a move if it would come entirely
3281 from the undefined bits of a paradoxical subreg. */
3282 if (undefined_operand_subword_p (y, i))
3283 continue;
3285 ypart = operand_subword (y, i, 1, mode);
3287 /* If we can't get a part of Y, put Y into memory if it is a
3288 constant. Otherwise, force it into a register. Then we must
3289 be able to get a part of Y. */
3290 if (ypart == 0 && CONSTANT_P (y))
3292 y = use_anchored_address (force_const_mem (mode, y));
3293 ypart = operand_subword (y, i, 1, mode);
3295 else if (ypart == 0)
3296 ypart = operand_subword_force (y, i, mode);
3298 gcc_assert (xpart && ypart);
3300 need_clobber |= (GET_CODE (xpart) == SUBREG);
3302 last_insn = emit_move_insn (xpart, ypart);
3305 seq = get_insns ();
3306 end_sequence ();
3308 /* Show the output dies here. This is necessary for SUBREGs
3309 of pseudos since we cannot track their lifetimes correctly;
3310 hard regs shouldn't appear here except as return values.
3311 We never want to emit such a clobber after reload. */
3312 if (x != y
3313 && ! (reload_in_progress || reload_completed)
3314 && need_clobber != 0)
3315 emit_clobber (x);
3317 emit_insn (seq);
3319 return last_insn;
3322 /* Low level part of emit_move_insn.
3323 Called just like emit_move_insn, but assumes X and Y
3324 are basically valid. */
3327 emit_move_insn_1 (rtx x, rtx y)
3329 enum machine_mode mode = GET_MODE (x);
3330 enum insn_code code;
3332 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3334 code = optab_handler (mov_optab, mode)->insn_code;
3335 if (code != CODE_FOR_nothing)
3336 return emit_insn (GEN_FCN (code) (x, y));
3338 /* Expand complex moves by moving real part and imag part. */
3339 if (COMPLEX_MODE_P (mode))
3340 return emit_move_complex (mode, x, y);
3342 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3343 || ALL_FIXED_POINT_MODE_P (mode))
3345 rtx result = emit_move_via_integer (mode, x, y, true);
3347 /* If we can't find an integer mode, use multi words. */
3348 if (result)
3349 return result;
3350 else
3351 return emit_move_multi_word (mode, x, y);
3354 if (GET_MODE_CLASS (mode) == MODE_CC)
3355 return emit_move_ccmode (mode, x, y);
3357 /* Try using a move pattern for the corresponding integer mode. This is
3358 only safe when simplify_subreg can convert MODE constants into integer
3359 constants. At present, it can only do this reliably if the value
3360 fits within a HOST_WIDE_INT. */
3361 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3363 rtx ret = emit_move_via_integer (mode, x, y, false);
3364 if (ret)
3365 return ret;
3368 return emit_move_multi_word (mode, x, y);
3371 /* Generate code to copy Y into X.
3372 Both Y and X must have the same mode, except that
3373 Y can be a constant with VOIDmode.
3374 This mode cannot be BLKmode; use emit_block_move for that.
3376 Return the last instruction emitted. */
3379 emit_move_insn (rtx x, rtx y)
3381 enum machine_mode mode = GET_MODE (x);
3382 rtx y_cst = NULL_RTX;
3383 rtx last_insn, set;
3385 gcc_assert (mode != BLKmode
3386 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3388 if (CONSTANT_P (y))
3390 if (optimize
3391 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3392 && (last_insn = compress_float_constant (x, y)))
3393 return last_insn;
3395 y_cst = y;
3397 if (!LEGITIMATE_CONSTANT_P (y))
3399 y = force_const_mem (mode, y);
3401 /* If the target's cannot_force_const_mem prevented the spill,
3402 assume that the target's move expanders will also take care
3403 of the non-legitimate constant. */
3404 if (!y)
3405 y = y_cst;
3406 else
3407 y = use_anchored_address (y);
3411 /* If X or Y are memory references, verify that their addresses are valid
3412 for the machine. */
3413 if (MEM_P (x)
3414 && (! memory_address_p (GET_MODE (x), XEXP (x, 0))
3415 && ! push_operand (x, GET_MODE (x))))
3416 x = validize_mem (x);
3418 if (MEM_P (y)
3419 && ! memory_address_p (GET_MODE (y), XEXP (y, 0)))
3420 y = validize_mem (y);
3422 gcc_assert (mode != BLKmode);
3424 last_insn = emit_move_insn_1 (x, y);
3426 if (y_cst && REG_P (x)
3427 && (set = single_set (last_insn)) != NULL_RTX
3428 && SET_DEST (set) == x
3429 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3430 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3432 return last_insn;
3435 /* If Y is representable exactly in a narrower mode, and the target can
3436 perform the extension directly from constant or memory, then emit the
3437 move as an extension. */
3439 static rtx
3440 compress_float_constant (rtx x, rtx y)
3442 enum machine_mode dstmode = GET_MODE (x);
3443 enum machine_mode orig_srcmode = GET_MODE (y);
3444 enum machine_mode srcmode;
3445 REAL_VALUE_TYPE r;
3446 int oldcost, newcost;
3447 bool speed = optimize_insn_for_speed_p ();
3449 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3451 if (LEGITIMATE_CONSTANT_P (y))
3452 oldcost = rtx_cost (y, SET, speed);
3453 else
3454 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3456 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3457 srcmode != orig_srcmode;
3458 srcmode = GET_MODE_WIDER_MODE (srcmode))
3460 enum insn_code ic;
3461 rtx trunc_y, last_insn;
3463 /* Skip if the target can't extend this way. */
3464 ic = can_extend_p (dstmode, srcmode, 0);
3465 if (ic == CODE_FOR_nothing)
3466 continue;
3468 /* Skip if the narrowed value isn't exact. */
3469 if (! exact_real_truncate (srcmode, &r))
3470 continue;
3472 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3474 if (LEGITIMATE_CONSTANT_P (trunc_y))
3476 /* Skip if the target needs extra instructions to perform
3477 the extension. */
3478 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3479 continue;
3480 /* This is valid, but may not be cheaper than the original. */
3481 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3482 if (oldcost < newcost)
3483 continue;
3485 else if (float_extend_from_mem[dstmode][srcmode])
3487 trunc_y = force_const_mem (srcmode, trunc_y);
3488 /* This is valid, but may not be cheaper than the original. */
3489 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3490 if (oldcost < newcost)
3491 continue;
3492 trunc_y = validize_mem (trunc_y);
3494 else
3495 continue;
3497 /* For CSE's benefit, force the compressed constant pool entry
3498 into a new pseudo. This constant may be used in different modes,
3499 and if not, combine will put things back together for us. */
3500 trunc_y = force_reg (srcmode, trunc_y);
3501 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3502 last_insn = get_last_insn ();
3504 if (REG_P (x))
3505 set_unique_reg_note (last_insn, REG_EQUAL, y);
3507 return last_insn;
3510 return NULL_RTX;
3513 /* Pushing data onto the stack. */
3515 /* Push a block of length SIZE (perhaps variable)
3516 and return an rtx to address the beginning of the block.
3517 The value may be virtual_outgoing_args_rtx.
3519 EXTRA is the number of bytes of padding to push in addition to SIZE.
3520 BELOW nonzero means this padding comes at low addresses;
3521 otherwise, the padding comes at high addresses. */
3524 push_block (rtx size, int extra, int below)
3526 rtx temp;
3528 size = convert_modes (Pmode, ptr_mode, size, 1);
3529 if (CONSTANT_P (size))
3530 anti_adjust_stack (plus_constant (size, extra));
3531 else if (REG_P (size) && extra == 0)
3532 anti_adjust_stack (size);
3533 else
3535 temp = copy_to_mode_reg (Pmode, size);
3536 if (extra != 0)
3537 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3538 temp, 0, OPTAB_LIB_WIDEN);
3539 anti_adjust_stack (temp);
3542 #ifndef STACK_GROWS_DOWNWARD
3543 if (0)
3544 #else
3545 if (1)
3546 #endif
3548 temp = virtual_outgoing_args_rtx;
3549 if (extra != 0 && below)
3550 temp = plus_constant (temp, extra);
3552 else
3554 if (GET_CODE (size) == CONST_INT)
3555 temp = plus_constant (virtual_outgoing_args_rtx,
3556 -INTVAL (size) - (below ? 0 : extra));
3557 else if (extra != 0 && !below)
3558 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3559 negate_rtx (Pmode, plus_constant (size, extra)));
3560 else
3561 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3562 negate_rtx (Pmode, size));
3565 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3568 #ifdef PUSH_ROUNDING
3570 /* Emit single push insn. */
3572 static void
3573 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3575 rtx dest_addr;
3576 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3577 rtx dest;
3578 enum insn_code icode;
3579 insn_operand_predicate_fn pred;
3581 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3582 /* If there is push pattern, use it. Otherwise try old way of throwing
3583 MEM representing push operation to move expander. */
3584 icode = optab_handler (push_optab, mode)->insn_code;
3585 if (icode != CODE_FOR_nothing)
3587 if (((pred = insn_data[(int) icode].operand[0].predicate)
3588 && !((*pred) (x, mode))))
3589 x = force_reg (mode, x);
3590 emit_insn (GEN_FCN (icode) (x));
3591 return;
3593 if (GET_MODE_SIZE (mode) == rounded_size)
3594 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3595 /* If we are to pad downward, adjust the stack pointer first and
3596 then store X into the stack location using an offset. This is
3597 because emit_move_insn does not know how to pad; it does not have
3598 access to type. */
3599 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3601 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3602 HOST_WIDE_INT offset;
3604 emit_move_insn (stack_pointer_rtx,
3605 expand_binop (Pmode,
3606 #ifdef STACK_GROWS_DOWNWARD
3607 sub_optab,
3608 #else
3609 add_optab,
3610 #endif
3611 stack_pointer_rtx,
3612 GEN_INT (rounded_size),
3613 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3615 offset = (HOST_WIDE_INT) padding_size;
3616 #ifdef STACK_GROWS_DOWNWARD
3617 if (STACK_PUSH_CODE == POST_DEC)
3618 /* We have already decremented the stack pointer, so get the
3619 previous value. */
3620 offset += (HOST_WIDE_INT) rounded_size;
3621 #else
3622 if (STACK_PUSH_CODE == POST_INC)
3623 /* We have already incremented the stack pointer, so get the
3624 previous value. */
3625 offset -= (HOST_WIDE_INT) rounded_size;
3626 #endif
3627 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3629 else
3631 #ifdef STACK_GROWS_DOWNWARD
3632 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3633 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3634 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3635 #else
3636 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3637 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3638 GEN_INT (rounded_size));
3639 #endif
3640 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3643 dest = gen_rtx_MEM (mode, dest_addr);
3645 if (type != 0)
3647 set_mem_attributes (dest, type, 1);
3649 if (flag_optimize_sibling_calls)
3650 /* Function incoming arguments may overlap with sibling call
3651 outgoing arguments and we cannot allow reordering of reads
3652 from function arguments with stores to outgoing arguments
3653 of sibling calls. */
3654 set_mem_alias_set (dest, 0);
3656 emit_move_insn (dest, x);
3658 #endif
3660 /* Generate code to push X onto the stack, assuming it has mode MODE and
3661 type TYPE.
3662 MODE is redundant except when X is a CONST_INT (since they don't
3663 carry mode info).
3664 SIZE is an rtx for the size of data to be copied (in bytes),
3665 needed only if X is BLKmode.
3667 ALIGN (in bits) is maximum alignment we can assume.
3669 If PARTIAL and REG are both nonzero, then copy that many of the first
3670 bytes of X into registers starting with REG, and push the rest of X.
3671 The amount of space pushed is decreased by PARTIAL bytes.
3672 REG must be a hard register in this case.
3673 If REG is zero but PARTIAL is not, take any all others actions for an
3674 argument partially in registers, but do not actually load any
3675 registers.
3677 EXTRA is the amount in bytes of extra space to leave next to this arg.
3678 This is ignored if an argument block has already been allocated.
3680 On a machine that lacks real push insns, ARGS_ADDR is the address of
3681 the bottom of the argument block for this call. We use indexing off there
3682 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3683 argument block has not been preallocated.
3685 ARGS_SO_FAR is the size of args previously pushed for this call.
3687 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3688 for arguments passed in registers. If nonzero, it will be the number
3689 of bytes required. */
3691 void
3692 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3693 unsigned int align, int partial, rtx reg, int extra,
3694 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3695 rtx alignment_pad)
3697 rtx xinner;
3698 enum direction stack_direction
3699 #ifdef STACK_GROWS_DOWNWARD
3700 = downward;
3701 #else
3702 = upward;
3703 #endif
3705 /* Decide where to pad the argument: `downward' for below,
3706 `upward' for above, or `none' for don't pad it.
3707 Default is below for small data on big-endian machines; else above. */
3708 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3710 /* Invert direction if stack is post-decrement.
3711 FIXME: why? */
3712 if (STACK_PUSH_CODE == POST_DEC)
3713 if (where_pad != none)
3714 where_pad = (where_pad == downward ? upward : downward);
3716 xinner = x;
3718 if (mode == BLKmode
3719 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3721 /* Copy a block into the stack, entirely or partially. */
3723 rtx temp;
3724 int used;
3725 int offset;
3726 int skip;
3728 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3729 used = partial - offset;
3731 if (mode != BLKmode)
3733 /* A value is to be stored in an insufficiently aligned
3734 stack slot; copy via a suitably aligned slot if
3735 necessary. */
3736 size = GEN_INT (GET_MODE_SIZE (mode));
3737 if (!MEM_P (xinner))
3739 temp = assign_temp (type, 0, 1, 1);
3740 emit_move_insn (temp, xinner);
3741 xinner = temp;
3745 gcc_assert (size);
3747 /* USED is now the # of bytes we need not copy to the stack
3748 because registers will take care of them. */
3750 if (partial != 0)
3751 xinner = adjust_address (xinner, BLKmode, used);
3753 /* If the partial register-part of the arg counts in its stack size,
3754 skip the part of stack space corresponding to the registers.
3755 Otherwise, start copying to the beginning of the stack space,
3756 by setting SKIP to 0. */
3757 skip = (reg_parm_stack_space == 0) ? 0 : used;
3759 #ifdef PUSH_ROUNDING
3760 /* Do it with several push insns if that doesn't take lots of insns
3761 and if there is no difficulty with push insns that skip bytes
3762 on the stack for alignment purposes. */
3763 if (args_addr == 0
3764 && PUSH_ARGS
3765 && GET_CODE (size) == CONST_INT
3766 && skip == 0
3767 && MEM_ALIGN (xinner) >= align
3768 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3769 /* Here we avoid the case of a structure whose weak alignment
3770 forces many pushes of a small amount of data,
3771 and such small pushes do rounding that causes trouble. */
3772 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3773 || align >= BIGGEST_ALIGNMENT
3774 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3775 == (align / BITS_PER_UNIT)))
3776 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3778 /* Push padding now if padding above and stack grows down,
3779 or if padding below and stack grows up.
3780 But if space already allocated, this has already been done. */
3781 if (extra && args_addr == 0
3782 && where_pad != none && where_pad != stack_direction)
3783 anti_adjust_stack (GEN_INT (extra));
3785 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3787 else
3788 #endif /* PUSH_ROUNDING */
3790 rtx target;
3792 /* Otherwise make space on the stack and copy the data
3793 to the address of that space. */
3795 /* Deduct words put into registers from the size we must copy. */
3796 if (partial != 0)
3798 if (GET_CODE (size) == CONST_INT)
3799 size = GEN_INT (INTVAL (size) - used);
3800 else
3801 size = expand_binop (GET_MODE (size), sub_optab, size,
3802 GEN_INT (used), NULL_RTX, 0,
3803 OPTAB_LIB_WIDEN);
3806 /* Get the address of the stack space.
3807 In this case, we do not deal with EXTRA separately.
3808 A single stack adjust will do. */
3809 if (! args_addr)
3811 temp = push_block (size, extra, where_pad == downward);
3812 extra = 0;
3814 else if (GET_CODE (args_so_far) == CONST_INT)
3815 temp = memory_address (BLKmode,
3816 plus_constant (args_addr,
3817 skip + INTVAL (args_so_far)));
3818 else
3819 temp = memory_address (BLKmode,
3820 plus_constant (gen_rtx_PLUS (Pmode,
3821 args_addr,
3822 args_so_far),
3823 skip));
3825 if (!ACCUMULATE_OUTGOING_ARGS)
3827 /* If the source is referenced relative to the stack pointer,
3828 copy it to another register to stabilize it. We do not need
3829 to do this if we know that we won't be changing sp. */
3831 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3832 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3833 temp = copy_to_reg (temp);
3836 target = gen_rtx_MEM (BLKmode, temp);
3838 /* We do *not* set_mem_attributes here, because incoming arguments
3839 may overlap with sibling call outgoing arguments and we cannot
3840 allow reordering of reads from function arguments with stores
3841 to outgoing arguments of sibling calls. We do, however, want
3842 to record the alignment of the stack slot. */
3843 /* ALIGN may well be better aligned than TYPE, e.g. due to
3844 PARM_BOUNDARY. Assume the caller isn't lying. */
3845 set_mem_align (target, align);
3847 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3850 else if (partial > 0)
3852 /* Scalar partly in registers. */
3854 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3855 int i;
3856 int not_stack;
3857 /* # bytes of start of argument
3858 that we must make space for but need not store. */
3859 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3860 int args_offset = INTVAL (args_so_far);
3861 int skip;
3863 /* Push padding now if padding above and stack grows down,
3864 or if padding below and stack grows up.
3865 But if space already allocated, this has already been done. */
3866 if (extra && args_addr == 0
3867 && where_pad != none && where_pad != stack_direction)
3868 anti_adjust_stack (GEN_INT (extra));
3870 /* If we make space by pushing it, we might as well push
3871 the real data. Otherwise, we can leave OFFSET nonzero
3872 and leave the space uninitialized. */
3873 if (args_addr == 0)
3874 offset = 0;
3876 /* Now NOT_STACK gets the number of words that we don't need to
3877 allocate on the stack. Convert OFFSET to words too. */
3878 not_stack = (partial - offset) / UNITS_PER_WORD;
3879 offset /= UNITS_PER_WORD;
3881 /* If the partial register-part of the arg counts in its stack size,
3882 skip the part of stack space corresponding to the registers.
3883 Otherwise, start copying to the beginning of the stack space,
3884 by setting SKIP to 0. */
3885 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3887 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3888 x = validize_mem (force_const_mem (mode, x));
3890 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3891 SUBREGs of such registers are not allowed. */
3892 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3893 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3894 x = copy_to_reg (x);
3896 /* Loop over all the words allocated on the stack for this arg. */
3897 /* We can do it by words, because any scalar bigger than a word
3898 has a size a multiple of a word. */
3899 #ifndef PUSH_ARGS_REVERSED
3900 for (i = not_stack; i < size; i++)
3901 #else
3902 for (i = size - 1; i >= not_stack; i--)
3903 #endif
3904 if (i >= not_stack + offset)
3905 emit_push_insn (operand_subword_force (x, i, mode),
3906 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3907 0, args_addr,
3908 GEN_INT (args_offset + ((i - not_stack + skip)
3909 * UNITS_PER_WORD)),
3910 reg_parm_stack_space, alignment_pad);
3912 else
3914 rtx addr;
3915 rtx dest;
3917 /* Push padding now if padding above and stack grows down,
3918 or if padding below and stack grows up.
3919 But if space already allocated, this has already been done. */
3920 if (extra && args_addr == 0
3921 && where_pad != none && where_pad != stack_direction)
3922 anti_adjust_stack (GEN_INT (extra));
3924 #ifdef PUSH_ROUNDING
3925 if (args_addr == 0 && PUSH_ARGS)
3926 emit_single_push_insn (mode, x, type);
3927 else
3928 #endif
3930 if (GET_CODE (args_so_far) == CONST_INT)
3931 addr
3932 = memory_address (mode,
3933 plus_constant (args_addr,
3934 INTVAL (args_so_far)));
3935 else
3936 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3937 args_so_far));
3938 dest = gen_rtx_MEM (mode, addr);
3940 /* We do *not* set_mem_attributes here, because incoming arguments
3941 may overlap with sibling call outgoing arguments and we cannot
3942 allow reordering of reads from function arguments with stores
3943 to outgoing arguments of sibling calls. We do, however, want
3944 to record the alignment of the stack slot. */
3945 /* ALIGN may well be better aligned than TYPE, e.g. due to
3946 PARM_BOUNDARY. Assume the caller isn't lying. */
3947 set_mem_align (dest, align);
3949 emit_move_insn (dest, x);
3953 /* If part should go in registers, copy that part
3954 into the appropriate registers. Do this now, at the end,
3955 since mem-to-mem copies above may do function calls. */
3956 if (partial > 0 && reg != 0)
3958 /* Handle calls that pass values in multiple non-contiguous locations.
3959 The Irix 6 ABI has examples of this. */
3960 if (GET_CODE (reg) == PARALLEL)
3961 emit_group_load (reg, x, type, -1);
3962 else
3964 gcc_assert (partial % UNITS_PER_WORD == 0);
3965 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3969 if (extra && args_addr == 0 && where_pad == stack_direction)
3970 anti_adjust_stack (GEN_INT (extra));
3972 if (alignment_pad && args_addr == 0)
3973 anti_adjust_stack (alignment_pad);
3976 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3977 operations. */
3979 static rtx
3980 get_subtarget (rtx x)
3982 return (optimize
3983 || x == 0
3984 /* Only registers can be subtargets. */
3985 || !REG_P (x)
3986 /* Don't use hard regs to avoid extending their life. */
3987 || REGNO (x) < FIRST_PSEUDO_REGISTER
3988 ? 0 : x);
3991 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3992 FIELD is a bitfield. Returns true if the optimization was successful,
3993 and there's nothing else to do. */
3995 static bool
3996 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3997 unsigned HOST_WIDE_INT bitpos,
3998 enum machine_mode mode1, rtx str_rtx,
3999 tree to, tree src)
4001 enum machine_mode str_mode = GET_MODE (str_rtx);
4002 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4003 tree op0, op1;
4004 rtx value, result;
4005 optab binop;
4007 if (mode1 != VOIDmode
4008 || bitsize >= BITS_PER_WORD
4009 || str_bitsize > BITS_PER_WORD
4010 || TREE_SIDE_EFFECTS (to)
4011 || TREE_THIS_VOLATILE (to))
4012 return false;
4014 STRIP_NOPS (src);
4015 if (!BINARY_CLASS_P (src)
4016 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4017 return false;
4019 op0 = TREE_OPERAND (src, 0);
4020 op1 = TREE_OPERAND (src, 1);
4021 STRIP_NOPS (op0);
4023 if (!operand_equal_p (to, op0, 0))
4024 return false;
4026 if (MEM_P (str_rtx))
4028 unsigned HOST_WIDE_INT offset1;
4030 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4031 str_mode = word_mode;
4032 str_mode = get_best_mode (bitsize, bitpos,
4033 MEM_ALIGN (str_rtx), str_mode, 0);
4034 if (str_mode == VOIDmode)
4035 return false;
4036 str_bitsize = GET_MODE_BITSIZE (str_mode);
4038 offset1 = bitpos;
4039 bitpos %= str_bitsize;
4040 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4041 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4043 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4044 return false;
4046 /* If the bit field covers the whole REG/MEM, store_field
4047 will likely generate better code. */
4048 if (bitsize >= str_bitsize)
4049 return false;
4051 /* We can't handle fields split across multiple entities. */
4052 if (bitpos + bitsize > str_bitsize)
4053 return false;
4055 if (BYTES_BIG_ENDIAN)
4056 bitpos = str_bitsize - bitpos - bitsize;
4058 switch (TREE_CODE (src))
4060 case PLUS_EXPR:
4061 case MINUS_EXPR:
4062 /* For now, just optimize the case of the topmost bitfield
4063 where we don't need to do any masking and also
4064 1 bit bitfields where xor can be used.
4065 We might win by one instruction for the other bitfields
4066 too if insv/extv instructions aren't used, so that
4067 can be added later. */
4068 if (bitpos + bitsize != str_bitsize
4069 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4070 break;
4072 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4073 value = convert_modes (str_mode,
4074 TYPE_MODE (TREE_TYPE (op1)), value,
4075 TYPE_UNSIGNED (TREE_TYPE (op1)));
4077 /* We may be accessing data outside the field, which means
4078 we can alias adjacent data. */
4079 if (MEM_P (str_rtx))
4081 str_rtx = shallow_copy_rtx (str_rtx);
4082 set_mem_alias_set (str_rtx, 0);
4083 set_mem_expr (str_rtx, 0);
4086 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4087 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4089 value = expand_and (str_mode, value, const1_rtx, NULL);
4090 binop = xor_optab;
4092 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4093 build_int_cst (NULL_TREE, bitpos),
4094 NULL_RTX, 1);
4095 result = expand_binop (str_mode, binop, str_rtx,
4096 value, str_rtx, 1, OPTAB_WIDEN);
4097 if (result != str_rtx)
4098 emit_move_insn (str_rtx, result);
4099 return true;
4101 case BIT_IOR_EXPR:
4102 case BIT_XOR_EXPR:
4103 if (TREE_CODE (op1) != INTEGER_CST)
4104 break;
4105 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4106 value = convert_modes (GET_MODE (str_rtx),
4107 TYPE_MODE (TREE_TYPE (op1)), value,
4108 TYPE_UNSIGNED (TREE_TYPE (op1)));
4110 /* We may be accessing data outside the field, which means
4111 we can alias adjacent data. */
4112 if (MEM_P (str_rtx))
4114 str_rtx = shallow_copy_rtx (str_rtx);
4115 set_mem_alias_set (str_rtx, 0);
4116 set_mem_expr (str_rtx, 0);
4119 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4120 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4122 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4123 - 1);
4124 value = expand_and (GET_MODE (str_rtx), value, mask,
4125 NULL_RTX);
4127 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4128 build_int_cst (NULL_TREE, bitpos),
4129 NULL_RTX, 1);
4130 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4131 value, str_rtx, 1, OPTAB_WIDEN);
4132 if (result != str_rtx)
4133 emit_move_insn (str_rtx, result);
4134 return true;
4136 default:
4137 break;
4140 return false;
4144 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4145 is true, try generating a nontemporal store. */
4147 void
4148 expand_assignment (tree to, tree from, bool nontemporal)
4150 rtx to_rtx = 0;
4151 rtx result;
4153 /* Don't crash if the lhs of the assignment was erroneous. */
4154 if (TREE_CODE (to) == ERROR_MARK)
4156 result = expand_normal (from);
4157 return;
4160 /* Optimize away no-op moves without side-effects. */
4161 if (operand_equal_p (to, from, 0))
4162 return;
4164 /* Assignment of a structure component needs special treatment
4165 if the structure component's rtx is not simply a MEM.
4166 Assignment of an array element at a constant index, and assignment of
4167 an array element in an unaligned packed structure field, has the same
4168 problem. */
4169 if (handled_component_p (to)
4170 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4172 enum machine_mode mode1;
4173 HOST_WIDE_INT bitsize, bitpos;
4174 tree offset;
4175 int unsignedp;
4176 int volatilep = 0;
4177 tree tem;
4179 push_temp_slots ();
4180 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4181 &unsignedp, &volatilep, true);
4183 /* If we are going to use store_bit_field and extract_bit_field,
4184 make sure to_rtx will be safe for multiple use. */
4186 to_rtx = expand_normal (tem);
4188 if (offset != 0)
4190 rtx offset_rtx;
4192 if (!MEM_P (to_rtx))
4194 /* We can get constant negative offsets into arrays with broken
4195 user code. Translate this to a trap instead of ICEing. */
4196 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4197 expand_builtin_trap ();
4198 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4201 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4202 #ifdef POINTERS_EXTEND_UNSIGNED
4203 if (GET_MODE (offset_rtx) != Pmode)
4204 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4205 #else
4206 if (GET_MODE (offset_rtx) != ptr_mode)
4207 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4208 #endif
4210 /* A constant address in TO_RTX can have VOIDmode, we must not try
4211 to call force_reg for that case. Avoid that case. */
4212 if (MEM_P (to_rtx)
4213 && GET_MODE (to_rtx) == BLKmode
4214 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4215 && bitsize > 0
4216 && (bitpos % bitsize) == 0
4217 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4218 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4220 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4221 bitpos = 0;
4224 to_rtx = offset_address (to_rtx, offset_rtx,
4225 highest_pow2_factor_for_target (to,
4226 offset));
4229 /* Handle expand_expr of a complex value returning a CONCAT. */
4230 if (GET_CODE (to_rtx) == CONCAT)
4232 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4234 gcc_assert (bitpos == 0);
4235 result = store_expr (from, to_rtx, false, nontemporal);
4237 else
4239 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4240 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4241 nontemporal);
4244 else
4246 if (MEM_P (to_rtx))
4248 /* If the field is at offset zero, we could have been given the
4249 DECL_RTX of the parent struct. Don't munge it. */
4250 to_rtx = shallow_copy_rtx (to_rtx);
4252 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4254 /* Deal with volatile and readonly fields. The former is only
4255 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4256 if (volatilep)
4257 MEM_VOLATILE_P (to_rtx) = 1;
4258 if (component_uses_parent_alias_set (to))
4259 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4262 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4263 to_rtx, to, from))
4264 result = NULL;
4265 else
4266 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4267 TREE_TYPE (tem), get_alias_set (to),
4268 nontemporal);
4271 if (result)
4272 preserve_temp_slots (result);
4273 free_temp_slots ();
4274 pop_temp_slots ();
4275 return;
4278 /* If the rhs is a function call and its value is not an aggregate,
4279 call the function before we start to compute the lhs.
4280 This is needed for correct code for cases such as
4281 val = setjmp (buf) on machines where reference to val
4282 requires loading up part of an address in a separate insn.
4284 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4285 since it might be a promoted variable where the zero- or sign- extension
4286 needs to be done. Handling this in the normal way is safe because no
4287 computation is done before the call. */
4288 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4289 && COMPLETE_TYPE_P (TREE_TYPE (from))
4290 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4291 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4292 && REG_P (DECL_RTL (to))))
4294 rtx value;
4296 push_temp_slots ();
4297 value = expand_normal (from);
4298 if (to_rtx == 0)
4299 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4301 /* Handle calls that return values in multiple non-contiguous locations.
4302 The Irix 6 ABI has examples of this. */
4303 if (GET_CODE (to_rtx) == PARALLEL)
4304 emit_group_load (to_rtx, value, TREE_TYPE (from),
4305 int_size_in_bytes (TREE_TYPE (from)));
4306 else if (GET_MODE (to_rtx) == BLKmode)
4307 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4308 else
4310 if (POINTER_TYPE_P (TREE_TYPE (to)))
4311 value = convert_memory_address (GET_MODE (to_rtx), value);
4312 emit_move_insn (to_rtx, value);
4314 preserve_temp_slots (to_rtx);
4315 free_temp_slots ();
4316 pop_temp_slots ();
4317 return;
4320 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4321 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4323 if (to_rtx == 0)
4324 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4326 /* Don't move directly into a return register. */
4327 if (TREE_CODE (to) == RESULT_DECL
4328 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4330 rtx temp;
4332 push_temp_slots ();
4333 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4335 if (GET_CODE (to_rtx) == PARALLEL)
4336 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4337 int_size_in_bytes (TREE_TYPE (from)));
4338 else
4339 emit_move_insn (to_rtx, temp);
4341 preserve_temp_slots (to_rtx);
4342 free_temp_slots ();
4343 pop_temp_slots ();
4344 return;
4347 /* In case we are returning the contents of an object which overlaps
4348 the place the value is being stored, use a safe function when copying
4349 a value through a pointer into a structure value return block. */
4350 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4351 && cfun->returns_struct
4352 && !cfun->returns_pcc_struct)
4354 rtx from_rtx, size;
4356 push_temp_slots ();
4357 size = expr_size (from);
4358 from_rtx = expand_normal (from);
4360 emit_library_call (memmove_libfunc, LCT_NORMAL,
4361 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4362 XEXP (from_rtx, 0), Pmode,
4363 convert_to_mode (TYPE_MODE (sizetype),
4364 size, TYPE_UNSIGNED (sizetype)),
4365 TYPE_MODE (sizetype));
4367 preserve_temp_slots (to_rtx);
4368 free_temp_slots ();
4369 pop_temp_slots ();
4370 return;
4373 /* Compute FROM and store the value in the rtx we got. */
4375 push_temp_slots ();
4376 result = store_expr (from, to_rtx, 0, nontemporal);
4377 preserve_temp_slots (result);
4378 free_temp_slots ();
4379 pop_temp_slots ();
4380 return;
4383 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4384 succeeded, false otherwise. */
4386 static bool
4387 emit_storent_insn (rtx to, rtx from)
4389 enum machine_mode mode = GET_MODE (to), imode;
4390 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4391 rtx pattern;
4393 if (code == CODE_FOR_nothing)
4394 return false;
4396 imode = insn_data[code].operand[0].mode;
4397 if (!insn_data[code].operand[0].predicate (to, imode))
4398 return false;
4400 imode = insn_data[code].operand[1].mode;
4401 if (!insn_data[code].operand[1].predicate (from, imode))
4403 from = copy_to_mode_reg (imode, from);
4404 if (!insn_data[code].operand[1].predicate (from, imode))
4405 return false;
4408 pattern = GEN_FCN (code) (to, from);
4409 if (pattern == NULL_RTX)
4410 return false;
4412 emit_insn (pattern);
4413 return true;
4416 /* Generate code for computing expression EXP,
4417 and storing the value into TARGET.
4419 If the mode is BLKmode then we may return TARGET itself.
4420 It turns out that in BLKmode it doesn't cause a problem.
4421 because C has no operators that could combine two different
4422 assignments into the same BLKmode object with different values
4423 with no sequence point. Will other languages need this to
4424 be more thorough?
4426 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4427 stack, and block moves may need to be treated specially.
4429 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4432 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4434 rtx temp;
4435 rtx alt_rtl = NULL_RTX;
4436 int dont_return_target = 0;
4438 if (VOID_TYPE_P (TREE_TYPE (exp)))
4440 /* C++ can generate ?: expressions with a throw expression in one
4441 branch and an rvalue in the other. Here, we resolve attempts to
4442 store the throw expression's nonexistent result. */
4443 gcc_assert (!call_param_p);
4444 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4445 return NULL_RTX;
4447 if (TREE_CODE (exp) == COMPOUND_EXPR)
4449 /* Perform first part of compound expression, then assign from second
4450 part. */
4451 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4452 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4453 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4454 nontemporal);
4456 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4458 /* For conditional expression, get safe form of the target. Then
4459 test the condition, doing the appropriate assignment on either
4460 side. This avoids the creation of unnecessary temporaries.
4461 For non-BLKmode, it is more efficient not to do this. */
4463 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4465 do_pending_stack_adjust ();
4466 NO_DEFER_POP;
4467 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4468 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4469 nontemporal);
4470 emit_jump_insn (gen_jump (lab2));
4471 emit_barrier ();
4472 emit_label (lab1);
4473 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4474 nontemporal);
4475 emit_label (lab2);
4476 OK_DEFER_POP;
4478 return NULL_RTX;
4480 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4481 /* If this is a scalar in a register that is stored in a wider mode
4482 than the declared mode, compute the result into its declared mode
4483 and then convert to the wider mode. Our value is the computed
4484 expression. */
4486 rtx inner_target = 0;
4488 /* We can do the conversion inside EXP, which will often result
4489 in some optimizations. Do the conversion in two steps: first
4490 change the signedness, if needed, then the extend. But don't
4491 do this if the type of EXP is a subtype of something else
4492 since then the conversion might involve more than just
4493 converting modes. */
4494 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4495 && TREE_TYPE (TREE_TYPE (exp)) == 0
4496 && GET_MODE_PRECISION (GET_MODE (target))
4497 == TYPE_PRECISION (TREE_TYPE (exp)))
4499 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4500 != SUBREG_PROMOTED_UNSIGNED_P (target))
4502 /* Some types, e.g. Fortran's logical*4, won't have a signed
4503 version, so use the mode instead. */
4504 tree ntype
4505 = (signed_or_unsigned_type_for
4506 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4507 if (ntype == NULL)
4508 ntype = lang_hooks.types.type_for_mode
4509 (TYPE_MODE (TREE_TYPE (exp)),
4510 SUBREG_PROMOTED_UNSIGNED_P (target));
4512 exp = fold_convert (ntype, exp);
4515 exp = fold_convert (lang_hooks.types.type_for_mode
4516 (GET_MODE (SUBREG_REG (target)),
4517 SUBREG_PROMOTED_UNSIGNED_P (target)),
4518 exp);
4520 inner_target = SUBREG_REG (target);
4523 temp = expand_expr (exp, inner_target, VOIDmode,
4524 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4526 /* If TEMP is a VOIDmode constant, use convert_modes to make
4527 sure that we properly convert it. */
4528 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4530 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4531 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4532 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4533 GET_MODE (target), temp,
4534 SUBREG_PROMOTED_UNSIGNED_P (target));
4537 convert_move (SUBREG_REG (target), temp,
4538 SUBREG_PROMOTED_UNSIGNED_P (target));
4540 return NULL_RTX;
4542 else if (TREE_CODE (exp) == STRING_CST
4543 && !nontemporal && !call_param_p
4544 && TREE_STRING_LENGTH (exp) > 0
4545 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4547 /* Optimize initialization of an array with a STRING_CST. */
4548 HOST_WIDE_INT exp_len, str_copy_len;
4549 rtx dest_mem;
4551 exp_len = int_expr_size (exp);
4552 if (exp_len <= 0)
4553 goto normal_expr;
4555 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4556 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4557 goto normal_expr;
4559 str_copy_len = TREE_STRING_LENGTH (exp);
4560 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4562 str_copy_len += STORE_MAX_PIECES - 1;
4563 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4565 str_copy_len = MIN (str_copy_len, exp_len);
4566 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4567 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4568 MEM_ALIGN (target), false))
4569 goto normal_expr;
4571 dest_mem = target;
4573 dest_mem = store_by_pieces (dest_mem,
4574 str_copy_len, builtin_strncpy_read_str,
4575 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4576 MEM_ALIGN (target), false,
4577 exp_len > str_copy_len ? 1 : 0);
4578 if (exp_len > str_copy_len)
4579 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4580 GEN_INT (exp_len - str_copy_len),
4581 BLOCK_OP_NORMAL);
4582 return NULL_RTX;
4584 else
4586 rtx tmp_target;
4588 normal_expr:
4589 /* If we want to use a nontemporal store, force the value to
4590 register first. */
4591 tmp_target = nontemporal ? NULL_RTX : target;
4592 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4593 (call_param_p
4594 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4595 &alt_rtl);
4596 /* Return TARGET if it's a specified hardware register.
4597 If TARGET is a volatile mem ref, either return TARGET
4598 or return a reg copied *from* TARGET; ANSI requires this.
4600 Otherwise, if TEMP is not TARGET, return TEMP
4601 if it is constant (for efficiency),
4602 or if we really want the correct value. */
4603 if (!(target && REG_P (target)
4604 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4605 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4606 && ! rtx_equal_p (temp, target)
4607 && CONSTANT_P (temp))
4608 dont_return_target = 1;
4611 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4612 the same as that of TARGET, adjust the constant. This is needed, for
4613 example, in case it is a CONST_DOUBLE and we want only a word-sized
4614 value. */
4615 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4616 && TREE_CODE (exp) != ERROR_MARK
4617 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4618 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4619 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4621 /* If value was not generated in the target, store it there.
4622 Convert the value to TARGET's type first if necessary and emit the
4623 pending incrementations that have been queued when expanding EXP.
4624 Note that we cannot emit the whole queue blindly because this will
4625 effectively disable the POST_INC optimization later.
4627 If TEMP and TARGET compare equal according to rtx_equal_p, but
4628 one or both of them are volatile memory refs, we have to distinguish
4629 two cases:
4630 - expand_expr has used TARGET. In this case, we must not generate
4631 another copy. This can be detected by TARGET being equal according
4632 to == .
4633 - expand_expr has not used TARGET - that means that the source just
4634 happens to have the same RTX form. Since temp will have been created
4635 by expand_expr, it will compare unequal according to == .
4636 We must generate a copy in this case, to reach the correct number
4637 of volatile memory references. */
4639 if ((! rtx_equal_p (temp, target)
4640 || (temp != target && (side_effects_p (temp)
4641 || side_effects_p (target))))
4642 && TREE_CODE (exp) != ERROR_MARK
4643 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4644 but TARGET is not valid memory reference, TEMP will differ
4645 from TARGET although it is really the same location. */
4646 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4647 /* If there's nothing to copy, don't bother. Don't call
4648 expr_size unless necessary, because some front-ends (C++)
4649 expr_size-hook must not be given objects that are not
4650 supposed to be bit-copied or bit-initialized. */
4651 && expr_size (exp) != const0_rtx)
4653 if (GET_MODE (temp) != GET_MODE (target)
4654 && GET_MODE (temp) != VOIDmode)
4656 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4657 if (dont_return_target)
4659 /* In this case, we will return TEMP,
4660 so make sure it has the proper mode.
4661 But don't forget to store the value into TARGET. */
4662 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4663 emit_move_insn (target, temp);
4665 else if (GET_MODE (target) == BLKmode
4666 || GET_MODE (temp) == BLKmode)
4667 emit_block_move (target, temp, expr_size (exp),
4668 (call_param_p
4669 ? BLOCK_OP_CALL_PARM
4670 : BLOCK_OP_NORMAL));
4671 else
4672 convert_move (target, temp, unsignedp);
4675 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4677 /* Handle copying a string constant into an array. The string
4678 constant may be shorter than the array. So copy just the string's
4679 actual length, and clear the rest. First get the size of the data
4680 type of the string, which is actually the size of the target. */
4681 rtx size = expr_size (exp);
4683 if (GET_CODE (size) == CONST_INT
4684 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4685 emit_block_move (target, temp, size,
4686 (call_param_p
4687 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4688 else
4690 /* Compute the size of the data to copy from the string. */
4691 tree copy_size
4692 = size_binop (MIN_EXPR,
4693 make_tree (sizetype, size),
4694 size_int (TREE_STRING_LENGTH (exp)));
4695 rtx copy_size_rtx
4696 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4697 (call_param_p
4698 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4699 rtx label = 0;
4701 /* Copy that much. */
4702 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4703 TYPE_UNSIGNED (sizetype));
4704 emit_block_move (target, temp, copy_size_rtx,
4705 (call_param_p
4706 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4708 /* Figure out how much is left in TARGET that we have to clear.
4709 Do all calculations in ptr_mode. */
4710 if (GET_CODE (copy_size_rtx) == CONST_INT)
4712 size = plus_constant (size, -INTVAL (copy_size_rtx));
4713 target = adjust_address (target, BLKmode,
4714 INTVAL (copy_size_rtx));
4716 else
4718 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4719 copy_size_rtx, NULL_RTX, 0,
4720 OPTAB_LIB_WIDEN);
4722 #ifdef POINTERS_EXTEND_UNSIGNED
4723 if (GET_MODE (copy_size_rtx) != Pmode)
4724 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4725 TYPE_UNSIGNED (sizetype));
4726 #endif
4728 target = offset_address (target, copy_size_rtx,
4729 highest_pow2_factor (copy_size));
4730 label = gen_label_rtx ();
4731 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4732 GET_MODE (size), 0, label);
4735 if (size != const0_rtx)
4736 clear_storage (target, size, BLOCK_OP_NORMAL);
4738 if (label)
4739 emit_label (label);
4742 /* Handle calls that return values in multiple non-contiguous locations.
4743 The Irix 6 ABI has examples of this. */
4744 else if (GET_CODE (target) == PARALLEL)
4745 emit_group_load (target, temp, TREE_TYPE (exp),
4746 int_size_in_bytes (TREE_TYPE (exp)));
4747 else if (GET_MODE (temp) == BLKmode)
4748 emit_block_move (target, temp, expr_size (exp),
4749 (call_param_p
4750 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4751 else if (nontemporal
4752 && emit_storent_insn (target, temp))
4753 /* If we managed to emit a nontemporal store, there is nothing else to
4754 do. */
4756 else
4758 temp = force_operand (temp, target);
4759 if (temp != target)
4760 emit_move_insn (target, temp);
4764 return NULL_RTX;
4767 /* Helper for categorize_ctor_elements. Identical interface. */
4769 static bool
4770 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4771 HOST_WIDE_INT *p_elt_count,
4772 bool *p_must_clear)
4774 unsigned HOST_WIDE_INT idx;
4775 HOST_WIDE_INT nz_elts, elt_count;
4776 tree value, purpose;
4778 /* Whether CTOR is a valid constant initializer, in accordance with what
4779 initializer_constant_valid_p does. If inferred from the constructor
4780 elements, true until proven otherwise. */
4781 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4782 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4784 nz_elts = 0;
4785 elt_count = 0;
4787 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4789 HOST_WIDE_INT mult;
4791 mult = 1;
4792 if (TREE_CODE (purpose) == RANGE_EXPR)
4794 tree lo_index = TREE_OPERAND (purpose, 0);
4795 tree hi_index = TREE_OPERAND (purpose, 1);
4797 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4798 mult = (tree_low_cst (hi_index, 1)
4799 - tree_low_cst (lo_index, 1) + 1);
4802 switch (TREE_CODE (value))
4804 case CONSTRUCTOR:
4806 HOST_WIDE_INT nz = 0, ic = 0;
4808 bool const_elt_p
4809 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4811 nz_elts += mult * nz;
4812 elt_count += mult * ic;
4814 if (const_from_elts_p && const_p)
4815 const_p = const_elt_p;
4817 break;
4819 case INTEGER_CST:
4820 case REAL_CST:
4821 case FIXED_CST:
4822 if (!initializer_zerop (value))
4823 nz_elts += mult;
4824 elt_count += mult;
4825 break;
4827 case STRING_CST:
4828 nz_elts += mult * TREE_STRING_LENGTH (value);
4829 elt_count += mult * TREE_STRING_LENGTH (value);
4830 break;
4832 case COMPLEX_CST:
4833 if (!initializer_zerop (TREE_REALPART (value)))
4834 nz_elts += mult;
4835 if (!initializer_zerop (TREE_IMAGPART (value)))
4836 nz_elts += mult;
4837 elt_count += mult;
4838 break;
4840 case VECTOR_CST:
4842 tree v;
4843 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4845 if (!initializer_zerop (TREE_VALUE (v)))
4846 nz_elts += mult;
4847 elt_count += mult;
4850 break;
4852 default:
4853 nz_elts += mult;
4854 elt_count += mult;
4856 if (const_from_elts_p && const_p)
4857 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4858 != NULL_TREE;
4859 break;
4863 if (!*p_must_clear
4864 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4865 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4867 tree init_sub_type;
4868 bool clear_this = true;
4870 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4872 /* We don't expect more than one element of the union to be
4873 initialized. Not sure what we should do otherwise... */
4874 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4875 == 1);
4877 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4878 CONSTRUCTOR_ELTS (ctor),
4879 0)->value);
4881 /* ??? We could look at each element of the union, and find the
4882 largest element. Which would avoid comparing the size of the
4883 initialized element against any tail padding in the union.
4884 Doesn't seem worth the effort... */
4885 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4886 TYPE_SIZE (init_sub_type)) == 1)
4888 /* And now we have to find out if the element itself is fully
4889 constructed. E.g. for union { struct { int a, b; } s; } u
4890 = { .s = { .a = 1 } }. */
4891 if (elt_count == count_type_elements (init_sub_type, false))
4892 clear_this = false;
4896 *p_must_clear = clear_this;
4899 *p_nz_elts += nz_elts;
4900 *p_elt_count += elt_count;
4902 return const_p;
4905 /* Examine CTOR to discover:
4906 * how many scalar fields are set to nonzero values,
4907 and place it in *P_NZ_ELTS;
4908 * how many scalar fields in total are in CTOR,
4909 and place it in *P_ELT_COUNT.
4910 * if a type is a union, and the initializer from the constructor
4911 is not the largest element in the union, then set *p_must_clear.
4913 Return whether or not CTOR is a valid static constant initializer, the same
4914 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4916 bool
4917 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4918 HOST_WIDE_INT *p_elt_count,
4919 bool *p_must_clear)
4921 *p_nz_elts = 0;
4922 *p_elt_count = 0;
4923 *p_must_clear = false;
4925 return
4926 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4929 /* Count the number of scalars in TYPE. Return -1 on overflow or
4930 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4931 array member at the end of the structure. */
4933 HOST_WIDE_INT
4934 count_type_elements (const_tree type, bool allow_flexarr)
4936 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4937 switch (TREE_CODE (type))
4939 case ARRAY_TYPE:
4941 tree telts = array_type_nelts (type);
4942 if (telts && host_integerp (telts, 1))
4944 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4945 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4946 if (n == 0)
4947 return 0;
4948 else if (max / n > m)
4949 return n * m;
4951 return -1;
4954 case RECORD_TYPE:
4956 HOST_WIDE_INT n = 0, t;
4957 tree f;
4959 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4960 if (TREE_CODE (f) == FIELD_DECL)
4962 t = count_type_elements (TREE_TYPE (f), false);
4963 if (t < 0)
4965 /* Check for structures with flexible array member. */
4966 tree tf = TREE_TYPE (f);
4967 if (allow_flexarr
4968 && TREE_CHAIN (f) == NULL
4969 && TREE_CODE (tf) == ARRAY_TYPE
4970 && TYPE_DOMAIN (tf)
4971 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4972 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4973 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4974 && int_size_in_bytes (type) >= 0)
4975 break;
4977 return -1;
4979 n += t;
4982 return n;
4985 case UNION_TYPE:
4986 case QUAL_UNION_TYPE:
4987 return -1;
4989 case COMPLEX_TYPE:
4990 return 2;
4992 case VECTOR_TYPE:
4993 return TYPE_VECTOR_SUBPARTS (type);
4995 case INTEGER_TYPE:
4996 case REAL_TYPE:
4997 case FIXED_POINT_TYPE:
4998 case ENUMERAL_TYPE:
4999 case BOOLEAN_TYPE:
5000 case POINTER_TYPE:
5001 case OFFSET_TYPE:
5002 case REFERENCE_TYPE:
5003 return 1;
5005 case ERROR_MARK:
5006 return 0;
5008 case VOID_TYPE:
5009 case METHOD_TYPE:
5010 case FUNCTION_TYPE:
5011 case LANG_TYPE:
5012 default:
5013 gcc_unreachable ();
5017 /* Return 1 if EXP contains mostly (3/4) zeros. */
5019 static int
5020 mostly_zeros_p (const_tree exp)
5022 if (TREE_CODE (exp) == CONSTRUCTOR)
5025 HOST_WIDE_INT nz_elts, count, elts;
5026 bool must_clear;
5028 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5029 if (must_clear)
5030 return 1;
5032 elts = count_type_elements (TREE_TYPE (exp), false);
5034 return nz_elts < elts / 4;
5037 return initializer_zerop (exp);
5040 /* Return 1 if EXP contains all zeros. */
5042 static int
5043 all_zeros_p (const_tree exp)
5045 if (TREE_CODE (exp) == CONSTRUCTOR)
5048 HOST_WIDE_INT nz_elts, count;
5049 bool must_clear;
5051 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5052 return nz_elts == 0;
5055 return initializer_zerop (exp);
5058 /* Helper function for store_constructor.
5059 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5060 TYPE is the type of the CONSTRUCTOR, not the element type.
5061 CLEARED is as for store_constructor.
5062 ALIAS_SET is the alias set to use for any stores.
5064 This provides a recursive shortcut back to store_constructor when it isn't
5065 necessary to go through store_field. This is so that we can pass through
5066 the cleared field to let store_constructor know that we may not have to
5067 clear a substructure if the outer structure has already been cleared. */
5069 static void
5070 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5071 HOST_WIDE_INT bitpos, enum machine_mode mode,
5072 tree exp, tree type, int cleared,
5073 alias_set_type alias_set)
5075 if (TREE_CODE (exp) == CONSTRUCTOR
5076 /* We can only call store_constructor recursively if the size and
5077 bit position are on a byte boundary. */
5078 && bitpos % BITS_PER_UNIT == 0
5079 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5080 /* If we have a nonzero bitpos for a register target, then we just
5081 let store_field do the bitfield handling. This is unlikely to
5082 generate unnecessary clear instructions anyways. */
5083 && (bitpos == 0 || MEM_P (target)))
5085 if (MEM_P (target))
5086 target
5087 = adjust_address (target,
5088 GET_MODE (target) == BLKmode
5089 || 0 != (bitpos
5090 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5091 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5094 /* Update the alias set, if required. */
5095 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5096 && MEM_ALIAS_SET (target) != 0)
5098 target = copy_rtx (target);
5099 set_mem_alias_set (target, alias_set);
5102 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5104 else
5105 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5108 /* Store the value of constructor EXP into the rtx TARGET.
5109 TARGET is either a REG or a MEM; we know it cannot conflict, since
5110 safe_from_p has been called.
5111 CLEARED is true if TARGET is known to have been zero'd.
5112 SIZE is the number of bytes of TARGET we are allowed to modify: this
5113 may not be the same as the size of EXP if we are assigning to a field
5114 which has been packed to exclude padding bits. */
5116 static void
5117 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5119 tree type = TREE_TYPE (exp);
5120 #ifdef WORD_REGISTER_OPERATIONS
5121 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5122 #endif
5124 switch (TREE_CODE (type))
5126 case RECORD_TYPE:
5127 case UNION_TYPE:
5128 case QUAL_UNION_TYPE:
5130 unsigned HOST_WIDE_INT idx;
5131 tree field, value;
5133 /* If size is zero or the target is already cleared, do nothing. */
5134 if (size == 0 || cleared)
5135 cleared = 1;
5136 /* We either clear the aggregate or indicate the value is dead. */
5137 else if ((TREE_CODE (type) == UNION_TYPE
5138 || TREE_CODE (type) == QUAL_UNION_TYPE)
5139 && ! CONSTRUCTOR_ELTS (exp))
5140 /* If the constructor is empty, clear the union. */
5142 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5143 cleared = 1;
5146 /* If we are building a static constructor into a register,
5147 set the initial value as zero so we can fold the value into
5148 a constant. But if more than one register is involved,
5149 this probably loses. */
5150 else if (REG_P (target) && TREE_STATIC (exp)
5151 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5153 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5154 cleared = 1;
5157 /* If the constructor has fewer fields than the structure or
5158 if we are initializing the structure to mostly zeros, clear
5159 the whole structure first. Don't do this if TARGET is a
5160 register whose mode size isn't equal to SIZE since
5161 clear_storage can't handle this case. */
5162 else if (size > 0
5163 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5164 != fields_length (type))
5165 || mostly_zeros_p (exp))
5166 && (!REG_P (target)
5167 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5168 == size)))
5170 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5171 cleared = 1;
5174 if (REG_P (target) && !cleared)
5175 emit_clobber (target);
5177 /* Store each element of the constructor into the
5178 corresponding field of TARGET. */
5179 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5181 enum machine_mode mode;
5182 HOST_WIDE_INT bitsize;
5183 HOST_WIDE_INT bitpos = 0;
5184 tree offset;
5185 rtx to_rtx = target;
5187 /* Just ignore missing fields. We cleared the whole
5188 structure, above, if any fields are missing. */
5189 if (field == 0)
5190 continue;
5192 if (cleared && initializer_zerop (value))
5193 continue;
5195 if (host_integerp (DECL_SIZE (field), 1))
5196 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5197 else
5198 bitsize = -1;
5200 mode = DECL_MODE (field);
5201 if (DECL_BIT_FIELD (field))
5202 mode = VOIDmode;
5204 offset = DECL_FIELD_OFFSET (field);
5205 if (host_integerp (offset, 0)
5206 && host_integerp (bit_position (field), 0))
5208 bitpos = int_bit_position (field);
5209 offset = 0;
5211 else
5212 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5214 if (offset)
5216 rtx offset_rtx;
5218 offset
5219 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5220 make_tree (TREE_TYPE (exp),
5221 target));
5223 offset_rtx = expand_normal (offset);
5224 gcc_assert (MEM_P (to_rtx));
5226 #ifdef POINTERS_EXTEND_UNSIGNED
5227 if (GET_MODE (offset_rtx) != Pmode)
5228 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5229 #else
5230 if (GET_MODE (offset_rtx) != ptr_mode)
5231 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5232 #endif
5234 to_rtx = offset_address (to_rtx, offset_rtx,
5235 highest_pow2_factor (offset));
5238 #ifdef WORD_REGISTER_OPERATIONS
5239 /* If this initializes a field that is smaller than a
5240 word, at the start of a word, try to widen it to a full
5241 word. This special case allows us to output C++ member
5242 function initializations in a form that the optimizers
5243 can understand. */
5244 if (REG_P (target)
5245 && bitsize < BITS_PER_WORD
5246 && bitpos % BITS_PER_WORD == 0
5247 && GET_MODE_CLASS (mode) == MODE_INT
5248 && TREE_CODE (value) == INTEGER_CST
5249 && exp_size >= 0
5250 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5252 tree type = TREE_TYPE (value);
5254 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5256 type = lang_hooks.types.type_for_size
5257 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5258 value = fold_convert (type, value);
5261 if (BYTES_BIG_ENDIAN)
5262 value
5263 = fold_build2 (LSHIFT_EXPR, type, value,
5264 build_int_cst (type,
5265 BITS_PER_WORD - bitsize));
5266 bitsize = BITS_PER_WORD;
5267 mode = word_mode;
5269 #endif
5271 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5272 && DECL_NONADDRESSABLE_P (field))
5274 to_rtx = copy_rtx (to_rtx);
5275 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5278 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5279 value, type, cleared,
5280 get_alias_set (TREE_TYPE (field)));
5282 break;
5284 case ARRAY_TYPE:
5286 tree value, index;
5287 unsigned HOST_WIDE_INT i;
5288 int need_to_clear;
5289 tree domain;
5290 tree elttype = TREE_TYPE (type);
5291 int const_bounds_p;
5292 HOST_WIDE_INT minelt = 0;
5293 HOST_WIDE_INT maxelt = 0;
5295 domain = TYPE_DOMAIN (type);
5296 const_bounds_p = (TYPE_MIN_VALUE (domain)
5297 && TYPE_MAX_VALUE (domain)
5298 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5299 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5301 /* If we have constant bounds for the range of the type, get them. */
5302 if (const_bounds_p)
5304 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5305 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5308 /* If the constructor has fewer elements than the array, clear
5309 the whole array first. Similarly if this is static
5310 constructor of a non-BLKmode object. */
5311 if (cleared)
5312 need_to_clear = 0;
5313 else if (REG_P (target) && TREE_STATIC (exp))
5314 need_to_clear = 1;
5315 else
5317 unsigned HOST_WIDE_INT idx;
5318 tree index, value;
5319 HOST_WIDE_INT count = 0, zero_count = 0;
5320 need_to_clear = ! const_bounds_p;
5322 /* This loop is a more accurate version of the loop in
5323 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5324 is also needed to check for missing elements. */
5325 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5327 HOST_WIDE_INT this_node_count;
5329 if (need_to_clear)
5330 break;
5332 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5334 tree lo_index = TREE_OPERAND (index, 0);
5335 tree hi_index = TREE_OPERAND (index, 1);
5337 if (! host_integerp (lo_index, 1)
5338 || ! host_integerp (hi_index, 1))
5340 need_to_clear = 1;
5341 break;
5344 this_node_count = (tree_low_cst (hi_index, 1)
5345 - tree_low_cst (lo_index, 1) + 1);
5347 else
5348 this_node_count = 1;
5350 count += this_node_count;
5351 if (mostly_zeros_p (value))
5352 zero_count += this_node_count;
5355 /* Clear the entire array first if there are any missing
5356 elements, or if the incidence of zero elements is >=
5357 75%. */
5358 if (! need_to_clear
5359 && (count < maxelt - minelt + 1
5360 || 4 * zero_count >= 3 * count))
5361 need_to_clear = 1;
5364 if (need_to_clear && size > 0)
5366 if (REG_P (target))
5367 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5368 else
5369 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5370 cleared = 1;
5373 if (!cleared && REG_P (target))
5374 /* Inform later passes that the old value is dead. */
5375 emit_clobber (target);
5377 /* Store each element of the constructor into the
5378 corresponding element of TARGET, determined by counting the
5379 elements. */
5380 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5382 enum machine_mode mode;
5383 HOST_WIDE_INT bitsize;
5384 HOST_WIDE_INT bitpos;
5385 int unsignedp;
5386 rtx xtarget = target;
5388 if (cleared && initializer_zerop (value))
5389 continue;
5391 unsignedp = TYPE_UNSIGNED (elttype);
5392 mode = TYPE_MODE (elttype);
5393 if (mode == BLKmode)
5394 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5395 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5396 : -1);
5397 else
5398 bitsize = GET_MODE_BITSIZE (mode);
5400 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5402 tree lo_index = TREE_OPERAND (index, 0);
5403 tree hi_index = TREE_OPERAND (index, 1);
5404 rtx index_r, pos_rtx;
5405 HOST_WIDE_INT lo, hi, count;
5406 tree position;
5408 /* If the range is constant and "small", unroll the loop. */
5409 if (const_bounds_p
5410 && host_integerp (lo_index, 0)
5411 && host_integerp (hi_index, 0)
5412 && (lo = tree_low_cst (lo_index, 0),
5413 hi = tree_low_cst (hi_index, 0),
5414 count = hi - lo + 1,
5415 (!MEM_P (target)
5416 || count <= 2
5417 || (host_integerp (TYPE_SIZE (elttype), 1)
5418 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5419 <= 40 * 8)))))
5421 lo -= minelt; hi -= minelt;
5422 for (; lo <= hi; lo++)
5424 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5426 if (MEM_P (target)
5427 && !MEM_KEEP_ALIAS_SET_P (target)
5428 && TREE_CODE (type) == ARRAY_TYPE
5429 && TYPE_NONALIASED_COMPONENT (type))
5431 target = copy_rtx (target);
5432 MEM_KEEP_ALIAS_SET_P (target) = 1;
5435 store_constructor_field
5436 (target, bitsize, bitpos, mode, value, type, cleared,
5437 get_alias_set (elttype));
5440 else
5442 rtx loop_start = gen_label_rtx ();
5443 rtx loop_end = gen_label_rtx ();
5444 tree exit_cond;
5446 expand_normal (hi_index);
5447 unsignedp = TYPE_UNSIGNED (domain);
5449 index = build_decl (VAR_DECL, NULL_TREE, domain);
5451 index_r
5452 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5453 &unsignedp, 0));
5454 SET_DECL_RTL (index, index_r);
5455 store_expr (lo_index, index_r, 0, false);
5457 /* Build the head of the loop. */
5458 do_pending_stack_adjust ();
5459 emit_label (loop_start);
5461 /* Assign value to element index. */
5462 position =
5463 fold_convert (ssizetype,
5464 fold_build2 (MINUS_EXPR,
5465 TREE_TYPE (index),
5466 index,
5467 TYPE_MIN_VALUE (domain)));
5469 position =
5470 size_binop (MULT_EXPR, position,
5471 fold_convert (ssizetype,
5472 TYPE_SIZE_UNIT (elttype)));
5474 pos_rtx = expand_normal (position);
5475 xtarget = offset_address (target, pos_rtx,
5476 highest_pow2_factor (position));
5477 xtarget = adjust_address (xtarget, mode, 0);
5478 if (TREE_CODE (value) == CONSTRUCTOR)
5479 store_constructor (value, xtarget, cleared,
5480 bitsize / BITS_PER_UNIT);
5481 else
5482 store_expr (value, xtarget, 0, false);
5484 /* Generate a conditional jump to exit the loop. */
5485 exit_cond = build2 (LT_EXPR, integer_type_node,
5486 index, hi_index);
5487 jumpif (exit_cond, loop_end);
5489 /* Update the loop counter, and jump to the head of
5490 the loop. */
5491 expand_assignment (index,
5492 build2 (PLUS_EXPR, TREE_TYPE (index),
5493 index, integer_one_node),
5494 false);
5496 emit_jump (loop_start);
5498 /* Build the end of the loop. */
5499 emit_label (loop_end);
5502 else if ((index != 0 && ! host_integerp (index, 0))
5503 || ! host_integerp (TYPE_SIZE (elttype), 1))
5505 tree position;
5507 if (index == 0)
5508 index = ssize_int (1);
5510 if (minelt)
5511 index = fold_convert (ssizetype,
5512 fold_build2 (MINUS_EXPR,
5513 TREE_TYPE (index),
5514 index,
5515 TYPE_MIN_VALUE (domain)));
5517 position =
5518 size_binop (MULT_EXPR, index,
5519 fold_convert (ssizetype,
5520 TYPE_SIZE_UNIT (elttype)));
5521 xtarget = offset_address (target,
5522 expand_normal (position),
5523 highest_pow2_factor (position));
5524 xtarget = adjust_address (xtarget, mode, 0);
5525 store_expr (value, xtarget, 0, false);
5527 else
5529 if (index != 0)
5530 bitpos = ((tree_low_cst (index, 0) - minelt)
5531 * tree_low_cst (TYPE_SIZE (elttype), 1));
5532 else
5533 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5535 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5536 && TREE_CODE (type) == ARRAY_TYPE
5537 && TYPE_NONALIASED_COMPONENT (type))
5539 target = copy_rtx (target);
5540 MEM_KEEP_ALIAS_SET_P (target) = 1;
5542 store_constructor_field (target, bitsize, bitpos, mode, value,
5543 type, cleared, get_alias_set (elttype));
5546 break;
5549 case VECTOR_TYPE:
5551 unsigned HOST_WIDE_INT idx;
5552 constructor_elt *ce;
5553 int i;
5554 int need_to_clear;
5555 int icode = 0;
5556 tree elttype = TREE_TYPE (type);
5557 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5558 enum machine_mode eltmode = TYPE_MODE (elttype);
5559 HOST_WIDE_INT bitsize;
5560 HOST_WIDE_INT bitpos;
5561 rtvec vector = NULL;
5562 unsigned n_elts;
5563 alias_set_type alias;
5565 gcc_assert (eltmode != BLKmode);
5567 n_elts = TYPE_VECTOR_SUBPARTS (type);
5568 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5570 enum machine_mode mode = GET_MODE (target);
5572 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5573 if (icode != CODE_FOR_nothing)
5575 unsigned int i;
5577 vector = rtvec_alloc (n_elts);
5578 for (i = 0; i < n_elts; i++)
5579 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5583 /* If the constructor has fewer elements than the vector,
5584 clear the whole array first. Similarly if this is static
5585 constructor of a non-BLKmode object. */
5586 if (cleared)
5587 need_to_clear = 0;
5588 else if (REG_P (target) && TREE_STATIC (exp))
5589 need_to_clear = 1;
5590 else
5592 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5593 tree value;
5595 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5597 int n_elts_here = tree_low_cst
5598 (int_const_binop (TRUNC_DIV_EXPR,
5599 TYPE_SIZE (TREE_TYPE (value)),
5600 TYPE_SIZE (elttype), 0), 1);
5602 count += n_elts_here;
5603 if (mostly_zeros_p (value))
5604 zero_count += n_elts_here;
5607 /* Clear the entire vector first if there are any missing elements,
5608 or if the incidence of zero elements is >= 75%. */
5609 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5612 if (need_to_clear && size > 0 && !vector)
5614 if (REG_P (target))
5615 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5616 else
5617 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5618 cleared = 1;
5621 /* Inform later passes that the old value is dead. */
5622 if (!cleared && !vector && REG_P (target))
5623 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5625 if (MEM_P (target))
5626 alias = MEM_ALIAS_SET (target);
5627 else
5628 alias = get_alias_set (elttype);
5630 /* Store each element of the constructor into the corresponding
5631 element of TARGET, determined by counting the elements. */
5632 for (idx = 0, i = 0;
5633 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5634 idx++, i += bitsize / elt_size)
5636 HOST_WIDE_INT eltpos;
5637 tree value = ce->value;
5639 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5640 if (cleared && initializer_zerop (value))
5641 continue;
5643 if (ce->index)
5644 eltpos = tree_low_cst (ce->index, 1);
5645 else
5646 eltpos = i;
5648 if (vector)
5650 /* Vector CONSTRUCTORs should only be built from smaller
5651 vectors in the case of BLKmode vectors. */
5652 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5653 RTVEC_ELT (vector, eltpos)
5654 = expand_normal (value);
5656 else
5658 enum machine_mode value_mode =
5659 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5660 ? TYPE_MODE (TREE_TYPE (value))
5661 : eltmode;
5662 bitpos = eltpos * elt_size;
5663 store_constructor_field (target, bitsize, bitpos,
5664 value_mode, value, type,
5665 cleared, alias);
5669 if (vector)
5670 emit_insn (GEN_FCN (icode)
5671 (target,
5672 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5673 break;
5676 default:
5677 gcc_unreachable ();
5681 /* Store the value of EXP (an expression tree)
5682 into a subfield of TARGET which has mode MODE and occupies
5683 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5684 If MODE is VOIDmode, it means that we are storing into a bit-field.
5686 Always return const0_rtx unless we have something particular to
5687 return.
5689 TYPE is the type of the underlying object,
5691 ALIAS_SET is the alias set for the destination. This value will
5692 (in general) be different from that for TARGET, since TARGET is a
5693 reference to the containing structure.
5695 If NONTEMPORAL is true, try generating a nontemporal store. */
5697 static rtx
5698 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5699 enum machine_mode mode, tree exp, tree type,
5700 alias_set_type alias_set, bool nontemporal)
5702 HOST_WIDE_INT width_mask = 0;
5704 if (TREE_CODE (exp) == ERROR_MARK)
5705 return const0_rtx;
5707 /* If we have nothing to store, do nothing unless the expression has
5708 side-effects. */
5709 if (bitsize == 0)
5710 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5711 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5712 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5714 /* If we are storing into an unaligned field of an aligned union that is
5715 in a register, we may have the mode of TARGET being an integer mode but
5716 MODE == BLKmode. In that case, get an aligned object whose size and
5717 alignment are the same as TARGET and store TARGET into it (we can avoid
5718 the store if the field being stored is the entire width of TARGET). Then
5719 call ourselves recursively to store the field into a BLKmode version of
5720 that object. Finally, load from the object into TARGET. This is not
5721 very efficient in general, but should only be slightly more expensive
5722 than the otherwise-required unaligned accesses. Perhaps this can be
5723 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5724 twice, once with emit_move_insn and once via store_field. */
5726 if (mode == BLKmode
5727 && (REG_P (target) || GET_CODE (target) == SUBREG))
5729 rtx object = assign_temp (type, 0, 1, 1);
5730 rtx blk_object = adjust_address (object, BLKmode, 0);
5732 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5733 emit_move_insn (object, target);
5735 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5736 nontemporal);
5738 emit_move_insn (target, object);
5740 /* We want to return the BLKmode version of the data. */
5741 return blk_object;
5744 if (GET_CODE (target) == CONCAT)
5746 /* We're storing into a struct containing a single __complex. */
5748 gcc_assert (!bitpos);
5749 return store_expr (exp, target, 0, nontemporal);
5752 /* If the structure is in a register or if the component
5753 is a bit field, we cannot use addressing to access it.
5754 Use bit-field techniques or SUBREG to store in it. */
5756 if (mode == VOIDmode
5757 || (mode != BLKmode && ! direct_store[(int) mode]
5758 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5759 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5760 || REG_P (target)
5761 || GET_CODE (target) == SUBREG
5762 /* If the field isn't aligned enough to store as an ordinary memref,
5763 store it as a bit field. */
5764 || (mode != BLKmode
5765 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5766 || bitpos % GET_MODE_ALIGNMENT (mode))
5767 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5768 || (bitpos % BITS_PER_UNIT != 0)))
5769 /* If the RHS and field are a constant size and the size of the
5770 RHS isn't the same size as the bitfield, we must use bitfield
5771 operations. */
5772 || (bitsize >= 0
5773 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5774 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5776 rtx temp;
5778 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5779 implies a mask operation. If the precision is the same size as
5780 the field we're storing into, that mask is redundant. This is
5781 particularly common with bit field assignments generated by the
5782 C front end. */
5783 if (TREE_CODE (exp) == NOP_EXPR)
5785 tree type = TREE_TYPE (exp);
5786 if (INTEGRAL_TYPE_P (type)
5787 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5788 && bitsize == TYPE_PRECISION (type))
5790 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5791 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5792 exp = TREE_OPERAND (exp, 0);
5796 temp = expand_normal (exp);
5798 /* If BITSIZE is narrower than the size of the type of EXP
5799 we will be narrowing TEMP. Normally, what's wanted are the
5800 low-order bits. However, if EXP's type is a record and this is
5801 big-endian machine, we want the upper BITSIZE bits. */
5802 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5803 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5804 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5805 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5806 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5807 - bitsize),
5808 NULL_RTX, 1);
5810 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5811 MODE. */
5812 if (mode != VOIDmode && mode != BLKmode
5813 && mode != TYPE_MODE (TREE_TYPE (exp)))
5814 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5816 /* If the modes of TEMP and TARGET are both BLKmode, both
5817 must be in memory and BITPOS must be aligned on a byte
5818 boundary. If so, we simply do a block copy. Likewise
5819 for a BLKmode-like TARGET. */
5820 if (GET_MODE (temp) == BLKmode
5821 && (GET_MODE (target) == BLKmode
5822 || (MEM_P (target)
5823 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5824 && (bitpos % BITS_PER_UNIT) == 0
5825 && (bitsize % BITS_PER_UNIT) == 0)))
5827 gcc_assert (MEM_P (target) && MEM_P (temp)
5828 && (bitpos % BITS_PER_UNIT) == 0);
5830 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5831 emit_block_move (target, temp,
5832 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5833 / BITS_PER_UNIT),
5834 BLOCK_OP_NORMAL);
5836 return const0_rtx;
5839 /* Store the value in the bitfield. */
5840 store_bit_field (target, bitsize, bitpos, mode, temp);
5842 return const0_rtx;
5844 else
5846 /* Now build a reference to just the desired component. */
5847 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5849 if (to_rtx == target)
5850 to_rtx = copy_rtx (to_rtx);
5852 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5853 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5854 set_mem_alias_set (to_rtx, alias_set);
5856 return store_expr (exp, to_rtx, 0, nontemporal);
5860 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5861 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5862 codes and find the ultimate containing object, which we return.
5864 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5865 bit position, and *PUNSIGNEDP to the signedness of the field.
5866 If the position of the field is variable, we store a tree
5867 giving the variable offset (in units) in *POFFSET.
5868 This offset is in addition to the bit position.
5869 If the position is not variable, we store 0 in *POFFSET.
5871 If any of the extraction expressions is volatile,
5872 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5874 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5875 Otherwise, it is a mode that can be used to access the field.
5877 If the field describes a variable-sized object, *PMODE is set to
5878 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5879 this case, but the address of the object can be found.
5881 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5882 look through nodes that serve as markers of a greater alignment than
5883 the one that can be deduced from the expression. These nodes make it
5884 possible for front-ends to prevent temporaries from being created by
5885 the middle-end on alignment considerations. For that purpose, the
5886 normal operating mode at high-level is to always pass FALSE so that
5887 the ultimate containing object is really returned; moreover, the
5888 associated predicate handled_component_p will always return TRUE
5889 on these nodes, thus indicating that they are essentially handled
5890 by get_inner_reference. TRUE should only be passed when the caller
5891 is scanning the expression in order to build another representation
5892 and specifically knows how to handle these nodes; as such, this is
5893 the normal operating mode in the RTL expanders. */
5895 tree
5896 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5897 HOST_WIDE_INT *pbitpos, tree *poffset,
5898 enum machine_mode *pmode, int *punsignedp,
5899 int *pvolatilep, bool keep_aligning)
5901 tree size_tree = 0;
5902 enum machine_mode mode = VOIDmode;
5903 bool blkmode_bitfield = false;
5904 tree offset = size_zero_node;
5905 tree bit_offset = bitsize_zero_node;
5907 /* First get the mode, signedness, and size. We do this from just the
5908 outermost expression. */
5909 if (TREE_CODE (exp) == COMPONENT_REF)
5911 tree field = TREE_OPERAND (exp, 1);
5912 size_tree = DECL_SIZE (field);
5913 if (!DECL_BIT_FIELD (field))
5914 mode = DECL_MODE (field);
5915 else if (DECL_MODE (field) == BLKmode)
5916 blkmode_bitfield = true;
5918 *punsignedp = DECL_UNSIGNED (field);
5920 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5922 size_tree = TREE_OPERAND (exp, 1);
5923 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5924 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5926 /* For vector types, with the correct size of access, use the mode of
5927 inner type. */
5928 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5929 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5930 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5931 mode = TYPE_MODE (TREE_TYPE (exp));
5933 else
5935 mode = TYPE_MODE (TREE_TYPE (exp));
5936 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5938 if (mode == BLKmode)
5939 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5940 else
5941 *pbitsize = GET_MODE_BITSIZE (mode);
5944 if (size_tree != 0)
5946 if (! host_integerp (size_tree, 1))
5947 mode = BLKmode, *pbitsize = -1;
5948 else
5949 *pbitsize = tree_low_cst (size_tree, 1);
5952 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5953 and find the ultimate containing object. */
5954 while (1)
5956 switch (TREE_CODE (exp))
5958 case BIT_FIELD_REF:
5959 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5960 TREE_OPERAND (exp, 2));
5961 break;
5963 case COMPONENT_REF:
5965 tree field = TREE_OPERAND (exp, 1);
5966 tree this_offset = component_ref_field_offset (exp);
5968 /* If this field hasn't been filled in yet, don't go past it.
5969 This should only happen when folding expressions made during
5970 type construction. */
5971 if (this_offset == 0)
5972 break;
5974 offset = size_binop (PLUS_EXPR, offset, this_offset);
5975 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5976 DECL_FIELD_BIT_OFFSET (field));
5978 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5980 break;
5982 case ARRAY_REF:
5983 case ARRAY_RANGE_REF:
5985 tree index = TREE_OPERAND (exp, 1);
5986 tree low_bound = array_ref_low_bound (exp);
5987 tree unit_size = array_ref_element_size (exp);
5989 /* We assume all arrays have sizes that are a multiple of a byte.
5990 First subtract the lower bound, if any, in the type of the
5991 index, then convert to sizetype and multiply by the size of
5992 the array element. */
5993 if (! integer_zerop (low_bound))
5994 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5995 index, low_bound);
5997 offset = size_binop (PLUS_EXPR, offset,
5998 size_binop (MULT_EXPR,
5999 fold_convert (sizetype, index),
6000 unit_size));
6002 break;
6004 case REALPART_EXPR:
6005 break;
6007 case IMAGPART_EXPR:
6008 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6009 bitsize_int (*pbitsize));
6010 break;
6012 case VIEW_CONVERT_EXPR:
6013 if (keep_aligning && STRICT_ALIGNMENT
6014 && (TYPE_ALIGN (TREE_TYPE (exp))
6015 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6016 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6017 < BIGGEST_ALIGNMENT)
6018 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6019 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6020 goto done;
6021 break;
6023 default:
6024 goto done;
6027 /* If any reference in the chain is volatile, the effect is volatile. */
6028 if (TREE_THIS_VOLATILE (exp))
6029 *pvolatilep = 1;
6031 exp = TREE_OPERAND (exp, 0);
6033 done:
6035 /* If OFFSET is constant, see if we can return the whole thing as a
6036 constant bit position. Make sure to handle overflow during
6037 this conversion. */
6038 if (host_integerp (offset, 0))
6040 double_int tem = double_int_mul (tree_to_double_int (offset),
6041 uhwi_to_double_int (BITS_PER_UNIT));
6042 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6043 if (double_int_fits_in_shwi_p (tem))
6045 *pbitpos = double_int_to_shwi (tem);
6046 *poffset = offset = NULL_TREE;
6050 /* Otherwise, split it up. */
6051 if (offset)
6053 *pbitpos = tree_low_cst (bit_offset, 0);
6054 *poffset = offset;
6057 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6058 if (mode == VOIDmode
6059 && blkmode_bitfield
6060 && (*pbitpos % BITS_PER_UNIT) == 0
6061 && (*pbitsize % BITS_PER_UNIT) == 0)
6062 *pmode = BLKmode;
6063 else
6064 *pmode = mode;
6066 return exp;
6069 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6070 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6071 EXP is marked as PACKED. */
6073 bool
6074 contains_packed_reference (const_tree exp)
6076 bool packed_p = false;
6078 while (1)
6080 switch (TREE_CODE (exp))
6082 case COMPONENT_REF:
6084 tree field = TREE_OPERAND (exp, 1);
6085 packed_p = DECL_PACKED (field)
6086 || TYPE_PACKED (TREE_TYPE (field))
6087 || TYPE_PACKED (TREE_TYPE (exp));
6088 if (packed_p)
6089 goto done;
6091 break;
6093 case BIT_FIELD_REF:
6094 case ARRAY_REF:
6095 case ARRAY_RANGE_REF:
6096 case REALPART_EXPR:
6097 case IMAGPART_EXPR:
6098 case VIEW_CONVERT_EXPR:
6099 break;
6101 default:
6102 goto done;
6104 exp = TREE_OPERAND (exp, 0);
6106 done:
6107 return packed_p;
6110 /* Return a tree of sizetype representing the size, in bytes, of the element
6111 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6113 tree
6114 array_ref_element_size (tree exp)
6116 tree aligned_size = TREE_OPERAND (exp, 3);
6117 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6119 /* If a size was specified in the ARRAY_REF, it's the size measured
6120 in alignment units of the element type. So multiply by that value. */
6121 if (aligned_size)
6123 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6124 sizetype from another type of the same width and signedness. */
6125 if (TREE_TYPE (aligned_size) != sizetype)
6126 aligned_size = fold_convert (sizetype, aligned_size);
6127 return size_binop (MULT_EXPR, aligned_size,
6128 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6131 /* Otherwise, take the size from that of the element type. Substitute
6132 any PLACEHOLDER_EXPR that we have. */
6133 else
6134 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6137 /* Return a tree representing the lower bound of the array mentioned in
6138 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6140 tree
6141 array_ref_low_bound (tree exp)
6143 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6145 /* If a lower bound is specified in EXP, use it. */
6146 if (TREE_OPERAND (exp, 2))
6147 return TREE_OPERAND (exp, 2);
6149 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6150 substituting for a PLACEHOLDER_EXPR as needed. */
6151 if (domain_type && TYPE_MIN_VALUE (domain_type))
6152 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6154 /* Otherwise, return a zero of the appropriate type. */
6155 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6158 /* Return a tree representing the upper bound of the array mentioned in
6159 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6161 tree
6162 array_ref_up_bound (tree exp)
6164 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6166 /* If there is a domain type and it has an upper bound, use it, substituting
6167 for a PLACEHOLDER_EXPR as needed. */
6168 if (domain_type && TYPE_MAX_VALUE (domain_type))
6169 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6171 /* Otherwise fail. */
6172 return NULL_TREE;
6175 /* Return a tree representing the offset, in bytes, of the field referenced
6176 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6178 tree
6179 component_ref_field_offset (tree exp)
6181 tree aligned_offset = TREE_OPERAND (exp, 2);
6182 tree field = TREE_OPERAND (exp, 1);
6184 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6185 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6186 value. */
6187 if (aligned_offset)
6189 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6190 sizetype from another type of the same width and signedness. */
6191 if (TREE_TYPE (aligned_offset) != sizetype)
6192 aligned_offset = fold_convert (sizetype, aligned_offset);
6193 return size_binop (MULT_EXPR, aligned_offset,
6194 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6197 /* Otherwise, take the offset from that of the field. Substitute
6198 any PLACEHOLDER_EXPR that we have. */
6199 else
6200 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6203 /* Given an rtx VALUE that may contain additions and multiplications, return
6204 an equivalent value that just refers to a register, memory, or constant.
6205 This is done by generating instructions to perform the arithmetic and
6206 returning a pseudo-register containing the value.
6208 The returned value may be a REG, SUBREG, MEM or constant. */
6211 force_operand (rtx value, rtx target)
6213 rtx op1, op2;
6214 /* Use subtarget as the target for operand 0 of a binary operation. */
6215 rtx subtarget = get_subtarget (target);
6216 enum rtx_code code = GET_CODE (value);
6218 /* Check for subreg applied to an expression produced by loop optimizer. */
6219 if (code == SUBREG
6220 && !REG_P (SUBREG_REG (value))
6221 && !MEM_P (SUBREG_REG (value)))
6223 value
6224 = simplify_gen_subreg (GET_MODE (value),
6225 force_reg (GET_MODE (SUBREG_REG (value)),
6226 force_operand (SUBREG_REG (value),
6227 NULL_RTX)),
6228 GET_MODE (SUBREG_REG (value)),
6229 SUBREG_BYTE (value));
6230 code = GET_CODE (value);
6233 /* Check for a PIC address load. */
6234 if ((code == PLUS || code == MINUS)
6235 && XEXP (value, 0) == pic_offset_table_rtx
6236 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6237 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6238 || GET_CODE (XEXP (value, 1)) == CONST))
6240 if (!subtarget)
6241 subtarget = gen_reg_rtx (GET_MODE (value));
6242 emit_move_insn (subtarget, value);
6243 return subtarget;
6246 if (ARITHMETIC_P (value))
6248 op2 = XEXP (value, 1);
6249 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6250 subtarget = 0;
6251 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6253 code = PLUS;
6254 op2 = negate_rtx (GET_MODE (value), op2);
6257 /* Check for an addition with OP2 a constant integer and our first
6258 operand a PLUS of a virtual register and something else. In that
6259 case, we want to emit the sum of the virtual register and the
6260 constant first and then add the other value. This allows virtual
6261 register instantiation to simply modify the constant rather than
6262 creating another one around this addition. */
6263 if (code == PLUS && GET_CODE (op2) == CONST_INT
6264 && GET_CODE (XEXP (value, 0)) == PLUS
6265 && REG_P (XEXP (XEXP (value, 0), 0))
6266 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6267 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6269 rtx temp = expand_simple_binop (GET_MODE (value), code,
6270 XEXP (XEXP (value, 0), 0), op2,
6271 subtarget, 0, OPTAB_LIB_WIDEN);
6272 return expand_simple_binop (GET_MODE (value), code, temp,
6273 force_operand (XEXP (XEXP (value,
6274 0), 1), 0),
6275 target, 0, OPTAB_LIB_WIDEN);
6278 op1 = force_operand (XEXP (value, 0), subtarget);
6279 op2 = force_operand (op2, NULL_RTX);
6280 switch (code)
6282 case MULT:
6283 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6284 case DIV:
6285 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6286 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6287 target, 1, OPTAB_LIB_WIDEN);
6288 else
6289 return expand_divmod (0,
6290 FLOAT_MODE_P (GET_MODE (value))
6291 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6292 GET_MODE (value), op1, op2, target, 0);
6293 case MOD:
6294 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6295 target, 0);
6296 case UDIV:
6297 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6298 target, 1);
6299 case UMOD:
6300 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6301 target, 1);
6302 case ASHIFTRT:
6303 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6304 target, 0, OPTAB_LIB_WIDEN);
6305 default:
6306 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6307 target, 1, OPTAB_LIB_WIDEN);
6310 if (UNARY_P (value))
6312 if (!target)
6313 target = gen_reg_rtx (GET_MODE (value));
6314 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6315 switch (code)
6317 case ZERO_EXTEND:
6318 case SIGN_EXTEND:
6319 case TRUNCATE:
6320 case FLOAT_EXTEND:
6321 case FLOAT_TRUNCATE:
6322 convert_move (target, op1, code == ZERO_EXTEND);
6323 return target;
6325 case FIX:
6326 case UNSIGNED_FIX:
6327 expand_fix (target, op1, code == UNSIGNED_FIX);
6328 return target;
6330 case FLOAT:
6331 case UNSIGNED_FLOAT:
6332 expand_float (target, op1, code == UNSIGNED_FLOAT);
6333 return target;
6335 default:
6336 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6340 #ifdef INSN_SCHEDULING
6341 /* On machines that have insn scheduling, we want all memory reference to be
6342 explicit, so we need to deal with such paradoxical SUBREGs. */
6343 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6344 && (GET_MODE_SIZE (GET_MODE (value))
6345 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6346 value
6347 = simplify_gen_subreg (GET_MODE (value),
6348 force_reg (GET_MODE (SUBREG_REG (value)),
6349 force_operand (SUBREG_REG (value),
6350 NULL_RTX)),
6351 GET_MODE (SUBREG_REG (value)),
6352 SUBREG_BYTE (value));
6353 #endif
6355 return value;
6358 /* Subroutine of expand_expr: return nonzero iff there is no way that
6359 EXP can reference X, which is being modified. TOP_P is nonzero if this
6360 call is going to be used to determine whether we need a temporary
6361 for EXP, as opposed to a recursive call to this function.
6363 It is always safe for this routine to return zero since it merely
6364 searches for optimization opportunities. */
6367 safe_from_p (const_rtx x, tree exp, int top_p)
6369 rtx exp_rtl = 0;
6370 int i, nops;
6372 if (x == 0
6373 /* If EXP has varying size, we MUST use a target since we currently
6374 have no way of allocating temporaries of variable size
6375 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6376 So we assume here that something at a higher level has prevented a
6377 clash. This is somewhat bogus, but the best we can do. Only
6378 do this when X is BLKmode and when we are at the top level. */
6379 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6380 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6381 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6382 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6383 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6384 != INTEGER_CST)
6385 && GET_MODE (x) == BLKmode)
6386 /* If X is in the outgoing argument area, it is always safe. */
6387 || (MEM_P (x)
6388 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6389 || (GET_CODE (XEXP (x, 0)) == PLUS
6390 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6391 return 1;
6393 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6394 find the underlying pseudo. */
6395 if (GET_CODE (x) == SUBREG)
6397 x = SUBREG_REG (x);
6398 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6399 return 0;
6402 /* Now look at our tree code and possibly recurse. */
6403 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6405 case tcc_declaration:
6406 exp_rtl = DECL_RTL_IF_SET (exp);
6407 break;
6409 case tcc_constant:
6410 return 1;
6412 case tcc_exceptional:
6413 if (TREE_CODE (exp) == TREE_LIST)
6415 while (1)
6417 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6418 return 0;
6419 exp = TREE_CHAIN (exp);
6420 if (!exp)
6421 return 1;
6422 if (TREE_CODE (exp) != TREE_LIST)
6423 return safe_from_p (x, exp, 0);
6426 else if (TREE_CODE (exp) == CONSTRUCTOR)
6428 constructor_elt *ce;
6429 unsigned HOST_WIDE_INT idx;
6431 for (idx = 0;
6432 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6433 idx++)
6434 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6435 || !safe_from_p (x, ce->value, 0))
6436 return 0;
6437 return 1;
6439 else if (TREE_CODE (exp) == ERROR_MARK)
6440 return 1; /* An already-visited SAVE_EXPR? */
6441 else
6442 return 0;
6444 case tcc_statement:
6445 /* The only case we look at here is the DECL_INITIAL inside a
6446 DECL_EXPR. */
6447 return (TREE_CODE (exp) != DECL_EXPR
6448 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6449 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6450 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6452 case tcc_binary:
6453 case tcc_comparison:
6454 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6455 return 0;
6456 /* Fall through. */
6458 case tcc_unary:
6459 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6461 case tcc_expression:
6462 case tcc_reference:
6463 case tcc_vl_exp:
6464 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6465 the expression. If it is set, we conflict iff we are that rtx or
6466 both are in memory. Otherwise, we check all operands of the
6467 expression recursively. */
6469 switch (TREE_CODE (exp))
6471 case ADDR_EXPR:
6472 /* If the operand is static or we are static, we can't conflict.
6473 Likewise if we don't conflict with the operand at all. */
6474 if (staticp (TREE_OPERAND (exp, 0))
6475 || TREE_STATIC (exp)
6476 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6477 return 1;
6479 /* Otherwise, the only way this can conflict is if we are taking
6480 the address of a DECL a that address if part of X, which is
6481 very rare. */
6482 exp = TREE_OPERAND (exp, 0);
6483 if (DECL_P (exp))
6485 if (!DECL_RTL_SET_P (exp)
6486 || !MEM_P (DECL_RTL (exp)))
6487 return 0;
6488 else
6489 exp_rtl = XEXP (DECL_RTL (exp), 0);
6491 break;
6493 case MISALIGNED_INDIRECT_REF:
6494 case ALIGN_INDIRECT_REF:
6495 case INDIRECT_REF:
6496 if (MEM_P (x)
6497 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6498 get_alias_set (exp)))
6499 return 0;
6500 break;
6502 case CALL_EXPR:
6503 /* Assume that the call will clobber all hard registers and
6504 all of memory. */
6505 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6506 || MEM_P (x))
6507 return 0;
6508 break;
6510 case WITH_CLEANUP_EXPR:
6511 case CLEANUP_POINT_EXPR:
6512 /* Lowered by gimplify.c. */
6513 gcc_unreachable ();
6515 case SAVE_EXPR:
6516 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6518 default:
6519 break;
6522 /* If we have an rtx, we do not need to scan our operands. */
6523 if (exp_rtl)
6524 break;
6526 nops = TREE_OPERAND_LENGTH (exp);
6527 for (i = 0; i < nops; i++)
6528 if (TREE_OPERAND (exp, i) != 0
6529 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6530 return 0;
6532 break;
6534 case tcc_type:
6535 /* Should never get a type here. */
6536 gcc_unreachable ();
6539 /* If we have an rtl, find any enclosed object. Then see if we conflict
6540 with it. */
6541 if (exp_rtl)
6543 if (GET_CODE (exp_rtl) == SUBREG)
6545 exp_rtl = SUBREG_REG (exp_rtl);
6546 if (REG_P (exp_rtl)
6547 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6548 return 0;
6551 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6552 are memory and they conflict. */
6553 return ! (rtx_equal_p (x, exp_rtl)
6554 || (MEM_P (x) && MEM_P (exp_rtl)
6555 && true_dependence (exp_rtl, VOIDmode, x,
6556 rtx_addr_varies_p)));
6559 /* If we reach here, it is safe. */
6560 return 1;
6564 /* Return the highest power of two that EXP is known to be a multiple of.
6565 This is used in updating alignment of MEMs in array references. */
6567 unsigned HOST_WIDE_INT
6568 highest_pow2_factor (const_tree exp)
6570 unsigned HOST_WIDE_INT c0, c1;
6572 switch (TREE_CODE (exp))
6574 case INTEGER_CST:
6575 /* We can find the lowest bit that's a one. If the low
6576 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6577 We need to handle this case since we can find it in a COND_EXPR,
6578 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6579 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6580 later ICE. */
6581 if (TREE_OVERFLOW (exp))
6582 return BIGGEST_ALIGNMENT;
6583 else
6585 /* Note: tree_low_cst is intentionally not used here,
6586 we don't care about the upper bits. */
6587 c0 = TREE_INT_CST_LOW (exp);
6588 c0 &= -c0;
6589 return c0 ? c0 : BIGGEST_ALIGNMENT;
6591 break;
6593 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6594 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6595 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6596 return MIN (c0, c1);
6598 case MULT_EXPR:
6599 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6600 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6601 return c0 * c1;
6603 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6604 case CEIL_DIV_EXPR:
6605 if (integer_pow2p (TREE_OPERAND (exp, 1))
6606 && host_integerp (TREE_OPERAND (exp, 1), 1))
6608 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6609 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6610 return MAX (1, c0 / c1);
6612 break;
6614 case BIT_AND_EXPR:
6615 /* The highest power of two of a bit-and expression is the maximum of
6616 that of its operands. We typically get here for a complex LHS and
6617 a constant negative power of two on the RHS to force an explicit
6618 alignment, so don't bother looking at the LHS. */
6619 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6621 CASE_CONVERT:
6622 case SAVE_EXPR:
6623 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6625 case COMPOUND_EXPR:
6626 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6628 case COND_EXPR:
6629 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6630 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6631 return MIN (c0, c1);
6633 default:
6634 break;
6637 return 1;
6640 /* Similar, except that the alignment requirements of TARGET are
6641 taken into account. Assume it is at least as aligned as its
6642 type, unless it is a COMPONENT_REF in which case the layout of
6643 the structure gives the alignment. */
6645 static unsigned HOST_WIDE_INT
6646 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6648 unsigned HOST_WIDE_INT target_align, factor;
6650 factor = highest_pow2_factor (exp);
6651 if (TREE_CODE (target) == COMPONENT_REF)
6652 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6653 else
6654 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6655 return MAX (factor, target_align);
6658 /* Return &VAR expression for emulated thread local VAR. */
6660 static tree
6661 emutls_var_address (tree var)
6663 tree emuvar = emutls_decl (var);
6664 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6665 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6666 tree arglist = build_tree_list (NULL_TREE, arg);
6667 tree call = build_function_call_expr (fn, arglist);
6668 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6672 /* Subroutine of expand_expr. Expand the two operands of a binary
6673 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6674 The value may be stored in TARGET if TARGET is nonzero. The
6675 MODIFIER argument is as documented by expand_expr. */
6677 static void
6678 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6679 enum expand_modifier modifier)
6681 if (! safe_from_p (target, exp1, 1))
6682 target = 0;
6683 if (operand_equal_p (exp0, exp1, 0))
6685 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6686 *op1 = copy_rtx (*op0);
6688 else
6690 /* If we need to preserve evaluation order, copy exp0 into its own
6691 temporary variable so that it can't be clobbered by exp1. */
6692 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6693 exp0 = save_expr (exp0);
6694 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6695 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6700 /* Return a MEM that contains constant EXP. DEFER is as for
6701 output_constant_def and MODIFIER is as for expand_expr. */
6703 static rtx
6704 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6706 rtx mem;
6708 mem = output_constant_def (exp, defer);
6709 if (modifier != EXPAND_INITIALIZER)
6710 mem = use_anchored_address (mem);
6711 return mem;
6714 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6715 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6717 static rtx
6718 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6719 enum expand_modifier modifier)
6721 rtx result, subtarget;
6722 tree inner, offset;
6723 HOST_WIDE_INT bitsize, bitpos;
6724 int volatilep, unsignedp;
6725 enum machine_mode mode1;
6727 /* If we are taking the address of a constant and are at the top level,
6728 we have to use output_constant_def since we can't call force_const_mem
6729 at top level. */
6730 /* ??? This should be considered a front-end bug. We should not be
6731 generating ADDR_EXPR of something that isn't an LVALUE. The only
6732 exception here is STRING_CST. */
6733 if (CONSTANT_CLASS_P (exp))
6734 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6736 /* Everything must be something allowed by is_gimple_addressable. */
6737 switch (TREE_CODE (exp))
6739 case INDIRECT_REF:
6740 /* This case will happen via recursion for &a->b. */
6741 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6743 case CONST_DECL:
6744 /* Recurse and make the output_constant_def clause above handle this. */
6745 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6746 tmode, modifier);
6748 case REALPART_EXPR:
6749 /* The real part of the complex number is always first, therefore
6750 the address is the same as the address of the parent object. */
6751 offset = 0;
6752 bitpos = 0;
6753 inner = TREE_OPERAND (exp, 0);
6754 break;
6756 case IMAGPART_EXPR:
6757 /* The imaginary part of the complex number is always second.
6758 The expression is therefore always offset by the size of the
6759 scalar type. */
6760 offset = 0;
6761 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6762 inner = TREE_OPERAND (exp, 0);
6763 break;
6765 case VAR_DECL:
6766 /* TLS emulation hook - replace __thread VAR's &VAR with
6767 __emutls_get_address (&_emutls.VAR). */
6768 if (! targetm.have_tls
6769 && TREE_CODE (exp) == VAR_DECL
6770 && DECL_THREAD_LOCAL_P (exp))
6772 exp = emutls_var_address (exp);
6773 return expand_expr (exp, target, tmode, modifier);
6775 /* Fall through. */
6777 default:
6778 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6779 expand_expr, as that can have various side effects; LABEL_DECLs for
6780 example, may not have their DECL_RTL set yet. Expand the rtl of
6781 CONSTRUCTORs too, which should yield a memory reference for the
6782 constructor's contents. Assume language specific tree nodes can
6783 be expanded in some interesting way. */
6784 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6785 if (DECL_P (exp)
6786 || TREE_CODE (exp) == CONSTRUCTOR
6787 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6789 result = expand_expr (exp, target, tmode,
6790 modifier == EXPAND_INITIALIZER
6791 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6793 /* If the DECL isn't in memory, then the DECL wasn't properly
6794 marked TREE_ADDRESSABLE, which will be either a front-end
6795 or a tree optimizer bug. */
6796 gcc_assert (MEM_P (result));
6797 result = XEXP (result, 0);
6799 /* ??? Is this needed anymore? */
6800 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6802 assemble_external (exp);
6803 TREE_USED (exp) = 1;
6806 if (modifier != EXPAND_INITIALIZER
6807 && modifier != EXPAND_CONST_ADDRESS)
6808 result = force_operand (result, target);
6809 return result;
6812 /* Pass FALSE as the last argument to get_inner_reference although
6813 we are expanding to RTL. The rationale is that we know how to
6814 handle "aligning nodes" here: we can just bypass them because
6815 they won't change the final object whose address will be returned
6816 (they actually exist only for that purpose). */
6817 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6818 &mode1, &unsignedp, &volatilep, false);
6819 break;
6822 /* We must have made progress. */
6823 gcc_assert (inner != exp);
6825 subtarget = offset || bitpos ? NULL_RTX : target;
6826 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6827 inner alignment, force the inner to be sufficiently aligned. */
6828 if (CONSTANT_CLASS_P (inner)
6829 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6831 inner = copy_node (inner);
6832 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6833 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6834 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6836 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6838 if (offset)
6840 rtx tmp;
6842 if (modifier != EXPAND_NORMAL)
6843 result = force_operand (result, NULL);
6844 tmp = expand_expr (offset, NULL_RTX, tmode,
6845 modifier == EXPAND_INITIALIZER
6846 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6848 result = convert_memory_address (tmode, result);
6849 tmp = convert_memory_address (tmode, tmp);
6851 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6852 result = gen_rtx_PLUS (tmode, result, tmp);
6853 else
6855 subtarget = bitpos ? NULL_RTX : target;
6856 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6857 1, OPTAB_LIB_WIDEN);
6861 if (bitpos)
6863 /* Someone beforehand should have rejected taking the address
6864 of such an object. */
6865 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6867 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6868 if (modifier < EXPAND_SUM)
6869 result = force_operand (result, target);
6872 return result;
6875 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6876 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6878 static rtx
6879 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6880 enum expand_modifier modifier)
6882 enum machine_mode rmode;
6883 rtx result;
6885 /* Target mode of VOIDmode says "whatever's natural". */
6886 if (tmode == VOIDmode)
6887 tmode = TYPE_MODE (TREE_TYPE (exp));
6889 /* We can get called with some Weird Things if the user does silliness
6890 like "(short) &a". In that case, convert_memory_address won't do
6891 the right thing, so ignore the given target mode. */
6892 if (tmode != Pmode && tmode != ptr_mode)
6893 tmode = Pmode;
6895 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6896 tmode, modifier);
6898 /* Despite expand_expr claims concerning ignoring TMODE when not
6899 strictly convenient, stuff breaks if we don't honor it. Note
6900 that combined with the above, we only do this for pointer modes. */
6901 rmode = GET_MODE (result);
6902 if (rmode == VOIDmode)
6903 rmode = tmode;
6904 if (rmode != tmode)
6905 result = convert_memory_address (tmode, result);
6907 return result;
6910 /* Generate code for computing CONSTRUCTOR EXP.
6911 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6912 is TRUE, instead of creating a temporary variable in memory
6913 NULL is returned and the caller needs to handle it differently. */
6915 static rtx
6916 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6917 bool avoid_temp_mem)
6919 tree type = TREE_TYPE (exp);
6920 enum machine_mode mode = TYPE_MODE (type);
6922 /* Try to avoid creating a temporary at all. This is possible
6923 if all of the initializer is zero.
6924 FIXME: try to handle all [0..255] initializers we can handle
6925 with memset. */
6926 if (TREE_STATIC (exp)
6927 && !TREE_ADDRESSABLE (exp)
6928 && target != 0 && mode == BLKmode
6929 && all_zeros_p (exp))
6931 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6932 return target;
6935 /* All elts simple constants => refer to a constant in memory. But
6936 if this is a non-BLKmode mode, let it store a field at a time
6937 since that should make a CONST_INT or CONST_DOUBLE when we
6938 fold. Likewise, if we have a target we can use, it is best to
6939 store directly into the target unless the type is large enough
6940 that memcpy will be used. If we are making an initializer and
6941 all operands are constant, put it in memory as well.
6943 FIXME: Avoid trying to fill vector constructors piece-meal.
6944 Output them with output_constant_def below unless we're sure
6945 they're zeros. This should go away when vector initializers
6946 are treated like VECTOR_CST instead of arrays. */
6947 if ((TREE_STATIC (exp)
6948 && ((mode == BLKmode
6949 && ! (target != 0 && safe_from_p (target, exp, 1)))
6950 || TREE_ADDRESSABLE (exp)
6951 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6952 && (! MOVE_BY_PIECES_P
6953 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6954 TYPE_ALIGN (type)))
6955 && ! mostly_zeros_p (exp))))
6956 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
6957 && TREE_CONSTANT (exp)))
6959 rtx constructor;
6961 if (avoid_temp_mem)
6962 return NULL_RTX;
6964 constructor = expand_expr_constant (exp, 1, modifier);
6966 if (modifier != EXPAND_CONST_ADDRESS
6967 && modifier != EXPAND_INITIALIZER
6968 && modifier != EXPAND_SUM)
6969 constructor = validize_mem (constructor);
6971 return constructor;
6974 /* Handle calls that pass values in multiple non-contiguous
6975 locations. The Irix 6 ABI has examples of this. */
6976 if (target == 0 || ! safe_from_p (target, exp, 1)
6977 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
6979 if (avoid_temp_mem)
6980 return NULL_RTX;
6982 target
6983 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
6984 | (TREE_READONLY (exp)
6985 * TYPE_QUAL_CONST))),
6986 0, TREE_ADDRESSABLE (exp), 1);
6989 store_constructor (exp, target, 0, int_expr_size (exp));
6990 return target;
6994 /* expand_expr: generate code for computing expression EXP.
6995 An rtx for the computed value is returned. The value is never null.
6996 In the case of a void EXP, const0_rtx is returned.
6998 The value may be stored in TARGET if TARGET is nonzero.
6999 TARGET is just a suggestion; callers must assume that
7000 the rtx returned may not be the same as TARGET.
7002 If TARGET is CONST0_RTX, it means that the value will be ignored.
7004 If TMODE is not VOIDmode, it suggests generating the
7005 result in mode TMODE. But this is done only when convenient.
7006 Otherwise, TMODE is ignored and the value generated in its natural mode.
7007 TMODE is just a suggestion; callers must assume that
7008 the rtx returned may not have mode TMODE.
7010 Note that TARGET may have neither TMODE nor MODE. In that case, it
7011 probably will not be used.
7013 If MODIFIER is EXPAND_SUM then when EXP is an addition
7014 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7015 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7016 products as above, or REG or MEM, or constant.
7017 Ordinarily in such cases we would output mul or add instructions
7018 and then return a pseudo reg containing the sum.
7020 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7021 it also marks a label as absolutely required (it can't be dead).
7022 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7023 This is used for outputting expressions used in initializers.
7025 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7026 with a constant address even if that address is not normally legitimate.
7027 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7029 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7030 a call parameter. Such targets require special care as we haven't yet
7031 marked TARGET so that it's safe from being trashed by libcalls. We
7032 don't want to use TARGET for anything but the final result;
7033 Intermediate values must go elsewhere. Additionally, calls to
7034 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7036 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7037 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7038 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7039 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7040 recursively. */
7042 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
7043 enum expand_modifier, rtx *);
7046 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7047 enum expand_modifier modifier, rtx *alt_rtl)
7049 int rn = -1;
7050 rtx ret, last = NULL;
7052 /* Handle ERROR_MARK before anybody tries to access its type. */
7053 if (TREE_CODE (exp) == ERROR_MARK
7054 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7056 ret = CONST0_RTX (tmode);
7057 return ret ? ret : const0_rtx;
7060 if (flag_non_call_exceptions)
7062 rn = lookup_expr_eh_region (exp);
7064 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7065 if (rn >= 0)
7066 last = get_last_insn ();
7069 /* If this is an expression of some kind and it has an associated line
7070 number, then emit the line number before expanding the expression.
7072 We need to save and restore the file and line information so that
7073 errors discovered during expansion are emitted with the right
7074 information. It would be better of the diagnostic routines
7075 used the file/line information embedded in the tree nodes rather
7076 than globals. */
7077 if (cfun && EXPR_HAS_LOCATION (exp))
7079 location_t saved_location = input_location;
7080 input_location = EXPR_LOCATION (exp);
7081 set_curr_insn_source_location (input_location);
7083 /* Record where the insns produced belong. */
7084 set_curr_insn_block (TREE_BLOCK (exp));
7086 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7088 input_location = saved_location;
7090 else
7092 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7095 /* If using non-call exceptions, mark all insns that may trap.
7096 expand_call() will mark CALL_INSNs before we get to this code,
7097 but it doesn't handle libcalls, and these may trap. */
7098 if (rn >= 0)
7100 rtx insn;
7101 for (insn = next_real_insn (last); insn;
7102 insn = next_real_insn (insn))
7104 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7105 /* If we want exceptions for non-call insns, any
7106 may_trap_p instruction may throw. */
7107 && GET_CODE (PATTERN (insn)) != CLOBBER
7108 && GET_CODE (PATTERN (insn)) != USE
7109 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7110 add_reg_note (insn, REG_EH_REGION, GEN_INT (rn));
7114 return ret;
7117 static rtx
7118 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7119 enum expand_modifier modifier, rtx *alt_rtl)
7121 rtx op0, op1, op2, temp, decl_rtl;
7122 tree type;
7123 int unsignedp;
7124 enum machine_mode mode;
7125 enum tree_code code = TREE_CODE (exp);
7126 optab this_optab;
7127 rtx subtarget, original_target;
7128 int ignore;
7129 tree context, subexp0, subexp1;
7130 bool reduce_bit_field;
7131 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7132 ? reduce_to_bit_field_precision ((expr), \
7133 target, \
7134 type) \
7135 : (expr))
7137 type = TREE_TYPE (exp);
7138 mode = TYPE_MODE (type);
7139 unsignedp = TYPE_UNSIGNED (type);
7141 ignore = (target == const0_rtx
7142 || ((CONVERT_EXPR_CODE_P (code)
7143 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7144 && TREE_CODE (type) == VOID_TYPE));
7146 /* An operation in what may be a bit-field type needs the
7147 result to be reduced to the precision of the bit-field type,
7148 which is narrower than that of the type's mode. */
7149 reduce_bit_field = (!ignore
7150 && TREE_CODE (type) == INTEGER_TYPE
7151 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7153 /* If we are going to ignore this result, we need only do something
7154 if there is a side-effect somewhere in the expression. If there
7155 is, short-circuit the most common cases here. Note that we must
7156 not call expand_expr with anything but const0_rtx in case this
7157 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7159 if (ignore)
7161 if (! TREE_SIDE_EFFECTS (exp))
7162 return const0_rtx;
7164 /* Ensure we reference a volatile object even if value is ignored, but
7165 don't do this if all we are doing is taking its address. */
7166 if (TREE_THIS_VOLATILE (exp)
7167 && TREE_CODE (exp) != FUNCTION_DECL
7168 && mode != VOIDmode && mode != BLKmode
7169 && modifier != EXPAND_CONST_ADDRESS)
7171 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7172 if (MEM_P (temp))
7173 temp = copy_to_reg (temp);
7174 return const0_rtx;
7177 if (TREE_CODE_CLASS (code) == tcc_unary
7178 || code == COMPONENT_REF || code == INDIRECT_REF)
7179 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7180 modifier);
7182 else if (TREE_CODE_CLASS (code) == tcc_binary
7183 || TREE_CODE_CLASS (code) == tcc_comparison
7184 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7186 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7187 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7188 return const0_rtx;
7190 else if (code == BIT_FIELD_REF)
7192 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7193 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7194 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7195 return const0_rtx;
7198 target = 0;
7201 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7202 target = 0;
7204 /* Use subtarget as the target for operand 0 of a binary operation. */
7205 subtarget = get_subtarget (target);
7206 original_target = target;
7208 switch (code)
7210 case LABEL_DECL:
7212 tree function = decl_function_context (exp);
7214 temp = label_rtx (exp);
7215 temp = gen_rtx_LABEL_REF (Pmode, temp);
7217 if (function != current_function_decl
7218 && function != 0)
7219 LABEL_REF_NONLOCAL_P (temp) = 1;
7221 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7222 return temp;
7225 case SSA_NAME:
7226 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7227 NULL);
7229 case PARM_DECL:
7230 case VAR_DECL:
7231 /* If a static var's type was incomplete when the decl was written,
7232 but the type is complete now, lay out the decl now. */
7233 if (DECL_SIZE (exp) == 0
7234 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7235 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7236 layout_decl (exp, 0);
7238 /* TLS emulation hook - replace __thread vars with
7239 *__emutls_get_address (&_emutls.var). */
7240 if (! targetm.have_tls
7241 && TREE_CODE (exp) == VAR_DECL
7242 && DECL_THREAD_LOCAL_P (exp))
7244 exp = build_fold_indirect_ref (emutls_var_address (exp));
7245 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7248 /* ... fall through ... */
7250 case FUNCTION_DECL:
7251 case RESULT_DECL:
7252 decl_rtl = DECL_RTL (exp);
7253 gcc_assert (decl_rtl);
7254 decl_rtl = copy_rtx (decl_rtl);
7256 /* Ensure variable marked as used even if it doesn't go through
7257 a parser. If it hasn't be used yet, write out an external
7258 definition. */
7259 if (! TREE_USED (exp))
7261 assemble_external (exp);
7262 TREE_USED (exp) = 1;
7265 /* Show we haven't gotten RTL for this yet. */
7266 temp = 0;
7268 /* Variables inherited from containing functions should have
7269 been lowered by this point. */
7270 context = decl_function_context (exp);
7271 gcc_assert (!context
7272 || context == current_function_decl
7273 || TREE_STATIC (exp)
7274 /* ??? C++ creates functions that are not TREE_STATIC. */
7275 || TREE_CODE (exp) == FUNCTION_DECL);
7277 /* This is the case of an array whose size is to be determined
7278 from its initializer, while the initializer is still being parsed.
7279 See expand_decl. */
7281 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7282 temp = validize_mem (decl_rtl);
7284 /* If DECL_RTL is memory, we are in the normal case and the
7285 address is not valid, get the address into a register. */
7287 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7289 if (alt_rtl)
7290 *alt_rtl = decl_rtl;
7291 decl_rtl = use_anchored_address (decl_rtl);
7292 if (modifier != EXPAND_CONST_ADDRESS
7293 && modifier != EXPAND_SUM
7294 && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
7295 temp = replace_equiv_address (decl_rtl,
7296 copy_rtx (XEXP (decl_rtl, 0)));
7299 /* If we got something, return it. But first, set the alignment
7300 if the address is a register. */
7301 if (temp != 0)
7303 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7304 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7306 return temp;
7309 /* If the mode of DECL_RTL does not match that of the decl, it
7310 must be a promoted value. We return a SUBREG of the wanted mode,
7311 but mark it so that we know that it was already extended. */
7313 if (REG_P (decl_rtl)
7314 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7316 enum machine_mode pmode;
7318 /* Get the signedness used for this variable. Ensure we get the
7319 same mode we got when the variable was declared. */
7320 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7321 (TREE_CODE (exp) == RESULT_DECL
7322 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7323 gcc_assert (GET_MODE (decl_rtl) == pmode);
7325 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7326 SUBREG_PROMOTED_VAR_P (temp) = 1;
7327 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7328 return temp;
7331 return decl_rtl;
7333 case INTEGER_CST:
7334 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7335 TREE_INT_CST_HIGH (exp), mode);
7337 return temp;
7339 case VECTOR_CST:
7341 tree tmp = NULL_TREE;
7342 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7343 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7344 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7345 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7346 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7347 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7348 return const_vector_from_tree (exp);
7349 if (GET_MODE_CLASS (mode) == MODE_INT)
7351 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7352 if (type_for_mode)
7353 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7355 if (!tmp)
7356 tmp = build_constructor_from_list (type,
7357 TREE_VECTOR_CST_ELTS (exp));
7358 return expand_expr (tmp, ignore ? const0_rtx : target,
7359 tmode, modifier);
7362 case CONST_DECL:
7363 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7365 case REAL_CST:
7366 /* If optimized, generate immediate CONST_DOUBLE
7367 which will be turned into memory by reload if necessary.
7369 We used to force a register so that loop.c could see it. But
7370 this does not allow gen_* patterns to perform optimizations with
7371 the constants. It also produces two insns in cases like "x = 1.0;".
7372 On most machines, floating-point constants are not permitted in
7373 many insns, so we'd end up copying it to a register in any case.
7375 Now, we do the copying in expand_binop, if appropriate. */
7376 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7377 TYPE_MODE (TREE_TYPE (exp)));
7379 case FIXED_CST:
7380 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7381 TYPE_MODE (TREE_TYPE (exp)));
7383 case COMPLEX_CST:
7384 /* Handle evaluating a complex constant in a CONCAT target. */
7385 if (original_target && GET_CODE (original_target) == CONCAT)
7387 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7388 rtx rtarg, itarg;
7390 rtarg = XEXP (original_target, 0);
7391 itarg = XEXP (original_target, 1);
7393 /* Move the real and imaginary parts separately. */
7394 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7395 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7397 if (op0 != rtarg)
7398 emit_move_insn (rtarg, op0);
7399 if (op1 != itarg)
7400 emit_move_insn (itarg, op1);
7402 return original_target;
7405 /* ... fall through ... */
7407 case STRING_CST:
7408 temp = expand_expr_constant (exp, 1, modifier);
7410 /* temp contains a constant address.
7411 On RISC machines where a constant address isn't valid,
7412 make some insns to get that address into a register. */
7413 if (modifier != EXPAND_CONST_ADDRESS
7414 && modifier != EXPAND_INITIALIZER
7415 && modifier != EXPAND_SUM
7416 && ! memory_address_p (mode, XEXP (temp, 0)))
7417 return replace_equiv_address (temp,
7418 copy_rtx (XEXP (temp, 0)));
7419 return temp;
7421 case SAVE_EXPR:
7423 tree val = TREE_OPERAND (exp, 0);
7424 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7426 if (!SAVE_EXPR_RESOLVED_P (exp))
7428 /* We can indeed still hit this case, typically via builtin
7429 expanders calling save_expr immediately before expanding
7430 something. Assume this means that we only have to deal
7431 with non-BLKmode values. */
7432 gcc_assert (GET_MODE (ret) != BLKmode);
7434 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7435 DECL_ARTIFICIAL (val) = 1;
7436 DECL_IGNORED_P (val) = 1;
7437 TREE_OPERAND (exp, 0) = val;
7438 SAVE_EXPR_RESOLVED_P (exp) = 1;
7440 if (!CONSTANT_P (ret))
7441 ret = copy_to_reg (ret);
7442 SET_DECL_RTL (val, ret);
7445 return ret;
7448 case GOTO_EXPR:
7449 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7450 expand_goto (TREE_OPERAND (exp, 0));
7451 else
7452 expand_computed_goto (TREE_OPERAND (exp, 0));
7453 return const0_rtx;
7455 case CONSTRUCTOR:
7456 /* If we don't need the result, just ensure we evaluate any
7457 subexpressions. */
7458 if (ignore)
7460 unsigned HOST_WIDE_INT idx;
7461 tree value;
7463 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7464 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7466 return const0_rtx;
7469 return expand_constructor (exp, target, modifier, false);
7471 case MISALIGNED_INDIRECT_REF:
7472 case ALIGN_INDIRECT_REF:
7473 case INDIRECT_REF:
7475 tree exp1 = TREE_OPERAND (exp, 0);
7477 if (modifier != EXPAND_WRITE)
7479 tree t;
7481 t = fold_read_from_constant_string (exp);
7482 if (t)
7483 return expand_expr (t, target, tmode, modifier);
7486 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7487 op0 = memory_address (mode, op0);
7489 if (code == ALIGN_INDIRECT_REF)
7491 int align = TYPE_ALIGN_UNIT (type);
7492 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7493 op0 = memory_address (mode, op0);
7496 temp = gen_rtx_MEM (mode, op0);
7498 set_mem_attributes (temp, exp, 0);
7500 /* Resolve the misalignment now, so that we don't have to remember
7501 to resolve it later. Of course, this only works for reads. */
7502 /* ??? When we get around to supporting writes, we'll have to handle
7503 this in store_expr directly. The vectorizer isn't generating
7504 those yet, however. */
7505 if (code == MISALIGNED_INDIRECT_REF)
7507 int icode;
7508 rtx reg, insn;
7510 gcc_assert (modifier == EXPAND_NORMAL
7511 || modifier == EXPAND_STACK_PARM);
7513 /* The vectorizer should have already checked the mode. */
7514 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7515 gcc_assert (icode != CODE_FOR_nothing);
7517 /* We've already validated the memory, and we're creating a
7518 new pseudo destination. The predicates really can't fail. */
7519 reg = gen_reg_rtx (mode);
7521 /* Nor can the insn generator. */
7522 insn = GEN_FCN (icode) (reg, temp);
7523 emit_insn (insn);
7525 return reg;
7528 return temp;
7531 case TARGET_MEM_REF:
7533 struct mem_address addr;
7535 get_address_description (exp, &addr);
7536 op0 = addr_for_mem_ref (&addr, true);
7537 op0 = memory_address (mode, op0);
7538 temp = gen_rtx_MEM (mode, op0);
7539 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7541 return temp;
7543 case ARRAY_REF:
7546 tree array = TREE_OPERAND (exp, 0);
7547 tree index = TREE_OPERAND (exp, 1);
7549 /* Fold an expression like: "foo"[2].
7550 This is not done in fold so it won't happen inside &.
7551 Don't fold if this is for wide characters since it's too
7552 difficult to do correctly and this is a very rare case. */
7554 if (modifier != EXPAND_CONST_ADDRESS
7555 && modifier != EXPAND_INITIALIZER
7556 && modifier != EXPAND_MEMORY)
7558 tree t = fold_read_from_constant_string (exp);
7560 if (t)
7561 return expand_expr (t, target, tmode, modifier);
7564 /* If this is a constant index into a constant array,
7565 just get the value from the array. Handle both the cases when
7566 we have an explicit constructor and when our operand is a variable
7567 that was declared const. */
7569 if (modifier != EXPAND_CONST_ADDRESS
7570 && modifier != EXPAND_INITIALIZER
7571 && modifier != EXPAND_MEMORY
7572 && TREE_CODE (array) == CONSTRUCTOR
7573 && ! TREE_SIDE_EFFECTS (array)
7574 && TREE_CODE (index) == INTEGER_CST)
7576 unsigned HOST_WIDE_INT ix;
7577 tree field, value;
7579 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7580 field, value)
7581 if (tree_int_cst_equal (field, index))
7583 if (!TREE_SIDE_EFFECTS (value))
7584 return expand_expr (fold (value), target, tmode, modifier);
7585 break;
7589 else if (optimize >= 1
7590 && modifier != EXPAND_CONST_ADDRESS
7591 && modifier != EXPAND_INITIALIZER
7592 && modifier != EXPAND_MEMORY
7593 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7594 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7595 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7596 && targetm.binds_local_p (array))
7598 if (TREE_CODE (index) == INTEGER_CST)
7600 tree init = DECL_INITIAL (array);
7602 if (TREE_CODE (init) == CONSTRUCTOR)
7604 unsigned HOST_WIDE_INT ix;
7605 tree field, value;
7607 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7608 field, value)
7609 if (tree_int_cst_equal (field, index))
7611 if (TREE_SIDE_EFFECTS (value))
7612 break;
7614 if (TREE_CODE (value) == CONSTRUCTOR)
7616 /* If VALUE is a CONSTRUCTOR, this
7617 optimization is only useful if
7618 this doesn't store the CONSTRUCTOR
7619 into memory. If it does, it is more
7620 efficient to just load the data from
7621 the array directly. */
7622 rtx ret = expand_constructor (value, target,
7623 modifier, true);
7624 if (ret == NULL_RTX)
7625 break;
7628 return expand_expr (fold (value), target, tmode,
7629 modifier);
7632 else if(TREE_CODE (init) == STRING_CST)
7634 tree index1 = index;
7635 tree low_bound = array_ref_low_bound (exp);
7636 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7638 /* Optimize the special-case of a zero lower bound.
7640 We convert the low_bound to sizetype to avoid some problems
7641 with constant folding. (E.g. suppose the lower bound is 1,
7642 and its mode is QI. Without the conversion,l (ARRAY
7643 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7644 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7646 if (! integer_zerop (low_bound))
7647 index1 = size_diffop (index1, fold_convert (sizetype,
7648 low_bound));
7650 if (0 > compare_tree_int (index1,
7651 TREE_STRING_LENGTH (init)))
7653 tree type = TREE_TYPE (TREE_TYPE (init));
7654 enum machine_mode mode = TYPE_MODE (type);
7656 if (GET_MODE_CLASS (mode) == MODE_INT
7657 && GET_MODE_SIZE (mode) == 1)
7658 return gen_int_mode (TREE_STRING_POINTER (init)
7659 [TREE_INT_CST_LOW (index1)],
7660 mode);
7666 goto normal_inner_ref;
7668 case COMPONENT_REF:
7669 /* If the operand is a CONSTRUCTOR, we can just extract the
7670 appropriate field if it is present. */
7671 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7673 unsigned HOST_WIDE_INT idx;
7674 tree field, value;
7676 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7677 idx, field, value)
7678 if (field == TREE_OPERAND (exp, 1)
7679 /* We can normally use the value of the field in the
7680 CONSTRUCTOR. However, if this is a bitfield in
7681 an integral mode that we can fit in a HOST_WIDE_INT,
7682 we must mask only the number of bits in the bitfield,
7683 since this is done implicitly by the constructor. If
7684 the bitfield does not meet either of those conditions,
7685 we can't do this optimization. */
7686 && (! DECL_BIT_FIELD (field)
7687 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7688 && (GET_MODE_BITSIZE (DECL_MODE (field))
7689 <= HOST_BITS_PER_WIDE_INT))))
7691 if (DECL_BIT_FIELD (field)
7692 && modifier == EXPAND_STACK_PARM)
7693 target = 0;
7694 op0 = expand_expr (value, target, tmode, modifier);
7695 if (DECL_BIT_FIELD (field))
7697 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7698 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7700 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7702 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7703 op0 = expand_and (imode, op0, op1, target);
7705 else
7707 tree count
7708 = build_int_cst (NULL_TREE,
7709 GET_MODE_BITSIZE (imode) - bitsize);
7711 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7712 target, 0);
7713 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7714 target, 0);
7718 return op0;
7721 goto normal_inner_ref;
7723 case BIT_FIELD_REF:
7724 case ARRAY_RANGE_REF:
7725 normal_inner_ref:
7727 enum machine_mode mode1, mode2;
7728 HOST_WIDE_INT bitsize, bitpos;
7729 tree offset;
7730 int volatilep = 0, must_force_mem;
7731 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7732 &mode1, &unsignedp, &volatilep, true);
7733 rtx orig_op0, memloc;
7735 /* If we got back the original object, something is wrong. Perhaps
7736 we are evaluating an expression too early. In any event, don't
7737 infinitely recurse. */
7738 gcc_assert (tem != exp);
7740 /* If TEM's type is a union of variable size, pass TARGET to the inner
7741 computation, since it will need a temporary and TARGET is known
7742 to have to do. This occurs in unchecked conversion in Ada. */
7743 orig_op0 = op0
7744 = expand_expr (tem,
7745 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7746 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7747 != INTEGER_CST)
7748 && modifier != EXPAND_STACK_PARM
7749 ? target : NULL_RTX),
7750 VOIDmode,
7751 (modifier == EXPAND_INITIALIZER
7752 || modifier == EXPAND_CONST_ADDRESS
7753 || modifier == EXPAND_STACK_PARM)
7754 ? modifier : EXPAND_NORMAL);
7756 mode2
7757 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
7759 /* If we have either an offset, a BLKmode result, or a reference
7760 outside the underlying object, we must force it to memory.
7761 Such a case can occur in Ada if we have unchecked conversion
7762 of an expression from a scalar type to an aggregate type or
7763 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
7764 passed a partially uninitialized object or a view-conversion
7765 to a larger size. */
7766 must_force_mem = (offset
7767 || mode1 == BLKmode
7768 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
7770 /* If this is a constant, put it in a register if it is a legitimate
7771 constant and we don't need a memory reference. */
7772 if (CONSTANT_P (op0)
7773 && mode2 != BLKmode
7774 && LEGITIMATE_CONSTANT_P (op0)
7775 && !must_force_mem)
7776 op0 = force_reg (mode2, op0);
7778 /* Otherwise, if this is a constant, try to force it to the constant
7779 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
7780 is a legitimate constant. */
7781 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
7782 op0 = validize_mem (memloc);
7784 /* Otherwise, if this is a constant or the object is not in memory
7785 and need be, put it there. */
7786 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
7788 tree nt = build_qualified_type (TREE_TYPE (tem),
7789 (TYPE_QUALS (TREE_TYPE (tem))
7790 | TYPE_QUAL_CONST));
7791 memloc = assign_temp (nt, 1, 1, 1);
7792 emit_move_insn (memloc, op0);
7793 op0 = memloc;
7796 if (offset)
7798 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7799 EXPAND_SUM);
7801 gcc_assert (MEM_P (op0));
7803 #ifdef POINTERS_EXTEND_UNSIGNED
7804 if (GET_MODE (offset_rtx) != Pmode)
7805 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7806 #else
7807 if (GET_MODE (offset_rtx) != ptr_mode)
7808 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7809 #endif
7811 if (GET_MODE (op0) == BLKmode
7812 /* A constant address in OP0 can have VOIDmode, we must
7813 not try to call force_reg in that case. */
7814 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7815 && bitsize != 0
7816 && (bitpos % bitsize) == 0
7817 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7818 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7820 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7821 bitpos = 0;
7824 op0 = offset_address (op0, offset_rtx,
7825 highest_pow2_factor (offset));
7828 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7829 record its alignment as BIGGEST_ALIGNMENT. */
7830 if (MEM_P (op0) && bitpos == 0 && offset != 0
7831 && is_aligning_offset (offset, tem))
7832 set_mem_align (op0, BIGGEST_ALIGNMENT);
7834 /* Don't forget about volatility even if this is a bitfield. */
7835 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7837 if (op0 == orig_op0)
7838 op0 = copy_rtx (op0);
7840 MEM_VOLATILE_P (op0) = 1;
7843 /* The following code doesn't handle CONCAT.
7844 Assume only bitpos == 0 can be used for CONCAT, due to
7845 one element arrays having the same mode as its element. */
7846 if (GET_CODE (op0) == CONCAT)
7848 gcc_assert (bitpos == 0
7849 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7850 return op0;
7853 /* In cases where an aligned union has an unaligned object
7854 as a field, we might be extracting a BLKmode value from
7855 an integer-mode (e.g., SImode) object. Handle this case
7856 by doing the extract into an object as wide as the field
7857 (which we know to be the width of a basic mode), then
7858 storing into memory, and changing the mode to BLKmode. */
7859 if (mode1 == VOIDmode
7860 || REG_P (op0) || GET_CODE (op0) == SUBREG
7861 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7862 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7863 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7864 && modifier != EXPAND_CONST_ADDRESS
7865 && modifier != EXPAND_INITIALIZER)
7866 /* If the field isn't aligned enough to fetch as a memref,
7867 fetch it as a bit field. */
7868 || (mode1 != BLKmode
7869 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7870 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7871 || (MEM_P (op0)
7872 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7873 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7874 && ((modifier == EXPAND_CONST_ADDRESS
7875 || modifier == EXPAND_INITIALIZER)
7876 ? STRICT_ALIGNMENT
7877 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7878 || (bitpos % BITS_PER_UNIT != 0)))
7879 /* If the type and the field are a constant size and the
7880 size of the type isn't the same size as the bitfield,
7881 we must use bitfield operations. */
7882 || (bitsize >= 0
7883 && TYPE_SIZE (TREE_TYPE (exp))
7884 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7885 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7886 bitsize)))
7888 enum machine_mode ext_mode = mode;
7890 if (ext_mode == BLKmode
7891 && ! (target != 0 && MEM_P (op0)
7892 && MEM_P (target)
7893 && bitpos % BITS_PER_UNIT == 0))
7894 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7896 if (ext_mode == BLKmode)
7898 if (target == 0)
7899 target = assign_temp (type, 0, 1, 1);
7901 if (bitsize == 0)
7902 return target;
7904 /* In this case, BITPOS must start at a byte boundary and
7905 TARGET, if specified, must be a MEM. */
7906 gcc_assert (MEM_P (op0)
7907 && (!target || MEM_P (target))
7908 && !(bitpos % BITS_PER_UNIT));
7910 emit_block_move (target,
7911 adjust_address (op0, VOIDmode,
7912 bitpos / BITS_PER_UNIT),
7913 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7914 / BITS_PER_UNIT),
7915 (modifier == EXPAND_STACK_PARM
7916 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7918 return target;
7921 op0 = validize_mem (op0);
7923 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7924 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7926 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7927 (modifier == EXPAND_STACK_PARM
7928 ? NULL_RTX : target),
7929 ext_mode, ext_mode);
7931 /* If the result is a record type and BITSIZE is narrower than
7932 the mode of OP0, an integral mode, and this is a big endian
7933 machine, we must put the field into the high-order bits. */
7934 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7935 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7936 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7937 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7938 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7939 - bitsize),
7940 op0, 1);
7942 /* If the result type is BLKmode, store the data into a temporary
7943 of the appropriate type, but with the mode corresponding to the
7944 mode for the data we have (op0's mode). It's tempting to make
7945 this a constant type, since we know it's only being stored once,
7946 but that can cause problems if we are taking the address of this
7947 COMPONENT_REF because the MEM of any reference via that address
7948 will have flags corresponding to the type, which will not
7949 necessarily be constant. */
7950 if (mode == BLKmode)
7952 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7953 rtx new_rtx;
7955 /* If the reference doesn't use the alias set of its type,
7956 we cannot create the temporary using that type. */
7957 if (component_uses_parent_alias_set (exp))
7959 new_rtx = assign_stack_local (ext_mode, size, 0);
7960 set_mem_alias_set (new_rtx, get_alias_set (exp));
7962 else
7963 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
7965 emit_move_insn (new_rtx, op0);
7966 op0 = copy_rtx (new_rtx);
7967 PUT_MODE (op0, BLKmode);
7968 set_mem_attributes (op0, exp, 1);
7971 return op0;
7974 /* If the result is BLKmode, use that to access the object
7975 now as well. */
7976 if (mode == BLKmode)
7977 mode1 = BLKmode;
7979 /* Get a reference to just this component. */
7980 if (modifier == EXPAND_CONST_ADDRESS
7981 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7982 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7983 else
7984 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7986 if (op0 == orig_op0)
7987 op0 = copy_rtx (op0);
7989 set_mem_attributes (op0, exp, 0);
7990 if (REG_P (XEXP (op0, 0)))
7991 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7993 MEM_VOLATILE_P (op0) |= volatilep;
7994 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7995 || modifier == EXPAND_CONST_ADDRESS
7996 || modifier == EXPAND_INITIALIZER)
7997 return op0;
7998 else if (target == 0)
7999 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8001 convert_move (target, op0, unsignedp);
8002 return target;
8005 case OBJ_TYPE_REF:
8006 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
8008 case CALL_EXPR:
8009 /* All valid uses of __builtin_va_arg_pack () are removed during
8010 inlining. */
8011 if (CALL_EXPR_VA_ARG_PACK (exp))
8012 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8014 tree fndecl = get_callee_fndecl (exp), attr;
8016 if (fndecl
8017 && (attr = lookup_attribute ("error",
8018 DECL_ATTRIBUTES (fndecl))) != NULL)
8019 error ("%Kcall to %qs declared with attribute error: %s",
8020 exp, lang_hooks.decl_printable_name (fndecl, 1),
8021 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8022 if (fndecl
8023 && (attr = lookup_attribute ("warning",
8024 DECL_ATTRIBUTES (fndecl))) != NULL)
8025 warning_at (tree_nonartificial_location (exp),
8026 0, "%Kcall to %qs declared with attribute warning: %s",
8027 exp, lang_hooks.decl_printable_name (fndecl, 1),
8028 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8030 /* Check for a built-in function. */
8031 if (fndecl && DECL_BUILT_IN (fndecl))
8033 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
8034 return expand_builtin (exp, target, subtarget, tmode, ignore);
8037 return expand_call (exp, target, ignore);
8039 case PAREN_EXPR:
8040 CASE_CONVERT:
8041 if (TREE_OPERAND (exp, 0) == error_mark_node)
8042 return const0_rtx;
8044 if (TREE_CODE (type) == UNION_TYPE)
8046 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
8048 /* If both input and output are BLKmode, this conversion isn't doing
8049 anything except possibly changing memory attribute. */
8050 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8052 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8053 modifier);
8055 result = copy_rtx (result);
8056 set_mem_attributes (result, exp, 0);
8057 return result;
8060 if (target == 0)
8062 if (TYPE_MODE (type) != BLKmode)
8063 target = gen_reg_rtx (TYPE_MODE (type));
8064 else
8065 target = assign_temp (type, 0, 1, 1);
8068 if (MEM_P (target))
8069 /* Store data into beginning of memory target. */
8070 store_expr (TREE_OPERAND (exp, 0),
8071 adjust_address (target, TYPE_MODE (valtype), 0),
8072 modifier == EXPAND_STACK_PARM,
8073 false);
8075 else
8077 gcc_assert (REG_P (target));
8079 /* Store this field into a union of the proper type. */
8080 store_field (target,
8081 MIN ((int_size_in_bytes (TREE_TYPE
8082 (TREE_OPERAND (exp, 0)))
8083 * BITS_PER_UNIT),
8084 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8085 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8086 type, 0, false);
8089 /* Return the entire union. */
8090 return target;
8093 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8095 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8096 modifier);
8098 /* If the signedness of the conversion differs and OP0 is
8099 a promoted SUBREG, clear that indication since we now
8100 have to do the proper extension. */
8101 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8102 && GET_CODE (op0) == SUBREG)
8103 SUBREG_PROMOTED_VAR_P (op0) = 0;
8105 return REDUCE_BIT_FIELD (op0);
8108 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8109 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8110 if (GET_MODE (op0) == mode)
8113 /* If OP0 is a constant, just convert it into the proper mode. */
8114 else if (CONSTANT_P (op0))
8116 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8117 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8119 if (modifier == EXPAND_INITIALIZER)
8120 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8121 subreg_lowpart_offset (mode,
8122 inner_mode));
8123 else
8124 op0= convert_modes (mode, inner_mode, op0,
8125 TYPE_UNSIGNED (inner_type));
8128 else if (modifier == EXPAND_INITIALIZER)
8129 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8131 else if (target == 0)
8132 op0 = convert_to_mode (mode, op0,
8133 TYPE_UNSIGNED (TREE_TYPE
8134 (TREE_OPERAND (exp, 0))));
8135 else
8137 convert_move (target, op0,
8138 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8139 op0 = target;
8142 return REDUCE_BIT_FIELD (op0);
8144 case VIEW_CONVERT_EXPR:
8145 op0 = NULL_RTX;
8147 /* If we are converting to BLKmode, try to avoid an intermediate
8148 temporary by fetching an inner memory reference. */
8149 if (mode == BLKmode
8150 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
8151 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != BLKmode
8152 && handled_component_p (TREE_OPERAND (exp, 0)))
8154 enum machine_mode mode1;
8155 HOST_WIDE_INT bitsize, bitpos;
8156 tree offset;
8157 int unsignedp;
8158 int volatilep = 0;
8159 tree tem
8160 = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, &bitpos,
8161 &offset, &mode1, &unsignedp, &volatilep,
8162 true);
8163 rtx orig_op0;
8165 /* ??? We should work harder and deal with non-zero offsets. */
8166 if (!offset
8167 && (bitpos % BITS_PER_UNIT) == 0
8168 && bitsize >= 0
8169 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
8171 /* See the normal_inner_ref case for the rationale. */
8172 orig_op0
8173 = expand_expr (tem,
8174 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8175 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8176 != INTEGER_CST)
8177 && modifier != EXPAND_STACK_PARM
8178 ? target : NULL_RTX),
8179 VOIDmode,
8180 (modifier == EXPAND_INITIALIZER
8181 || modifier == EXPAND_CONST_ADDRESS
8182 || modifier == EXPAND_STACK_PARM)
8183 ? modifier : EXPAND_NORMAL);
8185 if (MEM_P (orig_op0))
8187 op0 = orig_op0;
8189 /* Get a reference to just this component. */
8190 if (modifier == EXPAND_CONST_ADDRESS
8191 || modifier == EXPAND_SUM
8192 || modifier == EXPAND_INITIALIZER)
8193 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
8194 else
8195 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
8197 if (op0 == orig_op0)
8198 op0 = copy_rtx (op0);
8200 set_mem_attributes (op0, TREE_OPERAND (exp, 0), 0);
8201 if (REG_P (XEXP (op0, 0)))
8202 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
8204 MEM_VOLATILE_P (op0) |= volatilep;
8209 if (!op0)
8210 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8212 /* If the input and output modes are both the same, we are done. */
8213 if (mode == GET_MODE (op0))
8215 /* If neither mode is BLKmode, and both modes are the same size
8216 then we can use gen_lowpart. */
8217 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
8218 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0)))
8220 if (GET_CODE (op0) == SUBREG)
8221 op0 = force_reg (GET_MODE (op0), op0);
8222 op0 = gen_lowpart (mode, op0);
8224 /* If both modes are integral, then we can convert from one to the
8225 other. */
8226 else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode))
8227 op0 = convert_modes (mode, GET_MODE (op0), op0,
8228 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8229 /* As a last resort, spill op0 to memory, and reload it in a
8230 different mode. */
8231 else if (!MEM_P (op0))
8233 /* If the operand is not a MEM, force it into memory. Since we
8234 are going to be changing the mode of the MEM, don't call
8235 force_const_mem for constants because we don't allow pool
8236 constants to change mode. */
8237 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8239 gcc_assert (!TREE_ADDRESSABLE (exp));
8241 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8242 target
8243 = assign_stack_temp_for_type
8244 (TYPE_MODE (inner_type),
8245 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8247 emit_move_insn (target, op0);
8248 op0 = target;
8251 /* At this point, OP0 is in the correct mode. If the output type is
8252 such that the operand is known to be aligned, indicate that it is.
8253 Otherwise, we need only be concerned about alignment for non-BLKmode
8254 results. */
8255 if (MEM_P (op0))
8257 op0 = copy_rtx (op0);
8259 if (TYPE_ALIGN_OK (type))
8260 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8261 else if (STRICT_ALIGNMENT
8262 && mode != BLKmode
8263 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
8265 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8266 HOST_WIDE_INT temp_size
8267 = MAX (int_size_in_bytes (inner_type),
8268 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
8269 rtx new_rtx
8270 = assign_stack_temp_for_type (mode, temp_size, 0, type);
8271 rtx new_with_op0_mode
8272 = adjust_address (new_rtx, GET_MODE (op0), 0);
8274 gcc_assert (!TREE_ADDRESSABLE (exp));
8276 if (GET_MODE (op0) == BLKmode)
8277 emit_block_move (new_with_op0_mode, op0,
8278 GEN_INT (GET_MODE_SIZE (mode)),
8279 (modifier == EXPAND_STACK_PARM
8280 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8281 else
8282 emit_move_insn (new_with_op0_mode, op0);
8284 op0 = new_rtx;
8287 op0 = adjust_address (op0, mode, 0);
8290 return op0;
8292 case POINTER_PLUS_EXPR:
8293 /* Even though the sizetype mode and the pointer's mode can be different
8294 expand is able to handle this correctly and get the correct result out
8295 of the PLUS_EXPR code. */
8296 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8297 if sizetype precision is smaller than pointer precision. */
8298 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8299 exp = build2 (PLUS_EXPR, type,
8300 TREE_OPERAND (exp, 0),
8301 fold_convert (type,
8302 fold_convert (ssizetype,
8303 TREE_OPERAND (exp, 1))));
8304 case PLUS_EXPR:
8306 /* Check if this is a case for multiplication and addition. */
8307 if ((TREE_CODE (type) == INTEGER_TYPE
8308 || TREE_CODE (type) == FIXED_POINT_TYPE)
8309 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8311 tree subsubexp0, subsubexp1;
8312 enum tree_code code0, code1, this_code;
8314 subexp0 = TREE_OPERAND (exp, 0);
8315 subsubexp0 = TREE_OPERAND (subexp0, 0);
8316 subsubexp1 = TREE_OPERAND (subexp0, 1);
8317 code0 = TREE_CODE (subsubexp0);
8318 code1 = TREE_CODE (subsubexp1);
8319 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8320 : FIXED_CONVERT_EXPR;
8321 if (code0 == this_code && code1 == this_code
8322 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8323 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8324 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8325 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8326 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8327 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8329 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8330 enum machine_mode innermode = TYPE_MODE (op0type);
8331 bool zextend_p = TYPE_UNSIGNED (op0type);
8332 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8333 if (sat_p == 0)
8334 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8335 else
8336 this_optab = zextend_p ? usmadd_widen_optab
8337 : ssmadd_widen_optab;
8338 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8339 && (optab_handler (this_optab, mode)->insn_code
8340 != CODE_FOR_nothing))
8342 expand_operands (TREE_OPERAND (subsubexp0, 0),
8343 TREE_OPERAND (subsubexp1, 0),
8344 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8345 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8346 VOIDmode, EXPAND_NORMAL);
8347 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8348 target, unsignedp);
8349 gcc_assert (temp);
8350 return REDUCE_BIT_FIELD (temp);
8355 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8356 something else, make sure we add the register to the constant and
8357 then to the other thing. This case can occur during strength
8358 reduction and doing it this way will produce better code if the
8359 frame pointer or argument pointer is eliminated.
8361 fold-const.c will ensure that the constant is always in the inner
8362 PLUS_EXPR, so the only case we need to do anything about is if
8363 sp, ap, or fp is our second argument, in which case we must swap
8364 the innermost first argument and our second argument. */
8366 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8367 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8368 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8369 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8370 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8371 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8373 tree t = TREE_OPERAND (exp, 1);
8375 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8376 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8379 /* If the result is to be ptr_mode and we are adding an integer to
8380 something, we might be forming a constant. So try to use
8381 plus_constant. If it produces a sum and we can't accept it,
8382 use force_operand. This allows P = &ARR[const] to generate
8383 efficient code on machines where a SYMBOL_REF is not a valid
8384 address.
8386 If this is an EXPAND_SUM call, always return the sum. */
8387 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8388 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8390 if (modifier == EXPAND_STACK_PARM)
8391 target = 0;
8392 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8393 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8394 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8396 rtx constant_part;
8398 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8399 EXPAND_SUM);
8400 /* Use immed_double_const to ensure that the constant is
8401 truncated according to the mode of OP1, then sign extended
8402 to a HOST_WIDE_INT. Using the constant directly can result
8403 in non-canonical RTL in a 64x32 cross compile. */
8404 constant_part
8405 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8406 (HOST_WIDE_INT) 0,
8407 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8408 op1 = plus_constant (op1, INTVAL (constant_part));
8409 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8410 op1 = force_operand (op1, target);
8411 return REDUCE_BIT_FIELD (op1);
8414 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8415 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8416 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8418 rtx constant_part;
8420 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8421 (modifier == EXPAND_INITIALIZER
8422 ? EXPAND_INITIALIZER : EXPAND_SUM));
8423 if (! CONSTANT_P (op0))
8425 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8426 VOIDmode, modifier);
8427 /* Return a PLUS if modifier says it's OK. */
8428 if (modifier == EXPAND_SUM
8429 || modifier == EXPAND_INITIALIZER)
8430 return simplify_gen_binary (PLUS, mode, op0, op1);
8431 goto binop2;
8433 /* Use immed_double_const to ensure that the constant is
8434 truncated according to the mode of OP1, then sign extended
8435 to a HOST_WIDE_INT. Using the constant directly can result
8436 in non-canonical RTL in a 64x32 cross compile. */
8437 constant_part
8438 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8439 (HOST_WIDE_INT) 0,
8440 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8441 op0 = plus_constant (op0, INTVAL (constant_part));
8442 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8443 op0 = force_operand (op0, target);
8444 return REDUCE_BIT_FIELD (op0);
8448 /* No sense saving up arithmetic to be done
8449 if it's all in the wrong mode to form part of an address.
8450 And force_operand won't know whether to sign-extend or
8451 zero-extend. */
8452 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8453 || mode != ptr_mode)
8455 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8456 subtarget, &op0, &op1, EXPAND_NORMAL);
8457 if (op0 == const0_rtx)
8458 return op1;
8459 if (op1 == const0_rtx)
8460 return op0;
8461 goto binop2;
8464 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8465 subtarget, &op0, &op1, modifier);
8466 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8468 case MINUS_EXPR:
8469 /* Check if this is a case for multiplication and subtraction. */
8470 if ((TREE_CODE (type) == INTEGER_TYPE
8471 || TREE_CODE (type) == FIXED_POINT_TYPE)
8472 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8474 tree subsubexp0, subsubexp1;
8475 enum tree_code code0, code1, this_code;
8477 subexp1 = TREE_OPERAND (exp, 1);
8478 subsubexp0 = TREE_OPERAND (subexp1, 0);
8479 subsubexp1 = TREE_OPERAND (subexp1, 1);
8480 code0 = TREE_CODE (subsubexp0);
8481 code1 = TREE_CODE (subsubexp1);
8482 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8483 : FIXED_CONVERT_EXPR;
8484 if (code0 == this_code && code1 == this_code
8485 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8486 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8487 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8488 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8489 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8490 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8492 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8493 enum machine_mode innermode = TYPE_MODE (op0type);
8494 bool zextend_p = TYPE_UNSIGNED (op0type);
8495 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8496 if (sat_p == 0)
8497 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8498 else
8499 this_optab = zextend_p ? usmsub_widen_optab
8500 : ssmsub_widen_optab;
8501 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8502 && (optab_handler (this_optab, mode)->insn_code
8503 != CODE_FOR_nothing))
8505 expand_operands (TREE_OPERAND (subsubexp0, 0),
8506 TREE_OPERAND (subsubexp1, 0),
8507 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8508 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8509 VOIDmode, EXPAND_NORMAL);
8510 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8511 target, unsignedp);
8512 gcc_assert (temp);
8513 return REDUCE_BIT_FIELD (temp);
8518 /* For initializers, we are allowed to return a MINUS of two
8519 symbolic constants. Here we handle all cases when both operands
8520 are constant. */
8521 /* Handle difference of two symbolic constants,
8522 for the sake of an initializer. */
8523 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8524 && really_constant_p (TREE_OPERAND (exp, 0))
8525 && really_constant_p (TREE_OPERAND (exp, 1)))
8527 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8528 NULL_RTX, &op0, &op1, modifier);
8530 /* If the last operand is a CONST_INT, use plus_constant of
8531 the negated constant. Else make the MINUS. */
8532 if (GET_CODE (op1) == CONST_INT)
8533 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8534 else
8535 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8538 /* No sense saving up arithmetic to be done
8539 if it's all in the wrong mode to form part of an address.
8540 And force_operand won't know whether to sign-extend or
8541 zero-extend. */
8542 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8543 || mode != ptr_mode)
8544 goto binop;
8546 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8547 subtarget, &op0, &op1, modifier);
8549 /* Convert A - const to A + (-const). */
8550 if (GET_CODE (op1) == CONST_INT)
8552 op1 = negate_rtx (mode, op1);
8553 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8556 goto binop2;
8558 case MULT_EXPR:
8559 /* If this is a fixed-point operation, then we cannot use the code
8560 below because "expand_mult" doesn't support sat/no-sat fixed-point
8561 multiplications. */
8562 if (ALL_FIXED_POINT_MODE_P (mode))
8563 goto binop;
8565 /* If first operand is constant, swap them.
8566 Thus the following special case checks need only
8567 check the second operand. */
8568 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8570 tree t1 = TREE_OPERAND (exp, 0);
8571 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8572 TREE_OPERAND (exp, 1) = t1;
8575 /* Attempt to return something suitable for generating an
8576 indexed address, for machines that support that. */
8578 if (modifier == EXPAND_SUM && mode == ptr_mode
8579 && host_integerp (TREE_OPERAND (exp, 1), 0))
8581 tree exp1 = TREE_OPERAND (exp, 1);
8583 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8584 EXPAND_SUM);
8586 if (!REG_P (op0))
8587 op0 = force_operand (op0, NULL_RTX);
8588 if (!REG_P (op0))
8589 op0 = copy_to_mode_reg (mode, op0);
8591 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8592 gen_int_mode (tree_low_cst (exp1, 0),
8593 TYPE_MODE (TREE_TYPE (exp1)))));
8596 if (modifier == EXPAND_STACK_PARM)
8597 target = 0;
8599 /* Check for multiplying things that have been extended
8600 from a narrower type. If this machine supports multiplying
8601 in that narrower type with a result in the desired type,
8602 do it that way, and avoid the explicit type-conversion. */
8604 subexp0 = TREE_OPERAND (exp, 0);
8605 subexp1 = TREE_OPERAND (exp, 1);
8606 /* First, check if we have a multiplication of one signed and one
8607 unsigned operand. */
8608 if (TREE_CODE (subexp0) == NOP_EXPR
8609 && TREE_CODE (subexp1) == NOP_EXPR
8610 && TREE_CODE (type) == INTEGER_TYPE
8611 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8612 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8613 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8614 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8615 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8616 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8618 enum machine_mode innermode
8619 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8620 this_optab = usmul_widen_optab;
8621 if (mode == GET_MODE_WIDER_MODE (innermode))
8623 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8625 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8626 expand_operands (TREE_OPERAND (subexp0, 0),
8627 TREE_OPERAND (subexp1, 0),
8628 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8629 else
8630 expand_operands (TREE_OPERAND (subexp0, 0),
8631 TREE_OPERAND (subexp1, 0),
8632 NULL_RTX, &op1, &op0, EXPAND_NORMAL);
8634 goto binop3;
8638 /* Check for a multiplication with matching signedness. */
8639 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8640 && TREE_CODE (type) == INTEGER_TYPE
8641 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8642 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8643 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8644 && int_fits_type_p (TREE_OPERAND (exp, 1),
8645 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8646 /* Don't use a widening multiply if a shift will do. */
8647 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8648 > HOST_BITS_PER_WIDE_INT)
8649 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8651 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8652 && (TYPE_PRECISION (TREE_TYPE
8653 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8654 == TYPE_PRECISION (TREE_TYPE
8655 (TREE_OPERAND
8656 (TREE_OPERAND (exp, 0), 0))))
8657 /* If both operands are extended, they must either both
8658 be zero-extended or both be sign-extended. */
8659 && (TYPE_UNSIGNED (TREE_TYPE
8660 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8661 == TYPE_UNSIGNED (TREE_TYPE
8662 (TREE_OPERAND
8663 (TREE_OPERAND (exp, 0), 0)))))))
8665 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8666 enum machine_mode innermode = TYPE_MODE (op0type);
8667 bool zextend_p = TYPE_UNSIGNED (op0type);
8668 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8669 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8671 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8673 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8675 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8676 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8677 TREE_OPERAND (exp, 1),
8678 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8679 else
8680 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8681 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8682 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8683 goto binop3;
8685 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8686 && innermode == word_mode)
8688 rtx htem, hipart;
8689 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8690 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8691 op1 = convert_modes (innermode, mode,
8692 expand_normal (TREE_OPERAND (exp, 1)),
8693 unsignedp);
8694 else
8695 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8696 temp = expand_binop (mode, other_optab, op0, op1, target,
8697 unsignedp, OPTAB_LIB_WIDEN);
8698 hipart = gen_highpart (innermode, temp);
8699 htem = expand_mult_highpart_adjust (innermode, hipart,
8700 op0, op1, hipart,
8701 zextend_p);
8702 if (htem != hipart)
8703 emit_move_insn (hipart, htem);
8704 return REDUCE_BIT_FIELD (temp);
8708 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8709 subtarget, &op0, &op1, EXPAND_NORMAL);
8710 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8712 case TRUNC_DIV_EXPR:
8713 case FLOOR_DIV_EXPR:
8714 case CEIL_DIV_EXPR:
8715 case ROUND_DIV_EXPR:
8716 case EXACT_DIV_EXPR:
8717 /* If this is a fixed-point operation, then we cannot use the code
8718 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8719 divisions. */
8720 if (ALL_FIXED_POINT_MODE_P (mode))
8721 goto binop;
8723 if (modifier == EXPAND_STACK_PARM)
8724 target = 0;
8725 /* Possible optimization: compute the dividend with EXPAND_SUM
8726 then if the divisor is constant can optimize the case
8727 where some terms of the dividend have coeffs divisible by it. */
8728 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8729 subtarget, &op0, &op1, EXPAND_NORMAL);
8730 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8732 case RDIV_EXPR:
8733 goto binop;
8735 case TRUNC_MOD_EXPR:
8736 case FLOOR_MOD_EXPR:
8737 case CEIL_MOD_EXPR:
8738 case ROUND_MOD_EXPR:
8739 if (modifier == EXPAND_STACK_PARM)
8740 target = 0;
8741 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8742 subtarget, &op0, &op1, EXPAND_NORMAL);
8743 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8745 case FIXED_CONVERT_EXPR:
8746 op0 = expand_normal (TREE_OPERAND (exp, 0));
8747 if (target == 0 || modifier == EXPAND_STACK_PARM)
8748 target = gen_reg_rtx (mode);
8750 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8751 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8752 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8753 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8754 else
8755 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8756 return target;
8758 case FIX_TRUNC_EXPR:
8759 op0 = expand_normal (TREE_OPERAND (exp, 0));
8760 if (target == 0 || modifier == EXPAND_STACK_PARM)
8761 target = gen_reg_rtx (mode);
8762 expand_fix (target, op0, unsignedp);
8763 return target;
8765 case FLOAT_EXPR:
8766 op0 = expand_normal (TREE_OPERAND (exp, 0));
8767 if (target == 0 || modifier == EXPAND_STACK_PARM)
8768 target = gen_reg_rtx (mode);
8769 /* expand_float can't figure out what to do if FROM has VOIDmode.
8770 So give it the correct mode. With -O, cse will optimize this. */
8771 if (GET_MODE (op0) == VOIDmode)
8772 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8773 op0);
8774 expand_float (target, op0,
8775 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8776 return target;
8778 case NEGATE_EXPR:
8779 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8780 VOIDmode, EXPAND_NORMAL);
8781 if (modifier == EXPAND_STACK_PARM)
8782 target = 0;
8783 temp = expand_unop (mode,
8784 optab_for_tree_code (NEGATE_EXPR, type,
8785 optab_default),
8786 op0, target, 0);
8787 gcc_assert (temp);
8788 return REDUCE_BIT_FIELD (temp);
8790 case ABS_EXPR:
8791 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8792 VOIDmode, EXPAND_NORMAL);
8793 if (modifier == EXPAND_STACK_PARM)
8794 target = 0;
8796 /* ABS_EXPR is not valid for complex arguments. */
8797 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8798 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8800 /* Unsigned abs is simply the operand. Testing here means we don't
8801 risk generating incorrect code below. */
8802 if (TYPE_UNSIGNED (type))
8803 return op0;
8805 return expand_abs (mode, op0, target, unsignedp,
8806 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8808 case MAX_EXPR:
8809 case MIN_EXPR:
8810 target = original_target;
8811 if (target == 0
8812 || modifier == EXPAND_STACK_PARM
8813 || (MEM_P (target) && MEM_VOLATILE_P (target))
8814 || GET_MODE (target) != mode
8815 || (REG_P (target)
8816 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8817 target = gen_reg_rtx (mode);
8818 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8819 target, &op0, &op1, EXPAND_NORMAL);
8821 /* First try to do it with a special MIN or MAX instruction.
8822 If that does not win, use a conditional jump to select the proper
8823 value. */
8824 this_optab = optab_for_tree_code (code, type, optab_default);
8825 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8826 OPTAB_WIDEN);
8827 if (temp != 0)
8828 return temp;
8830 /* At this point, a MEM target is no longer useful; we will get better
8831 code without it. */
8833 if (! REG_P (target))
8834 target = gen_reg_rtx (mode);
8836 /* If op1 was placed in target, swap op0 and op1. */
8837 if (target != op0 && target == op1)
8839 temp = op0;
8840 op0 = op1;
8841 op1 = temp;
8844 /* We generate better code and avoid problems with op1 mentioning
8845 target by forcing op1 into a pseudo if it isn't a constant. */
8846 if (! CONSTANT_P (op1))
8847 op1 = force_reg (mode, op1);
8850 enum rtx_code comparison_code;
8851 rtx cmpop1 = op1;
8853 if (code == MAX_EXPR)
8854 comparison_code = unsignedp ? GEU : GE;
8855 else
8856 comparison_code = unsignedp ? LEU : LE;
8858 /* Canonicalize to comparisons against 0. */
8859 if (op1 == const1_rtx)
8861 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8862 or (a != 0 ? a : 1) for unsigned.
8863 For MIN we are safe converting (a <= 1 ? a : 1)
8864 into (a <= 0 ? a : 1) */
8865 cmpop1 = const0_rtx;
8866 if (code == MAX_EXPR)
8867 comparison_code = unsignedp ? NE : GT;
8869 if (op1 == constm1_rtx && !unsignedp)
8871 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8872 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8873 cmpop1 = const0_rtx;
8874 if (code == MIN_EXPR)
8875 comparison_code = LT;
8877 #ifdef HAVE_conditional_move
8878 /* Use a conditional move if possible. */
8879 if (can_conditionally_move_p (mode))
8881 rtx insn;
8883 /* ??? Same problem as in expmed.c: emit_conditional_move
8884 forces a stack adjustment via compare_from_rtx, and we
8885 lose the stack adjustment if the sequence we are about
8886 to create is discarded. */
8887 do_pending_stack_adjust ();
8889 start_sequence ();
8891 /* Try to emit the conditional move. */
8892 insn = emit_conditional_move (target, comparison_code,
8893 op0, cmpop1, mode,
8894 op0, op1, mode,
8895 unsignedp);
8897 /* If we could do the conditional move, emit the sequence,
8898 and return. */
8899 if (insn)
8901 rtx seq = get_insns ();
8902 end_sequence ();
8903 emit_insn (seq);
8904 return target;
8907 /* Otherwise discard the sequence and fall back to code with
8908 branches. */
8909 end_sequence ();
8911 #endif
8912 if (target != op0)
8913 emit_move_insn (target, op0);
8915 temp = gen_label_rtx ();
8916 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8917 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8919 emit_move_insn (target, op1);
8920 emit_label (temp);
8921 return target;
8923 case BIT_NOT_EXPR:
8924 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8925 VOIDmode, EXPAND_NORMAL);
8926 if (modifier == EXPAND_STACK_PARM)
8927 target = 0;
8928 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8929 gcc_assert (temp);
8930 return temp;
8932 /* ??? Can optimize bitwise operations with one arg constant.
8933 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8934 and (a bitwise1 b) bitwise2 b (etc)
8935 but that is probably not worth while. */
8937 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8938 boolean values when we want in all cases to compute both of them. In
8939 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8940 as actual zero-or-1 values and then bitwise anding. In cases where
8941 there cannot be any side effects, better code would be made by
8942 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8943 how to recognize those cases. */
8945 case TRUTH_AND_EXPR:
8946 code = BIT_AND_EXPR;
8947 case BIT_AND_EXPR:
8948 goto binop;
8950 case TRUTH_OR_EXPR:
8951 code = BIT_IOR_EXPR;
8952 case BIT_IOR_EXPR:
8953 goto binop;
8955 case TRUTH_XOR_EXPR:
8956 code = BIT_XOR_EXPR;
8957 case BIT_XOR_EXPR:
8958 goto binop;
8960 case LROTATE_EXPR:
8961 case RROTATE_EXPR:
8962 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8963 || (GET_MODE_PRECISION (TYPE_MODE (type))
8964 == TYPE_PRECISION (type)));
8965 /* fall through */
8967 case LSHIFT_EXPR:
8968 case RSHIFT_EXPR:
8969 /* If this is a fixed-point operation, then we cannot use the code
8970 below because "expand_shift" doesn't support sat/no-sat fixed-point
8971 shifts. */
8972 if (ALL_FIXED_POINT_MODE_P (mode))
8973 goto binop;
8975 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8976 subtarget = 0;
8977 if (modifier == EXPAND_STACK_PARM)
8978 target = 0;
8979 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8980 VOIDmode, EXPAND_NORMAL);
8981 temp = expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8982 unsignedp);
8983 if (code == LSHIFT_EXPR)
8984 temp = REDUCE_BIT_FIELD (temp);
8985 return temp;
8987 /* Could determine the answer when only additive constants differ. Also,
8988 the addition of one can be handled by changing the condition. */
8989 case LT_EXPR:
8990 case LE_EXPR:
8991 case GT_EXPR:
8992 case GE_EXPR:
8993 case EQ_EXPR:
8994 case NE_EXPR:
8995 case UNORDERED_EXPR:
8996 case ORDERED_EXPR:
8997 case UNLT_EXPR:
8998 case UNLE_EXPR:
8999 case UNGT_EXPR:
9000 case UNGE_EXPR:
9001 case UNEQ_EXPR:
9002 case LTGT_EXPR:
9003 temp = do_store_flag (exp,
9004 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9005 tmode != VOIDmode ? tmode : mode);
9006 if (temp != 0)
9007 return temp;
9009 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
9010 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
9011 && original_target
9012 && REG_P (original_target)
9013 && (GET_MODE (original_target)
9014 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9016 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
9017 VOIDmode, EXPAND_NORMAL);
9019 /* If temp is constant, we can just compute the result. */
9020 if (GET_CODE (temp) == CONST_INT)
9022 if (INTVAL (temp) != 0)
9023 emit_move_insn (target, const1_rtx);
9024 else
9025 emit_move_insn (target, const0_rtx);
9027 return target;
9030 if (temp != original_target)
9032 enum machine_mode mode1 = GET_MODE (temp);
9033 if (mode1 == VOIDmode)
9034 mode1 = tmode != VOIDmode ? tmode : mode;
9036 temp = copy_to_mode_reg (mode1, temp);
9039 op1 = gen_label_rtx ();
9040 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
9041 GET_MODE (temp), unsignedp, op1);
9042 emit_move_insn (temp, const1_rtx);
9043 emit_label (op1);
9044 return temp;
9047 /* If no set-flag instruction, must generate a conditional store
9048 into a temporary variable. Drop through and handle this
9049 like && and ||. */
9050 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9051 are occassionally created by folding during expansion. */
9052 case TRUTH_ANDIF_EXPR:
9053 case TRUTH_ORIF_EXPR:
9054 if (! ignore
9055 && (target == 0
9056 || modifier == EXPAND_STACK_PARM
9057 || ! safe_from_p (target, exp, 1)
9058 /* Make sure we don't have a hard reg (such as function's return
9059 value) live across basic blocks, if not optimizing. */
9060 || (!optimize && REG_P (target)
9061 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9062 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9064 if (target)
9065 emit_move_insn (target, const0_rtx);
9067 op1 = gen_label_rtx ();
9068 jumpifnot (exp, op1);
9070 if (target)
9071 emit_move_insn (target, const1_rtx);
9073 emit_label (op1);
9074 return ignore ? const0_rtx : target;
9076 case TRUTH_NOT_EXPR:
9077 if (modifier == EXPAND_STACK_PARM)
9078 target = 0;
9079 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
9080 VOIDmode, EXPAND_NORMAL);
9081 /* The parser is careful to generate TRUTH_NOT_EXPR
9082 only with operands that are always zero or one. */
9083 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
9084 target, 1, OPTAB_LIB_WIDEN);
9085 gcc_assert (temp);
9086 return temp;
9088 case STATEMENT_LIST:
9090 tree_stmt_iterator iter;
9092 gcc_assert (ignore);
9094 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9095 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9097 return const0_rtx;
9099 case COND_EXPR:
9100 /* A COND_EXPR with its type being VOID_TYPE represents a
9101 conditional jump and is handled in
9102 expand_gimple_cond_expr. */
9103 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
9105 /* Note that COND_EXPRs whose type is a structure or union
9106 are required to be constructed to contain assignments of
9107 a temporary variable, so that we can evaluate them here
9108 for side effect only. If type is void, we must do likewise. */
9110 gcc_assert (!TREE_ADDRESSABLE (type)
9111 && !ignore
9112 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
9113 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
9115 /* If we are not to produce a result, we have no target. Otherwise,
9116 if a target was specified use it; it will not be used as an
9117 intermediate target unless it is safe. If no target, use a
9118 temporary. */
9120 if (modifier != EXPAND_STACK_PARM
9121 && original_target
9122 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
9123 && GET_MODE (original_target) == mode
9124 #ifdef HAVE_conditional_move
9125 && (! can_conditionally_move_p (mode)
9126 || REG_P (original_target))
9127 #endif
9128 && !MEM_P (original_target))
9129 temp = original_target;
9130 else
9131 temp = assign_temp (type, 0, 0, 1);
9133 do_pending_stack_adjust ();
9134 NO_DEFER_POP;
9135 op0 = gen_label_rtx ();
9136 op1 = gen_label_rtx ();
9137 jumpifnot (TREE_OPERAND (exp, 0), op0);
9138 store_expr (TREE_OPERAND (exp, 1), temp,
9139 modifier == EXPAND_STACK_PARM,
9140 false);
9142 emit_jump_insn (gen_jump (op1));
9143 emit_barrier ();
9144 emit_label (op0);
9145 store_expr (TREE_OPERAND (exp, 2), temp,
9146 modifier == EXPAND_STACK_PARM,
9147 false);
9149 emit_label (op1);
9150 OK_DEFER_POP;
9151 return temp;
9153 case VEC_COND_EXPR:
9154 target = expand_vec_cond_expr (exp, target);
9155 return target;
9157 case MODIFY_EXPR:
9159 tree lhs = TREE_OPERAND (exp, 0);
9160 tree rhs = TREE_OPERAND (exp, 1);
9161 gcc_assert (ignore);
9163 /* Check for |= or &= of a bitfield of size one into another bitfield
9164 of size 1. In this case, (unless we need the result of the
9165 assignment) we can do this more efficiently with a
9166 test followed by an assignment, if necessary.
9168 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9169 things change so we do, this code should be enhanced to
9170 support it. */
9171 if (TREE_CODE (lhs) == COMPONENT_REF
9172 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9173 || TREE_CODE (rhs) == BIT_AND_EXPR)
9174 && TREE_OPERAND (rhs, 0) == lhs
9175 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9176 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9177 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9179 rtx label = gen_label_rtx ();
9180 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9181 do_jump (TREE_OPERAND (rhs, 1),
9182 value ? label : 0,
9183 value ? 0 : label);
9184 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9185 MOVE_NONTEMPORAL (exp));
9186 do_pending_stack_adjust ();
9187 emit_label (label);
9188 return const0_rtx;
9191 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9192 return const0_rtx;
9195 case RETURN_EXPR:
9196 if (!TREE_OPERAND (exp, 0))
9197 expand_null_return ();
9198 else
9199 expand_return (TREE_OPERAND (exp, 0));
9200 return const0_rtx;
9202 case ADDR_EXPR:
9203 return expand_expr_addr_expr (exp, target, tmode, modifier);
9205 case COMPLEX_EXPR:
9206 /* Get the rtx code of the operands. */
9207 op0 = expand_normal (TREE_OPERAND (exp, 0));
9208 op1 = expand_normal (TREE_OPERAND (exp, 1));
9210 if (!target)
9211 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9213 /* Move the real (op0) and imaginary (op1) parts to their location. */
9214 write_complex_part (target, op0, false);
9215 write_complex_part (target, op1, true);
9217 return target;
9219 case REALPART_EXPR:
9220 op0 = expand_normal (TREE_OPERAND (exp, 0));
9221 return read_complex_part (op0, false);
9223 case IMAGPART_EXPR:
9224 op0 = expand_normal (TREE_OPERAND (exp, 0));
9225 return read_complex_part (op0, true);
9227 case RESX_EXPR:
9228 expand_resx_expr (exp);
9229 return const0_rtx;
9231 case TRY_CATCH_EXPR:
9232 case CATCH_EXPR:
9233 case EH_FILTER_EXPR:
9234 case TRY_FINALLY_EXPR:
9235 /* Lowered by tree-eh.c. */
9236 gcc_unreachable ();
9238 case WITH_CLEANUP_EXPR:
9239 case CLEANUP_POINT_EXPR:
9240 case TARGET_EXPR:
9241 case CASE_LABEL_EXPR:
9242 case VA_ARG_EXPR:
9243 case BIND_EXPR:
9244 case INIT_EXPR:
9245 case CONJ_EXPR:
9246 case COMPOUND_EXPR:
9247 case PREINCREMENT_EXPR:
9248 case PREDECREMENT_EXPR:
9249 case POSTINCREMENT_EXPR:
9250 case POSTDECREMENT_EXPR:
9251 case LOOP_EXPR:
9252 case EXIT_EXPR:
9253 /* Lowered by gimplify.c. */
9254 gcc_unreachable ();
9256 case CHANGE_DYNAMIC_TYPE_EXPR:
9257 /* This is ignored at the RTL level. The tree level set
9258 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9259 overkill for the RTL layer but is all that we can
9260 represent. */
9261 return const0_rtx;
9263 case EXC_PTR_EXPR:
9264 return get_exception_pointer ();
9266 case FILTER_EXPR:
9267 return get_exception_filter ();
9269 case FDESC_EXPR:
9270 /* Function descriptors are not valid except for as
9271 initialization constants, and should not be expanded. */
9272 gcc_unreachable ();
9274 case SWITCH_EXPR:
9275 expand_case (exp);
9276 return const0_rtx;
9278 case LABEL_EXPR:
9279 expand_label (TREE_OPERAND (exp, 0));
9280 return const0_rtx;
9282 case ASM_EXPR:
9283 expand_asm_expr (exp);
9284 return const0_rtx;
9286 case WITH_SIZE_EXPR:
9287 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9288 have pulled out the size to use in whatever context it needed. */
9289 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9290 modifier, alt_rtl);
9292 case REALIGN_LOAD_EXPR:
9294 tree oprnd0 = TREE_OPERAND (exp, 0);
9295 tree oprnd1 = TREE_OPERAND (exp, 1);
9296 tree oprnd2 = TREE_OPERAND (exp, 2);
9297 rtx op2;
9299 this_optab = optab_for_tree_code (code, type, optab_default);
9300 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9301 op2 = expand_normal (oprnd2);
9302 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9303 target, unsignedp);
9304 gcc_assert (temp);
9305 return temp;
9308 case DOT_PROD_EXPR:
9310 tree oprnd0 = TREE_OPERAND (exp, 0);
9311 tree oprnd1 = TREE_OPERAND (exp, 1);
9312 tree oprnd2 = TREE_OPERAND (exp, 2);
9313 rtx op2;
9315 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9316 op2 = expand_normal (oprnd2);
9317 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9318 target, unsignedp);
9319 return target;
9322 case WIDEN_SUM_EXPR:
9324 tree oprnd0 = TREE_OPERAND (exp, 0);
9325 tree oprnd1 = TREE_OPERAND (exp, 1);
9327 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9328 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9329 target, unsignedp);
9330 return target;
9333 case REDUC_MAX_EXPR:
9334 case REDUC_MIN_EXPR:
9335 case REDUC_PLUS_EXPR:
9337 op0 = expand_normal (TREE_OPERAND (exp, 0));
9338 this_optab = optab_for_tree_code (code, type, optab_default);
9339 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9340 gcc_assert (temp);
9341 return temp;
9344 case VEC_EXTRACT_EVEN_EXPR:
9345 case VEC_EXTRACT_ODD_EXPR:
9347 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9348 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9349 this_optab = optab_for_tree_code (code, type, optab_default);
9350 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9351 OPTAB_WIDEN);
9352 gcc_assert (temp);
9353 return temp;
9356 case VEC_INTERLEAVE_HIGH_EXPR:
9357 case VEC_INTERLEAVE_LOW_EXPR:
9359 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9360 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9361 this_optab = optab_for_tree_code (code, type, optab_default);
9362 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9363 OPTAB_WIDEN);
9364 gcc_assert (temp);
9365 return temp;
9368 case VEC_LSHIFT_EXPR:
9369 case VEC_RSHIFT_EXPR:
9371 target = expand_vec_shift_expr (exp, target);
9372 return target;
9375 case VEC_UNPACK_HI_EXPR:
9376 case VEC_UNPACK_LO_EXPR:
9378 op0 = expand_normal (TREE_OPERAND (exp, 0));
9379 this_optab = optab_for_tree_code (code, type, optab_default);
9380 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9381 target, unsignedp);
9382 gcc_assert (temp);
9383 return temp;
9386 case VEC_UNPACK_FLOAT_HI_EXPR:
9387 case VEC_UNPACK_FLOAT_LO_EXPR:
9389 op0 = expand_normal (TREE_OPERAND (exp, 0));
9390 /* The signedness is determined from input operand. */
9391 this_optab = optab_for_tree_code (code,
9392 TREE_TYPE (TREE_OPERAND (exp, 0)),
9393 optab_default);
9394 temp = expand_widen_pattern_expr
9395 (exp, op0, NULL_RTX, NULL_RTX,
9396 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9398 gcc_assert (temp);
9399 return temp;
9402 case VEC_WIDEN_MULT_HI_EXPR:
9403 case VEC_WIDEN_MULT_LO_EXPR:
9405 tree oprnd0 = TREE_OPERAND (exp, 0);
9406 tree oprnd1 = TREE_OPERAND (exp, 1);
9408 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9409 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9410 target, unsignedp);
9411 gcc_assert (target);
9412 return target;
9415 case VEC_PACK_TRUNC_EXPR:
9416 case VEC_PACK_SAT_EXPR:
9417 case VEC_PACK_FIX_TRUNC_EXPR:
9418 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9419 goto binop;
9421 case COMPOUND_LITERAL_EXPR:
9423 /* Initialize the anonymous variable declared in the compound
9424 literal, then return the variable. */
9425 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9427 /* Create RTL for this variable. */
9428 if (!DECL_RTL_SET_P (decl))
9430 if (DECL_HARD_REGISTER (decl))
9431 /* The user specified an assembler name for this variable.
9432 Set that up now. */
9433 rest_of_decl_compilation (decl, 0, 0);
9434 else
9435 expand_decl (decl);
9438 return expand_expr_real (decl, original_target, tmode,
9439 modifier, alt_rtl);
9442 default:
9443 gcc_unreachable ();
9446 /* Here to do an ordinary binary operator. */
9447 binop:
9448 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9449 subtarget, &op0, &op1, EXPAND_NORMAL);
9450 binop2:
9451 this_optab = optab_for_tree_code (code, type, optab_default);
9452 binop3:
9453 if (modifier == EXPAND_STACK_PARM)
9454 target = 0;
9455 temp = expand_binop (mode, this_optab, op0, op1, target,
9456 unsignedp, OPTAB_LIB_WIDEN);
9457 gcc_assert (temp);
9458 return REDUCE_BIT_FIELD (temp);
9460 #undef REDUCE_BIT_FIELD
9462 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9463 signedness of TYPE), possibly returning the result in TARGET. */
9464 static rtx
9465 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9467 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9468 if (target && GET_MODE (target) != GET_MODE (exp))
9469 target = 0;
9470 /* For constant values, reduce using build_int_cst_type. */
9471 if (GET_CODE (exp) == CONST_INT)
9473 HOST_WIDE_INT value = INTVAL (exp);
9474 tree t = build_int_cst_type (type, value);
9475 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9477 else if (TYPE_UNSIGNED (type))
9479 rtx mask;
9480 if (prec < HOST_BITS_PER_WIDE_INT)
9481 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9482 GET_MODE (exp));
9483 else
9484 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9485 ((unsigned HOST_WIDE_INT) 1
9486 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9487 GET_MODE (exp));
9488 return expand_and (GET_MODE (exp), exp, mask, target);
9490 else
9492 tree count = build_int_cst (NULL_TREE,
9493 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9494 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9495 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9499 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9500 when applied to the address of EXP produces an address known to be
9501 aligned more than BIGGEST_ALIGNMENT. */
9503 static int
9504 is_aligning_offset (const_tree offset, const_tree exp)
9506 /* Strip off any conversions. */
9507 while (CONVERT_EXPR_P (offset))
9508 offset = TREE_OPERAND (offset, 0);
9510 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9511 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9512 if (TREE_CODE (offset) != BIT_AND_EXPR
9513 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9514 || compare_tree_int (TREE_OPERAND (offset, 1),
9515 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9516 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9517 return 0;
9519 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9520 It must be NEGATE_EXPR. Then strip any more conversions. */
9521 offset = TREE_OPERAND (offset, 0);
9522 while (CONVERT_EXPR_P (offset))
9523 offset = TREE_OPERAND (offset, 0);
9525 if (TREE_CODE (offset) != NEGATE_EXPR)
9526 return 0;
9528 offset = TREE_OPERAND (offset, 0);
9529 while (CONVERT_EXPR_P (offset))
9530 offset = TREE_OPERAND (offset, 0);
9532 /* This must now be the address of EXP. */
9533 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9536 /* Return the tree node if an ARG corresponds to a string constant or zero
9537 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9538 in bytes within the string that ARG is accessing. The type of the
9539 offset will be `sizetype'. */
9541 tree
9542 string_constant (tree arg, tree *ptr_offset)
9544 tree array, offset, lower_bound;
9545 STRIP_NOPS (arg);
9547 if (TREE_CODE (arg) == ADDR_EXPR)
9549 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9551 *ptr_offset = size_zero_node;
9552 return TREE_OPERAND (arg, 0);
9554 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9556 array = TREE_OPERAND (arg, 0);
9557 offset = size_zero_node;
9559 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9561 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9562 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9563 if (TREE_CODE (array) != STRING_CST
9564 && TREE_CODE (array) != VAR_DECL)
9565 return 0;
9567 /* Check if the array has a nonzero lower bound. */
9568 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9569 if (!integer_zerop (lower_bound))
9571 /* If the offset and base aren't both constants, return 0. */
9572 if (TREE_CODE (lower_bound) != INTEGER_CST)
9573 return 0;
9574 if (TREE_CODE (offset) != INTEGER_CST)
9575 return 0;
9576 /* Adjust offset by the lower bound. */
9577 offset = size_diffop (fold_convert (sizetype, offset),
9578 fold_convert (sizetype, lower_bound));
9581 else
9582 return 0;
9584 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9586 tree arg0 = TREE_OPERAND (arg, 0);
9587 tree arg1 = TREE_OPERAND (arg, 1);
9589 STRIP_NOPS (arg0);
9590 STRIP_NOPS (arg1);
9592 if (TREE_CODE (arg0) == ADDR_EXPR
9593 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9594 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9596 array = TREE_OPERAND (arg0, 0);
9597 offset = arg1;
9599 else if (TREE_CODE (arg1) == ADDR_EXPR
9600 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9601 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9603 array = TREE_OPERAND (arg1, 0);
9604 offset = arg0;
9606 else
9607 return 0;
9609 else
9610 return 0;
9612 if (TREE_CODE (array) == STRING_CST)
9614 *ptr_offset = fold_convert (sizetype, offset);
9615 return array;
9617 else if (TREE_CODE (array) == VAR_DECL)
9619 int length;
9621 /* Variables initialized to string literals can be handled too. */
9622 if (DECL_INITIAL (array) == NULL_TREE
9623 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9624 return 0;
9626 /* If they are read-only, non-volatile and bind locally. */
9627 if (! TREE_READONLY (array)
9628 || TREE_SIDE_EFFECTS (array)
9629 || ! targetm.binds_local_p (array))
9630 return 0;
9632 /* Avoid const char foo[4] = "abcde"; */
9633 if (DECL_SIZE_UNIT (array) == NULL_TREE
9634 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9635 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9636 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9637 return 0;
9639 /* If variable is bigger than the string literal, OFFSET must be constant
9640 and inside of the bounds of the string literal. */
9641 offset = fold_convert (sizetype, offset);
9642 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9643 && (! host_integerp (offset, 1)
9644 || compare_tree_int (offset, length) >= 0))
9645 return 0;
9647 *ptr_offset = offset;
9648 return DECL_INITIAL (array);
9651 return 0;
9654 /* Generate code to calculate EXP using a store-flag instruction
9655 and return an rtx for the result. EXP is either a comparison
9656 or a TRUTH_NOT_EXPR whose operand is a comparison.
9658 If TARGET is nonzero, store the result there if convenient.
9660 Return zero if there is no suitable set-flag instruction
9661 available on this machine.
9663 Once expand_expr has been called on the arguments of the comparison,
9664 we are committed to doing the store flag, since it is not safe to
9665 re-evaluate the expression. We emit the store-flag insn by calling
9666 emit_store_flag, but only expand the arguments if we have a reason
9667 to believe that emit_store_flag will be successful. If we think that
9668 it will, but it isn't, we have to simulate the store-flag with a
9669 set/jump/set sequence. */
9671 static rtx
9672 do_store_flag (tree exp, rtx target, enum machine_mode mode)
9674 enum rtx_code code;
9675 tree arg0, arg1, type;
9676 tree tem;
9677 enum machine_mode operand_mode;
9678 int invert = 0;
9679 int unsignedp;
9680 rtx op0, op1;
9681 rtx subtarget = target;
9682 rtx result, label;
9684 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9685 result at the end. We can't simply invert the test since it would
9686 have already been inverted if it were valid. This case occurs for
9687 some floating-point comparisons. */
9689 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9690 invert = 1, exp = TREE_OPERAND (exp, 0);
9692 arg0 = TREE_OPERAND (exp, 0);
9693 arg1 = TREE_OPERAND (exp, 1);
9695 /* Don't crash if the comparison was erroneous. */
9696 if (arg0 == error_mark_node || arg1 == error_mark_node)
9697 return const0_rtx;
9699 type = TREE_TYPE (arg0);
9700 operand_mode = TYPE_MODE (type);
9701 unsignedp = TYPE_UNSIGNED (type);
9703 /* We won't bother with BLKmode store-flag operations because it would mean
9704 passing a lot of information to emit_store_flag. */
9705 if (operand_mode == BLKmode)
9706 return 0;
9708 /* We won't bother with store-flag operations involving function pointers
9709 when function pointers must be canonicalized before comparisons. */
9710 #ifdef HAVE_canonicalize_funcptr_for_compare
9711 if (HAVE_canonicalize_funcptr_for_compare
9712 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9713 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9714 == FUNCTION_TYPE))
9715 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9716 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9717 == FUNCTION_TYPE))))
9718 return 0;
9719 #endif
9721 STRIP_NOPS (arg0);
9722 STRIP_NOPS (arg1);
9724 /* Get the rtx comparison code to use. We know that EXP is a comparison
9725 operation of some type. Some comparisons against 1 and -1 can be
9726 converted to comparisons with zero. Do so here so that the tests
9727 below will be aware that we have a comparison with zero. These
9728 tests will not catch constants in the first operand, but constants
9729 are rarely passed as the first operand. */
9731 switch (TREE_CODE (exp))
9733 case EQ_EXPR:
9734 code = EQ;
9735 break;
9736 case NE_EXPR:
9737 code = NE;
9738 break;
9739 case LT_EXPR:
9740 if (integer_onep (arg1))
9741 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9742 else
9743 code = unsignedp ? LTU : LT;
9744 break;
9745 case LE_EXPR:
9746 if (! unsignedp && integer_all_onesp (arg1))
9747 arg1 = integer_zero_node, code = LT;
9748 else
9749 code = unsignedp ? LEU : LE;
9750 break;
9751 case GT_EXPR:
9752 if (! unsignedp && integer_all_onesp (arg1))
9753 arg1 = integer_zero_node, code = GE;
9754 else
9755 code = unsignedp ? GTU : GT;
9756 break;
9757 case GE_EXPR:
9758 if (integer_onep (arg1))
9759 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9760 else
9761 code = unsignedp ? GEU : GE;
9762 break;
9764 case UNORDERED_EXPR:
9765 code = UNORDERED;
9766 break;
9767 case ORDERED_EXPR:
9768 code = ORDERED;
9769 break;
9770 case UNLT_EXPR:
9771 code = UNLT;
9772 break;
9773 case UNLE_EXPR:
9774 code = UNLE;
9775 break;
9776 case UNGT_EXPR:
9777 code = UNGT;
9778 break;
9779 case UNGE_EXPR:
9780 code = UNGE;
9781 break;
9782 case UNEQ_EXPR:
9783 code = UNEQ;
9784 break;
9785 case LTGT_EXPR:
9786 code = LTGT;
9787 break;
9789 default:
9790 gcc_unreachable ();
9793 /* Put a constant second. */
9794 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9795 || TREE_CODE (arg0) == FIXED_CST)
9797 tem = arg0; arg0 = arg1; arg1 = tem;
9798 code = swap_condition (code);
9801 /* If this is an equality or inequality test of a single bit, we can
9802 do this by shifting the bit being tested to the low-order bit and
9803 masking the result with the constant 1. If the condition was EQ,
9804 we xor it with 1. This does not require an scc insn and is faster
9805 than an scc insn even if we have it.
9807 The code to make this transformation was moved into fold_single_bit_test,
9808 so we just call into the folder and expand its result. */
9810 if ((code == NE || code == EQ)
9811 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9812 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9814 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9815 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9816 arg0, arg1, type),
9817 target, VOIDmode, EXPAND_NORMAL);
9820 /* Now see if we are likely to be able to do this. Return if not. */
9821 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9822 return 0;
9824 if (! get_subtarget (target)
9825 || GET_MODE (subtarget) != operand_mode)
9826 subtarget = 0;
9828 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
9830 if (target == 0)
9831 target = gen_reg_rtx (mode);
9833 result = emit_store_flag (target, code, op0, op1,
9834 operand_mode, unsignedp, 1);
9836 if (result)
9838 if (invert)
9839 result = expand_binop (mode, xor_optab, result, const1_rtx,
9840 result, 0, OPTAB_LIB_WIDEN);
9841 return result;
9844 /* If this failed, we have to do this with set/compare/jump/set code. */
9845 if (!REG_P (target)
9846 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9847 target = gen_reg_rtx (GET_MODE (target));
9849 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9850 label = gen_label_rtx ();
9851 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9852 NULL_RTX, label);
9854 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9855 emit_label (label);
9857 return target;
9861 /* Stubs in case we haven't got a casesi insn. */
9862 #ifndef HAVE_casesi
9863 # define HAVE_casesi 0
9864 # define gen_casesi(a, b, c, d, e) (0)
9865 # define CODE_FOR_casesi CODE_FOR_nothing
9866 #endif
9868 /* If the machine does not have a case insn that compares the bounds,
9869 this means extra overhead for dispatch tables, which raises the
9870 threshold for using them. */
9871 #ifndef CASE_VALUES_THRESHOLD
9872 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9873 #endif /* CASE_VALUES_THRESHOLD */
9875 unsigned int
9876 case_values_threshold (void)
9878 return CASE_VALUES_THRESHOLD;
9881 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9882 0 otherwise (i.e. if there is no casesi instruction). */
9884 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9885 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
9886 rtx fallback_label ATTRIBUTE_UNUSED)
9888 enum machine_mode index_mode = SImode;
9889 int index_bits = GET_MODE_BITSIZE (index_mode);
9890 rtx op1, op2, index;
9891 enum machine_mode op_mode;
9893 if (! HAVE_casesi)
9894 return 0;
9896 /* Convert the index to SImode. */
9897 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9899 enum machine_mode omode = TYPE_MODE (index_type);
9900 rtx rangertx = expand_normal (range);
9902 /* We must handle the endpoints in the original mode. */
9903 index_expr = build2 (MINUS_EXPR, index_type,
9904 index_expr, minval);
9905 minval = integer_zero_node;
9906 index = expand_normal (index_expr);
9907 if (default_label)
9908 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9909 omode, 1, default_label);
9910 /* Now we can safely truncate. */
9911 index = convert_to_mode (index_mode, index, 0);
9913 else
9915 if (TYPE_MODE (index_type) != index_mode)
9917 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9918 index_expr = fold_convert (index_type, index_expr);
9921 index = expand_normal (index_expr);
9924 do_pending_stack_adjust ();
9926 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9927 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9928 (index, op_mode))
9929 index = copy_to_mode_reg (op_mode, index);
9931 op1 = expand_normal (minval);
9933 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9934 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9935 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9936 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9937 (op1, op_mode))
9938 op1 = copy_to_mode_reg (op_mode, op1);
9940 op2 = expand_normal (range);
9942 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9943 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9944 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9945 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9946 (op2, op_mode))
9947 op2 = copy_to_mode_reg (op_mode, op2);
9949 emit_jump_insn (gen_casesi (index, op1, op2,
9950 table_label, !default_label
9951 ? fallback_label : default_label));
9952 return 1;
9955 /* Attempt to generate a tablejump instruction; same concept. */
9956 #ifndef HAVE_tablejump
9957 #define HAVE_tablejump 0
9958 #define gen_tablejump(x, y) (0)
9959 #endif
9961 /* Subroutine of the next function.
9963 INDEX is the value being switched on, with the lowest value
9964 in the table already subtracted.
9965 MODE is its expected mode (needed if INDEX is constant).
9966 RANGE is the length of the jump table.
9967 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9969 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9970 index value is out of range. */
9972 static void
9973 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9974 rtx default_label)
9976 rtx temp, vector;
9978 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
9979 cfun->cfg->max_jumptable_ents = INTVAL (range);
9981 /* Do an unsigned comparison (in the proper mode) between the index
9982 expression and the value which represents the length of the range.
9983 Since we just finished subtracting the lower bound of the range
9984 from the index expression, this comparison allows us to simultaneously
9985 check that the original index expression value is both greater than
9986 or equal to the minimum value of the range and less than or equal to
9987 the maximum value of the range. */
9989 if (default_label)
9990 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9991 default_label);
9993 /* If index is in range, it must fit in Pmode.
9994 Convert to Pmode so we can index with it. */
9995 if (mode != Pmode)
9996 index = convert_to_mode (Pmode, index, 1);
9998 /* Don't let a MEM slip through, because then INDEX that comes
9999 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10000 and break_out_memory_refs will go to work on it and mess it up. */
10001 #ifdef PIC_CASE_VECTOR_ADDRESS
10002 if (flag_pic && !REG_P (index))
10003 index = copy_to_mode_reg (Pmode, index);
10004 #endif
10006 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10007 GET_MODE_SIZE, because this indicates how large insns are. The other
10008 uses should all be Pmode, because they are addresses. This code
10009 could fail if addresses and insns are not the same size. */
10010 index = gen_rtx_PLUS (Pmode,
10011 gen_rtx_MULT (Pmode, index,
10012 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10013 gen_rtx_LABEL_REF (Pmode, table_label));
10014 #ifdef PIC_CASE_VECTOR_ADDRESS
10015 if (flag_pic)
10016 index = PIC_CASE_VECTOR_ADDRESS (index);
10017 else
10018 #endif
10019 index = memory_address (CASE_VECTOR_MODE, index);
10020 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10021 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10022 convert_move (temp, vector, 0);
10024 emit_jump_insn (gen_tablejump (temp, table_label));
10026 /* If we are generating PIC code or if the table is PC-relative, the
10027 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10028 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10029 emit_barrier ();
10033 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10034 rtx table_label, rtx default_label)
10036 rtx index;
10038 if (! HAVE_tablejump)
10039 return 0;
10041 index_expr = fold_build2 (MINUS_EXPR, index_type,
10042 fold_convert (index_type, index_expr),
10043 fold_convert (index_type, minval));
10044 index = expand_normal (index_expr);
10045 do_pending_stack_adjust ();
10047 do_tablejump (index, TYPE_MODE (index_type),
10048 convert_modes (TYPE_MODE (index_type),
10049 TYPE_MODE (TREE_TYPE (range)),
10050 expand_normal (range),
10051 TYPE_UNSIGNED (TREE_TYPE (range))),
10052 table_label, default_label);
10053 return 1;
10056 /* Nonzero if the mode is a valid vector mode for this architecture.
10057 This returns nonzero even if there is no hardware support for the
10058 vector mode, but we can emulate with narrower modes. */
10061 vector_mode_valid_p (enum machine_mode mode)
10063 enum mode_class mclass = GET_MODE_CLASS (mode);
10064 enum machine_mode innermode;
10066 /* Doh! What's going on? */
10067 if (mclass != MODE_VECTOR_INT
10068 && mclass != MODE_VECTOR_FLOAT
10069 && mclass != MODE_VECTOR_FRACT
10070 && mclass != MODE_VECTOR_UFRACT
10071 && mclass != MODE_VECTOR_ACCUM
10072 && mclass != MODE_VECTOR_UACCUM)
10073 return 0;
10075 /* Hardware support. Woo hoo! */
10076 if (targetm.vector_mode_supported_p (mode))
10077 return 1;
10079 innermode = GET_MODE_INNER (mode);
10081 /* We should probably return 1 if requesting V4DI and we have no DI,
10082 but we have V2DI, but this is probably very unlikely. */
10084 /* If we have support for the inner mode, we can safely emulate it.
10085 We may not have V2DI, but me can emulate with a pair of DIs. */
10086 return targetm.scalar_mode_supported_p (innermode);
10089 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10090 static rtx
10091 const_vector_from_tree (tree exp)
10093 rtvec v;
10094 int units, i;
10095 tree link, elt;
10096 enum machine_mode inner, mode;
10098 mode = TYPE_MODE (TREE_TYPE (exp));
10100 if (initializer_zerop (exp))
10101 return CONST0_RTX (mode);
10103 units = GET_MODE_NUNITS (mode);
10104 inner = GET_MODE_INNER (mode);
10106 v = rtvec_alloc (units);
10108 link = TREE_VECTOR_CST_ELTS (exp);
10109 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10111 elt = TREE_VALUE (link);
10113 if (TREE_CODE (elt) == REAL_CST)
10114 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10115 inner);
10116 else if (TREE_CODE (elt) == FIXED_CST)
10117 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10118 inner);
10119 else
10120 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10121 TREE_INT_CST_HIGH (elt),
10122 inner);
10125 /* Initialize remaining elements to 0. */
10126 for (; i < units; ++i)
10127 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10129 return gen_rtx_CONST_VECTOR (mode, v);
10131 #include "gt-expr.h"