re PR target/29978 (redundant jumps)
[official-gcc.git] / gcc / expr.c
blob87ed3c29874b36f2cd0b030f11d6c2f17761c77d
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
56 #include "diagnostic.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
64 #ifdef PUSH_ROUNDING
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #endif
70 #endif
72 #endif
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* This structure is used by move_by_pieces to describe the move to
92 be performed. */
93 struct move_by_pieces
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 rtx from;
100 rtx from_addr;
101 int autinc_from;
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
105 int reverse;
108 /* This structure is used by store_by_pieces to describe the clear to
109 be performed. */
111 struct store_by_pieces
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
120 void *constfundata;
121 int reverse;
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 unsigned int,
126 unsigned int);
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero. */
191 #ifndef SET_BY_PIECES_P
192 #define SET_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) SET_RATIO)
195 #endif
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memcpy" storage when the source is a constant string. */
199 #ifndef STORE_BY_PIECES_P
200 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
202 < (unsigned int) MOVE_RATIO)
203 #endif
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block sets. */
209 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211 /* These arrays record the insn_code of three different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217 /* Synchronization primitives. */
218 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245 #endif
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
251 void
252 init_expr_target (void)
254 rtx insn, pat;
255 enum machine_mode mode;
256 int num_clobbers;
257 rtx mem, mem1;
258 rtx reg;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
277 int regno;
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 regno++)
292 if (! HARD_REGNO_MODE_OK (regno, mode))
293 continue;
295 SET_REGNO (reg, regno);
297 SET_SRC (pat) = mem;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
328 enum insn_code ic;
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
332 continue;
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
344 void
345 init_expr (void)
347 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
352 fixed-point.
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
356 void
357 convert_move (rtx to, rtx from, int unsignedp)
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
363 enum insn_code code;
364 rtx libcall;
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
375 /* If the source and destination are already the same, then there's
376 nothing to do. */
377 if (to == from)
378 return;
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
382 TO here. */
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
395 emit_move_insn (to, from);
396 return;
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
405 else
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
408 emit_move_insn (to, from);
409 return;
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
416 return;
419 if (to_real)
421 rtx value, insns;
422 convert_optab tab;
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
433 tab = sext_optab;
434 else
435 tab = trunc_optab;
437 /* Try converting directly if the insn is supported. */
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
444 return;
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
453 start_sequence ();
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
455 1, from, from_mode);
456 insns = get_insns ();
457 end_sequence ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
460 from)
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
462 return;
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
479 to, from, UNKNOWN);
480 return;
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
484 rtx new_from;
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
491 if (to_mode == full_mode)
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
494 to, from, UNKNOWN);
495 return;
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
504 from = new_from;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
518 else
519 expand_fixed_convert (to, from, 0, 1);
520 return;
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
529 rtx insns;
530 rtx lowpart;
531 rtx fill_value;
532 rtx lowfrom;
533 int i;
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
539 != CODE_FOR_nothing)
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
548 return;
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
555 if (REG_P (to))
557 if (reg_overlap_mentioned_p (to, from))
558 from = force_reg (from_mode, from);
559 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
561 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
562 emit_unop_insn (code, to,
563 gen_lowpart (word_mode, to), equiv_code);
564 return;
567 /* No special multiword conversion insn; do it by hand. */
568 start_sequence ();
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
579 else
580 lowpart_mode = from_mode;
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
587 /* Compute the value to put in each remaining word. */
588 if (unsignedp)
589 fill_value = const0_rtx;
590 else
592 #ifdef HAVE_slt
593 if (HAVE_slt
594 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
595 && STORE_FLAG_VALUE == -1)
597 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
598 lowpart_mode, 0);
599 fill_value = gen_reg_rtx (word_mode);
600 emit_insn (gen_slt (fill_value));
602 else
603 #endif
605 fill_value
606 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
607 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
608 NULL_RTX, 0);
609 fill_value = convert_to_mode (word_mode, fill_value, 1);
613 /* Fill the remaining words. */
614 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
616 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
617 rtx subword = operand_subword (to, index, 1, to_mode);
619 gcc_assert (subword);
621 if (fill_value != subword)
622 emit_move_insn (subword, fill_value);
625 insns = get_insns ();
626 end_sequence ();
628 emit_no_conflict_block (insns, to, from, NULL_RTX,
629 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
630 return;
633 /* Truncating multi-word to a word or less. */
634 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
635 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
637 if (!((MEM_P (from)
638 && ! MEM_VOLATILE_P (from)
639 && direct_load[(int) to_mode]
640 && ! mode_dependent_address_p (XEXP (from, 0)))
641 || REG_P (from)
642 || GET_CODE (from) == SUBREG))
643 from = force_reg (from_mode, from);
644 convert_move (to, gen_lowpart (word_mode, from), 0);
645 return;
648 /* Now follow all the conversions between integers
649 no more than a word long. */
651 /* For truncation, usually we can just refer to FROM in a narrower mode. */
652 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
653 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
654 GET_MODE_BITSIZE (from_mode)))
656 if (!((MEM_P (from)
657 && ! MEM_VOLATILE_P (from)
658 && direct_load[(int) to_mode]
659 && ! mode_dependent_address_p (XEXP (from, 0)))
660 || REG_P (from)
661 || GET_CODE (from) == SUBREG))
662 from = force_reg (from_mode, from);
663 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
664 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
665 from = copy_to_reg (from);
666 emit_move_insn (to, gen_lowpart (to_mode, from));
667 return;
670 /* Handle extension. */
671 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
673 /* Convert directly if that works. */
674 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
675 != CODE_FOR_nothing)
677 emit_unop_insn (code, to, from, equiv_code);
678 return;
680 else
682 enum machine_mode intermediate;
683 rtx tmp;
684 tree shift_amount;
686 /* Search for a mode to convert via. */
687 for (intermediate = from_mode; intermediate != VOIDmode;
688 intermediate = GET_MODE_WIDER_MODE (intermediate))
689 if (((can_extend_p (to_mode, intermediate, unsignedp)
690 != CODE_FOR_nothing)
691 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
692 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
693 GET_MODE_BITSIZE (intermediate))))
694 && (can_extend_p (intermediate, from_mode, unsignedp)
695 != CODE_FOR_nothing))
697 convert_move (to, convert_to_mode (intermediate, from,
698 unsignedp), unsignedp);
699 return;
702 /* No suitable intermediate mode.
703 Generate what we need with shifts. */
704 shift_amount = build_int_cst (NULL_TREE,
705 GET_MODE_BITSIZE (to_mode)
706 - GET_MODE_BITSIZE (from_mode));
707 from = gen_lowpart (to_mode, force_reg (from_mode, from));
708 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
709 to, unsignedp);
710 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
711 to, unsignedp);
712 if (tmp != to)
713 emit_move_insn (to, tmp);
714 return;
718 /* Support special truncate insns for certain modes. */
719 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
721 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
722 to, from, UNKNOWN);
723 return;
726 /* Handle truncation of volatile memrefs, and so on;
727 the things that couldn't be truncated directly,
728 and for which there was no special instruction.
730 ??? Code above formerly short-circuited this, for most integer
731 mode pairs, with a force_reg in from_mode followed by a recursive
732 call to this routine. Appears always to have been wrong. */
733 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
735 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
736 emit_move_insn (to, temp);
737 return;
740 /* Mode combination is not recognized. */
741 gcc_unreachable ();
744 /* Return an rtx for a value that would result
745 from converting X to mode MODE.
746 Both X and MODE may be floating, or both integer.
747 UNSIGNEDP is nonzero if X is an unsigned value.
748 This can be done by referring to a part of X in place
749 or by copying to a new temporary with conversion. */
752 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
754 return convert_modes (mode, VOIDmode, x, unsignedp);
757 /* Return an rtx for a value that would result
758 from converting X from mode OLDMODE to mode MODE.
759 Both modes may be floating, or both integer.
760 UNSIGNEDP is nonzero if X is an unsigned value.
762 This can be done by referring to a part of X in place
763 or by copying to a new temporary with conversion.
765 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
768 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
770 rtx temp;
772 /* If FROM is a SUBREG that indicates that we have already done at least
773 the required extension, strip it. */
775 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
776 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
777 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
778 x = gen_lowpart (mode, x);
780 if (GET_MODE (x) != VOIDmode)
781 oldmode = GET_MODE (x);
783 if (mode == oldmode)
784 return x;
786 /* There is one case that we must handle specially: If we are converting
787 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
788 we are to interpret the constant as unsigned, gen_lowpart will do
789 the wrong if the constant appears negative. What we want to do is
790 make the high-order word of the constant zero, not all ones. */
792 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
793 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
794 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
796 HOST_WIDE_INT val = INTVAL (x);
798 if (oldmode != VOIDmode
799 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
801 int width = GET_MODE_BITSIZE (oldmode);
803 /* We need to zero extend VAL. */
804 val &= ((HOST_WIDE_INT) 1 << width) - 1;
807 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
810 /* We can do this with a gen_lowpart if both desired and current modes
811 are integer, and this is either a constant integer, a register, or a
812 non-volatile MEM. Except for the constant case where MODE is no
813 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
815 if ((GET_CODE (x) == CONST_INT
816 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
817 || (GET_MODE_CLASS (mode) == MODE_INT
818 && GET_MODE_CLASS (oldmode) == MODE_INT
819 && (GET_CODE (x) == CONST_DOUBLE
820 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
821 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
822 && direct_load[(int) mode])
823 || (REG_P (x)
824 && (! HARD_REGISTER_P (x)
825 || HARD_REGNO_MODE_OK (REGNO (x), mode))
826 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
827 GET_MODE_BITSIZE (GET_MODE (x)))))))))
829 /* ?? If we don't know OLDMODE, we have to assume here that
830 X does not need sign- or zero-extension. This may not be
831 the case, but it's the best we can do. */
832 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
833 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
835 HOST_WIDE_INT val = INTVAL (x);
836 int width = GET_MODE_BITSIZE (oldmode);
838 /* We must sign or zero-extend in this case. Start by
839 zero-extending, then sign extend if we need to. */
840 val &= ((HOST_WIDE_INT) 1 << width) - 1;
841 if (! unsignedp
842 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
843 val |= (HOST_WIDE_INT) (-1) << width;
845 return gen_int_mode (val, mode);
848 return gen_lowpart (mode, x);
851 /* Converting from integer constant into mode is always equivalent to an
852 subreg operation. */
853 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
855 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
856 return simplify_gen_subreg (mode, x, oldmode, 0);
859 temp = gen_reg_rtx (mode);
860 convert_move (temp, x, unsignedp);
861 return temp;
864 /* STORE_MAX_PIECES is the number of bytes at a time that we can
865 store efficiently. Due to internal GCC limitations, this is
866 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
867 for an immediate constant. */
869 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
871 /* Determine whether the LEN bytes can be moved by using several move
872 instructions. Return nonzero if a call to move_by_pieces should
873 succeed. */
876 can_move_by_pieces (unsigned HOST_WIDE_INT len,
877 unsigned int align ATTRIBUTE_UNUSED)
879 return MOVE_BY_PIECES_P (len, align);
882 /* Generate several move instructions to copy LEN bytes from block FROM to
883 block TO. (These are MEM rtx's with BLKmode).
885 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
886 used to push FROM to the stack.
888 ALIGN is maximum stack alignment we can assume.
890 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
891 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
892 stpcpy. */
895 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
896 unsigned int align, int endp)
898 struct move_by_pieces data;
899 rtx to_addr, from_addr = XEXP (from, 0);
900 unsigned int max_size = MOVE_MAX_PIECES + 1;
901 enum machine_mode mode = VOIDmode, tmode;
902 enum insn_code icode;
904 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
906 data.offset = 0;
907 data.from_addr = from_addr;
908 if (to)
910 to_addr = XEXP (to, 0);
911 data.to = to;
912 data.autinc_to
913 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
914 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
915 data.reverse
916 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
918 else
920 to_addr = NULL_RTX;
921 data.to = NULL_RTX;
922 data.autinc_to = 1;
923 #ifdef STACK_GROWS_DOWNWARD
924 data.reverse = 1;
925 #else
926 data.reverse = 0;
927 #endif
929 data.to_addr = to_addr;
930 data.from = from;
931 data.autinc_from
932 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
933 || GET_CODE (from_addr) == POST_INC
934 || GET_CODE (from_addr) == POST_DEC);
936 data.explicit_inc_from = 0;
937 data.explicit_inc_to = 0;
938 if (data.reverse) data.offset = len;
939 data.len = len;
941 /* If copying requires more than two move insns,
942 copy addresses to registers (to make displacements shorter)
943 and use post-increment if available. */
944 if (!(data.autinc_from && data.autinc_to)
945 && move_by_pieces_ninsns (len, align, max_size) > 2)
947 /* Find the mode of the largest move... */
948 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
949 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
950 if (GET_MODE_SIZE (tmode) < max_size)
951 mode = tmode;
953 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
955 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
956 data.autinc_from = 1;
957 data.explicit_inc_from = -1;
959 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
961 data.from_addr = copy_addr_to_reg (from_addr);
962 data.autinc_from = 1;
963 data.explicit_inc_from = 1;
965 if (!data.autinc_from && CONSTANT_P (from_addr))
966 data.from_addr = copy_addr_to_reg (from_addr);
967 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
969 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
970 data.autinc_to = 1;
971 data.explicit_inc_to = -1;
973 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
975 data.to_addr = copy_addr_to_reg (to_addr);
976 data.autinc_to = 1;
977 data.explicit_inc_to = 1;
979 if (!data.autinc_to && CONSTANT_P (to_addr))
980 data.to_addr = copy_addr_to_reg (to_addr);
983 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
984 if (align >= GET_MODE_ALIGNMENT (tmode))
985 align = GET_MODE_ALIGNMENT (tmode);
986 else
988 enum machine_mode xmode;
990 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
991 tmode != VOIDmode;
992 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
993 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
994 || SLOW_UNALIGNED_ACCESS (tmode, align))
995 break;
997 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1000 /* First move what we can in the largest integer mode, then go to
1001 successively smaller modes. */
1003 while (max_size > 1)
1005 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1006 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1007 if (GET_MODE_SIZE (tmode) < max_size)
1008 mode = tmode;
1010 if (mode == VOIDmode)
1011 break;
1013 icode = optab_handler (mov_optab, mode)->insn_code;
1014 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1015 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1017 max_size = GET_MODE_SIZE (mode);
1020 /* The code above should have handled everything. */
1021 gcc_assert (!data.len);
1023 if (endp)
1025 rtx to1;
1027 gcc_assert (!data.reverse);
1028 if (data.autinc_to)
1030 if (endp == 2)
1032 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1033 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1034 else
1035 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1036 -1));
1038 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1039 data.offset);
1041 else
1043 if (endp == 2)
1044 --data.offset;
1045 to1 = adjust_address (data.to, QImode, data.offset);
1047 return to1;
1049 else
1050 return data.to;
1053 /* Return number of insns required to move L bytes by pieces.
1054 ALIGN (in bits) is maximum alignment we can assume. */
1056 static unsigned HOST_WIDE_INT
1057 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1058 unsigned int max_size)
1060 unsigned HOST_WIDE_INT n_insns = 0;
1061 enum machine_mode tmode;
1063 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1064 if (align >= GET_MODE_ALIGNMENT (tmode))
1065 align = GET_MODE_ALIGNMENT (tmode);
1066 else
1068 enum machine_mode tmode, xmode;
1070 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1071 tmode != VOIDmode;
1072 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1073 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1074 || SLOW_UNALIGNED_ACCESS (tmode, align))
1075 break;
1077 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1080 while (max_size > 1)
1082 enum machine_mode mode = VOIDmode;
1083 enum insn_code icode;
1085 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1086 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1087 if (GET_MODE_SIZE (tmode) < max_size)
1088 mode = tmode;
1090 if (mode == VOIDmode)
1091 break;
1093 icode = optab_handler (mov_optab, mode)->insn_code;
1094 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1095 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1097 max_size = GET_MODE_SIZE (mode);
1100 gcc_assert (!l);
1101 return n_insns;
1104 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1105 with move instructions for mode MODE. GENFUN is the gen_... function
1106 to make a move insn for that mode. DATA has all the other info. */
1108 static void
1109 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1110 struct move_by_pieces *data)
1112 unsigned int size = GET_MODE_SIZE (mode);
1113 rtx to1 = NULL_RTX, from1;
1115 while (data->len >= size)
1117 if (data->reverse)
1118 data->offset -= size;
1120 if (data->to)
1122 if (data->autinc_to)
1123 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1124 data->offset);
1125 else
1126 to1 = adjust_address (data->to, mode, data->offset);
1129 if (data->autinc_from)
1130 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1131 data->offset);
1132 else
1133 from1 = adjust_address (data->from, mode, data->offset);
1135 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1136 emit_insn (gen_add2_insn (data->to_addr,
1137 GEN_INT (-(HOST_WIDE_INT)size)));
1138 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1139 emit_insn (gen_add2_insn (data->from_addr,
1140 GEN_INT (-(HOST_WIDE_INT)size)));
1142 if (data->to)
1143 emit_insn ((*genfun) (to1, from1));
1144 else
1146 #ifdef PUSH_ROUNDING
1147 emit_single_push_insn (mode, from1, NULL);
1148 #else
1149 gcc_unreachable ();
1150 #endif
1153 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1154 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1155 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1156 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1158 if (! data->reverse)
1159 data->offset += size;
1161 data->len -= size;
1165 /* Emit code to move a block Y to a block X. This may be done with
1166 string-move instructions, with multiple scalar move instructions,
1167 or with a library call.
1169 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1170 SIZE is an rtx that says how long they are.
1171 ALIGN is the maximum alignment we can assume they have.
1172 METHOD describes what kind of copy this is, and what mechanisms may be used.
1174 Return the address of the new block, if memcpy is called and returns it,
1175 0 otherwise. */
1178 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1179 unsigned int expected_align, HOST_WIDE_INT expected_size)
1181 bool may_use_call;
1182 rtx retval = 0;
1183 unsigned int align;
1185 switch (method)
1187 case BLOCK_OP_NORMAL:
1188 case BLOCK_OP_TAILCALL:
1189 may_use_call = true;
1190 break;
1192 case BLOCK_OP_CALL_PARM:
1193 may_use_call = block_move_libcall_safe_for_call_parm ();
1195 /* Make inhibit_defer_pop nonzero around the library call
1196 to force it to pop the arguments right away. */
1197 NO_DEFER_POP;
1198 break;
1200 case BLOCK_OP_NO_LIBCALL:
1201 may_use_call = false;
1202 break;
1204 default:
1205 gcc_unreachable ();
1208 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1210 gcc_assert (MEM_P (x));
1211 gcc_assert (MEM_P (y));
1212 gcc_assert (size);
1214 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1215 block copy is more efficient for other large modes, e.g. DCmode. */
1216 x = adjust_address (x, BLKmode, 0);
1217 y = adjust_address (y, BLKmode, 0);
1219 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1220 can be incorrect is coming from __builtin_memcpy. */
1221 if (GET_CODE (size) == CONST_INT)
1223 if (INTVAL (size) == 0)
1224 return 0;
1226 x = shallow_copy_rtx (x);
1227 y = shallow_copy_rtx (y);
1228 set_mem_size (x, size);
1229 set_mem_size (y, size);
1232 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1233 move_by_pieces (x, y, INTVAL (size), align, 0);
1234 else if (emit_block_move_via_movmem (x, y, size, align,
1235 expected_align, expected_size))
1237 else if (may_use_call)
1238 retval = emit_block_move_via_libcall (x, y, size,
1239 method == BLOCK_OP_TAILCALL);
1240 else
1241 emit_block_move_via_loop (x, y, size, align);
1243 if (method == BLOCK_OP_CALL_PARM)
1244 OK_DEFER_POP;
1246 return retval;
1250 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1252 return emit_block_move_hints (x, y, size, method, 0, -1);
1255 /* A subroutine of emit_block_move. Returns true if calling the
1256 block move libcall will not clobber any parameters which may have
1257 already been placed on the stack. */
1259 static bool
1260 block_move_libcall_safe_for_call_parm (void)
1262 /* If arguments are pushed on the stack, then they're safe. */
1263 if (PUSH_ARGS)
1264 return true;
1266 /* If registers go on the stack anyway, any argument is sure to clobber
1267 an outgoing argument. */
1268 #if defined (REG_PARM_STACK_SPACE)
1269 if (OUTGOING_REG_PARM_STACK_SPACE)
1271 tree fn;
1272 fn = emit_block_move_libcall_fn (false);
1273 if (REG_PARM_STACK_SPACE (fn) != 0)
1274 return false;
1276 #endif
1278 /* If any argument goes in memory, then it might clobber an outgoing
1279 argument. */
1281 CUMULATIVE_ARGS args_so_far;
1282 tree fn, arg;
1284 fn = emit_block_move_libcall_fn (false);
1285 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1287 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1288 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1290 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1291 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1292 if (!tmp || !REG_P (tmp))
1293 return false;
1294 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1295 return false;
1296 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1299 return true;
1302 /* A subroutine of emit_block_move. Expand a movmem pattern;
1303 return true if successful. */
1305 static bool
1306 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1307 unsigned int expected_align, HOST_WIDE_INT expected_size)
1309 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1310 int save_volatile_ok = volatile_ok;
1311 enum machine_mode mode;
1313 if (expected_align < align)
1314 expected_align = align;
1316 /* Since this is a move insn, we don't care about volatility. */
1317 volatile_ok = 1;
1319 /* Try the most limited insn first, because there's no point
1320 including more than one in the machine description unless
1321 the more limited one has some advantage. */
1323 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1324 mode = GET_MODE_WIDER_MODE (mode))
1326 enum insn_code code = movmem_optab[(int) mode];
1327 insn_operand_predicate_fn pred;
1329 if (code != CODE_FOR_nothing
1330 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1331 here because if SIZE is less than the mode mask, as it is
1332 returned by the macro, it will definitely be less than the
1333 actual mode mask. */
1334 && ((GET_CODE (size) == CONST_INT
1335 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1336 <= (GET_MODE_MASK (mode) >> 1)))
1337 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1338 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1339 || (*pred) (x, BLKmode))
1340 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1341 || (*pred) (y, BLKmode))
1342 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1343 || (*pred) (opalign, VOIDmode)))
1345 rtx op2;
1346 rtx last = get_last_insn ();
1347 rtx pat;
1349 op2 = convert_to_mode (mode, size, 1);
1350 pred = insn_data[(int) code].operand[2].predicate;
1351 if (pred != 0 && ! (*pred) (op2, mode))
1352 op2 = copy_to_mode_reg (mode, op2);
1354 /* ??? When called via emit_block_move_for_call, it'd be
1355 nice if there were some way to inform the backend, so
1356 that it doesn't fail the expansion because it thinks
1357 emitting the libcall would be more efficient. */
1359 if (insn_data[(int) code].n_operands == 4)
1360 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1361 else
1362 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1363 GEN_INT (expected_align),
1364 GEN_INT (expected_size));
1365 if (pat)
1367 emit_insn (pat);
1368 volatile_ok = save_volatile_ok;
1369 return true;
1371 else
1372 delete_insns_since (last);
1376 volatile_ok = save_volatile_ok;
1377 return false;
1380 /* A subroutine of emit_block_move. Expand a call to memcpy.
1381 Return the return value from memcpy, 0 otherwise. */
1384 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1386 rtx dst_addr, src_addr;
1387 tree call_expr, fn, src_tree, dst_tree, size_tree;
1388 enum machine_mode size_mode;
1389 rtx retval;
1391 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1392 pseudos. We can then place those new pseudos into a VAR_DECL and
1393 use them later. */
1395 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1396 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1398 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1399 src_addr = convert_memory_address (ptr_mode, src_addr);
1401 dst_tree = make_tree (ptr_type_node, dst_addr);
1402 src_tree = make_tree (ptr_type_node, src_addr);
1404 size_mode = TYPE_MODE (sizetype);
1406 size = convert_to_mode (size_mode, size, 1);
1407 size = copy_to_mode_reg (size_mode, size);
1409 /* It is incorrect to use the libcall calling conventions to call
1410 memcpy in this context. This could be a user call to memcpy and
1411 the user may wish to examine the return value from memcpy. For
1412 targets where libcalls and normal calls have different conventions
1413 for returning pointers, we could end up generating incorrect code. */
1415 size_tree = make_tree (sizetype, size);
1417 fn = emit_block_move_libcall_fn (true);
1418 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1419 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1421 retval = expand_normal (call_expr);
1423 return retval;
1426 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1427 for the function we use for block copies. The first time FOR_CALL
1428 is true, we call assemble_external. */
1430 static GTY(()) tree block_move_fn;
1432 void
1433 init_block_move_fn (const char *asmspec)
1435 if (!block_move_fn)
1437 tree args, fn;
1439 fn = get_identifier ("memcpy");
1440 args = build_function_type_list (ptr_type_node, ptr_type_node,
1441 const_ptr_type_node, sizetype,
1442 NULL_TREE);
1444 fn = build_decl (FUNCTION_DECL, fn, args);
1445 DECL_EXTERNAL (fn) = 1;
1446 TREE_PUBLIC (fn) = 1;
1447 DECL_ARTIFICIAL (fn) = 1;
1448 TREE_NOTHROW (fn) = 1;
1449 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1450 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1452 block_move_fn = fn;
1455 if (asmspec)
1456 set_user_assembler_name (block_move_fn, asmspec);
1459 static tree
1460 emit_block_move_libcall_fn (int for_call)
1462 static bool emitted_extern;
1464 if (!block_move_fn)
1465 init_block_move_fn (NULL);
1467 if (for_call && !emitted_extern)
1469 emitted_extern = true;
1470 make_decl_rtl (block_move_fn);
1471 assemble_external (block_move_fn);
1474 return block_move_fn;
1477 /* A subroutine of emit_block_move. Copy the data via an explicit
1478 loop. This is used only when libcalls are forbidden. */
1479 /* ??? It'd be nice to copy in hunks larger than QImode. */
1481 static void
1482 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1483 unsigned int align ATTRIBUTE_UNUSED)
1485 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1486 enum machine_mode iter_mode;
1488 iter_mode = GET_MODE (size);
1489 if (iter_mode == VOIDmode)
1490 iter_mode = word_mode;
1492 top_label = gen_label_rtx ();
1493 cmp_label = gen_label_rtx ();
1494 iter = gen_reg_rtx (iter_mode);
1496 emit_move_insn (iter, const0_rtx);
1498 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1499 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1500 do_pending_stack_adjust ();
1502 emit_jump (cmp_label);
1503 emit_label (top_label);
1505 tmp = convert_modes (Pmode, iter_mode, iter, true);
1506 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1507 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1508 x = change_address (x, QImode, x_addr);
1509 y = change_address (y, QImode, y_addr);
1511 emit_move_insn (x, y);
1513 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1514 true, OPTAB_LIB_WIDEN);
1515 if (tmp != iter)
1516 emit_move_insn (iter, tmp);
1518 emit_label (cmp_label);
1520 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1521 true, top_label);
1524 /* Copy all or part of a value X into registers starting at REGNO.
1525 The number of registers to be filled is NREGS. */
1527 void
1528 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1530 int i;
1531 #ifdef HAVE_load_multiple
1532 rtx pat;
1533 rtx last;
1534 #endif
1536 if (nregs == 0)
1537 return;
1539 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1540 x = validize_mem (force_const_mem (mode, x));
1542 /* See if the machine can do this with a load multiple insn. */
1543 #ifdef HAVE_load_multiple
1544 if (HAVE_load_multiple)
1546 last = get_last_insn ();
1547 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1548 GEN_INT (nregs));
1549 if (pat)
1551 emit_insn (pat);
1552 return;
1554 else
1555 delete_insns_since (last);
1557 #endif
1559 for (i = 0; i < nregs; i++)
1560 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1561 operand_subword_force (x, i, mode));
1564 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1565 The number of registers to be filled is NREGS. */
1567 void
1568 move_block_from_reg (int regno, rtx x, int nregs)
1570 int i;
1572 if (nregs == 0)
1573 return;
1575 /* See if the machine can do this with a store multiple insn. */
1576 #ifdef HAVE_store_multiple
1577 if (HAVE_store_multiple)
1579 rtx last = get_last_insn ();
1580 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1581 GEN_INT (nregs));
1582 if (pat)
1584 emit_insn (pat);
1585 return;
1587 else
1588 delete_insns_since (last);
1590 #endif
1592 for (i = 0; i < nregs; i++)
1594 rtx tem = operand_subword (x, i, 1, BLKmode);
1596 gcc_assert (tem);
1598 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1602 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1603 ORIG, where ORIG is a non-consecutive group of registers represented by
1604 a PARALLEL. The clone is identical to the original except in that the
1605 original set of registers is replaced by a new set of pseudo registers.
1606 The new set has the same modes as the original set. */
1609 gen_group_rtx (rtx orig)
1611 int i, length;
1612 rtx *tmps;
1614 gcc_assert (GET_CODE (orig) == PARALLEL);
1616 length = XVECLEN (orig, 0);
1617 tmps = alloca (sizeof (rtx) * length);
1619 /* Skip a NULL entry in first slot. */
1620 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1622 if (i)
1623 tmps[0] = 0;
1625 for (; i < length; i++)
1627 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1628 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1630 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1633 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1636 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1637 except that values are placed in TMPS[i], and must later be moved
1638 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1640 static void
1641 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1643 rtx src;
1644 int start, i;
1645 enum machine_mode m = GET_MODE (orig_src);
1647 gcc_assert (GET_CODE (dst) == PARALLEL);
1649 if (m != VOIDmode
1650 && !SCALAR_INT_MODE_P (m)
1651 && !MEM_P (orig_src)
1652 && GET_CODE (orig_src) != CONCAT)
1654 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1655 if (imode == BLKmode)
1656 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1657 else
1658 src = gen_reg_rtx (imode);
1659 if (imode != BLKmode)
1660 src = gen_lowpart (GET_MODE (orig_src), src);
1661 emit_move_insn (src, orig_src);
1662 /* ...and back again. */
1663 if (imode != BLKmode)
1664 src = gen_lowpart (imode, src);
1665 emit_group_load_1 (tmps, dst, src, type, ssize);
1666 return;
1669 /* Check for a NULL entry, used to indicate that the parameter goes
1670 both on the stack and in registers. */
1671 if (XEXP (XVECEXP (dst, 0, 0), 0))
1672 start = 0;
1673 else
1674 start = 1;
1676 /* Process the pieces. */
1677 for (i = start; i < XVECLEN (dst, 0); i++)
1679 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1680 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1681 unsigned int bytelen = GET_MODE_SIZE (mode);
1682 int shift = 0;
1684 /* Handle trailing fragments that run over the size of the struct. */
1685 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1687 /* Arrange to shift the fragment to where it belongs.
1688 extract_bit_field loads to the lsb of the reg. */
1689 if (
1690 #ifdef BLOCK_REG_PADDING
1691 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1692 == (BYTES_BIG_ENDIAN ? upward : downward)
1693 #else
1694 BYTES_BIG_ENDIAN
1695 #endif
1697 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1698 bytelen = ssize - bytepos;
1699 gcc_assert (bytelen > 0);
1702 /* If we won't be loading directly from memory, protect the real source
1703 from strange tricks we might play; but make sure that the source can
1704 be loaded directly into the destination. */
1705 src = orig_src;
1706 if (!MEM_P (orig_src)
1707 && (!CONSTANT_P (orig_src)
1708 || (GET_MODE (orig_src) != mode
1709 && GET_MODE (orig_src) != VOIDmode)))
1711 if (GET_MODE (orig_src) == VOIDmode)
1712 src = gen_reg_rtx (mode);
1713 else
1714 src = gen_reg_rtx (GET_MODE (orig_src));
1716 emit_move_insn (src, orig_src);
1719 /* Optimize the access just a bit. */
1720 if (MEM_P (src)
1721 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1722 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1723 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1724 && bytelen == GET_MODE_SIZE (mode))
1726 tmps[i] = gen_reg_rtx (mode);
1727 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1729 else if (COMPLEX_MODE_P (mode)
1730 && GET_MODE (src) == mode
1731 && bytelen == GET_MODE_SIZE (mode))
1732 /* Let emit_move_complex do the bulk of the work. */
1733 tmps[i] = src;
1734 else if (GET_CODE (src) == CONCAT)
1736 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1737 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1739 if ((bytepos == 0 && bytelen == slen0)
1740 || (bytepos != 0 && bytepos + bytelen <= slen))
1742 /* The following assumes that the concatenated objects all
1743 have the same size. In this case, a simple calculation
1744 can be used to determine the object and the bit field
1745 to be extracted. */
1746 tmps[i] = XEXP (src, bytepos / slen0);
1747 if (! CONSTANT_P (tmps[i])
1748 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1749 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1750 (bytepos % slen0) * BITS_PER_UNIT,
1751 1, NULL_RTX, mode, mode);
1753 else
1755 rtx mem;
1757 gcc_assert (!bytepos);
1758 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1759 emit_move_insn (mem, src);
1760 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1761 0, 1, NULL_RTX, mode, mode);
1764 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1765 SIMD register, which is currently broken. While we get GCC
1766 to emit proper RTL for these cases, let's dump to memory. */
1767 else if (VECTOR_MODE_P (GET_MODE (dst))
1768 && REG_P (src))
1770 int slen = GET_MODE_SIZE (GET_MODE (src));
1771 rtx mem;
1773 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1774 emit_move_insn (mem, src);
1775 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1777 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1778 && XVECLEN (dst, 0) > 1)
1779 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1780 else if (CONSTANT_P (src))
1782 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1784 if (len == ssize)
1785 tmps[i] = src;
1786 else
1788 rtx first, second;
1790 gcc_assert (2 * len == ssize);
1791 split_double (src, &first, &second);
1792 if (i)
1793 tmps[i] = second;
1794 else
1795 tmps[i] = first;
1798 else if (REG_P (src) && GET_MODE (src) == mode)
1799 tmps[i] = src;
1800 else
1801 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1802 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1803 mode, mode);
1805 if (shift)
1806 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1807 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1811 /* Emit code to move a block SRC of type TYPE to a block DST,
1812 where DST is non-consecutive registers represented by a PARALLEL.
1813 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1814 if not known. */
1816 void
1817 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1819 rtx *tmps;
1820 int i;
1822 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1823 emit_group_load_1 (tmps, dst, src, type, ssize);
1825 /* Copy the extracted pieces into the proper (probable) hard regs. */
1826 for (i = 0; i < XVECLEN (dst, 0); i++)
1828 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1829 if (d == NULL)
1830 continue;
1831 emit_move_insn (d, tmps[i]);
1835 /* Similar, but load SRC into new pseudos in a format that looks like
1836 PARALLEL. This can later be fed to emit_group_move to get things
1837 in the right place. */
1840 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1842 rtvec vec;
1843 int i;
1845 vec = rtvec_alloc (XVECLEN (parallel, 0));
1846 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1848 /* Convert the vector to look just like the original PARALLEL, except
1849 with the computed values. */
1850 for (i = 0; i < XVECLEN (parallel, 0); i++)
1852 rtx e = XVECEXP (parallel, 0, i);
1853 rtx d = XEXP (e, 0);
1855 if (d)
1857 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1858 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1860 RTVEC_ELT (vec, i) = e;
1863 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1866 /* Emit code to move a block SRC to block DST, where SRC and DST are
1867 non-consecutive groups of registers, each represented by a PARALLEL. */
1869 void
1870 emit_group_move (rtx dst, rtx src)
1872 int i;
1874 gcc_assert (GET_CODE (src) == PARALLEL
1875 && GET_CODE (dst) == PARALLEL
1876 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1878 /* Skip first entry if NULL. */
1879 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1880 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1881 XEXP (XVECEXP (src, 0, i), 0));
1884 /* Move a group of registers represented by a PARALLEL into pseudos. */
1887 emit_group_move_into_temps (rtx src)
1889 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1890 int i;
1892 for (i = 0; i < XVECLEN (src, 0); i++)
1894 rtx e = XVECEXP (src, 0, i);
1895 rtx d = XEXP (e, 0);
1897 if (d)
1898 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1899 RTVEC_ELT (vec, i) = e;
1902 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1905 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1906 where SRC is non-consecutive registers represented by a PARALLEL.
1907 SSIZE represents the total size of block ORIG_DST, or -1 if not
1908 known. */
1910 void
1911 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1913 rtx *tmps, dst;
1914 int start, finish, i;
1915 enum machine_mode m = GET_MODE (orig_dst);
1917 gcc_assert (GET_CODE (src) == PARALLEL);
1919 if (!SCALAR_INT_MODE_P (m)
1920 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1922 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1923 if (imode == BLKmode)
1924 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1925 else
1926 dst = gen_reg_rtx (imode);
1927 emit_group_store (dst, src, type, ssize);
1928 if (imode != BLKmode)
1929 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1930 emit_move_insn (orig_dst, dst);
1931 return;
1934 /* Check for a NULL entry, used to indicate that the parameter goes
1935 both on the stack and in registers. */
1936 if (XEXP (XVECEXP (src, 0, 0), 0))
1937 start = 0;
1938 else
1939 start = 1;
1940 finish = XVECLEN (src, 0);
1942 tmps = alloca (sizeof (rtx) * finish);
1944 /* Copy the (probable) hard regs into pseudos. */
1945 for (i = start; i < finish; i++)
1947 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1948 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1950 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1951 emit_move_insn (tmps[i], reg);
1953 else
1954 tmps[i] = reg;
1957 /* If we won't be storing directly into memory, protect the real destination
1958 from strange tricks we might play. */
1959 dst = orig_dst;
1960 if (GET_CODE (dst) == PARALLEL)
1962 rtx temp;
1964 /* We can get a PARALLEL dst if there is a conditional expression in
1965 a return statement. In that case, the dst and src are the same,
1966 so no action is necessary. */
1967 if (rtx_equal_p (dst, src))
1968 return;
1970 /* It is unclear if we can ever reach here, but we may as well handle
1971 it. Allocate a temporary, and split this into a store/load to/from
1972 the temporary. */
1974 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1975 emit_group_store (temp, src, type, ssize);
1976 emit_group_load (dst, temp, type, ssize);
1977 return;
1979 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1981 enum machine_mode outer = GET_MODE (dst);
1982 enum machine_mode inner;
1983 HOST_WIDE_INT bytepos;
1984 bool done = false;
1985 rtx temp;
1987 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1988 dst = gen_reg_rtx (outer);
1990 /* Make life a bit easier for combine. */
1991 /* If the first element of the vector is the low part
1992 of the destination mode, use a paradoxical subreg to
1993 initialize the destination. */
1994 if (start < finish)
1996 inner = GET_MODE (tmps[start]);
1997 bytepos = subreg_lowpart_offset (inner, outer);
1998 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2000 temp = simplify_gen_subreg (outer, tmps[start],
2001 inner, 0);
2002 if (temp)
2004 emit_move_insn (dst, temp);
2005 done = true;
2006 start++;
2011 /* If the first element wasn't the low part, try the last. */
2012 if (!done
2013 && start < finish - 1)
2015 inner = GET_MODE (tmps[finish - 1]);
2016 bytepos = subreg_lowpart_offset (inner, outer);
2017 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2019 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2020 inner, 0);
2021 if (temp)
2023 emit_move_insn (dst, temp);
2024 done = true;
2025 finish--;
2030 /* Otherwise, simply initialize the result to zero. */
2031 if (!done)
2032 emit_move_insn (dst, CONST0_RTX (outer));
2035 /* Process the pieces. */
2036 for (i = start; i < finish; i++)
2038 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2039 enum machine_mode mode = GET_MODE (tmps[i]);
2040 unsigned int bytelen = GET_MODE_SIZE (mode);
2041 rtx dest = dst;
2043 /* Handle trailing fragments that run over the size of the struct. */
2044 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2046 /* store_bit_field always takes its value from the lsb.
2047 Move the fragment to the lsb if it's not already there. */
2048 if (
2049 #ifdef BLOCK_REG_PADDING
2050 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2051 == (BYTES_BIG_ENDIAN ? upward : downward)
2052 #else
2053 BYTES_BIG_ENDIAN
2054 #endif
2057 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2058 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2059 build_int_cst (NULL_TREE, shift),
2060 tmps[i], 0);
2062 bytelen = ssize - bytepos;
2065 if (GET_CODE (dst) == CONCAT)
2067 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2068 dest = XEXP (dst, 0);
2069 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2071 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2072 dest = XEXP (dst, 1);
2074 else
2076 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2077 dest = assign_stack_temp (GET_MODE (dest),
2078 GET_MODE_SIZE (GET_MODE (dest)), 0);
2079 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2080 tmps[i]);
2081 dst = dest;
2082 break;
2086 /* Optimize the access just a bit. */
2087 if (MEM_P (dest)
2088 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2089 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2090 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2091 && bytelen == GET_MODE_SIZE (mode))
2092 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2093 else
2094 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2095 mode, tmps[i]);
2098 /* Copy from the pseudo into the (probable) hard reg. */
2099 if (orig_dst != dst)
2100 emit_move_insn (orig_dst, dst);
2103 /* Generate code to copy a BLKmode object of TYPE out of a
2104 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2105 is null, a stack temporary is created. TGTBLK is returned.
2107 The purpose of this routine is to handle functions that return
2108 BLKmode structures in registers. Some machines (the PA for example)
2109 want to return all small structures in registers regardless of the
2110 structure's alignment. */
2113 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2115 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2116 rtx src = NULL, dst = NULL;
2117 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2118 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2120 if (tgtblk == 0)
2122 tgtblk = assign_temp (build_qualified_type (type,
2123 (TYPE_QUALS (type)
2124 | TYPE_QUAL_CONST)),
2125 0, 1, 1);
2126 preserve_temp_slots (tgtblk);
2129 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2130 into a new pseudo which is a full word. */
2132 if (GET_MODE (srcreg) != BLKmode
2133 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2134 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2136 /* If the structure doesn't take up a whole number of words, see whether
2137 SRCREG is padded on the left or on the right. If it's on the left,
2138 set PADDING_CORRECTION to the number of bits to skip.
2140 In most ABIs, the structure will be returned at the least end of
2141 the register, which translates to right padding on little-endian
2142 targets and left padding on big-endian targets. The opposite
2143 holds if the structure is returned at the most significant
2144 end of the register. */
2145 if (bytes % UNITS_PER_WORD != 0
2146 && (targetm.calls.return_in_msb (type)
2147 ? !BYTES_BIG_ENDIAN
2148 : BYTES_BIG_ENDIAN))
2149 padding_correction
2150 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2152 /* Copy the structure BITSIZE bites at a time.
2154 We could probably emit more efficient code for machines which do not use
2155 strict alignment, but it doesn't seem worth the effort at the current
2156 time. */
2157 for (bitpos = 0, xbitpos = padding_correction;
2158 bitpos < bytes * BITS_PER_UNIT;
2159 bitpos += bitsize, xbitpos += bitsize)
2161 /* We need a new source operand each time xbitpos is on a
2162 word boundary and when xbitpos == padding_correction
2163 (the first time through). */
2164 if (xbitpos % BITS_PER_WORD == 0
2165 || xbitpos == padding_correction)
2166 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2167 GET_MODE (srcreg));
2169 /* We need a new destination operand each time bitpos is on
2170 a word boundary. */
2171 if (bitpos % BITS_PER_WORD == 0)
2172 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2174 /* Use xbitpos for the source extraction (right justified) and
2175 xbitpos for the destination store (left justified). */
2176 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2177 extract_bit_field (src, bitsize,
2178 xbitpos % BITS_PER_WORD, 1,
2179 NULL_RTX, word_mode, word_mode));
2182 return tgtblk;
2185 /* Add a USE expression for REG to the (possibly empty) list pointed
2186 to by CALL_FUSAGE. REG must denote a hard register. */
2188 void
2189 use_reg (rtx *call_fusage, rtx reg)
2191 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2193 *call_fusage
2194 = gen_rtx_EXPR_LIST (VOIDmode,
2195 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2198 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2199 starting at REGNO. All of these registers must be hard registers. */
2201 void
2202 use_regs (rtx *call_fusage, int regno, int nregs)
2204 int i;
2206 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2208 for (i = 0; i < nregs; i++)
2209 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2212 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2213 PARALLEL REGS. This is for calls that pass values in multiple
2214 non-contiguous locations. The Irix 6 ABI has examples of this. */
2216 void
2217 use_group_regs (rtx *call_fusage, rtx regs)
2219 int i;
2221 for (i = 0; i < XVECLEN (regs, 0); i++)
2223 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2225 /* A NULL entry means the parameter goes both on the stack and in
2226 registers. This can also be a MEM for targets that pass values
2227 partially on the stack and partially in registers. */
2228 if (reg != 0 && REG_P (reg))
2229 use_reg (call_fusage, reg);
2234 /* Determine whether the LEN bytes generated by CONSTFUN can be
2235 stored to memory using several move instructions. CONSTFUNDATA is
2236 a pointer which will be passed as argument in every CONSTFUN call.
2237 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2238 a memset operation and false if it's a copy of a constant string.
2239 Return nonzero if a call to store_by_pieces should succeed. */
2242 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2243 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2244 void *constfundata, unsigned int align, bool memsetp)
2246 unsigned HOST_WIDE_INT l;
2247 unsigned int max_size;
2248 HOST_WIDE_INT offset = 0;
2249 enum machine_mode mode, tmode;
2250 enum insn_code icode;
2251 int reverse;
2252 rtx cst;
2254 if (len == 0)
2255 return 1;
2257 if (! (memsetp
2258 ? SET_BY_PIECES_P (len, align)
2259 : STORE_BY_PIECES_P (len, align)))
2260 return 0;
2262 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2263 if (align >= GET_MODE_ALIGNMENT (tmode))
2264 align = GET_MODE_ALIGNMENT (tmode);
2265 else
2267 enum machine_mode xmode;
2269 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2270 tmode != VOIDmode;
2271 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2272 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2273 || SLOW_UNALIGNED_ACCESS (tmode, align))
2274 break;
2276 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2279 /* We would first store what we can in the largest integer mode, then go to
2280 successively smaller modes. */
2282 for (reverse = 0;
2283 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2284 reverse++)
2286 l = len;
2287 mode = VOIDmode;
2288 max_size = STORE_MAX_PIECES + 1;
2289 while (max_size > 1)
2291 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2292 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2293 if (GET_MODE_SIZE (tmode) < max_size)
2294 mode = tmode;
2296 if (mode == VOIDmode)
2297 break;
2299 icode = optab_handler (mov_optab, mode)->insn_code;
2300 if (icode != CODE_FOR_nothing
2301 && align >= GET_MODE_ALIGNMENT (mode))
2303 unsigned int size = GET_MODE_SIZE (mode);
2305 while (l >= size)
2307 if (reverse)
2308 offset -= size;
2310 cst = (*constfun) (constfundata, offset, mode);
2311 if (!LEGITIMATE_CONSTANT_P (cst))
2312 return 0;
2314 if (!reverse)
2315 offset += size;
2317 l -= size;
2321 max_size = GET_MODE_SIZE (mode);
2324 /* The code above should have handled everything. */
2325 gcc_assert (!l);
2328 return 1;
2331 /* Generate several move instructions to store LEN bytes generated by
2332 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2333 pointer which will be passed as argument in every CONSTFUN call.
2334 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2335 a memset operation and false if it's a copy of a constant string.
2336 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2337 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2338 stpcpy. */
2341 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2342 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2343 void *constfundata, unsigned int align, bool memsetp, int endp)
2345 struct store_by_pieces data;
2347 if (len == 0)
2349 gcc_assert (endp != 2);
2350 return to;
2353 gcc_assert (memsetp
2354 ? SET_BY_PIECES_P (len, align)
2355 : STORE_BY_PIECES_P (len, align));
2356 data.constfun = constfun;
2357 data.constfundata = constfundata;
2358 data.len = len;
2359 data.to = to;
2360 store_by_pieces_1 (&data, align);
2361 if (endp)
2363 rtx to1;
2365 gcc_assert (!data.reverse);
2366 if (data.autinc_to)
2368 if (endp == 2)
2370 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2371 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2372 else
2373 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2374 -1));
2376 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2377 data.offset);
2379 else
2381 if (endp == 2)
2382 --data.offset;
2383 to1 = adjust_address (data.to, QImode, data.offset);
2385 return to1;
2387 else
2388 return data.to;
2391 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2392 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2394 static void
2395 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2397 struct store_by_pieces data;
2399 if (len == 0)
2400 return;
2402 data.constfun = clear_by_pieces_1;
2403 data.constfundata = NULL;
2404 data.len = len;
2405 data.to = to;
2406 store_by_pieces_1 (&data, align);
2409 /* Callback routine for clear_by_pieces.
2410 Return const0_rtx unconditionally. */
2412 static rtx
2413 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2414 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2415 enum machine_mode mode ATTRIBUTE_UNUSED)
2417 return const0_rtx;
2420 /* Subroutine of clear_by_pieces and store_by_pieces.
2421 Generate several move instructions to store LEN bytes of block TO. (A MEM
2422 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2424 static void
2425 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2426 unsigned int align ATTRIBUTE_UNUSED)
2428 rtx to_addr = XEXP (data->to, 0);
2429 unsigned int max_size = STORE_MAX_PIECES + 1;
2430 enum machine_mode mode = VOIDmode, tmode;
2431 enum insn_code icode;
2433 data->offset = 0;
2434 data->to_addr = to_addr;
2435 data->autinc_to
2436 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2437 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2439 data->explicit_inc_to = 0;
2440 data->reverse
2441 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2442 if (data->reverse)
2443 data->offset = data->len;
2445 /* If storing requires more than two move insns,
2446 copy addresses to registers (to make displacements shorter)
2447 and use post-increment if available. */
2448 if (!data->autinc_to
2449 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2451 /* Determine the main mode we'll be using. */
2452 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2453 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2454 if (GET_MODE_SIZE (tmode) < max_size)
2455 mode = tmode;
2457 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2459 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2460 data->autinc_to = 1;
2461 data->explicit_inc_to = -1;
2464 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2465 && ! data->autinc_to)
2467 data->to_addr = copy_addr_to_reg (to_addr);
2468 data->autinc_to = 1;
2469 data->explicit_inc_to = 1;
2472 if ( !data->autinc_to && CONSTANT_P (to_addr))
2473 data->to_addr = copy_addr_to_reg (to_addr);
2476 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2477 if (align >= GET_MODE_ALIGNMENT (tmode))
2478 align = GET_MODE_ALIGNMENT (tmode);
2479 else
2481 enum machine_mode xmode;
2483 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2484 tmode != VOIDmode;
2485 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2486 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2487 || SLOW_UNALIGNED_ACCESS (tmode, align))
2488 break;
2490 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2493 /* First store what we can in the largest integer mode, then go to
2494 successively smaller modes. */
2496 while (max_size > 1)
2498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2500 if (GET_MODE_SIZE (tmode) < max_size)
2501 mode = tmode;
2503 if (mode == VOIDmode)
2504 break;
2506 icode = optab_handler (mov_optab, mode)->insn_code;
2507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2508 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2510 max_size = GET_MODE_SIZE (mode);
2513 /* The code above should have handled everything. */
2514 gcc_assert (!data->len);
2517 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2518 with move instructions for mode MODE. GENFUN is the gen_... function
2519 to make a move insn for that mode. DATA has all the other info. */
2521 static void
2522 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2523 struct store_by_pieces *data)
2525 unsigned int size = GET_MODE_SIZE (mode);
2526 rtx to1, cst;
2528 while (data->len >= size)
2530 if (data->reverse)
2531 data->offset -= size;
2533 if (data->autinc_to)
2534 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2535 data->offset);
2536 else
2537 to1 = adjust_address (data->to, mode, data->offset);
2539 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2540 emit_insn (gen_add2_insn (data->to_addr,
2541 GEN_INT (-(HOST_WIDE_INT) size)));
2543 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2544 emit_insn ((*genfun) (to1, cst));
2546 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2549 if (! data->reverse)
2550 data->offset += size;
2552 data->len -= size;
2556 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2557 its length in bytes. */
2560 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2561 unsigned int expected_align, HOST_WIDE_INT expected_size)
2563 enum machine_mode mode = GET_MODE (object);
2564 unsigned int align;
2566 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2568 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2569 just move a zero. Otherwise, do this a piece at a time. */
2570 if (mode != BLKmode
2571 && GET_CODE (size) == CONST_INT
2572 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2574 rtx zero = CONST0_RTX (mode);
2575 if (zero != NULL)
2577 emit_move_insn (object, zero);
2578 return NULL;
2581 if (COMPLEX_MODE_P (mode))
2583 zero = CONST0_RTX (GET_MODE_INNER (mode));
2584 if (zero != NULL)
2586 write_complex_part (object, zero, 0);
2587 write_complex_part (object, zero, 1);
2588 return NULL;
2593 if (size == const0_rtx)
2594 return NULL;
2596 align = MEM_ALIGN (object);
2598 if (GET_CODE (size) == CONST_INT
2599 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2600 clear_by_pieces (object, INTVAL (size), align);
2601 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2602 expected_align, expected_size))
2604 else
2605 return set_storage_via_libcall (object, size, const0_rtx,
2606 method == BLOCK_OP_TAILCALL);
2608 return NULL;
2612 clear_storage (rtx object, rtx size, enum block_op_methods method)
2614 return clear_storage_hints (object, size, method, 0, -1);
2618 /* A subroutine of clear_storage. Expand a call to memset.
2619 Return the return value of memset, 0 otherwise. */
2622 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2624 tree call_expr, fn, object_tree, size_tree, val_tree;
2625 enum machine_mode size_mode;
2626 rtx retval;
2628 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2629 place those into new pseudos into a VAR_DECL and use them later. */
2631 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2633 size_mode = TYPE_MODE (sizetype);
2634 size = convert_to_mode (size_mode, size, 1);
2635 size = copy_to_mode_reg (size_mode, size);
2637 /* It is incorrect to use the libcall calling conventions to call
2638 memset in this context. This could be a user call to memset and
2639 the user may wish to examine the return value from memset. For
2640 targets where libcalls and normal calls have different conventions
2641 for returning pointers, we could end up generating incorrect code. */
2643 object_tree = make_tree (ptr_type_node, object);
2644 if (GET_CODE (val) != CONST_INT)
2645 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2646 size_tree = make_tree (sizetype, size);
2647 val_tree = make_tree (integer_type_node, val);
2649 fn = clear_storage_libcall_fn (true);
2650 call_expr = build_call_expr (fn, 3,
2651 object_tree, integer_zero_node, size_tree);
2652 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2654 retval = expand_normal (call_expr);
2656 return retval;
2659 /* A subroutine of set_storage_via_libcall. Create the tree node
2660 for the function we use for block clears. The first time FOR_CALL
2661 is true, we call assemble_external. */
2663 static GTY(()) tree block_clear_fn;
2665 void
2666 init_block_clear_fn (const char *asmspec)
2668 if (!block_clear_fn)
2670 tree fn, args;
2672 fn = get_identifier ("memset");
2673 args = build_function_type_list (ptr_type_node, ptr_type_node,
2674 integer_type_node, sizetype,
2675 NULL_TREE);
2677 fn = build_decl (FUNCTION_DECL, fn, args);
2678 DECL_EXTERNAL (fn) = 1;
2679 TREE_PUBLIC (fn) = 1;
2680 DECL_ARTIFICIAL (fn) = 1;
2681 TREE_NOTHROW (fn) = 1;
2682 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2683 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2685 block_clear_fn = fn;
2688 if (asmspec)
2689 set_user_assembler_name (block_clear_fn, asmspec);
2692 static tree
2693 clear_storage_libcall_fn (int for_call)
2695 static bool emitted_extern;
2697 if (!block_clear_fn)
2698 init_block_clear_fn (NULL);
2700 if (for_call && !emitted_extern)
2702 emitted_extern = true;
2703 make_decl_rtl (block_clear_fn);
2704 assemble_external (block_clear_fn);
2707 return block_clear_fn;
2710 /* Expand a setmem pattern; return true if successful. */
2712 bool
2713 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2714 unsigned int expected_align, HOST_WIDE_INT expected_size)
2716 /* Try the most limited insn first, because there's no point
2717 including more than one in the machine description unless
2718 the more limited one has some advantage. */
2720 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2721 enum machine_mode mode;
2723 if (expected_align < align)
2724 expected_align = align;
2726 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2727 mode = GET_MODE_WIDER_MODE (mode))
2729 enum insn_code code = setmem_optab[(int) mode];
2730 insn_operand_predicate_fn pred;
2732 if (code != CODE_FOR_nothing
2733 /* We don't need MODE to be narrower than
2734 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2735 the mode mask, as it is returned by the macro, it will
2736 definitely be less than the actual mode mask. */
2737 && ((GET_CODE (size) == CONST_INT
2738 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2739 <= (GET_MODE_MASK (mode) >> 1)))
2740 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2741 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2742 || (*pred) (object, BLKmode))
2743 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2744 || (*pred) (opalign, VOIDmode)))
2746 rtx opsize, opchar;
2747 enum machine_mode char_mode;
2748 rtx last = get_last_insn ();
2749 rtx pat;
2751 opsize = convert_to_mode (mode, size, 1);
2752 pred = insn_data[(int) code].operand[1].predicate;
2753 if (pred != 0 && ! (*pred) (opsize, mode))
2754 opsize = copy_to_mode_reg (mode, opsize);
2756 opchar = val;
2757 char_mode = insn_data[(int) code].operand[2].mode;
2758 if (char_mode != VOIDmode)
2760 opchar = convert_to_mode (char_mode, opchar, 1);
2761 pred = insn_data[(int) code].operand[2].predicate;
2762 if (pred != 0 && ! (*pred) (opchar, char_mode))
2763 opchar = copy_to_mode_reg (char_mode, opchar);
2766 if (insn_data[(int) code].n_operands == 4)
2767 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2768 else
2769 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2770 GEN_INT (expected_align),
2771 GEN_INT (expected_size));
2772 if (pat)
2774 emit_insn (pat);
2775 return true;
2777 else
2778 delete_insns_since (last);
2782 return false;
2786 /* Write to one of the components of the complex value CPLX. Write VAL to
2787 the real part if IMAG_P is false, and the imaginary part if its true. */
2789 static void
2790 write_complex_part (rtx cplx, rtx val, bool imag_p)
2792 enum machine_mode cmode;
2793 enum machine_mode imode;
2794 unsigned ibitsize;
2796 if (GET_CODE (cplx) == CONCAT)
2798 emit_move_insn (XEXP (cplx, imag_p), val);
2799 return;
2802 cmode = GET_MODE (cplx);
2803 imode = GET_MODE_INNER (cmode);
2804 ibitsize = GET_MODE_BITSIZE (imode);
2806 /* For MEMs simplify_gen_subreg may generate an invalid new address
2807 because, e.g., the original address is considered mode-dependent
2808 by the target, which restricts simplify_subreg from invoking
2809 adjust_address_nv. Instead of preparing fallback support for an
2810 invalid address, we call adjust_address_nv directly. */
2811 if (MEM_P (cplx))
2813 emit_move_insn (adjust_address_nv (cplx, imode,
2814 imag_p ? GET_MODE_SIZE (imode) : 0),
2815 val);
2816 return;
2819 /* If the sub-object is at least word sized, then we know that subregging
2820 will work. This special case is important, since store_bit_field
2821 wants to operate on integer modes, and there's rarely an OImode to
2822 correspond to TCmode. */
2823 if (ibitsize >= BITS_PER_WORD
2824 /* For hard regs we have exact predicates. Assume we can split
2825 the original object if it spans an even number of hard regs.
2826 This special case is important for SCmode on 64-bit platforms
2827 where the natural size of floating-point regs is 32-bit. */
2828 || (REG_P (cplx)
2829 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2830 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2832 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2833 imag_p ? GET_MODE_SIZE (imode) : 0);
2834 if (part)
2836 emit_move_insn (part, val);
2837 return;
2839 else
2840 /* simplify_gen_subreg may fail for sub-word MEMs. */
2841 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2844 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2847 /* Extract one of the components of the complex value CPLX. Extract the
2848 real part if IMAG_P is false, and the imaginary part if it's true. */
2850 static rtx
2851 read_complex_part (rtx cplx, bool imag_p)
2853 enum machine_mode cmode, imode;
2854 unsigned ibitsize;
2856 if (GET_CODE (cplx) == CONCAT)
2857 return XEXP (cplx, imag_p);
2859 cmode = GET_MODE (cplx);
2860 imode = GET_MODE_INNER (cmode);
2861 ibitsize = GET_MODE_BITSIZE (imode);
2863 /* Special case reads from complex constants that got spilled to memory. */
2864 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2866 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2867 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2869 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2870 if (CONSTANT_CLASS_P (part))
2871 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2875 /* For MEMs simplify_gen_subreg may generate an invalid new address
2876 because, e.g., the original address is considered mode-dependent
2877 by the target, which restricts simplify_subreg from invoking
2878 adjust_address_nv. Instead of preparing fallback support for an
2879 invalid address, we call adjust_address_nv directly. */
2880 if (MEM_P (cplx))
2881 return adjust_address_nv (cplx, imode,
2882 imag_p ? GET_MODE_SIZE (imode) : 0);
2884 /* If the sub-object is at least word sized, then we know that subregging
2885 will work. This special case is important, since extract_bit_field
2886 wants to operate on integer modes, and there's rarely an OImode to
2887 correspond to TCmode. */
2888 if (ibitsize >= BITS_PER_WORD
2889 /* For hard regs we have exact predicates. Assume we can split
2890 the original object if it spans an even number of hard regs.
2891 This special case is important for SCmode on 64-bit platforms
2892 where the natural size of floating-point regs is 32-bit. */
2893 || (REG_P (cplx)
2894 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2895 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2897 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2898 imag_p ? GET_MODE_SIZE (imode) : 0);
2899 if (ret)
2900 return ret;
2901 else
2902 /* simplify_gen_subreg may fail for sub-word MEMs. */
2903 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2906 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2907 true, NULL_RTX, imode, imode);
2910 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2911 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2912 represented in NEW_MODE. If FORCE is true, this will never happen, as
2913 we'll force-create a SUBREG if needed. */
2915 static rtx
2916 emit_move_change_mode (enum machine_mode new_mode,
2917 enum machine_mode old_mode, rtx x, bool force)
2919 rtx ret;
2921 if (push_operand (x, GET_MODE (x)))
2923 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2924 MEM_COPY_ATTRIBUTES (ret, x);
2926 else if (MEM_P (x))
2928 /* We don't have to worry about changing the address since the
2929 size in bytes is supposed to be the same. */
2930 if (reload_in_progress)
2932 /* Copy the MEM to change the mode and move any
2933 substitutions from the old MEM to the new one. */
2934 ret = adjust_address_nv (x, new_mode, 0);
2935 copy_replacements (x, ret);
2937 else
2938 ret = adjust_address (x, new_mode, 0);
2940 else
2942 /* Note that we do want simplify_subreg's behavior of validating
2943 that the new mode is ok for a hard register. If we were to use
2944 simplify_gen_subreg, we would create the subreg, but would
2945 probably run into the target not being able to implement it. */
2946 /* Except, of course, when FORCE is true, when this is exactly what
2947 we want. Which is needed for CCmodes on some targets. */
2948 if (force)
2949 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2950 else
2951 ret = simplify_subreg (new_mode, x, old_mode, 0);
2954 return ret;
2957 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2958 an integer mode of the same size as MODE. Returns the instruction
2959 emitted, or NULL if such a move could not be generated. */
2961 static rtx
2962 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2964 enum machine_mode imode;
2965 enum insn_code code;
2967 /* There must exist a mode of the exact size we require. */
2968 imode = int_mode_for_mode (mode);
2969 if (imode == BLKmode)
2970 return NULL_RTX;
2972 /* The target must support moves in this mode. */
2973 code = optab_handler (mov_optab, imode)->insn_code;
2974 if (code == CODE_FOR_nothing)
2975 return NULL_RTX;
2977 x = emit_move_change_mode (imode, mode, x, force);
2978 if (x == NULL_RTX)
2979 return NULL_RTX;
2980 y = emit_move_change_mode (imode, mode, y, force);
2981 if (y == NULL_RTX)
2982 return NULL_RTX;
2983 return emit_insn (GEN_FCN (code) (x, y));
2986 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2987 Return an equivalent MEM that does not use an auto-increment. */
2989 static rtx
2990 emit_move_resolve_push (enum machine_mode mode, rtx x)
2992 enum rtx_code code = GET_CODE (XEXP (x, 0));
2993 HOST_WIDE_INT adjust;
2994 rtx temp;
2996 adjust = GET_MODE_SIZE (mode);
2997 #ifdef PUSH_ROUNDING
2998 adjust = PUSH_ROUNDING (adjust);
2999 #endif
3000 if (code == PRE_DEC || code == POST_DEC)
3001 adjust = -adjust;
3002 else if (code == PRE_MODIFY || code == POST_MODIFY)
3004 rtx expr = XEXP (XEXP (x, 0), 1);
3005 HOST_WIDE_INT val;
3007 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3008 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
3009 val = INTVAL (XEXP (expr, 1));
3010 if (GET_CODE (expr) == MINUS)
3011 val = -val;
3012 gcc_assert (adjust == val || adjust == -val);
3013 adjust = val;
3016 /* Do not use anti_adjust_stack, since we don't want to update
3017 stack_pointer_delta. */
3018 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3019 GEN_INT (adjust), stack_pointer_rtx,
3020 0, OPTAB_LIB_WIDEN);
3021 if (temp != stack_pointer_rtx)
3022 emit_move_insn (stack_pointer_rtx, temp);
3024 switch (code)
3026 case PRE_INC:
3027 case PRE_DEC:
3028 case PRE_MODIFY:
3029 temp = stack_pointer_rtx;
3030 break;
3031 case POST_INC:
3032 case POST_DEC:
3033 case POST_MODIFY:
3034 temp = plus_constant (stack_pointer_rtx, -adjust);
3035 break;
3036 default:
3037 gcc_unreachable ();
3040 return replace_equiv_address (x, temp);
3043 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3044 X is known to satisfy push_operand, and MODE is known to be complex.
3045 Returns the last instruction emitted. */
3048 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3050 enum machine_mode submode = GET_MODE_INNER (mode);
3051 bool imag_first;
3053 #ifdef PUSH_ROUNDING
3054 unsigned int submodesize = GET_MODE_SIZE (submode);
3056 /* In case we output to the stack, but the size is smaller than the
3057 machine can push exactly, we need to use move instructions. */
3058 if (PUSH_ROUNDING (submodesize) != submodesize)
3060 x = emit_move_resolve_push (mode, x);
3061 return emit_move_insn (x, y);
3063 #endif
3065 /* Note that the real part always precedes the imag part in memory
3066 regardless of machine's endianness. */
3067 switch (GET_CODE (XEXP (x, 0)))
3069 case PRE_DEC:
3070 case POST_DEC:
3071 imag_first = true;
3072 break;
3073 case PRE_INC:
3074 case POST_INC:
3075 imag_first = false;
3076 break;
3077 default:
3078 gcc_unreachable ();
3081 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3082 read_complex_part (y, imag_first));
3083 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3084 read_complex_part (y, !imag_first));
3087 /* A subroutine of emit_move_complex. Perform the move from Y to X
3088 via two moves of the parts. Returns the last instruction emitted. */
3091 emit_move_complex_parts (rtx x, rtx y)
3093 /* Show the output dies here. This is necessary for SUBREGs
3094 of pseudos since we cannot track their lifetimes correctly;
3095 hard regs shouldn't appear here except as return values. */
3096 if (!reload_completed && !reload_in_progress
3097 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3098 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3100 write_complex_part (x, read_complex_part (y, false), false);
3101 write_complex_part (x, read_complex_part (y, true), true);
3103 return get_last_insn ();
3106 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3107 MODE is known to be complex. Returns the last instruction emitted. */
3109 static rtx
3110 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3112 bool try_int;
3114 /* Need to take special care for pushes, to maintain proper ordering
3115 of the data, and possibly extra padding. */
3116 if (push_operand (x, mode))
3117 return emit_move_complex_push (mode, x, y);
3119 /* See if we can coerce the target into moving both values at once. */
3121 /* Move floating point as parts. */
3122 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3123 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3124 try_int = false;
3125 /* Not possible if the values are inherently not adjacent. */
3126 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3127 try_int = false;
3128 /* Is possible if both are registers (or subregs of registers). */
3129 else if (register_operand (x, mode) && register_operand (y, mode))
3130 try_int = true;
3131 /* If one of the operands is a memory, and alignment constraints
3132 are friendly enough, we may be able to do combined memory operations.
3133 We do not attempt this if Y is a constant because that combination is
3134 usually better with the by-parts thing below. */
3135 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3136 && (!STRICT_ALIGNMENT
3137 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3138 try_int = true;
3139 else
3140 try_int = false;
3142 if (try_int)
3144 rtx ret;
3146 /* For memory to memory moves, optimal behavior can be had with the
3147 existing block move logic. */
3148 if (MEM_P (x) && MEM_P (y))
3150 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3151 BLOCK_OP_NO_LIBCALL);
3152 return get_last_insn ();
3155 ret = emit_move_via_integer (mode, x, y, true);
3156 if (ret)
3157 return ret;
3160 return emit_move_complex_parts (x, y);
3163 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3164 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3166 static rtx
3167 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3169 rtx ret;
3171 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3172 if (mode != CCmode)
3174 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3175 if (code != CODE_FOR_nothing)
3177 x = emit_move_change_mode (CCmode, mode, x, true);
3178 y = emit_move_change_mode (CCmode, mode, y, true);
3179 return emit_insn (GEN_FCN (code) (x, y));
3183 /* Otherwise, find the MODE_INT mode of the same width. */
3184 ret = emit_move_via_integer (mode, x, y, false);
3185 gcc_assert (ret != NULL);
3186 return ret;
3189 /* Return true if word I of OP lies entirely in the
3190 undefined bits of a paradoxical subreg. */
3192 static bool
3193 undefined_operand_subword_p (const_rtx op, int i)
3195 enum machine_mode innermode, innermostmode;
3196 int offset;
3197 if (GET_CODE (op) != SUBREG)
3198 return false;
3199 innermode = GET_MODE (op);
3200 innermostmode = GET_MODE (SUBREG_REG (op));
3201 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3202 /* The SUBREG_BYTE represents offset, as if the value were stored in
3203 memory, except for a paradoxical subreg where we define
3204 SUBREG_BYTE to be 0; undo this exception as in
3205 simplify_subreg. */
3206 if (SUBREG_BYTE (op) == 0
3207 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3209 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3210 if (WORDS_BIG_ENDIAN)
3211 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3212 if (BYTES_BIG_ENDIAN)
3213 offset += difference % UNITS_PER_WORD;
3215 if (offset >= GET_MODE_SIZE (innermostmode)
3216 || offset <= -GET_MODE_SIZE (word_mode))
3217 return true;
3218 return false;
3221 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3222 MODE is any multi-word or full-word mode that lacks a move_insn
3223 pattern. Note that you will get better code if you define such
3224 patterns, even if they must turn into multiple assembler instructions. */
3226 static rtx
3227 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3229 rtx last_insn = 0;
3230 rtx seq, inner;
3231 bool need_clobber;
3232 int i;
3234 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3236 /* If X is a push on the stack, do the push now and replace
3237 X with a reference to the stack pointer. */
3238 if (push_operand (x, mode))
3239 x = emit_move_resolve_push (mode, x);
3241 /* If we are in reload, see if either operand is a MEM whose address
3242 is scheduled for replacement. */
3243 if (reload_in_progress && MEM_P (x)
3244 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3245 x = replace_equiv_address_nv (x, inner);
3246 if (reload_in_progress && MEM_P (y)
3247 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3248 y = replace_equiv_address_nv (y, inner);
3250 start_sequence ();
3252 need_clobber = false;
3253 for (i = 0;
3254 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3255 i++)
3257 rtx xpart = operand_subword (x, i, 1, mode);
3258 rtx ypart;
3260 /* Do not generate code for a move if it would come entirely
3261 from the undefined bits of a paradoxical subreg. */
3262 if (undefined_operand_subword_p (y, i))
3263 continue;
3265 ypart = operand_subword (y, i, 1, mode);
3267 /* If we can't get a part of Y, put Y into memory if it is a
3268 constant. Otherwise, force it into a register. Then we must
3269 be able to get a part of Y. */
3270 if (ypart == 0 && CONSTANT_P (y))
3272 y = use_anchored_address (force_const_mem (mode, y));
3273 ypart = operand_subword (y, i, 1, mode);
3275 else if (ypart == 0)
3276 ypart = operand_subword_force (y, i, mode);
3278 gcc_assert (xpart && ypart);
3280 need_clobber |= (GET_CODE (xpart) == SUBREG);
3282 last_insn = emit_move_insn (xpart, ypart);
3285 seq = get_insns ();
3286 end_sequence ();
3288 /* Show the output dies here. This is necessary for SUBREGs
3289 of pseudos since we cannot track their lifetimes correctly;
3290 hard regs shouldn't appear here except as return values.
3291 We never want to emit such a clobber after reload. */
3292 if (x != y
3293 && ! (reload_in_progress || reload_completed)
3294 && need_clobber != 0)
3295 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3297 emit_insn (seq);
3299 return last_insn;
3302 /* Low level part of emit_move_insn.
3303 Called just like emit_move_insn, but assumes X and Y
3304 are basically valid. */
3307 emit_move_insn_1 (rtx x, rtx y)
3309 enum machine_mode mode = GET_MODE (x);
3310 enum insn_code code;
3312 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3314 code = optab_handler (mov_optab, mode)->insn_code;
3315 if (code != CODE_FOR_nothing)
3316 return emit_insn (GEN_FCN (code) (x, y));
3318 /* Expand complex moves by moving real part and imag part. */
3319 if (COMPLEX_MODE_P (mode))
3320 return emit_move_complex (mode, x, y);
3322 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3323 || ALL_FIXED_POINT_MODE_P (mode))
3325 rtx result = emit_move_via_integer (mode, x, y, true);
3327 /* If we can't find an integer mode, use multi words. */
3328 if (result)
3329 return result;
3330 else
3331 return emit_move_multi_word (mode, x, y);
3334 if (GET_MODE_CLASS (mode) == MODE_CC)
3335 return emit_move_ccmode (mode, x, y);
3337 /* Try using a move pattern for the corresponding integer mode. This is
3338 only safe when simplify_subreg can convert MODE constants into integer
3339 constants. At present, it can only do this reliably if the value
3340 fits within a HOST_WIDE_INT. */
3341 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3343 rtx ret = emit_move_via_integer (mode, x, y, false);
3344 if (ret)
3345 return ret;
3348 return emit_move_multi_word (mode, x, y);
3351 /* Generate code to copy Y into X.
3352 Both Y and X must have the same mode, except that
3353 Y can be a constant with VOIDmode.
3354 This mode cannot be BLKmode; use emit_block_move for that.
3356 Return the last instruction emitted. */
3359 emit_move_insn (rtx x, rtx y)
3361 enum machine_mode mode = GET_MODE (x);
3362 rtx y_cst = NULL_RTX;
3363 rtx last_insn, set;
3365 gcc_assert (mode != BLKmode
3366 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3368 if (CONSTANT_P (y))
3370 if (optimize
3371 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3372 && (last_insn = compress_float_constant (x, y)))
3373 return last_insn;
3375 y_cst = y;
3377 if (!LEGITIMATE_CONSTANT_P (y))
3379 y = force_const_mem (mode, y);
3381 /* If the target's cannot_force_const_mem prevented the spill,
3382 assume that the target's move expanders will also take care
3383 of the non-legitimate constant. */
3384 if (!y)
3385 y = y_cst;
3386 else
3387 y = use_anchored_address (y);
3391 /* If X or Y are memory references, verify that their addresses are valid
3392 for the machine. */
3393 if (MEM_P (x)
3394 && (! memory_address_p (GET_MODE (x), XEXP (x, 0))
3395 && ! push_operand (x, GET_MODE (x))))
3396 x = validize_mem (x);
3398 if (MEM_P (y)
3399 && ! memory_address_p (GET_MODE (y), XEXP (y, 0)))
3400 y = validize_mem (y);
3402 gcc_assert (mode != BLKmode);
3404 last_insn = emit_move_insn_1 (x, y);
3406 if (y_cst && REG_P (x)
3407 && (set = single_set (last_insn)) != NULL_RTX
3408 && SET_DEST (set) == x
3409 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3410 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3412 return last_insn;
3415 /* If Y is representable exactly in a narrower mode, and the target can
3416 perform the extension directly from constant or memory, then emit the
3417 move as an extension. */
3419 static rtx
3420 compress_float_constant (rtx x, rtx y)
3422 enum machine_mode dstmode = GET_MODE (x);
3423 enum machine_mode orig_srcmode = GET_MODE (y);
3424 enum machine_mode srcmode;
3425 REAL_VALUE_TYPE r;
3426 int oldcost, newcost;
3428 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3430 if (LEGITIMATE_CONSTANT_P (y))
3431 oldcost = rtx_cost (y, SET);
3432 else
3433 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3435 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3436 srcmode != orig_srcmode;
3437 srcmode = GET_MODE_WIDER_MODE (srcmode))
3439 enum insn_code ic;
3440 rtx trunc_y, last_insn;
3442 /* Skip if the target can't extend this way. */
3443 ic = can_extend_p (dstmode, srcmode, 0);
3444 if (ic == CODE_FOR_nothing)
3445 continue;
3447 /* Skip if the narrowed value isn't exact. */
3448 if (! exact_real_truncate (srcmode, &r))
3449 continue;
3451 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3453 if (LEGITIMATE_CONSTANT_P (trunc_y))
3455 /* Skip if the target needs extra instructions to perform
3456 the extension. */
3457 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3458 continue;
3459 /* This is valid, but may not be cheaper than the original. */
3460 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3461 if (oldcost < newcost)
3462 continue;
3464 else if (float_extend_from_mem[dstmode][srcmode])
3466 trunc_y = force_const_mem (srcmode, trunc_y);
3467 /* This is valid, but may not be cheaper than the original. */
3468 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3469 if (oldcost < newcost)
3470 continue;
3471 trunc_y = validize_mem (trunc_y);
3473 else
3474 continue;
3476 /* For CSE's benefit, force the compressed constant pool entry
3477 into a new pseudo. This constant may be used in different modes,
3478 and if not, combine will put things back together for us. */
3479 trunc_y = force_reg (srcmode, trunc_y);
3480 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3481 last_insn = get_last_insn ();
3483 if (REG_P (x))
3484 set_unique_reg_note (last_insn, REG_EQUAL, y);
3486 return last_insn;
3489 return NULL_RTX;
3492 /* Pushing data onto the stack. */
3494 /* Push a block of length SIZE (perhaps variable)
3495 and return an rtx to address the beginning of the block.
3496 The value may be virtual_outgoing_args_rtx.
3498 EXTRA is the number of bytes of padding to push in addition to SIZE.
3499 BELOW nonzero means this padding comes at low addresses;
3500 otherwise, the padding comes at high addresses. */
3503 push_block (rtx size, int extra, int below)
3505 rtx temp;
3507 size = convert_modes (Pmode, ptr_mode, size, 1);
3508 if (CONSTANT_P (size))
3509 anti_adjust_stack (plus_constant (size, extra));
3510 else if (REG_P (size) && extra == 0)
3511 anti_adjust_stack (size);
3512 else
3514 temp = copy_to_mode_reg (Pmode, size);
3515 if (extra != 0)
3516 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3517 temp, 0, OPTAB_LIB_WIDEN);
3518 anti_adjust_stack (temp);
3521 #ifndef STACK_GROWS_DOWNWARD
3522 if (0)
3523 #else
3524 if (1)
3525 #endif
3527 temp = virtual_outgoing_args_rtx;
3528 if (extra != 0 && below)
3529 temp = plus_constant (temp, extra);
3531 else
3533 if (GET_CODE (size) == CONST_INT)
3534 temp = plus_constant (virtual_outgoing_args_rtx,
3535 -INTVAL (size) - (below ? 0 : extra));
3536 else if (extra != 0 && !below)
3537 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3538 negate_rtx (Pmode, plus_constant (size, extra)));
3539 else
3540 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3541 negate_rtx (Pmode, size));
3544 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3547 #ifdef PUSH_ROUNDING
3549 /* Emit single push insn. */
3551 static void
3552 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3554 rtx dest_addr;
3555 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3556 rtx dest;
3557 enum insn_code icode;
3558 insn_operand_predicate_fn pred;
3560 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3561 /* If there is push pattern, use it. Otherwise try old way of throwing
3562 MEM representing push operation to move expander. */
3563 icode = optab_handler (push_optab, mode)->insn_code;
3564 if (icode != CODE_FOR_nothing)
3566 if (((pred = insn_data[(int) icode].operand[0].predicate)
3567 && !((*pred) (x, mode))))
3568 x = force_reg (mode, x);
3569 emit_insn (GEN_FCN (icode) (x));
3570 return;
3572 if (GET_MODE_SIZE (mode) == rounded_size)
3573 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3574 /* If we are to pad downward, adjust the stack pointer first and
3575 then store X into the stack location using an offset. This is
3576 because emit_move_insn does not know how to pad; it does not have
3577 access to type. */
3578 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3580 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3581 HOST_WIDE_INT offset;
3583 emit_move_insn (stack_pointer_rtx,
3584 expand_binop (Pmode,
3585 #ifdef STACK_GROWS_DOWNWARD
3586 sub_optab,
3587 #else
3588 add_optab,
3589 #endif
3590 stack_pointer_rtx,
3591 GEN_INT (rounded_size),
3592 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3594 offset = (HOST_WIDE_INT) padding_size;
3595 #ifdef STACK_GROWS_DOWNWARD
3596 if (STACK_PUSH_CODE == POST_DEC)
3597 /* We have already decremented the stack pointer, so get the
3598 previous value. */
3599 offset += (HOST_WIDE_INT) rounded_size;
3600 #else
3601 if (STACK_PUSH_CODE == POST_INC)
3602 /* We have already incremented the stack pointer, so get the
3603 previous value. */
3604 offset -= (HOST_WIDE_INT) rounded_size;
3605 #endif
3606 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3608 else
3610 #ifdef STACK_GROWS_DOWNWARD
3611 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3612 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3613 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3614 #else
3615 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3616 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3617 GEN_INT (rounded_size));
3618 #endif
3619 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3622 dest = gen_rtx_MEM (mode, dest_addr);
3624 if (type != 0)
3626 set_mem_attributes (dest, type, 1);
3628 if (flag_optimize_sibling_calls)
3629 /* Function incoming arguments may overlap with sibling call
3630 outgoing arguments and we cannot allow reordering of reads
3631 from function arguments with stores to outgoing arguments
3632 of sibling calls. */
3633 set_mem_alias_set (dest, 0);
3635 emit_move_insn (dest, x);
3637 #endif
3639 /* Generate code to push X onto the stack, assuming it has mode MODE and
3640 type TYPE.
3641 MODE is redundant except when X is a CONST_INT (since they don't
3642 carry mode info).
3643 SIZE is an rtx for the size of data to be copied (in bytes),
3644 needed only if X is BLKmode.
3646 ALIGN (in bits) is maximum alignment we can assume.
3648 If PARTIAL and REG are both nonzero, then copy that many of the first
3649 bytes of X into registers starting with REG, and push the rest of X.
3650 The amount of space pushed is decreased by PARTIAL bytes.
3651 REG must be a hard register in this case.
3652 If REG is zero but PARTIAL is not, take any all others actions for an
3653 argument partially in registers, but do not actually load any
3654 registers.
3656 EXTRA is the amount in bytes of extra space to leave next to this arg.
3657 This is ignored if an argument block has already been allocated.
3659 On a machine that lacks real push insns, ARGS_ADDR is the address of
3660 the bottom of the argument block for this call. We use indexing off there
3661 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3662 argument block has not been preallocated.
3664 ARGS_SO_FAR is the size of args previously pushed for this call.
3666 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3667 for arguments passed in registers. If nonzero, it will be the number
3668 of bytes required. */
3670 void
3671 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3672 unsigned int align, int partial, rtx reg, int extra,
3673 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3674 rtx alignment_pad)
3676 rtx xinner;
3677 enum direction stack_direction
3678 #ifdef STACK_GROWS_DOWNWARD
3679 = downward;
3680 #else
3681 = upward;
3682 #endif
3684 /* Decide where to pad the argument: `downward' for below,
3685 `upward' for above, or `none' for don't pad it.
3686 Default is below for small data on big-endian machines; else above. */
3687 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3689 /* Invert direction if stack is post-decrement.
3690 FIXME: why? */
3691 if (STACK_PUSH_CODE == POST_DEC)
3692 if (where_pad != none)
3693 where_pad = (where_pad == downward ? upward : downward);
3695 xinner = x;
3697 if (mode == BLKmode
3698 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3700 /* Copy a block into the stack, entirely or partially. */
3702 rtx temp;
3703 int used;
3704 int offset;
3705 int skip;
3707 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3708 used = partial - offset;
3710 if (mode != BLKmode)
3712 /* A value is to be stored in an insufficiently aligned
3713 stack slot; copy via a suitably aligned slot if
3714 necessary. */
3715 size = GEN_INT (GET_MODE_SIZE (mode));
3716 if (!MEM_P (xinner))
3718 temp = assign_temp (type, 0, 1, 1);
3719 emit_move_insn (temp, xinner);
3720 xinner = temp;
3724 gcc_assert (size);
3726 /* USED is now the # of bytes we need not copy to the stack
3727 because registers will take care of them. */
3729 if (partial != 0)
3730 xinner = adjust_address (xinner, BLKmode, used);
3732 /* If the partial register-part of the arg counts in its stack size,
3733 skip the part of stack space corresponding to the registers.
3734 Otherwise, start copying to the beginning of the stack space,
3735 by setting SKIP to 0. */
3736 skip = (reg_parm_stack_space == 0) ? 0 : used;
3738 #ifdef PUSH_ROUNDING
3739 /* Do it with several push insns if that doesn't take lots of insns
3740 and if there is no difficulty with push insns that skip bytes
3741 on the stack for alignment purposes. */
3742 if (args_addr == 0
3743 && PUSH_ARGS
3744 && GET_CODE (size) == CONST_INT
3745 && skip == 0
3746 && MEM_ALIGN (xinner) >= align
3747 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3748 /* Here we avoid the case of a structure whose weak alignment
3749 forces many pushes of a small amount of data,
3750 and such small pushes do rounding that causes trouble. */
3751 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3752 || align >= BIGGEST_ALIGNMENT
3753 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3754 == (align / BITS_PER_UNIT)))
3755 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3757 /* Push padding now if padding above and stack grows down,
3758 or if padding below and stack grows up.
3759 But if space already allocated, this has already been done. */
3760 if (extra && args_addr == 0
3761 && where_pad != none && where_pad != stack_direction)
3762 anti_adjust_stack (GEN_INT (extra));
3764 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3766 else
3767 #endif /* PUSH_ROUNDING */
3769 rtx target;
3771 /* Otherwise make space on the stack and copy the data
3772 to the address of that space. */
3774 /* Deduct words put into registers from the size we must copy. */
3775 if (partial != 0)
3777 if (GET_CODE (size) == CONST_INT)
3778 size = GEN_INT (INTVAL (size) - used);
3779 else
3780 size = expand_binop (GET_MODE (size), sub_optab, size,
3781 GEN_INT (used), NULL_RTX, 0,
3782 OPTAB_LIB_WIDEN);
3785 /* Get the address of the stack space.
3786 In this case, we do not deal with EXTRA separately.
3787 A single stack adjust will do. */
3788 if (! args_addr)
3790 temp = push_block (size, extra, where_pad == downward);
3791 extra = 0;
3793 else if (GET_CODE (args_so_far) == CONST_INT)
3794 temp = memory_address (BLKmode,
3795 plus_constant (args_addr,
3796 skip + INTVAL (args_so_far)));
3797 else
3798 temp = memory_address (BLKmode,
3799 plus_constant (gen_rtx_PLUS (Pmode,
3800 args_addr,
3801 args_so_far),
3802 skip));
3804 if (!ACCUMULATE_OUTGOING_ARGS)
3806 /* If the source is referenced relative to the stack pointer,
3807 copy it to another register to stabilize it. We do not need
3808 to do this if we know that we won't be changing sp. */
3810 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3811 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3812 temp = copy_to_reg (temp);
3815 target = gen_rtx_MEM (BLKmode, temp);
3817 /* We do *not* set_mem_attributes here, because incoming arguments
3818 may overlap with sibling call outgoing arguments and we cannot
3819 allow reordering of reads from function arguments with stores
3820 to outgoing arguments of sibling calls. We do, however, want
3821 to record the alignment of the stack slot. */
3822 /* ALIGN may well be better aligned than TYPE, e.g. due to
3823 PARM_BOUNDARY. Assume the caller isn't lying. */
3824 set_mem_align (target, align);
3826 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3829 else if (partial > 0)
3831 /* Scalar partly in registers. */
3833 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3834 int i;
3835 int not_stack;
3836 /* # bytes of start of argument
3837 that we must make space for but need not store. */
3838 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3839 int args_offset = INTVAL (args_so_far);
3840 int skip;
3842 /* Push padding now if padding above and stack grows down,
3843 or if padding below and stack grows up.
3844 But if space already allocated, this has already been done. */
3845 if (extra && args_addr == 0
3846 && where_pad != none && where_pad != stack_direction)
3847 anti_adjust_stack (GEN_INT (extra));
3849 /* If we make space by pushing it, we might as well push
3850 the real data. Otherwise, we can leave OFFSET nonzero
3851 and leave the space uninitialized. */
3852 if (args_addr == 0)
3853 offset = 0;
3855 /* Now NOT_STACK gets the number of words that we don't need to
3856 allocate on the stack. Convert OFFSET to words too. */
3857 not_stack = (partial - offset) / UNITS_PER_WORD;
3858 offset /= UNITS_PER_WORD;
3860 /* If the partial register-part of the arg counts in its stack size,
3861 skip the part of stack space corresponding to the registers.
3862 Otherwise, start copying to the beginning of the stack space,
3863 by setting SKIP to 0. */
3864 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3866 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3867 x = validize_mem (force_const_mem (mode, x));
3869 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3870 SUBREGs of such registers are not allowed. */
3871 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3872 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3873 x = copy_to_reg (x);
3875 /* Loop over all the words allocated on the stack for this arg. */
3876 /* We can do it by words, because any scalar bigger than a word
3877 has a size a multiple of a word. */
3878 #ifndef PUSH_ARGS_REVERSED
3879 for (i = not_stack; i < size; i++)
3880 #else
3881 for (i = size - 1; i >= not_stack; i--)
3882 #endif
3883 if (i >= not_stack + offset)
3884 emit_push_insn (operand_subword_force (x, i, mode),
3885 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3886 0, args_addr,
3887 GEN_INT (args_offset + ((i - not_stack + skip)
3888 * UNITS_PER_WORD)),
3889 reg_parm_stack_space, alignment_pad);
3891 else
3893 rtx addr;
3894 rtx dest;
3896 /* Push padding now if padding above and stack grows down,
3897 or if padding below and stack grows up.
3898 But if space already allocated, this has already been done. */
3899 if (extra && args_addr == 0
3900 && where_pad != none && where_pad != stack_direction)
3901 anti_adjust_stack (GEN_INT (extra));
3903 #ifdef PUSH_ROUNDING
3904 if (args_addr == 0 && PUSH_ARGS)
3905 emit_single_push_insn (mode, x, type);
3906 else
3907 #endif
3909 if (GET_CODE (args_so_far) == CONST_INT)
3910 addr
3911 = memory_address (mode,
3912 plus_constant (args_addr,
3913 INTVAL (args_so_far)));
3914 else
3915 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3916 args_so_far));
3917 dest = gen_rtx_MEM (mode, addr);
3919 /* We do *not* set_mem_attributes here, because incoming arguments
3920 may overlap with sibling call outgoing arguments and we cannot
3921 allow reordering of reads from function arguments with stores
3922 to outgoing arguments of sibling calls. We do, however, want
3923 to record the alignment of the stack slot. */
3924 /* ALIGN may well be better aligned than TYPE, e.g. due to
3925 PARM_BOUNDARY. Assume the caller isn't lying. */
3926 set_mem_align (dest, align);
3928 emit_move_insn (dest, x);
3932 /* If part should go in registers, copy that part
3933 into the appropriate registers. Do this now, at the end,
3934 since mem-to-mem copies above may do function calls. */
3935 if (partial > 0 && reg != 0)
3937 /* Handle calls that pass values in multiple non-contiguous locations.
3938 The Irix 6 ABI has examples of this. */
3939 if (GET_CODE (reg) == PARALLEL)
3940 emit_group_load (reg, x, type, -1);
3941 else
3943 gcc_assert (partial % UNITS_PER_WORD == 0);
3944 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3948 if (extra && args_addr == 0 && where_pad == stack_direction)
3949 anti_adjust_stack (GEN_INT (extra));
3951 if (alignment_pad && args_addr == 0)
3952 anti_adjust_stack (alignment_pad);
3955 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3956 operations. */
3958 static rtx
3959 get_subtarget (rtx x)
3961 return (optimize
3962 || x == 0
3963 /* Only registers can be subtargets. */
3964 || !REG_P (x)
3965 /* Don't use hard regs to avoid extending their life. */
3966 || REGNO (x) < FIRST_PSEUDO_REGISTER
3967 ? 0 : x);
3970 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3971 FIELD is a bitfield. Returns true if the optimization was successful,
3972 and there's nothing else to do. */
3974 static bool
3975 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3976 unsigned HOST_WIDE_INT bitpos,
3977 enum machine_mode mode1, rtx str_rtx,
3978 tree to, tree src)
3980 enum machine_mode str_mode = GET_MODE (str_rtx);
3981 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3982 tree op0, op1;
3983 rtx value, result;
3984 optab binop;
3986 if (mode1 != VOIDmode
3987 || bitsize >= BITS_PER_WORD
3988 || str_bitsize > BITS_PER_WORD
3989 || TREE_SIDE_EFFECTS (to)
3990 || TREE_THIS_VOLATILE (to))
3991 return false;
3993 STRIP_NOPS (src);
3994 if (!BINARY_CLASS_P (src)
3995 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3996 return false;
3998 op0 = TREE_OPERAND (src, 0);
3999 op1 = TREE_OPERAND (src, 1);
4000 STRIP_NOPS (op0);
4002 if (!operand_equal_p (to, op0, 0))
4003 return false;
4005 if (MEM_P (str_rtx))
4007 unsigned HOST_WIDE_INT offset1;
4009 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4010 str_mode = word_mode;
4011 str_mode = get_best_mode (bitsize, bitpos,
4012 MEM_ALIGN (str_rtx), str_mode, 0);
4013 if (str_mode == VOIDmode)
4014 return false;
4015 str_bitsize = GET_MODE_BITSIZE (str_mode);
4017 offset1 = bitpos;
4018 bitpos %= str_bitsize;
4019 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4020 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4022 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4023 return false;
4025 /* If the bit field covers the whole REG/MEM, store_field
4026 will likely generate better code. */
4027 if (bitsize >= str_bitsize)
4028 return false;
4030 /* We can't handle fields split across multiple entities. */
4031 if (bitpos + bitsize > str_bitsize)
4032 return false;
4034 if (BYTES_BIG_ENDIAN)
4035 bitpos = str_bitsize - bitpos - bitsize;
4037 switch (TREE_CODE (src))
4039 case PLUS_EXPR:
4040 case MINUS_EXPR:
4041 /* For now, just optimize the case of the topmost bitfield
4042 where we don't need to do any masking and also
4043 1 bit bitfields where xor can be used.
4044 We might win by one instruction for the other bitfields
4045 too if insv/extv instructions aren't used, so that
4046 can be added later. */
4047 if (bitpos + bitsize != str_bitsize
4048 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4049 break;
4051 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4052 value = convert_modes (str_mode,
4053 TYPE_MODE (TREE_TYPE (op1)), value,
4054 TYPE_UNSIGNED (TREE_TYPE (op1)));
4056 /* We may be accessing data outside the field, which means
4057 we can alias adjacent data. */
4058 if (MEM_P (str_rtx))
4060 str_rtx = shallow_copy_rtx (str_rtx);
4061 set_mem_alias_set (str_rtx, 0);
4062 set_mem_expr (str_rtx, 0);
4065 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4066 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4068 value = expand_and (str_mode, value, const1_rtx, NULL);
4069 binop = xor_optab;
4071 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4072 build_int_cst (NULL_TREE, bitpos),
4073 NULL_RTX, 1);
4074 result = expand_binop (str_mode, binop, str_rtx,
4075 value, str_rtx, 1, OPTAB_WIDEN);
4076 if (result != str_rtx)
4077 emit_move_insn (str_rtx, result);
4078 return true;
4080 case BIT_IOR_EXPR:
4081 case BIT_XOR_EXPR:
4082 if (TREE_CODE (op1) != INTEGER_CST)
4083 break;
4084 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4085 value = convert_modes (GET_MODE (str_rtx),
4086 TYPE_MODE (TREE_TYPE (op1)), value,
4087 TYPE_UNSIGNED (TREE_TYPE (op1)));
4089 /* We may be accessing data outside the field, which means
4090 we can alias adjacent data. */
4091 if (MEM_P (str_rtx))
4093 str_rtx = shallow_copy_rtx (str_rtx);
4094 set_mem_alias_set (str_rtx, 0);
4095 set_mem_expr (str_rtx, 0);
4098 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4099 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4101 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4102 - 1);
4103 value = expand_and (GET_MODE (str_rtx), value, mask,
4104 NULL_RTX);
4106 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4107 build_int_cst (NULL_TREE, bitpos),
4108 NULL_RTX, 1);
4109 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4110 value, str_rtx, 1, OPTAB_WIDEN);
4111 if (result != str_rtx)
4112 emit_move_insn (str_rtx, result);
4113 return true;
4115 default:
4116 break;
4119 return false;
4123 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4124 is true, try generating a nontemporal store. */
4126 void
4127 expand_assignment (tree to, tree from, bool nontemporal)
4129 rtx to_rtx = 0;
4130 rtx result;
4132 /* Don't crash if the lhs of the assignment was erroneous. */
4133 if (TREE_CODE (to) == ERROR_MARK)
4135 result = expand_normal (from);
4136 return;
4139 /* Optimize away no-op moves without side-effects. */
4140 if (operand_equal_p (to, from, 0))
4141 return;
4143 /* Assignment of a structure component needs special treatment
4144 if the structure component's rtx is not simply a MEM.
4145 Assignment of an array element at a constant index, and assignment of
4146 an array element in an unaligned packed structure field, has the same
4147 problem. */
4148 if (handled_component_p (to)
4149 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4151 enum machine_mode mode1;
4152 HOST_WIDE_INT bitsize, bitpos;
4153 tree offset;
4154 int unsignedp;
4155 int volatilep = 0;
4156 tree tem;
4158 push_temp_slots ();
4159 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4160 &unsignedp, &volatilep, true);
4162 /* If we are going to use store_bit_field and extract_bit_field,
4163 make sure to_rtx will be safe for multiple use. */
4165 to_rtx = expand_normal (tem);
4167 if (offset != 0)
4169 rtx offset_rtx;
4171 if (!MEM_P (to_rtx))
4173 /* We can get constant negative offsets into arrays with broken
4174 user code. Translate this to a trap instead of ICEing. */
4175 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4176 expand_builtin_trap ();
4177 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4180 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4181 #ifdef POINTERS_EXTEND_UNSIGNED
4182 if (GET_MODE (offset_rtx) != Pmode)
4183 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4184 #else
4185 if (GET_MODE (offset_rtx) != ptr_mode)
4186 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4187 #endif
4189 /* A constant address in TO_RTX can have VOIDmode, we must not try
4190 to call force_reg for that case. Avoid that case. */
4191 if (MEM_P (to_rtx)
4192 && GET_MODE (to_rtx) == BLKmode
4193 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4194 && bitsize > 0
4195 && (bitpos % bitsize) == 0
4196 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4197 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4199 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4200 bitpos = 0;
4203 to_rtx = offset_address (to_rtx, offset_rtx,
4204 highest_pow2_factor_for_target (to,
4205 offset));
4208 /* Handle expand_expr of a complex value returning a CONCAT. */
4209 if (GET_CODE (to_rtx) == CONCAT)
4211 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4213 gcc_assert (bitpos == 0);
4214 result = store_expr (from, to_rtx, false, nontemporal);
4216 else
4218 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4219 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4220 nontemporal);
4223 else
4225 if (MEM_P (to_rtx))
4227 /* If the field is at offset zero, we could have been given the
4228 DECL_RTX of the parent struct. Don't munge it. */
4229 to_rtx = shallow_copy_rtx (to_rtx);
4231 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4233 /* Deal with volatile and readonly fields. The former is only
4234 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4235 if (volatilep)
4236 MEM_VOLATILE_P (to_rtx) = 1;
4237 if (component_uses_parent_alias_set (to))
4238 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4241 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4242 to_rtx, to, from))
4243 result = NULL;
4244 else
4245 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4246 TREE_TYPE (tem), get_alias_set (to),
4247 nontemporal);
4250 if (result)
4251 preserve_temp_slots (result);
4252 free_temp_slots ();
4253 pop_temp_slots ();
4254 return;
4257 /* If the rhs is a function call and its value is not an aggregate,
4258 call the function before we start to compute the lhs.
4259 This is needed for correct code for cases such as
4260 val = setjmp (buf) on machines where reference to val
4261 requires loading up part of an address in a separate insn.
4263 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4264 since it might be a promoted variable where the zero- or sign- extension
4265 needs to be done. Handling this in the normal way is safe because no
4266 computation is done before the call. */
4267 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4268 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4269 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4270 && REG_P (DECL_RTL (to))))
4272 rtx value;
4274 push_temp_slots ();
4275 value = expand_normal (from);
4276 if (to_rtx == 0)
4277 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4279 /* Handle calls that return values in multiple non-contiguous locations.
4280 The Irix 6 ABI has examples of this. */
4281 if (GET_CODE (to_rtx) == PARALLEL)
4282 emit_group_load (to_rtx, value, TREE_TYPE (from),
4283 int_size_in_bytes (TREE_TYPE (from)));
4284 else if (GET_MODE (to_rtx) == BLKmode)
4285 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4286 else
4288 if (POINTER_TYPE_P (TREE_TYPE (to)))
4289 value = convert_memory_address (GET_MODE (to_rtx), value);
4290 emit_move_insn (to_rtx, value);
4292 preserve_temp_slots (to_rtx);
4293 free_temp_slots ();
4294 pop_temp_slots ();
4295 return;
4298 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4299 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4301 if (to_rtx == 0)
4302 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4304 /* Don't move directly into a return register. */
4305 if (TREE_CODE (to) == RESULT_DECL
4306 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4308 rtx temp;
4310 push_temp_slots ();
4311 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4313 if (GET_CODE (to_rtx) == PARALLEL)
4314 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4315 int_size_in_bytes (TREE_TYPE (from)));
4316 else
4317 emit_move_insn (to_rtx, temp);
4319 preserve_temp_slots (to_rtx);
4320 free_temp_slots ();
4321 pop_temp_slots ();
4322 return;
4325 /* In case we are returning the contents of an object which overlaps
4326 the place the value is being stored, use a safe function when copying
4327 a value through a pointer into a structure value return block. */
4328 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4329 && current_function_returns_struct
4330 && !current_function_returns_pcc_struct)
4332 rtx from_rtx, size;
4334 push_temp_slots ();
4335 size = expr_size (from);
4336 from_rtx = expand_normal (from);
4338 emit_library_call (memmove_libfunc, LCT_NORMAL,
4339 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4340 XEXP (from_rtx, 0), Pmode,
4341 convert_to_mode (TYPE_MODE (sizetype),
4342 size, TYPE_UNSIGNED (sizetype)),
4343 TYPE_MODE (sizetype));
4345 preserve_temp_slots (to_rtx);
4346 free_temp_slots ();
4347 pop_temp_slots ();
4348 return;
4351 /* Compute FROM and store the value in the rtx we got. */
4353 push_temp_slots ();
4354 result = store_expr (from, to_rtx, 0, nontemporal);
4355 preserve_temp_slots (result);
4356 free_temp_slots ();
4357 pop_temp_slots ();
4358 return;
4361 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4362 succeeded, false otherwise. */
4364 static bool
4365 emit_storent_insn (rtx to, rtx from)
4367 enum machine_mode mode = GET_MODE (to), imode;
4368 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4369 rtx pattern;
4371 if (code == CODE_FOR_nothing)
4372 return false;
4374 imode = insn_data[code].operand[0].mode;
4375 if (!insn_data[code].operand[0].predicate (to, imode))
4376 return false;
4378 imode = insn_data[code].operand[1].mode;
4379 if (!insn_data[code].operand[1].predicate (from, imode))
4381 from = copy_to_mode_reg (imode, from);
4382 if (!insn_data[code].operand[1].predicate (from, imode))
4383 return false;
4386 pattern = GEN_FCN (code) (to, from);
4387 if (pattern == NULL_RTX)
4388 return false;
4390 emit_insn (pattern);
4391 return true;
4394 /* Generate code for computing expression EXP,
4395 and storing the value into TARGET.
4397 If the mode is BLKmode then we may return TARGET itself.
4398 It turns out that in BLKmode it doesn't cause a problem.
4399 because C has no operators that could combine two different
4400 assignments into the same BLKmode object with different values
4401 with no sequence point. Will other languages need this to
4402 be more thorough?
4404 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4405 stack, and block moves may need to be treated specially.
4407 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4410 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4412 rtx temp;
4413 rtx alt_rtl = NULL_RTX;
4414 int dont_return_target = 0;
4416 if (VOID_TYPE_P (TREE_TYPE (exp)))
4418 /* C++ can generate ?: expressions with a throw expression in one
4419 branch and an rvalue in the other. Here, we resolve attempts to
4420 store the throw expression's nonexistent result. */
4421 gcc_assert (!call_param_p);
4422 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4423 return NULL_RTX;
4425 if (TREE_CODE (exp) == COMPOUND_EXPR)
4427 /* Perform first part of compound expression, then assign from second
4428 part. */
4429 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4430 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4431 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4432 nontemporal);
4434 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4436 /* For conditional expression, get safe form of the target. Then
4437 test the condition, doing the appropriate assignment on either
4438 side. This avoids the creation of unnecessary temporaries.
4439 For non-BLKmode, it is more efficient not to do this. */
4441 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4443 do_pending_stack_adjust ();
4444 NO_DEFER_POP;
4445 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4446 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4447 nontemporal);
4448 emit_jump_insn (gen_jump (lab2));
4449 emit_barrier ();
4450 emit_label (lab1);
4451 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4452 nontemporal);
4453 emit_label (lab2);
4454 OK_DEFER_POP;
4456 return NULL_RTX;
4458 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4459 /* If this is a scalar in a register that is stored in a wider mode
4460 than the declared mode, compute the result into its declared mode
4461 and then convert to the wider mode. Our value is the computed
4462 expression. */
4464 rtx inner_target = 0;
4466 /* We can do the conversion inside EXP, which will often result
4467 in some optimizations. Do the conversion in two steps: first
4468 change the signedness, if needed, then the extend. But don't
4469 do this if the type of EXP is a subtype of something else
4470 since then the conversion might involve more than just
4471 converting modes. */
4472 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4473 && TREE_TYPE (TREE_TYPE (exp)) == 0
4474 && (!lang_hooks.reduce_bit_field_operations
4475 || (GET_MODE_PRECISION (GET_MODE (target))
4476 == TYPE_PRECISION (TREE_TYPE (exp)))))
4478 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4479 != SUBREG_PROMOTED_UNSIGNED_P (target))
4481 /* Some types, e.g. Fortran's logical*4, won't have a signed
4482 version, so use the mode instead. */
4483 tree ntype
4484 = (signed_or_unsigned_type_for
4485 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4486 if (ntype == NULL)
4487 ntype = lang_hooks.types.type_for_mode
4488 (TYPE_MODE (TREE_TYPE (exp)),
4489 SUBREG_PROMOTED_UNSIGNED_P (target));
4491 exp = fold_convert (ntype, exp);
4494 exp = fold_convert (lang_hooks.types.type_for_mode
4495 (GET_MODE (SUBREG_REG (target)),
4496 SUBREG_PROMOTED_UNSIGNED_P (target)),
4497 exp);
4499 inner_target = SUBREG_REG (target);
4502 temp = expand_expr (exp, inner_target, VOIDmode,
4503 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4505 /* If TEMP is a VOIDmode constant, use convert_modes to make
4506 sure that we properly convert it. */
4507 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4509 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4510 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4511 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4512 GET_MODE (target), temp,
4513 SUBREG_PROMOTED_UNSIGNED_P (target));
4516 convert_move (SUBREG_REG (target), temp,
4517 SUBREG_PROMOTED_UNSIGNED_P (target));
4519 return NULL_RTX;
4521 else if (TREE_CODE (exp) == STRING_CST
4522 && !nontemporal && !call_param_p
4523 && TREE_STRING_LENGTH (exp) > 0
4524 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4526 /* Optimize initialization of an array with a STRING_CST. */
4527 HOST_WIDE_INT exp_len, str_copy_len;
4528 rtx dest_mem;
4530 exp_len = int_expr_size (exp);
4531 if (exp_len <= 0)
4532 goto normal_expr;
4534 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4535 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4536 goto normal_expr;
4538 str_copy_len = TREE_STRING_LENGTH (exp);
4539 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4541 str_copy_len += STORE_MAX_PIECES - 1;
4542 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4544 str_copy_len = MIN (str_copy_len, exp_len);
4545 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4546 (void *) TREE_STRING_POINTER (exp),
4547 MEM_ALIGN (target), false))
4548 goto normal_expr;
4550 dest_mem = target;
4552 dest_mem = store_by_pieces (dest_mem,
4553 str_copy_len, builtin_strncpy_read_str,
4554 (void *) TREE_STRING_POINTER (exp),
4555 MEM_ALIGN (target), false,
4556 exp_len > str_copy_len ? 1 : 0);
4557 if (exp_len > str_copy_len)
4558 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4559 GEN_INT (exp_len - str_copy_len),
4560 BLOCK_OP_NORMAL);
4561 return NULL_RTX;
4563 else
4565 rtx tmp_target;
4567 normal_expr:
4568 /* If we want to use a nontemporal store, force the value to
4569 register first. */
4570 tmp_target = nontemporal ? NULL_RTX : target;
4571 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4572 (call_param_p
4573 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4574 &alt_rtl);
4575 /* Return TARGET if it's a specified hardware register.
4576 If TARGET is a volatile mem ref, either return TARGET
4577 or return a reg copied *from* TARGET; ANSI requires this.
4579 Otherwise, if TEMP is not TARGET, return TEMP
4580 if it is constant (for efficiency),
4581 or if we really want the correct value. */
4582 if (!(target && REG_P (target)
4583 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4584 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4585 && ! rtx_equal_p (temp, target)
4586 && CONSTANT_P (temp))
4587 dont_return_target = 1;
4590 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4591 the same as that of TARGET, adjust the constant. This is needed, for
4592 example, in case it is a CONST_DOUBLE and we want only a word-sized
4593 value. */
4594 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4595 && TREE_CODE (exp) != ERROR_MARK
4596 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4597 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4598 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4600 /* If value was not generated in the target, store it there.
4601 Convert the value to TARGET's type first if necessary and emit the
4602 pending incrementations that have been queued when expanding EXP.
4603 Note that we cannot emit the whole queue blindly because this will
4604 effectively disable the POST_INC optimization later.
4606 If TEMP and TARGET compare equal according to rtx_equal_p, but
4607 one or both of them are volatile memory refs, we have to distinguish
4608 two cases:
4609 - expand_expr has used TARGET. In this case, we must not generate
4610 another copy. This can be detected by TARGET being equal according
4611 to == .
4612 - expand_expr has not used TARGET - that means that the source just
4613 happens to have the same RTX form. Since temp will have been created
4614 by expand_expr, it will compare unequal according to == .
4615 We must generate a copy in this case, to reach the correct number
4616 of volatile memory references. */
4618 if ((! rtx_equal_p (temp, target)
4619 || (temp != target && (side_effects_p (temp)
4620 || side_effects_p (target))))
4621 && TREE_CODE (exp) != ERROR_MARK
4622 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4623 but TARGET is not valid memory reference, TEMP will differ
4624 from TARGET although it is really the same location. */
4625 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4626 /* If there's nothing to copy, don't bother. Don't call
4627 expr_size unless necessary, because some front-ends (C++)
4628 expr_size-hook must not be given objects that are not
4629 supposed to be bit-copied or bit-initialized. */
4630 && expr_size (exp) != const0_rtx)
4632 if (GET_MODE (temp) != GET_MODE (target)
4633 && GET_MODE (temp) != VOIDmode)
4635 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4636 if (dont_return_target)
4638 /* In this case, we will return TEMP,
4639 so make sure it has the proper mode.
4640 But don't forget to store the value into TARGET. */
4641 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4642 emit_move_insn (target, temp);
4644 else if (GET_MODE (target) == BLKmode)
4645 emit_block_move (target, temp, expr_size (exp),
4646 (call_param_p
4647 ? BLOCK_OP_CALL_PARM
4648 : BLOCK_OP_NORMAL));
4649 else
4650 convert_move (target, temp, unsignedp);
4653 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4655 /* Handle copying a string constant into an array. The string
4656 constant may be shorter than the array. So copy just the string's
4657 actual length, and clear the rest. First get the size of the data
4658 type of the string, which is actually the size of the target. */
4659 rtx size = expr_size (exp);
4661 if (GET_CODE (size) == CONST_INT
4662 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4663 emit_block_move (target, temp, size,
4664 (call_param_p
4665 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4666 else
4668 /* Compute the size of the data to copy from the string. */
4669 tree copy_size
4670 = size_binop (MIN_EXPR,
4671 make_tree (sizetype, size),
4672 size_int (TREE_STRING_LENGTH (exp)));
4673 rtx copy_size_rtx
4674 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4675 (call_param_p
4676 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4677 rtx label = 0;
4679 /* Copy that much. */
4680 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4681 TYPE_UNSIGNED (sizetype));
4682 emit_block_move (target, temp, copy_size_rtx,
4683 (call_param_p
4684 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4686 /* Figure out how much is left in TARGET that we have to clear.
4687 Do all calculations in ptr_mode. */
4688 if (GET_CODE (copy_size_rtx) == CONST_INT)
4690 size = plus_constant (size, -INTVAL (copy_size_rtx));
4691 target = adjust_address (target, BLKmode,
4692 INTVAL (copy_size_rtx));
4694 else
4696 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4697 copy_size_rtx, NULL_RTX, 0,
4698 OPTAB_LIB_WIDEN);
4700 #ifdef POINTERS_EXTEND_UNSIGNED
4701 if (GET_MODE (copy_size_rtx) != Pmode)
4702 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4703 TYPE_UNSIGNED (sizetype));
4704 #endif
4706 target = offset_address (target, copy_size_rtx,
4707 highest_pow2_factor (copy_size));
4708 label = gen_label_rtx ();
4709 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4710 GET_MODE (size), 0, label);
4713 if (size != const0_rtx)
4714 clear_storage (target, size, BLOCK_OP_NORMAL);
4716 if (label)
4717 emit_label (label);
4720 /* Handle calls that return values in multiple non-contiguous locations.
4721 The Irix 6 ABI has examples of this. */
4722 else if (GET_CODE (target) == PARALLEL)
4723 emit_group_load (target, temp, TREE_TYPE (exp),
4724 int_size_in_bytes (TREE_TYPE (exp)));
4725 else if (GET_MODE (temp) == BLKmode)
4726 emit_block_move (target, temp, expr_size (exp),
4727 (call_param_p
4728 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4729 else if (nontemporal
4730 && emit_storent_insn (target, temp))
4731 /* If we managed to emit a nontemporal store, there is nothing else to
4732 do. */
4734 else
4736 temp = force_operand (temp, target);
4737 if (temp != target)
4738 emit_move_insn (target, temp);
4742 return NULL_RTX;
4745 /* Helper for categorize_ctor_elements. Identical interface. */
4747 static bool
4748 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4749 HOST_WIDE_INT *p_elt_count,
4750 bool *p_must_clear)
4752 unsigned HOST_WIDE_INT idx;
4753 HOST_WIDE_INT nz_elts, elt_count;
4754 tree value, purpose;
4756 /* Whether CTOR is a valid constant initializer, in accordance with what
4757 initializer_constant_valid_p does. If inferred from the constructor
4758 elements, true until proven otherwise. */
4759 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4760 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4762 nz_elts = 0;
4763 elt_count = 0;
4765 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4767 HOST_WIDE_INT mult;
4769 mult = 1;
4770 if (TREE_CODE (purpose) == RANGE_EXPR)
4772 tree lo_index = TREE_OPERAND (purpose, 0);
4773 tree hi_index = TREE_OPERAND (purpose, 1);
4775 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4776 mult = (tree_low_cst (hi_index, 1)
4777 - tree_low_cst (lo_index, 1) + 1);
4780 switch (TREE_CODE (value))
4782 case CONSTRUCTOR:
4784 HOST_WIDE_INT nz = 0, ic = 0;
4786 bool const_elt_p
4787 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4789 nz_elts += mult * nz;
4790 elt_count += mult * ic;
4792 if (const_from_elts_p && const_p)
4793 const_p = const_elt_p;
4795 break;
4797 case INTEGER_CST:
4798 case REAL_CST:
4799 case FIXED_CST:
4800 if (!initializer_zerop (value))
4801 nz_elts += mult;
4802 elt_count += mult;
4803 break;
4805 case STRING_CST:
4806 nz_elts += mult * TREE_STRING_LENGTH (value);
4807 elt_count += mult * TREE_STRING_LENGTH (value);
4808 break;
4810 case COMPLEX_CST:
4811 if (!initializer_zerop (TREE_REALPART (value)))
4812 nz_elts += mult;
4813 if (!initializer_zerop (TREE_IMAGPART (value)))
4814 nz_elts += mult;
4815 elt_count += mult;
4816 break;
4818 case VECTOR_CST:
4820 tree v;
4821 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4823 if (!initializer_zerop (TREE_VALUE (v)))
4824 nz_elts += mult;
4825 elt_count += mult;
4828 break;
4830 default:
4831 nz_elts += mult;
4832 elt_count += mult;
4834 if (const_from_elts_p && const_p)
4835 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4836 != NULL_TREE;
4837 break;
4841 if (!*p_must_clear
4842 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4843 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4845 tree init_sub_type;
4846 bool clear_this = true;
4848 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4850 /* We don't expect more than one element of the union to be
4851 initialized. Not sure what we should do otherwise... */
4852 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4853 == 1);
4855 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4856 CONSTRUCTOR_ELTS (ctor),
4857 0)->value);
4859 /* ??? We could look at each element of the union, and find the
4860 largest element. Which would avoid comparing the size of the
4861 initialized element against any tail padding in the union.
4862 Doesn't seem worth the effort... */
4863 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4864 TYPE_SIZE (init_sub_type)) == 1)
4866 /* And now we have to find out if the element itself is fully
4867 constructed. E.g. for union { struct { int a, b; } s; } u
4868 = { .s = { .a = 1 } }. */
4869 if (elt_count == count_type_elements (init_sub_type, false))
4870 clear_this = false;
4874 *p_must_clear = clear_this;
4877 *p_nz_elts += nz_elts;
4878 *p_elt_count += elt_count;
4880 return const_p;
4883 /* Examine CTOR to discover:
4884 * how many scalar fields are set to nonzero values,
4885 and place it in *P_NZ_ELTS;
4886 * how many scalar fields in total are in CTOR,
4887 and place it in *P_ELT_COUNT.
4888 * if a type is a union, and the initializer from the constructor
4889 is not the largest element in the union, then set *p_must_clear.
4891 Return whether or not CTOR is a valid static constant initializer, the same
4892 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4894 bool
4895 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4896 HOST_WIDE_INT *p_elt_count,
4897 bool *p_must_clear)
4899 *p_nz_elts = 0;
4900 *p_elt_count = 0;
4901 *p_must_clear = false;
4903 return
4904 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4907 /* Count the number of scalars in TYPE. Return -1 on overflow or
4908 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4909 array member at the end of the structure. */
4911 HOST_WIDE_INT
4912 count_type_elements (const_tree type, bool allow_flexarr)
4914 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4915 switch (TREE_CODE (type))
4917 case ARRAY_TYPE:
4919 tree telts = array_type_nelts (type);
4920 if (telts && host_integerp (telts, 1))
4922 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4923 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4924 if (n == 0)
4925 return 0;
4926 else if (max / n > m)
4927 return n * m;
4929 return -1;
4932 case RECORD_TYPE:
4934 HOST_WIDE_INT n = 0, t;
4935 tree f;
4937 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4938 if (TREE_CODE (f) == FIELD_DECL)
4940 t = count_type_elements (TREE_TYPE (f), false);
4941 if (t < 0)
4943 /* Check for structures with flexible array member. */
4944 tree tf = TREE_TYPE (f);
4945 if (allow_flexarr
4946 && TREE_CHAIN (f) == NULL
4947 && TREE_CODE (tf) == ARRAY_TYPE
4948 && TYPE_DOMAIN (tf)
4949 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4950 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4951 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4952 && int_size_in_bytes (type) >= 0)
4953 break;
4955 return -1;
4957 n += t;
4960 return n;
4963 case UNION_TYPE:
4964 case QUAL_UNION_TYPE:
4966 /* Ho hum. How in the world do we guess here? Clearly it isn't
4967 right to count the fields. Guess based on the number of words. */
4968 HOST_WIDE_INT n = int_size_in_bytes (type);
4969 if (n < 0)
4970 return -1;
4971 return n / UNITS_PER_WORD;
4974 case COMPLEX_TYPE:
4975 return 2;
4977 case VECTOR_TYPE:
4978 return TYPE_VECTOR_SUBPARTS (type);
4980 case INTEGER_TYPE:
4981 case REAL_TYPE:
4982 case FIXED_POINT_TYPE:
4983 case ENUMERAL_TYPE:
4984 case BOOLEAN_TYPE:
4985 case POINTER_TYPE:
4986 case OFFSET_TYPE:
4987 case REFERENCE_TYPE:
4988 return 1;
4990 case VOID_TYPE:
4991 case METHOD_TYPE:
4992 case FUNCTION_TYPE:
4993 case LANG_TYPE:
4994 default:
4995 gcc_unreachable ();
4999 /* Return 1 if EXP contains mostly (3/4) zeros. */
5001 static int
5002 mostly_zeros_p (const_tree exp)
5004 if (TREE_CODE (exp) == CONSTRUCTOR)
5007 HOST_WIDE_INT nz_elts, count, elts;
5008 bool must_clear;
5010 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5011 if (must_clear)
5012 return 1;
5014 elts = count_type_elements (TREE_TYPE (exp), false);
5016 return nz_elts < elts / 4;
5019 return initializer_zerop (exp);
5022 /* Return 1 if EXP contains all zeros. */
5024 static int
5025 all_zeros_p (const_tree exp)
5027 if (TREE_CODE (exp) == CONSTRUCTOR)
5030 HOST_WIDE_INT nz_elts, count;
5031 bool must_clear;
5033 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5034 return nz_elts == 0;
5037 return initializer_zerop (exp);
5040 /* Helper function for store_constructor.
5041 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5042 TYPE is the type of the CONSTRUCTOR, not the element type.
5043 CLEARED is as for store_constructor.
5044 ALIAS_SET is the alias set to use for any stores.
5046 This provides a recursive shortcut back to store_constructor when it isn't
5047 necessary to go through store_field. This is so that we can pass through
5048 the cleared field to let store_constructor know that we may not have to
5049 clear a substructure if the outer structure has already been cleared. */
5051 static void
5052 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5053 HOST_WIDE_INT bitpos, enum machine_mode mode,
5054 tree exp, tree type, int cleared,
5055 alias_set_type alias_set)
5057 if (TREE_CODE (exp) == CONSTRUCTOR
5058 /* We can only call store_constructor recursively if the size and
5059 bit position are on a byte boundary. */
5060 && bitpos % BITS_PER_UNIT == 0
5061 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5062 /* If we have a nonzero bitpos for a register target, then we just
5063 let store_field do the bitfield handling. This is unlikely to
5064 generate unnecessary clear instructions anyways. */
5065 && (bitpos == 0 || MEM_P (target)))
5067 if (MEM_P (target))
5068 target
5069 = adjust_address (target,
5070 GET_MODE (target) == BLKmode
5071 || 0 != (bitpos
5072 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5073 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5076 /* Update the alias set, if required. */
5077 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5078 && MEM_ALIAS_SET (target) != 0)
5080 target = copy_rtx (target);
5081 set_mem_alias_set (target, alias_set);
5084 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5086 else
5087 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5090 /* Store the value of constructor EXP into the rtx TARGET.
5091 TARGET is either a REG or a MEM; we know it cannot conflict, since
5092 safe_from_p has been called.
5093 CLEARED is true if TARGET is known to have been zero'd.
5094 SIZE is the number of bytes of TARGET we are allowed to modify: this
5095 may not be the same as the size of EXP if we are assigning to a field
5096 which has been packed to exclude padding bits. */
5098 static void
5099 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5101 tree type = TREE_TYPE (exp);
5102 #ifdef WORD_REGISTER_OPERATIONS
5103 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5104 #endif
5106 switch (TREE_CODE (type))
5108 case RECORD_TYPE:
5109 case UNION_TYPE:
5110 case QUAL_UNION_TYPE:
5112 unsigned HOST_WIDE_INT idx;
5113 tree field, value;
5115 /* If size is zero or the target is already cleared, do nothing. */
5116 if (size == 0 || cleared)
5117 cleared = 1;
5118 /* We either clear the aggregate or indicate the value is dead. */
5119 else if ((TREE_CODE (type) == UNION_TYPE
5120 || TREE_CODE (type) == QUAL_UNION_TYPE)
5121 && ! CONSTRUCTOR_ELTS (exp))
5122 /* If the constructor is empty, clear the union. */
5124 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5125 cleared = 1;
5128 /* If we are building a static constructor into a register,
5129 set the initial value as zero so we can fold the value into
5130 a constant. But if more than one register is involved,
5131 this probably loses. */
5132 else if (REG_P (target) && TREE_STATIC (exp)
5133 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5135 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5136 cleared = 1;
5139 /* If the constructor has fewer fields than the structure or
5140 if we are initializing the structure to mostly zeros, clear
5141 the whole structure first. Don't do this if TARGET is a
5142 register whose mode size isn't equal to SIZE since
5143 clear_storage can't handle this case. */
5144 else if (size > 0
5145 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5146 != fields_length (type))
5147 || mostly_zeros_p (exp))
5148 && (!REG_P (target)
5149 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5150 == size)))
5152 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5153 cleared = 1;
5156 if (REG_P (target) && !cleared)
5157 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5159 /* Store each element of the constructor into the
5160 corresponding field of TARGET. */
5161 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5163 enum machine_mode mode;
5164 HOST_WIDE_INT bitsize;
5165 HOST_WIDE_INT bitpos = 0;
5166 tree offset;
5167 rtx to_rtx = target;
5169 /* Just ignore missing fields. We cleared the whole
5170 structure, above, if any fields are missing. */
5171 if (field == 0)
5172 continue;
5174 if (cleared && initializer_zerop (value))
5175 continue;
5177 if (host_integerp (DECL_SIZE (field), 1))
5178 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5179 else
5180 bitsize = -1;
5182 mode = DECL_MODE (field);
5183 if (DECL_BIT_FIELD (field))
5184 mode = VOIDmode;
5186 offset = DECL_FIELD_OFFSET (field);
5187 if (host_integerp (offset, 0)
5188 && host_integerp (bit_position (field), 0))
5190 bitpos = int_bit_position (field);
5191 offset = 0;
5193 else
5194 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5196 if (offset)
5198 rtx offset_rtx;
5200 offset
5201 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5202 make_tree (TREE_TYPE (exp),
5203 target));
5205 offset_rtx = expand_normal (offset);
5206 gcc_assert (MEM_P (to_rtx));
5208 #ifdef POINTERS_EXTEND_UNSIGNED
5209 if (GET_MODE (offset_rtx) != Pmode)
5210 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5211 #else
5212 if (GET_MODE (offset_rtx) != ptr_mode)
5213 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5214 #endif
5216 to_rtx = offset_address (to_rtx, offset_rtx,
5217 highest_pow2_factor (offset));
5220 #ifdef WORD_REGISTER_OPERATIONS
5221 /* If this initializes a field that is smaller than a
5222 word, at the start of a word, try to widen it to a full
5223 word. This special case allows us to output C++ member
5224 function initializations in a form that the optimizers
5225 can understand. */
5226 if (REG_P (target)
5227 && bitsize < BITS_PER_WORD
5228 && bitpos % BITS_PER_WORD == 0
5229 && GET_MODE_CLASS (mode) == MODE_INT
5230 && TREE_CODE (value) == INTEGER_CST
5231 && exp_size >= 0
5232 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5234 tree type = TREE_TYPE (value);
5236 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5238 type = lang_hooks.types.type_for_size
5239 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5240 value = fold_convert (type, value);
5243 if (BYTES_BIG_ENDIAN)
5244 value
5245 = fold_build2 (LSHIFT_EXPR, type, value,
5246 build_int_cst (type,
5247 BITS_PER_WORD - bitsize));
5248 bitsize = BITS_PER_WORD;
5249 mode = word_mode;
5251 #endif
5253 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5254 && DECL_NONADDRESSABLE_P (field))
5256 to_rtx = copy_rtx (to_rtx);
5257 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5260 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5261 value, type, cleared,
5262 get_alias_set (TREE_TYPE (field)));
5264 break;
5266 case ARRAY_TYPE:
5268 tree value, index;
5269 unsigned HOST_WIDE_INT i;
5270 int need_to_clear;
5271 tree domain;
5272 tree elttype = TREE_TYPE (type);
5273 int const_bounds_p;
5274 HOST_WIDE_INT minelt = 0;
5275 HOST_WIDE_INT maxelt = 0;
5277 domain = TYPE_DOMAIN (type);
5278 const_bounds_p = (TYPE_MIN_VALUE (domain)
5279 && TYPE_MAX_VALUE (domain)
5280 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5281 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5283 /* If we have constant bounds for the range of the type, get them. */
5284 if (const_bounds_p)
5286 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5287 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5290 /* If the constructor has fewer elements than the array, clear
5291 the whole array first. Similarly if this is static
5292 constructor of a non-BLKmode object. */
5293 if (cleared)
5294 need_to_clear = 0;
5295 else if (REG_P (target) && TREE_STATIC (exp))
5296 need_to_clear = 1;
5297 else
5299 unsigned HOST_WIDE_INT idx;
5300 tree index, value;
5301 HOST_WIDE_INT count = 0, zero_count = 0;
5302 need_to_clear = ! const_bounds_p;
5304 /* This loop is a more accurate version of the loop in
5305 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5306 is also needed to check for missing elements. */
5307 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5309 HOST_WIDE_INT this_node_count;
5311 if (need_to_clear)
5312 break;
5314 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5316 tree lo_index = TREE_OPERAND (index, 0);
5317 tree hi_index = TREE_OPERAND (index, 1);
5319 if (! host_integerp (lo_index, 1)
5320 || ! host_integerp (hi_index, 1))
5322 need_to_clear = 1;
5323 break;
5326 this_node_count = (tree_low_cst (hi_index, 1)
5327 - tree_low_cst (lo_index, 1) + 1);
5329 else
5330 this_node_count = 1;
5332 count += this_node_count;
5333 if (mostly_zeros_p (value))
5334 zero_count += this_node_count;
5337 /* Clear the entire array first if there are any missing
5338 elements, or if the incidence of zero elements is >=
5339 75%. */
5340 if (! need_to_clear
5341 && (count < maxelt - minelt + 1
5342 || 4 * zero_count >= 3 * count))
5343 need_to_clear = 1;
5346 if (need_to_clear && size > 0)
5348 if (REG_P (target))
5349 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5350 else
5351 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5352 cleared = 1;
5355 if (!cleared && REG_P (target))
5356 /* Inform later passes that the old value is dead. */
5357 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5359 /* Store each element of the constructor into the
5360 corresponding element of TARGET, determined by counting the
5361 elements. */
5362 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5364 enum machine_mode mode;
5365 HOST_WIDE_INT bitsize;
5366 HOST_WIDE_INT bitpos;
5367 int unsignedp;
5368 rtx xtarget = target;
5370 if (cleared && initializer_zerop (value))
5371 continue;
5373 unsignedp = TYPE_UNSIGNED (elttype);
5374 mode = TYPE_MODE (elttype);
5375 if (mode == BLKmode)
5376 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5377 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5378 : -1);
5379 else
5380 bitsize = GET_MODE_BITSIZE (mode);
5382 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5384 tree lo_index = TREE_OPERAND (index, 0);
5385 tree hi_index = TREE_OPERAND (index, 1);
5386 rtx index_r, pos_rtx;
5387 HOST_WIDE_INT lo, hi, count;
5388 tree position;
5390 /* If the range is constant and "small", unroll the loop. */
5391 if (const_bounds_p
5392 && host_integerp (lo_index, 0)
5393 && host_integerp (hi_index, 0)
5394 && (lo = tree_low_cst (lo_index, 0),
5395 hi = tree_low_cst (hi_index, 0),
5396 count = hi - lo + 1,
5397 (!MEM_P (target)
5398 || count <= 2
5399 || (host_integerp (TYPE_SIZE (elttype), 1)
5400 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5401 <= 40 * 8)))))
5403 lo -= minelt; hi -= minelt;
5404 for (; lo <= hi; lo++)
5406 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5408 if (MEM_P (target)
5409 && !MEM_KEEP_ALIAS_SET_P (target)
5410 && TREE_CODE (type) == ARRAY_TYPE
5411 && TYPE_NONALIASED_COMPONENT (type))
5413 target = copy_rtx (target);
5414 MEM_KEEP_ALIAS_SET_P (target) = 1;
5417 store_constructor_field
5418 (target, bitsize, bitpos, mode, value, type, cleared,
5419 get_alias_set (elttype));
5422 else
5424 rtx loop_start = gen_label_rtx ();
5425 rtx loop_end = gen_label_rtx ();
5426 tree exit_cond;
5428 expand_normal (hi_index);
5429 unsignedp = TYPE_UNSIGNED (domain);
5431 index = build_decl (VAR_DECL, NULL_TREE, domain);
5433 index_r
5434 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5435 &unsignedp, 0));
5436 SET_DECL_RTL (index, index_r);
5437 store_expr (lo_index, index_r, 0, false);
5439 /* Build the head of the loop. */
5440 do_pending_stack_adjust ();
5441 emit_label (loop_start);
5443 /* Assign value to element index. */
5444 position =
5445 fold_convert (ssizetype,
5446 fold_build2 (MINUS_EXPR,
5447 TREE_TYPE (index),
5448 index,
5449 TYPE_MIN_VALUE (domain)));
5451 position =
5452 size_binop (MULT_EXPR, position,
5453 fold_convert (ssizetype,
5454 TYPE_SIZE_UNIT (elttype)));
5456 pos_rtx = expand_normal (position);
5457 xtarget = offset_address (target, pos_rtx,
5458 highest_pow2_factor (position));
5459 xtarget = adjust_address (xtarget, mode, 0);
5460 if (TREE_CODE (value) == CONSTRUCTOR)
5461 store_constructor (value, xtarget, cleared,
5462 bitsize / BITS_PER_UNIT);
5463 else
5464 store_expr (value, xtarget, 0, false);
5466 /* Generate a conditional jump to exit the loop. */
5467 exit_cond = build2 (LT_EXPR, integer_type_node,
5468 index, hi_index);
5469 jumpif (exit_cond, loop_end);
5471 /* Update the loop counter, and jump to the head of
5472 the loop. */
5473 expand_assignment (index,
5474 build2 (PLUS_EXPR, TREE_TYPE (index),
5475 index, integer_one_node),
5476 false);
5478 emit_jump (loop_start);
5480 /* Build the end of the loop. */
5481 emit_label (loop_end);
5484 else if ((index != 0 && ! host_integerp (index, 0))
5485 || ! host_integerp (TYPE_SIZE (elttype), 1))
5487 tree position;
5489 if (index == 0)
5490 index = ssize_int (1);
5492 if (minelt)
5493 index = fold_convert (ssizetype,
5494 fold_build2 (MINUS_EXPR,
5495 TREE_TYPE (index),
5496 index,
5497 TYPE_MIN_VALUE (domain)));
5499 position =
5500 size_binop (MULT_EXPR, index,
5501 fold_convert (ssizetype,
5502 TYPE_SIZE_UNIT (elttype)));
5503 xtarget = offset_address (target,
5504 expand_normal (position),
5505 highest_pow2_factor (position));
5506 xtarget = adjust_address (xtarget, mode, 0);
5507 store_expr (value, xtarget, 0, false);
5509 else
5511 if (index != 0)
5512 bitpos = ((tree_low_cst (index, 0) - minelt)
5513 * tree_low_cst (TYPE_SIZE (elttype), 1));
5514 else
5515 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5517 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5518 && TREE_CODE (type) == ARRAY_TYPE
5519 && TYPE_NONALIASED_COMPONENT (type))
5521 target = copy_rtx (target);
5522 MEM_KEEP_ALIAS_SET_P (target) = 1;
5524 store_constructor_field (target, bitsize, bitpos, mode, value,
5525 type, cleared, get_alias_set (elttype));
5528 break;
5531 case VECTOR_TYPE:
5533 unsigned HOST_WIDE_INT idx;
5534 constructor_elt *ce;
5535 int i;
5536 int need_to_clear;
5537 int icode = 0;
5538 tree elttype = TREE_TYPE (type);
5539 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5540 enum machine_mode eltmode = TYPE_MODE (elttype);
5541 HOST_WIDE_INT bitsize;
5542 HOST_WIDE_INT bitpos;
5543 rtvec vector = NULL;
5544 unsigned n_elts;
5546 gcc_assert (eltmode != BLKmode);
5548 n_elts = TYPE_VECTOR_SUBPARTS (type);
5549 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5551 enum machine_mode mode = GET_MODE (target);
5553 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5554 if (icode != CODE_FOR_nothing)
5556 unsigned int i;
5558 vector = rtvec_alloc (n_elts);
5559 for (i = 0; i < n_elts; i++)
5560 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5564 /* If the constructor has fewer elements than the vector,
5565 clear the whole array first. Similarly if this is static
5566 constructor of a non-BLKmode object. */
5567 if (cleared)
5568 need_to_clear = 0;
5569 else if (REG_P (target) && TREE_STATIC (exp))
5570 need_to_clear = 1;
5571 else
5573 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5574 tree value;
5576 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5578 int n_elts_here = tree_low_cst
5579 (int_const_binop (TRUNC_DIV_EXPR,
5580 TYPE_SIZE (TREE_TYPE (value)),
5581 TYPE_SIZE (elttype), 0), 1);
5583 count += n_elts_here;
5584 if (mostly_zeros_p (value))
5585 zero_count += n_elts_here;
5588 /* Clear the entire vector first if there are any missing elements,
5589 or if the incidence of zero elements is >= 75%. */
5590 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5593 if (need_to_clear && size > 0 && !vector)
5595 if (REG_P (target))
5596 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5597 else
5598 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5599 cleared = 1;
5602 /* Inform later passes that the old value is dead. */
5603 if (!cleared && !vector && REG_P (target))
5604 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5606 /* Store each element of the constructor into the corresponding
5607 element of TARGET, determined by counting the elements. */
5608 for (idx = 0, i = 0;
5609 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5610 idx++, i += bitsize / elt_size)
5612 HOST_WIDE_INT eltpos;
5613 tree value = ce->value;
5615 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5616 if (cleared && initializer_zerop (value))
5617 continue;
5619 if (ce->index)
5620 eltpos = tree_low_cst (ce->index, 1);
5621 else
5622 eltpos = i;
5624 if (vector)
5626 /* Vector CONSTRUCTORs should only be built from smaller
5627 vectors in the case of BLKmode vectors. */
5628 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5629 RTVEC_ELT (vector, eltpos)
5630 = expand_normal (value);
5632 else
5634 enum machine_mode value_mode =
5635 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5636 ? TYPE_MODE (TREE_TYPE (value))
5637 : eltmode;
5638 bitpos = eltpos * elt_size;
5639 store_constructor_field (target, bitsize, bitpos,
5640 value_mode, value, type,
5641 cleared, get_alias_set (elttype));
5645 if (vector)
5646 emit_insn (GEN_FCN (icode)
5647 (target,
5648 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5649 break;
5652 default:
5653 gcc_unreachable ();
5657 /* Store the value of EXP (an expression tree)
5658 into a subfield of TARGET which has mode MODE and occupies
5659 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5660 If MODE is VOIDmode, it means that we are storing into a bit-field.
5662 Always return const0_rtx unless we have something particular to
5663 return.
5665 TYPE is the type of the underlying object,
5667 ALIAS_SET is the alias set for the destination. This value will
5668 (in general) be different from that for TARGET, since TARGET is a
5669 reference to the containing structure.
5671 If NONTEMPORAL is true, try generating a nontemporal store. */
5673 static rtx
5674 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5675 enum machine_mode mode, tree exp, tree type,
5676 alias_set_type alias_set, bool nontemporal)
5678 HOST_WIDE_INT width_mask = 0;
5680 if (TREE_CODE (exp) == ERROR_MARK)
5681 return const0_rtx;
5683 /* If we have nothing to store, do nothing unless the expression has
5684 side-effects. */
5685 if (bitsize == 0)
5686 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5687 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5688 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5690 /* If we are storing into an unaligned field of an aligned union that is
5691 in a register, we may have the mode of TARGET being an integer mode but
5692 MODE == BLKmode. In that case, get an aligned object whose size and
5693 alignment are the same as TARGET and store TARGET into it (we can avoid
5694 the store if the field being stored is the entire width of TARGET). Then
5695 call ourselves recursively to store the field into a BLKmode version of
5696 that object. Finally, load from the object into TARGET. This is not
5697 very efficient in general, but should only be slightly more expensive
5698 than the otherwise-required unaligned accesses. Perhaps this can be
5699 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5700 twice, once with emit_move_insn and once via store_field. */
5702 if (mode == BLKmode
5703 && (REG_P (target) || GET_CODE (target) == SUBREG))
5705 rtx object = assign_temp (type, 0, 1, 1);
5706 rtx blk_object = adjust_address (object, BLKmode, 0);
5708 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5709 emit_move_insn (object, target);
5711 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5712 nontemporal);
5714 emit_move_insn (target, object);
5716 /* We want to return the BLKmode version of the data. */
5717 return blk_object;
5720 if (GET_CODE (target) == CONCAT)
5722 /* We're storing into a struct containing a single __complex. */
5724 gcc_assert (!bitpos);
5725 return store_expr (exp, target, 0, nontemporal);
5728 /* If the structure is in a register or if the component
5729 is a bit field, we cannot use addressing to access it.
5730 Use bit-field techniques or SUBREG to store in it. */
5732 if (mode == VOIDmode
5733 || (mode != BLKmode && ! direct_store[(int) mode]
5734 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5735 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5736 || REG_P (target)
5737 || GET_CODE (target) == SUBREG
5738 /* If the field isn't aligned enough to store as an ordinary memref,
5739 store it as a bit field. */
5740 || (mode != BLKmode
5741 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5742 || bitpos % GET_MODE_ALIGNMENT (mode))
5743 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5744 || (bitpos % BITS_PER_UNIT != 0)))
5745 /* If the RHS and field are a constant size and the size of the
5746 RHS isn't the same size as the bitfield, we must use bitfield
5747 operations. */
5748 || (bitsize >= 0
5749 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5750 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5752 rtx temp;
5754 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5755 implies a mask operation. If the precision is the same size as
5756 the field we're storing into, that mask is redundant. This is
5757 particularly common with bit field assignments generated by the
5758 C front end. */
5759 if (TREE_CODE (exp) == NOP_EXPR)
5761 tree type = TREE_TYPE (exp);
5762 if (INTEGRAL_TYPE_P (type)
5763 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5764 && bitsize == TYPE_PRECISION (type))
5766 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5767 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5768 exp = TREE_OPERAND (exp, 0);
5772 temp = expand_normal (exp);
5774 /* If BITSIZE is narrower than the size of the type of EXP
5775 we will be narrowing TEMP. Normally, what's wanted are the
5776 low-order bits. However, if EXP's type is a record and this is
5777 big-endian machine, we want the upper BITSIZE bits. */
5778 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5779 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5780 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5781 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5782 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5783 - bitsize),
5784 NULL_RTX, 1);
5786 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5787 MODE. */
5788 if (mode != VOIDmode && mode != BLKmode
5789 && mode != TYPE_MODE (TREE_TYPE (exp)))
5790 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5792 /* If the modes of TARGET and TEMP are both BLKmode, both
5793 must be in memory and BITPOS must be aligned on a byte
5794 boundary. If so, we simply do a block copy. */
5795 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5797 gcc_assert (MEM_P (target) && MEM_P (temp)
5798 && !(bitpos % BITS_PER_UNIT));
5800 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5801 emit_block_move (target, temp,
5802 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5803 / BITS_PER_UNIT),
5804 BLOCK_OP_NORMAL);
5806 return const0_rtx;
5809 /* Store the value in the bitfield. */
5810 store_bit_field (target, bitsize, bitpos, mode, temp);
5812 return const0_rtx;
5814 else
5816 /* Now build a reference to just the desired component. */
5817 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5819 if (to_rtx == target)
5820 to_rtx = copy_rtx (to_rtx);
5822 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5823 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5824 set_mem_alias_set (to_rtx, alias_set);
5826 return store_expr (exp, to_rtx, 0, nontemporal);
5830 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5831 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5832 codes and find the ultimate containing object, which we return.
5834 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5835 bit position, and *PUNSIGNEDP to the signedness of the field.
5836 If the position of the field is variable, we store a tree
5837 giving the variable offset (in units) in *POFFSET.
5838 This offset is in addition to the bit position.
5839 If the position is not variable, we store 0 in *POFFSET.
5841 If any of the extraction expressions is volatile,
5842 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5844 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5845 is a mode that can be used to access the field. In that case, *PBITSIZE
5846 is redundant.
5848 If the field describes a variable-sized object, *PMODE is set to
5849 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5850 this case, but the address of the object can be found.
5852 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5853 look through nodes that serve as markers of a greater alignment than
5854 the one that can be deduced from the expression. These nodes make it
5855 possible for front-ends to prevent temporaries from being created by
5856 the middle-end on alignment considerations. For that purpose, the
5857 normal operating mode at high-level is to always pass FALSE so that
5858 the ultimate containing object is really returned; moreover, the
5859 associated predicate handled_component_p will always return TRUE
5860 on these nodes, thus indicating that they are essentially handled
5861 by get_inner_reference. TRUE should only be passed when the caller
5862 is scanning the expression in order to build another representation
5863 and specifically knows how to handle these nodes; as such, this is
5864 the normal operating mode in the RTL expanders. */
5866 tree
5867 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5868 HOST_WIDE_INT *pbitpos, tree *poffset,
5869 enum machine_mode *pmode, int *punsignedp,
5870 int *pvolatilep, bool keep_aligning)
5872 tree size_tree = 0;
5873 enum machine_mode mode = VOIDmode;
5874 tree offset = size_zero_node;
5875 tree bit_offset = bitsize_zero_node;
5877 /* First get the mode, signedness, and size. We do this from just the
5878 outermost expression. */
5879 if (TREE_CODE (exp) == COMPONENT_REF)
5881 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5882 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5883 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5885 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5887 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5889 size_tree = TREE_OPERAND (exp, 1);
5890 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5892 /* For vector types, with the correct size of access, use the mode of
5893 inner type. */
5894 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5895 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5896 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5897 mode = TYPE_MODE (TREE_TYPE (exp));
5899 else
5901 mode = TYPE_MODE (TREE_TYPE (exp));
5902 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5904 if (mode == BLKmode)
5905 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5906 else
5907 *pbitsize = GET_MODE_BITSIZE (mode);
5910 if (size_tree != 0)
5912 if (! host_integerp (size_tree, 1))
5913 mode = BLKmode, *pbitsize = -1;
5914 else
5915 *pbitsize = tree_low_cst (size_tree, 1);
5918 *pmode = mode;
5920 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5921 and find the ultimate containing object. */
5922 while (1)
5924 switch (TREE_CODE (exp))
5926 case BIT_FIELD_REF:
5927 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5928 TREE_OPERAND (exp, 2));
5929 break;
5931 case COMPONENT_REF:
5933 tree field = TREE_OPERAND (exp, 1);
5934 tree this_offset = component_ref_field_offset (exp);
5936 /* If this field hasn't been filled in yet, don't go past it.
5937 This should only happen when folding expressions made during
5938 type construction. */
5939 if (this_offset == 0)
5940 break;
5942 offset = size_binop (PLUS_EXPR, offset, this_offset);
5943 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5944 DECL_FIELD_BIT_OFFSET (field));
5946 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5948 break;
5950 case ARRAY_REF:
5951 case ARRAY_RANGE_REF:
5953 tree index = TREE_OPERAND (exp, 1);
5954 tree low_bound = array_ref_low_bound (exp);
5955 tree unit_size = array_ref_element_size (exp);
5957 /* We assume all arrays have sizes that are a multiple of a byte.
5958 First subtract the lower bound, if any, in the type of the
5959 index, then convert to sizetype and multiply by the size of
5960 the array element. */
5961 if (! integer_zerop (low_bound))
5962 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5963 index, low_bound);
5965 offset = size_binop (PLUS_EXPR, offset,
5966 size_binop (MULT_EXPR,
5967 fold_convert (sizetype, index),
5968 unit_size));
5970 break;
5972 case REALPART_EXPR:
5973 break;
5975 case IMAGPART_EXPR:
5976 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5977 bitsize_int (*pbitsize));
5978 break;
5980 case VIEW_CONVERT_EXPR:
5981 if (keep_aligning && STRICT_ALIGNMENT
5982 && (TYPE_ALIGN (TREE_TYPE (exp))
5983 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5984 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5985 < BIGGEST_ALIGNMENT)
5986 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5987 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5988 goto done;
5989 break;
5991 default:
5992 goto done;
5995 /* If any reference in the chain is volatile, the effect is volatile. */
5996 if (TREE_THIS_VOLATILE (exp))
5997 *pvolatilep = 1;
5999 exp = TREE_OPERAND (exp, 0);
6001 done:
6003 /* If OFFSET is constant, see if we can return the whole thing as a
6004 constant bit position. Make sure to handle overflow during
6005 this conversion. */
6006 if (host_integerp (offset, 0))
6008 double_int tem = double_int_mul (tree_to_double_int (offset),
6009 uhwi_to_double_int (BITS_PER_UNIT));
6010 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6011 if (double_int_fits_in_shwi_p (tem))
6013 *pbitpos = double_int_to_shwi (tem);
6014 *poffset = NULL_TREE;
6015 return exp;
6019 /* Otherwise, split it up. */
6020 *pbitpos = tree_low_cst (bit_offset, 0);
6021 *poffset = offset;
6023 return exp;
6026 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
6027 look for whether EXP or any nested component-refs within EXP is marked
6028 as PACKED. */
6030 bool
6031 contains_packed_reference (const_tree exp)
6033 bool packed_p = false;
6035 while (1)
6037 switch (TREE_CODE (exp))
6039 case COMPONENT_REF:
6041 tree field = TREE_OPERAND (exp, 1);
6042 packed_p = DECL_PACKED (field)
6043 || TYPE_PACKED (TREE_TYPE (field))
6044 || TYPE_PACKED (TREE_TYPE (exp));
6045 if (packed_p)
6046 goto done;
6048 break;
6050 case BIT_FIELD_REF:
6051 case ARRAY_REF:
6052 case ARRAY_RANGE_REF:
6053 case REALPART_EXPR:
6054 case IMAGPART_EXPR:
6055 case VIEW_CONVERT_EXPR:
6056 break;
6058 default:
6059 goto done;
6061 exp = TREE_OPERAND (exp, 0);
6063 done:
6064 return packed_p;
6067 /* Return a tree of sizetype representing the size, in bytes, of the element
6068 of EXP, an ARRAY_REF. */
6070 tree
6071 array_ref_element_size (tree exp)
6073 tree aligned_size = TREE_OPERAND (exp, 3);
6074 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6076 /* If a size was specified in the ARRAY_REF, it's the size measured
6077 in alignment units of the element type. So multiply by that value. */
6078 if (aligned_size)
6080 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6081 sizetype from another type of the same width and signedness. */
6082 if (TREE_TYPE (aligned_size) != sizetype)
6083 aligned_size = fold_convert (sizetype, aligned_size);
6084 return size_binop (MULT_EXPR, aligned_size,
6085 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6088 /* Otherwise, take the size from that of the element type. Substitute
6089 any PLACEHOLDER_EXPR that we have. */
6090 else
6091 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6094 /* Return a tree representing the lower bound of the array mentioned in
6095 EXP, an ARRAY_REF. */
6097 tree
6098 array_ref_low_bound (tree exp)
6100 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6102 /* If a lower bound is specified in EXP, use it. */
6103 if (TREE_OPERAND (exp, 2))
6104 return TREE_OPERAND (exp, 2);
6106 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6107 substituting for a PLACEHOLDER_EXPR as needed. */
6108 if (domain_type && TYPE_MIN_VALUE (domain_type))
6109 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6111 /* Otherwise, return a zero of the appropriate type. */
6112 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6115 /* Return a tree representing the upper bound of the array mentioned in
6116 EXP, an ARRAY_REF. */
6118 tree
6119 array_ref_up_bound (tree exp)
6121 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6123 /* If there is a domain type and it has an upper bound, use it, substituting
6124 for a PLACEHOLDER_EXPR as needed. */
6125 if (domain_type && TYPE_MAX_VALUE (domain_type))
6126 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6128 /* Otherwise fail. */
6129 return NULL_TREE;
6132 /* Return a tree representing the offset, in bytes, of the field referenced
6133 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6135 tree
6136 component_ref_field_offset (tree exp)
6138 tree aligned_offset = TREE_OPERAND (exp, 2);
6139 tree field = TREE_OPERAND (exp, 1);
6141 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6142 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6143 value. */
6144 if (aligned_offset)
6146 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6147 sizetype from another type of the same width and signedness. */
6148 if (TREE_TYPE (aligned_offset) != sizetype)
6149 aligned_offset = fold_convert (sizetype, aligned_offset);
6150 return size_binop (MULT_EXPR, aligned_offset,
6151 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6154 /* Otherwise, take the offset from that of the field. Substitute
6155 any PLACEHOLDER_EXPR that we have. */
6156 else
6157 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6160 /* Return 1 if T is an expression that get_inner_reference handles. */
6163 handled_component_p (const_tree t)
6165 switch (TREE_CODE (t))
6167 case BIT_FIELD_REF:
6168 case COMPONENT_REF:
6169 case ARRAY_REF:
6170 case ARRAY_RANGE_REF:
6171 case VIEW_CONVERT_EXPR:
6172 case REALPART_EXPR:
6173 case IMAGPART_EXPR:
6174 return 1;
6176 default:
6177 return 0;
6181 /* Given an rtx VALUE that may contain additions and multiplications, return
6182 an equivalent value that just refers to a register, memory, or constant.
6183 This is done by generating instructions to perform the arithmetic and
6184 returning a pseudo-register containing the value.
6186 The returned value may be a REG, SUBREG, MEM or constant. */
6189 force_operand (rtx value, rtx target)
6191 rtx op1, op2;
6192 /* Use subtarget as the target for operand 0 of a binary operation. */
6193 rtx subtarget = get_subtarget (target);
6194 enum rtx_code code = GET_CODE (value);
6196 /* Check for subreg applied to an expression produced by loop optimizer. */
6197 if (code == SUBREG
6198 && !REG_P (SUBREG_REG (value))
6199 && !MEM_P (SUBREG_REG (value)))
6201 value
6202 = simplify_gen_subreg (GET_MODE (value),
6203 force_reg (GET_MODE (SUBREG_REG (value)),
6204 force_operand (SUBREG_REG (value),
6205 NULL_RTX)),
6206 GET_MODE (SUBREG_REG (value)),
6207 SUBREG_BYTE (value));
6208 code = GET_CODE (value);
6211 /* Check for a PIC address load. */
6212 if ((code == PLUS || code == MINUS)
6213 && XEXP (value, 0) == pic_offset_table_rtx
6214 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6215 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6216 || GET_CODE (XEXP (value, 1)) == CONST))
6218 if (!subtarget)
6219 subtarget = gen_reg_rtx (GET_MODE (value));
6220 emit_move_insn (subtarget, value);
6221 return subtarget;
6224 if (ARITHMETIC_P (value))
6226 op2 = XEXP (value, 1);
6227 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6228 subtarget = 0;
6229 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6231 code = PLUS;
6232 op2 = negate_rtx (GET_MODE (value), op2);
6235 /* Check for an addition with OP2 a constant integer and our first
6236 operand a PLUS of a virtual register and something else. In that
6237 case, we want to emit the sum of the virtual register and the
6238 constant first and then add the other value. This allows virtual
6239 register instantiation to simply modify the constant rather than
6240 creating another one around this addition. */
6241 if (code == PLUS && GET_CODE (op2) == CONST_INT
6242 && GET_CODE (XEXP (value, 0)) == PLUS
6243 && REG_P (XEXP (XEXP (value, 0), 0))
6244 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6245 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6247 rtx temp = expand_simple_binop (GET_MODE (value), code,
6248 XEXP (XEXP (value, 0), 0), op2,
6249 subtarget, 0, OPTAB_LIB_WIDEN);
6250 return expand_simple_binop (GET_MODE (value), code, temp,
6251 force_operand (XEXP (XEXP (value,
6252 0), 1), 0),
6253 target, 0, OPTAB_LIB_WIDEN);
6256 op1 = force_operand (XEXP (value, 0), subtarget);
6257 op2 = force_operand (op2, NULL_RTX);
6258 switch (code)
6260 case MULT:
6261 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6262 case DIV:
6263 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6264 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6265 target, 1, OPTAB_LIB_WIDEN);
6266 else
6267 return expand_divmod (0,
6268 FLOAT_MODE_P (GET_MODE (value))
6269 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6270 GET_MODE (value), op1, op2, target, 0);
6271 case MOD:
6272 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6273 target, 0);
6274 case UDIV:
6275 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6276 target, 1);
6277 case UMOD:
6278 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6279 target, 1);
6280 case ASHIFTRT:
6281 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6282 target, 0, OPTAB_LIB_WIDEN);
6283 default:
6284 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6285 target, 1, OPTAB_LIB_WIDEN);
6288 if (UNARY_P (value))
6290 if (!target)
6291 target = gen_reg_rtx (GET_MODE (value));
6292 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6293 switch (code)
6295 case ZERO_EXTEND:
6296 case SIGN_EXTEND:
6297 case TRUNCATE:
6298 case FLOAT_EXTEND:
6299 case FLOAT_TRUNCATE:
6300 convert_move (target, op1, code == ZERO_EXTEND);
6301 return target;
6303 case FIX:
6304 case UNSIGNED_FIX:
6305 expand_fix (target, op1, code == UNSIGNED_FIX);
6306 return target;
6308 case FLOAT:
6309 case UNSIGNED_FLOAT:
6310 expand_float (target, op1, code == UNSIGNED_FLOAT);
6311 return target;
6313 default:
6314 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6318 #ifdef INSN_SCHEDULING
6319 /* On machines that have insn scheduling, we want all memory reference to be
6320 explicit, so we need to deal with such paradoxical SUBREGs. */
6321 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6322 && (GET_MODE_SIZE (GET_MODE (value))
6323 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6324 value
6325 = simplify_gen_subreg (GET_MODE (value),
6326 force_reg (GET_MODE (SUBREG_REG (value)),
6327 force_operand (SUBREG_REG (value),
6328 NULL_RTX)),
6329 GET_MODE (SUBREG_REG (value)),
6330 SUBREG_BYTE (value));
6331 #endif
6333 return value;
6336 /* Subroutine of expand_expr: return nonzero iff there is no way that
6337 EXP can reference X, which is being modified. TOP_P is nonzero if this
6338 call is going to be used to determine whether we need a temporary
6339 for EXP, as opposed to a recursive call to this function.
6341 It is always safe for this routine to return zero since it merely
6342 searches for optimization opportunities. */
6345 safe_from_p (const_rtx x, tree exp, int top_p)
6347 rtx exp_rtl = 0;
6348 int i, nops;
6350 if (x == 0
6351 /* If EXP has varying size, we MUST use a target since we currently
6352 have no way of allocating temporaries of variable size
6353 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6354 So we assume here that something at a higher level has prevented a
6355 clash. This is somewhat bogus, but the best we can do. Only
6356 do this when X is BLKmode and when we are at the top level. */
6357 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6358 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6359 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6360 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6361 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6362 != INTEGER_CST)
6363 && GET_MODE (x) == BLKmode)
6364 /* If X is in the outgoing argument area, it is always safe. */
6365 || (MEM_P (x)
6366 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6367 || (GET_CODE (XEXP (x, 0)) == PLUS
6368 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6369 return 1;
6371 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6372 find the underlying pseudo. */
6373 if (GET_CODE (x) == SUBREG)
6375 x = SUBREG_REG (x);
6376 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6377 return 0;
6380 /* Now look at our tree code and possibly recurse. */
6381 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6383 case tcc_declaration:
6384 exp_rtl = DECL_RTL_IF_SET (exp);
6385 break;
6387 case tcc_constant:
6388 return 1;
6390 case tcc_exceptional:
6391 if (TREE_CODE (exp) == TREE_LIST)
6393 while (1)
6395 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6396 return 0;
6397 exp = TREE_CHAIN (exp);
6398 if (!exp)
6399 return 1;
6400 if (TREE_CODE (exp) != TREE_LIST)
6401 return safe_from_p (x, exp, 0);
6404 else if (TREE_CODE (exp) == CONSTRUCTOR)
6406 constructor_elt *ce;
6407 unsigned HOST_WIDE_INT idx;
6409 for (idx = 0;
6410 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6411 idx++)
6412 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6413 || !safe_from_p (x, ce->value, 0))
6414 return 0;
6415 return 1;
6417 else if (TREE_CODE (exp) == ERROR_MARK)
6418 return 1; /* An already-visited SAVE_EXPR? */
6419 else
6420 return 0;
6422 case tcc_statement:
6423 /* The only case we look at here is the DECL_INITIAL inside a
6424 DECL_EXPR. */
6425 return (TREE_CODE (exp) != DECL_EXPR
6426 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6427 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6428 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6430 case tcc_binary:
6431 case tcc_comparison:
6432 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6433 return 0;
6434 /* Fall through. */
6436 case tcc_unary:
6437 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6439 case tcc_expression:
6440 case tcc_reference:
6441 case tcc_vl_exp:
6442 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6443 the expression. If it is set, we conflict iff we are that rtx or
6444 both are in memory. Otherwise, we check all operands of the
6445 expression recursively. */
6447 switch (TREE_CODE (exp))
6449 case ADDR_EXPR:
6450 /* If the operand is static or we are static, we can't conflict.
6451 Likewise if we don't conflict with the operand at all. */
6452 if (staticp (TREE_OPERAND (exp, 0))
6453 || TREE_STATIC (exp)
6454 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6455 return 1;
6457 /* Otherwise, the only way this can conflict is if we are taking
6458 the address of a DECL a that address if part of X, which is
6459 very rare. */
6460 exp = TREE_OPERAND (exp, 0);
6461 if (DECL_P (exp))
6463 if (!DECL_RTL_SET_P (exp)
6464 || !MEM_P (DECL_RTL (exp)))
6465 return 0;
6466 else
6467 exp_rtl = XEXP (DECL_RTL (exp), 0);
6469 break;
6471 case MISALIGNED_INDIRECT_REF:
6472 case ALIGN_INDIRECT_REF:
6473 case INDIRECT_REF:
6474 if (MEM_P (x)
6475 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6476 get_alias_set (exp)))
6477 return 0;
6478 break;
6480 case CALL_EXPR:
6481 /* Assume that the call will clobber all hard registers and
6482 all of memory. */
6483 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6484 || MEM_P (x))
6485 return 0;
6486 break;
6488 case WITH_CLEANUP_EXPR:
6489 case CLEANUP_POINT_EXPR:
6490 /* Lowered by gimplify.c. */
6491 gcc_unreachable ();
6493 case SAVE_EXPR:
6494 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6496 default:
6497 break;
6500 /* If we have an rtx, we do not need to scan our operands. */
6501 if (exp_rtl)
6502 break;
6504 nops = TREE_OPERAND_LENGTH (exp);
6505 for (i = 0; i < nops; i++)
6506 if (TREE_OPERAND (exp, i) != 0
6507 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6508 return 0;
6510 break;
6512 case tcc_type:
6513 /* Should never get a type here. */
6514 gcc_unreachable ();
6516 case tcc_gimple_stmt:
6517 gcc_unreachable ();
6520 /* If we have an rtl, find any enclosed object. Then see if we conflict
6521 with it. */
6522 if (exp_rtl)
6524 if (GET_CODE (exp_rtl) == SUBREG)
6526 exp_rtl = SUBREG_REG (exp_rtl);
6527 if (REG_P (exp_rtl)
6528 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6529 return 0;
6532 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6533 are memory and they conflict. */
6534 return ! (rtx_equal_p (x, exp_rtl)
6535 || (MEM_P (x) && MEM_P (exp_rtl)
6536 && true_dependence (exp_rtl, VOIDmode, x,
6537 rtx_addr_varies_p)));
6540 /* If we reach here, it is safe. */
6541 return 1;
6545 /* Return the highest power of two that EXP is known to be a multiple of.
6546 This is used in updating alignment of MEMs in array references. */
6548 unsigned HOST_WIDE_INT
6549 highest_pow2_factor (const_tree exp)
6551 unsigned HOST_WIDE_INT c0, c1;
6553 switch (TREE_CODE (exp))
6555 case INTEGER_CST:
6556 /* We can find the lowest bit that's a one. If the low
6557 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6558 We need to handle this case since we can find it in a COND_EXPR,
6559 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6560 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6561 later ICE. */
6562 if (TREE_OVERFLOW (exp))
6563 return BIGGEST_ALIGNMENT;
6564 else
6566 /* Note: tree_low_cst is intentionally not used here,
6567 we don't care about the upper bits. */
6568 c0 = TREE_INT_CST_LOW (exp);
6569 c0 &= -c0;
6570 return c0 ? c0 : BIGGEST_ALIGNMENT;
6572 break;
6574 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6575 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6576 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6577 return MIN (c0, c1);
6579 case MULT_EXPR:
6580 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6581 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6582 return c0 * c1;
6584 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6585 case CEIL_DIV_EXPR:
6586 if (integer_pow2p (TREE_OPERAND (exp, 1))
6587 && host_integerp (TREE_OPERAND (exp, 1), 1))
6589 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6590 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6591 return MAX (1, c0 / c1);
6593 break;
6595 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6596 case SAVE_EXPR:
6597 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6599 case COMPOUND_EXPR:
6600 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6602 case COND_EXPR:
6603 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6604 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6605 return MIN (c0, c1);
6607 default:
6608 break;
6611 return 1;
6614 /* Similar, except that the alignment requirements of TARGET are
6615 taken into account. Assume it is at least as aligned as its
6616 type, unless it is a COMPONENT_REF in which case the layout of
6617 the structure gives the alignment. */
6619 static unsigned HOST_WIDE_INT
6620 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6622 unsigned HOST_WIDE_INT target_align, factor;
6624 factor = highest_pow2_factor (exp);
6625 if (TREE_CODE (target) == COMPONENT_REF)
6626 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6627 else
6628 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6629 return MAX (factor, target_align);
6632 /* Return &VAR expression for emulated thread local VAR. */
6634 static tree
6635 emutls_var_address (tree var)
6637 tree emuvar = emutls_decl (var);
6638 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6639 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6640 tree arglist = build_tree_list (NULL_TREE, arg);
6641 tree call = build_function_call_expr (fn, arglist);
6642 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6645 /* Expands variable VAR. */
6647 void
6648 expand_var (tree var)
6650 if (DECL_EXTERNAL (var))
6651 return;
6653 if (TREE_STATIC (var))
6654 /* If this is an inlined copy of a static local variable,
6655 look up the original decl. */
6656 var = DECL_ORIGIN (var);
6658 if (TREE_STATIC (var)
6659 ? !TREE_ASM_WRITTEN (var)
6660 : !DECL_RTL_SET_P (var))
6662 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6663 /* Should be ignored. */;
6664 else if (lang_hooks.expand_decl (var))
6665 /* OK. */;
6666 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6667 expand_decl (var);
6668 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6669 rest_of_decl_compilation (var, 0, 0);
6670 else
6671 /* No expansion needed. */
6672 gcc_assert (TREE_CODE (var) == TYPE_DECL
6673 || TREE_CODE (var) == CONST_DECL
6674 || TREE_CODE (var) == FUNCTION_DECL
6675 || TREE_CODE (var) == LABEL_DECL);
6679 /* Subroutine of expand_expr. Expand the two operands of a binary
6680 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6681 The value may be stored in TARGET if TARGET is nonzero. The
6682 MODIFIER argument is as documented by expand_expr. */
6684 static void
6685 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6686 enum expand_modifier modifier)
6688 if (! safe_from_p (target, exp1, 1))
6689 target = 0;
6690 if (operand_equal_p (exp0, exp1, 0))
6692 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6693 *op1 = copy_rtx (*op0);
6695 else
6697 /* If we need to preserve evaluation order, copy exp0 into its own
6698 temporary variable so that it can't be clobbered by exp1. */
6699 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6700 exp0 = save_expr (exp0);
6701 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6702 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6707 /* Return a MEM that contains constant EXP. DEFER is as for
6708 output_constant_def and MODIFIER is as for expand_expr. */
6710 static rtx
6711 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6713 rtx mem;
6715 mem = output_constant_def (exp, defer);
6716 if (modifier != EXPAND_INITIALIZER)
6717 mem = use_anchored_address (mem);
6718 return mem;
6721 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6722 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6724 static rtx
6725 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6726 enum expand_modifier modifier)
6728 rtx result, subtarget;
6729 tree inner, offset;
6730 HOST_WIDE_INT bitsize, bitpos;
6731 int volatilep, unsignedp;
6732 enum machine_mode mode1;
6734 /* If we are taking the address of a constant and are at the top level,
6735 we have to use output_constant_def since we can't call force_const_mem
6736 at top level. */
6737 /* ??? This should be considered a front-end bug. We should not be
6738 generating ADDR_EXPR of something that isn't an LVALUE. The only
6739 exception here is STRING_CST. */
6740 if (CONSTANT_CLASS_P (exp))
6741 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6743 /* Everything must be something allowed by is_gimple_addressable. */
6744 switch (TREE_CODE (exp))
6746 case INDIRECT_REF:
6747 /* This case will happen via recursion for &a->b. */
6748 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6750 case CONST_DECL:
6751 /* Recurse and make the output_constant_def clause above handle this. */
6752 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6753 tmode, modifier);
6755 case REALPART_EXPR:
6756 /* The real part of the complex number is always first, therefore
6757 the address is the same as the address of the parent object. */
6758 offset = 0;
6759 bitpos = 0;
6760 inner = TREE_OPERAND (exp, 0);
6761 break;
6763 case IMAGPART_EXPR:
6764 /* The imaginary part of the complex number is always second.
6765 The expression is therefore always offset by the size of the
6766 scalar type. */
6767 offset = 0;
6768 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6769 inner = TREE_OPERAND (exp, 0);
6770 break;
6772 case VAR_DECL:
6773 /* TLS emulation hook - replace __thread VAR's &VAR with
6774 __emutls_get_address (&_emutls.VAR). */
6775 if (! targetm.have_tls
6776 && TREE_CODE (exp) == VAR_DECL
6777 && DECL_THREAD_LOCAL_P (exp))
6779 exp = emutls_var_address (exp);
6780 return expand_expr (exp, target, tmode, modifier);
6782 /* Fall through. */
6784 default:
6785 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6786 expand_expr, as that can have various side effects; LABEL_DECLs for
6787 example, may not have their DECL_RTL set yet. Expand the rtl of
6788 CONSTRUCTORs too, which should yield a memory reference for the
6789 constructor's contents. Assume language specific tree nodes can
6790 be expanded in some interesting way. */
6791 if (DECL_P (exp)
6792 || TREE_CODE (exp) == CONSTRUCTOR
6793 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6795 result = expand_expr (exp, target, tmode,
6796 modifier == EXPAND_INITIALIZER
6797 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6799 /* If the DECL isn't in memory, then the DECL wasn't properly
6800 marked TREE_ADDRESSABLE, which will be either a front-end
6801 or a tree optimizer bug. */
6802 gcc_assert (MEM_P (result));
6803 result = XEXP (result, 0);
6805 /* ??? Is this needed anymore? */
6806 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6808 assemble_external (exp);
6809 TREE_USED (exp) = 1;
6812 if (modifier != EXPAND_INITIALIZER
6813 && modifier != EXPAND_CONST_ADDRESS)
6814 result = force_operand (result, target);
6815 return result;
6818 /* Pass FALSE as the last argument to get_inner_reference although
6819 we are expanding to RTL. The rationale is that we know how to
6820 handle "aligning nodes" here: we can just bypass them because
6821 they won't change the final object whose address will be returned
6822 (they actually exist only for that purpose). */
6823 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6824 &mode1, &unsignedp, &volatilep, false);
6825 break;
6828 /* We must have made progress. */
6829 gcc_assert (inner != exp);
6831 subtarget = offset || bitpos ? NULL_RTX : target;
6832 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6834 if (offset)
6836 rtx tmp;
6838 if (modifier != EXPAND_NORMAL)
6839 result = force_operand (result, NULL);
6840 tmp = expand_expr (offset, NULL_RTX, tmode,
6841 modifier == EXPAND_INITIALIZER
6842 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6844 result = convert_memory_address (tmode, result);
6845 tmp = convert_memory_address (tmode, tmp);
6847 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6848 result = gen_rtx_PLUS (tmode, result, tmp);
6849 else
6851 subtarget = bitpos ? NULL_RTX : target;
6852 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6853 1, OPTAB_LIB_WIDEN);
6857 if (bitpos)
6859 /* Someone beforehand should have rejected taking the address
6860 of such an object. */
6861 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6863 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6864 if (modifier < EXPAND_SUM)
6865 result = force_operand (result, target);
6868 return result;
6871 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6872 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6874 static rtx
6875 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6876 enum expand_modifier modifier)
6878 enum machine_mode rmode;
6879 rtx result;
6881 /* Target mode of VOIDmode says "whatever's natural". */
6882 if (tmode == VOIDmode)
6883 tmode = TYPE_MODE (TREE_TYPE (exp));
6885 /* We can get called with some Weird Things if the user does silliness
6886 like "(short) &a". In that case, convert_memory_address won't do
6887 the right thing, so ignore the given target mode. */
6888 if (tmode != Pmode && tmode != ptr_mode)
6889 tmode = Pmode;
6891 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6892 tmode, modifier);
6894 /* Despite expand_expr claims concerning ignoring TMODE when not
6895 strictly convenient, stuff breaks if we don't honor it. Note
6896 that combined with the above, we only do this for pointer modes. */
6897 rmode = GET_MODE (result);
6898 if (rmode == VOIDmode)
6899 rmode = tmode;
6900 if (rmode != tmode)
6901 result = convert_memory_address (tmode, result);
6903 return result;
6906 /* Generate code for computing CONSTRUCTOR EXP.
6907 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6908 is TRUE, instead of creating a temporary variable in memory
6909 NULL is returned and the caller needs to handle it differently. */
6911 static rtx
6912 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6913 bool avoid_temp_mem)
6915 tree type = TREE_TYPE (exp);
6916 enum machine_mode mode = TYPE_MODE (type);
6918 /* Try to avoid creating a temporary at all. This is possible
6919 if all of the initializer is zero.
6920 FIXME: try to handle all [0..255] initializers we can handle
6921 with memset. */
6922 if (TREE_STATIC (exp)
6923 && !TREE_ADDRESSABLE (exp)
6924 && target != 0 && mode == BLKmode
6925 && all_zeros_p (exp))
6927 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6928 return target;
6931 /* All elts simple constants => refer to a constant in memory. But
6932 if this is a non-BLKmode mode, let it store a field at a time
6933 since that should make a CONST_INT or CONST_DOUBLE when we
6934 fold. Likewise, if we have a target we can use, it is best to
6935 store directly into the target unless the type is large enough
6936 that memcpy will be used. If we are making an initializer and
6937 all operands are constant, put it in memory as well.
6939 FIXME: Avoid trying to fill vector constructors piece-meal.
6940 Output them with output_constant_def below unless we're sure
6941 they're zeros. This should go away when vector initializers
6942 are treated like VECTOR_CST instead of arrays. */
6943 if ((TREE_STATIC (exp)
6944 && ((mode == BLKmode
6945 && ! (target != 0 && safe_from_p (target, exp, 1)))
6946 || TREE_ADDRESSABLE (exp)
6947 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6948 && (! MOVE_BY_PIECES_P
6949 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6950 TYPE_ALIGN (type)))
6951 && ! mostly_zeros_p (exp))))
6952 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
6953 && TREE_CONSTANT (exp)))
6955 rtx constructor;
6957 if (avoid_temp_mem)
6958 return NULL_RTX;
6960 constructor = expand_expr_constant (exp, 1, modifier);
6962 if (modifier != EXPAND_CONST_ADDRESS
6963 && modifier != EXPAND_INITIALIZER
6964 && modifier != EXPAND_SUM)
6965 constructor = validize_mem (constructor);
6967 return constructor;
6970 /* Handle calls that pass values in multiple non-contiguous
6971 locations. The Irix 6 ABI has examples of this. */
6972 if (target == 0 || ! safe_from_p (target, exp, 1)
6973 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
6975 if (avoid_temp_mem)
6976 return NULL_RTX;
6978 target
6979 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
6980 | (TREE_READONLY (exp)
6981 * TYPE_QUAL_CONST))),
6982 0, TREE_ADDRESSABLE (exp), 1);
6985 store_constructor (exp, target, 0, int_expr_size (exp));
6986 return target;
6990 /* expand_expr: generate code for computing expression EXP.
6991 An rtx for the computed value is returned. The value is never null.
6992 In the case of a void EXP, const0_rtx is returned.
6994 The value may be stored in TARGET if TARGET is nonzero.
6995 TARGET is just a suggestion; callers must assume that
6996 the rtx returned may not be the same as TARGET.
6998 If TARGET is CONST0_RTX, it means that the value will be ignored.
7000 If TMODE is not VOIDmode, it suggests generating the
7001 result in mode TMODE. But this is done only when convenient.
7002 Otherwise, TMODE is ignored and the value generated in its natural mode.
7003 TMODE is just a suggestion; callers must assume that
7004 the rtx returned may not have mode TMODE.
7006 Note that TARGET may have neither TMODE nor MODE. In that case, it
7007 probably will not be used.
7009 If MODIFIER is EXPAND_SUM then when EXP is an addition
7010 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7011 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7012 products as above, or REG or MEM, or constant.
7013 Ordinarily in such cases we would output mul or add instructions
7014 and then return a pseudo reg containing the sum.
7016 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7017 it also marks a label as absolutely required (it can't be dead).
7018 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7019 This is used for outputting expressions used in initializers.
7021 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7022 with a constant address even if that address is not normally legitimate.
7023 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7025 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7026 a call parameter. Such targets require special care as we haven't yet
7027 marked TARGET so that it's safe from being trashed by libcalls. We
7028 don't want to use TARGET for anything but the final result;
7029 Intermediate values must go elsewhere. Additionally, calls to
7030 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7032 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7033 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7034 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7035 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7036 recursively. */
7038 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
7039 enum expand_modifier, rtx *);
7042 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7043 enum expand_modifier modifier, rtx *alt_rtl)
7045 int rn = -1;
7046 rtx ret, last = NULL;
7048 /* Handle ERROR_MARK before anybody tries to access its type. */
7049 if (TREE_CODE (exp) == ERROR_MARK
7050 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7052 ret = CONST0_RTX (tmode);
7053 return ret ? ret : const0_rtx;
7056 if (flag_non_call_exceptions)
7058 rn = lookup_stmt_eh_region (exp);
7059 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7060 if (rn >= 0)
7061 last = get_last_insn ();
7064 /* If this is an expression of some kind and it has an associated line
7065 number, then emit the line number before expanding the expression.
7067 We need to save and restore the file and line information so that
7068 errors discovered during expansion are emitted with the right
7069 information. It would be better of the diagnostic routines
7070 used the file/line information embedded in the tree nodes rather
7071 than globals. */
7072 if (cfun && EXPR_HAS_LOCATION (exp))
7074 location_t saved_location = input_location;
7075 input_location = EXPR_LOCATION (exp);
7076 set_curr_insn_source_location (input_location);
7078 /* Record where the insns produced belong. */
7079 set_curr_insn_block (TREE_BLOCK (exp));
7081 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7083 input_location = saved_location;
7085 else
7087 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7090 /* If using non-call exceptions, mark all insns that may trap.
7091 expand_call() will mark CALL_INSNs before we get to this code,
7092 but it doesn't handle libcalls, and these may trap. */
7093 if (rn >= 0)
7095 rtx insn;
7096 for (insn = next_real_insn (last); insn;
7097 insn = next_real_insn (insn))
7099 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7100 /* If we want exceptions for non-call insns, any
7101 may_trap_p instruction may throw. */
7102 && GET_CODE (PATTERN (insn)) != CLOBBER
7103 && GET_CODE (PATTERN (insn)) != USE
7104 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7106 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
7107 REG_NOTES (insn));
7112 return ret;
7115 static rtx
7116 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7117 enum expand_modifier modifier, rtx *alt_rtl)
7119 rtx op0, op1, op2, temp, decl_rtl;
7120 tree type;
7121 int unsignedp;
7122 enum machine_mode mode;
7123 enum tree_code code = TREE_CODE (exp);
7124 optab this_optab;
7125 rtx subtarget, original_target;
7126 int ignore;
7127 tree context, subexp0, subexp1;
7128 bool reduce_bit_field = false;
7129 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
7130 ? reduce_to_bit_field_precision ((expr), \
7131 target, \
7132 type) \
7133 : (expr))
7135 if (GIMPLE_STMT_P (exp))
7137 type = void_type_node;
7138 mode = VOIDmode;
7139 unsignedp = 0;
7141 else
7143 type = TREE_TYPE (exp);
7144 mode = TYPE_MODE (type);
7145 unsignedp = TYPE_UNSIGNED (type);
7147 if (lang_hooks.reduce_bit_field_operations
7148 && TREE_CODE (type) == INTEGER_TYPE
7149 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
7151 /* An operation in what may be a bit-field type needs the
7152 result to be reduced to the precision of the bit-field type,
7153 which is narrower than that of the type's mode. */
7154 reduce_bit_field = true;
7155 if (modifier == EXPAND_STACK_PARM)
7156 target = 0;
7159 /* Use subtarget as the target for operand 0 of a binary operation. */
7160 subtarget = get_subtarget (target);
7161 original_target = target;
7162 ignore = (target == const0_rtx
7163 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
7164 || code == CONVERT_EXPR || code == COND_EXPR
7165 || code == VIEW_CONVERT_EXPR)
7166 && TREE_CODE (type) == VOID_TYPE));
7168 /* If we are going to ignore this result, we need only do something
7169 if there is a side-effect somewhere in the expression. If there
7170 is, short-circuit the most common cases here. Note that we must
7171 not call expand_expr with anything but const0_rtx in case this
7172 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7174 if (ignore)
7176 if (! TREE_SIDE_EFFECTS (exp))
7177 return const0_rtx;
7179 /* Ensure we reference a volatile object even if value is ignored, but
7180 don't do this if all we are doing is taking its address. */
7181 if (TREE_THIS_VOLATILE (exp)
7182 && TREE_CODE (exp) != FUNCTION_DECL
7183 && mode != VOIDmode && mode != BLKmode
7184 && modifier != EXPAND_CONST_ADDRESS)
7186 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7187 if (MEM_P (temp))
7188 temp = copy_to_reg (temp);
7189 return const0_rtx;
7192 if (TREE_CODE_CLASS (code) == tcc_unary
7193 || code == COMPONENT_REF || code == INDIRECT_REF)
7194 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7195 modifier);
7197 else if (TREE_CODE_CLASS (code) == tcc_binary
7198 || TREE_CODE_CLASS (code) == tcc_comparison
7199 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7201 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7202 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7203 return const0_rtx;
7205 else if (code == BIT_FIELD_REF)
7207 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7208 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7209 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7210 return const0_rtx;
7213 target = 0;
7217 switch (code)
7219 case LABEL_DECL:
7221 tree function = decl_function_context (exp);
7223 temp = label_rtx (exp);
7224 temp = gen_rtx_LABEL_REF (Pmode, temp);
7226 if (function != current_function_decl
7227 && function != 0)
7228 LABEL_REF_NONLOCAL_P (temp) = 1;
7230 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7231 return temp;
7234 case SSA_NAME:
7235 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7236 NULL);
7238 case PARM_DECL:
7239 case VAR_DECL:
7240 /* If a static var's type was incomplete when the decl was written,
7241 but the type is complete now, lay out the decl now. */
7242 if (DECL_SIZE (exp) == 0
7243 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7244 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7245 layout_decl (exp, 0);
7247 /* TLS emulation hook - replace __thread vars with
7248 *__emutls_get_address (&_emutls.var). */
7249 if (! targetm.have_tls
7250 && TREE_CODE (exp) == VAR_DECL
7251 && DECL_THREAD_LOCAL_P (exp))
7253 exp = build_fold_indirect_ref (emutls_var_address (exp));
7254 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7257 /* ... fall through ... */
7259 case FUNCTION_DECL:
7260 case RESULT_DECL:
7261 decl_rtl = DECL_RTL (exp);
7262 gcc_assert (decl_rtl);
7263 decl_rtl = copy_rtx (decl_rtl);
7265 /* Ensure variable marked as used even if it doesn't go through
7266 a parser. If it hasn't be used yet, write out an external
7267 definition. */
7268 if (! TREE_USED (exp))
7270 assemble_external (exp);
7271 TREE_USED (exp) = 1;
7274 /* Show we haven't gotten RTL for this yet. */
7275 temp = 0;
7277 /* Variables inherited from containing functions should have
7278 been lowered by this point. */
7279 context = decl_function_context (exp);
7280 gcc_assert (!context
7281 || context == current_function_decl
7282 || TREE_STATIC (exp)
7283 /* ??? C++ creates functions that are not TREE_STATIC. */
7284 || TREE_CODE (exp) == FUNCTION_DECL);
7286 /* This is the case of an array whose size is to be determined
7287 from its initializer, while the initializer is still being parsed.
7288 See expand_decl. */
7290 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7291 temp = validize_mem (decl_rtl);
7293 /* If DECL_RTL is memory, we are in the normal case and the
7294 address is not valid, get the address into a register. */
7296 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7298 if (alt_rtl)
7299 *alt_rtl = decl_rtl;
7300 decl_rtl = use_anchored_address (decl_rtl);
7301 if (modifier != EXPAND_CONST_ADDRESS
7302 && modifier != EXPAND_SUM
7303 && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
7304 temp = replace_equiv_address (decl_rtl,
7305 copy_rtx (XEXP (decl_rtl, 0)));
7308 /* If we got something, return it. But first, set the alignment
7309 if the address is a register. */
7310 if (temp != 0)
7312 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7313 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7315 return temp;
7318 /* If the mode of DECL_RTL does not match that of the decl, it
7319 must be a promoted value. We return a SUBREG of the wanted mode,
7320 but mark it so that we know that it was already extended. */
7322 if (REG_P (decl_rtl)
7323 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7325 enum machine_mode pmode;
7327 /* Get the signedness used for this variable. Ensure we get the
7328 same mode we got when the variable was declared. */
7329 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7330 (TREE_CODE (exp) == RESULT_DECL
7331 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7332 gcc_assert (GET_MODE (decl_rtl) == pmode);
7334 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7335 SUBREG_PROMOTED_VAR_P (temp) = 1;
7336 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7337 return temp;
7340 return decl_rtl;
7342 case INTEGER_CST:
7343 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7344 TREE_INT_CST_HIGH (exp), mode);
7346 return temp;
7348 case VECTOR_CST:
7350 tree tmp = NULL_TREE;
7351 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7352 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7353 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7354 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7355 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7356 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7357 return const_vector_from_tree (exp);
7358 if (GET_MODE_CLASS (mode) == MODE_INT)
7360 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7361 if (type_for_mode)
7362 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7364 if (!tmp)
7365 tmp = build_constructor_from_list (type,
7366 TREE_VECTOR_CST_ELTS (exp));
7367 return expand_expr (tmp, ignore ? const0_rtx : target,
7368 tmode, modifier);
7371 case CONST_DECL:
7372 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7374 case REAL_CST:
7375 /* If optimized, generate immediate CONST_DOUBLE
7376 which will be turned into memory by reload if necessary.
7378 We used to force a register so that loop.c could see it. But
7379 this does not allow gen_* patterns to perform optimizations with
7380 the constants. It also produces two insns in cases like "x = 1.0;".
7381 On most machines, floating-point constants are not permitted in
7382 many insns, so we'd end up copying it to a register in any case.
7384 Now, we do the copying in expand_binop, if appropriate. */
7385 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7386 TYPE_MODE (TREE_TYPE (exp)));
7388 case FIXED_CST:
7389 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7390 TYPE_MODE (TREE_TYPE (exp)));
7392 case COMPLEX_CST:
7393 /* Handle evaluating a complex constant in a CONCAT target. */
7394 if (original_target && GET_CODE (original_target) == CONCAT)
7396 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7397 rtx rtarg, itarg;
7399 rtarg = XEXP (original_target, 0);
7400 itarg = XEXP (original_target, 1);
7402 /* Move the real and imaginary parts separately. */
7403 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7404 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7406 if (op0 != rtarg)
7407 emit_move_insn (rtarg, op0);
7408 if (op1 != itarg)
7409 emit_move_insn (itarg, op1);
7411 return original_target;
7414 /* ... fall through ... */
7416 case STRING_CST:
7417 temp = expand_expr_constant (exp, 1, modifier);
7419 /* temp contains a constant address.
7420 On RISC machines where a constant address isn't valid,
7421 make some insns to get that address into a register. */
7422 if (modifier != EXPAND_CONST_ADDRESS
7423 && modifier != EXPAND_INITIALIZER
7424 && modifier != EXPAND_SUM
7425 && ! memory_address_p (mode, XEXP (temp, 0)))
7426 return replace_equiv_address (temp,
7427 copy_rtx (XEXP (temp, 0)));
7428 return temp;
7430 case SAVE_EXPR:
7432 tree val = TREE_OPERAND (exp, 0);
7433 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7435 if (!SAVE_EXPR_RESOLVED_P (exp))
7437 /* We can indeed still hit this case, typically via builtin
7438 expanders calling save_expr immediately before expanding
7439 something. Assume this means that we only have to deal
7440 with non-BLKmode values. */
7441 gcc_assert (GET_MODE (ret) != BLKmode);
7443 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7444 DECL_ARTIFICIAL (val) = 1;
7445 DECL_IGNORED_P (val) = 1;
7446 TREE_OPERAND (exp, 0) = val;
7447 SAVE_EXPR_RESOLVED_P (exp) = 1;
7449 if (!CONSTANT_P (ret))
7450 ret = copy_to_reg (ret);
7451 SET_DECL_RTL (val, ret);
7454 return ret;
7457 case GOTO_EXPR:
7458 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7459 expand_goto (TREE_OPERAND (exp, 0));
7460 else
7461 expand_computed_goto (TREE_OPERAND (exp, 0));
7462 return const0_rtx;
7464 case CONSTRUCTOR:
7465 /* If we don't need the result, just ensure we evaluate any
7466 subexpressions. */
7467 if (ignore)
7469 unsigned HOST_WIDE_INT idx;
7470 tree value;
7472 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7473 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7475 return const0_rtx;
7478 return expand_constructor (exp, target, modifier, false);
7480 case MISALIGNED_INDIRECT_REF:
7481 case ALIGN_INDIRECT_REF:
7482 case INDIRECT_REF:
7484 tree exp1 = TREE_OPERAND (exp, 0);
7486 if (modifier != EXPAND_WRITE)
7488 tree t;
7490 t = fold_read_from_constant_string (exp);
7491 if (t)
7492 return expand_expr (t, target, tmode, modifier);
7495 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7496 op0 = memory_address (mode, op0);
7498 if (code == ALIGN_INDIRECT_REF)
7500 int align = TYPE_ALIGN_UNIT (type);
7501 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7502 op0 = memory_address (mode, op0);
7505 temp = gen_rtx_MEM (mode, op0);
7507 set_mem_attributes (temp, exp, 0);
7509 /* Resolve the misalignment now, so that we don't have to remember
7510 to resolve it later. Of course, this only works for reads. */
7511 /* ??? When we get around to supporting writes, we'll have to handle
7512 this in store_expr directly. The vectorizer isn't generating
7513 those yet, however. */
7514 if (code == MISALIGNED_INDIRECT_REF)
7516 int icode;
7517 rtx reg, insn;
7519 gcc_assert (modifier == EXPAND_NORMAL
7520 || modifier == EXPAND_STACK_PARM);
7522 /* The vectorizer should have already checked the mode. */
7523 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7524 gcc_assert (icode != CODE_FOR_nothing);
7526 /* We've already validated the memory, and we're creating a
7527 new pseudo destination. The predicates really can't fail. */
7528 reg = gen_reg_rtx (mode);
7530 /* Nor can the insn generator. */
7531 insn = GEN_FCN (icode) (reg, temp);
7532 emit_insn (insn);
7534 return reg;
7537 return temp;
7540 case TARGET_MEM_REF:
7542 struct mem_address addr;
7544 get_address_description (exp, &addr);
7545 op0 = addr_for_mem_ref (&addr, true);
7546 op0 = memory_address (mode, op0);
7547 temp = gen_rtx_MEM (mode, op0);
7548 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7550 return temp;
7552 case ARRAY_REF:
7555 tree array = TREE_OPERAND (exp, 0);
7556 tree index = TREE_OPERAND (exp, 1);
7558 /* Fold an expression like: "foo"[2].
7559 This is not done in fold so it won't happen inside &.
7560 Don't fold if this is for wide characters since it's too
7561 difficult to do correctly and this is a very rare case. */
7563 if (modifier != EXPAND_CONST_ADDRESS
7564 && modifier != EXPAND_INITIALIZER
7565 && modifier != EXPAND_MEMORY)
7567 tree t = fold_read_from_constant_string (exp);
7569 if (t)
7570 return expand_expr (t, target, tmode, modifier);
7573 /* If this is a constant index into a constant array,
7574 just get the value from the array. Handle both the cases when
7575 we have an explicit constructor and when our operand is a variable
7576 that was declared const. */
7578 if (modifier != EXPAND_CONST_ADDRESS
7579 && modifier != EXPAND_INITIALIZER
7580 && modifier != EXPAND_MEMORY
7581 && TREE_CODE (array) == CONSTRUCTOR
7582 && ! TREE_SIDE_EFFECTS (array)
7583 && TREE_CODE (index) == INTEGER_CST)
7585 unsigned HOST_WIDE_INT ix;
7586 tree field, value;
7588 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7589 field, value)
7590 if (tree_int_cst_equal (field, index))
7592 if (!TREE_SIDE_EFFECTS (value))
7593 return expand_expr (fold (value), target, tmode, modifier);
7594 break;
7598 else if (optimize >= 1
7599 && modifier != EXPAND_CONST_ADDRESS
7600 && modifier != EXPAND_INITIALIZER
7601 && modifier != EXPAND_MEMORY
7602 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7603 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7604 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7605 && targetm.binds_local_p (array))
7607 if (TREE_CODE (index) == INTEGER_CST)
7609 tree init = DECL_INITIAL (array);
7611 if (TREE_CODE (init) == CONSTRUCTOR)
7613 unsigned HOST_WIDE_INT ix;
7614 tree field, value;
7616 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7617 field, value)
7618 if (tree_int_cst_equal (field, index))
7620 if (TREE_SIDE_EFFECTS (value))
7621 break;
7623 if (TREE_CODE (value) == CONSTRUCTOR)
7625 /* If VALUE is a CONSTRUCTOR, this
7626 optimization is only useful if
7627 this doesn't store the CONSTRUCTOR
7628 into memory. If it does, it is more
7629 efficient to just load the data from
7630 the array directly. */
7631 rtx ret = expand_constructor (value, target,
7632 modifier, true);
7633 if (ret == NULL_RTX)
7634 break;
7637 return expand_expr (fold (value), target, tmode,
7638 modifier);
7641 else if(TREE_CODE (init) == STRING_CST)
7643 tree index1 = index;
7644 tree low_bound = array_ref_low_bound (exp);
7645 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7647 /* Optimize the special-case of a zero lower bound.
7649 We convert the low_bound to sizetype to avoid some problems
7650 with constant folding. (E.g. suppose the lower bound is 1,
7651 and its mode is QI. Without the conversion,l (ARRAY
7652 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7653 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7655 if (! integer_zerop (low_bound))
7656 index1 = size_diffop (index1, fold_convert (sizetype,
7657 low_bound));
7659 if (0 > compare_tree_int (index1,
7660 TREE_STRING_LENGTH (init)))
7662 tree type = TREE_TYPE (TREE_TYPE (init));
7663 enum machine_mode mode = TYPE_MODE (type);
7665 if (GET_MODE_CLASS (mode) == MODE_INT
7666 && GET_MODE_SIZE (mode) == 1)
7667 return gen_int_mode (TREE_STRING_POINTER (init)
7668 [TREE_INT_CST_LOW (index1)],
7669 mode);
7675 goto normal_inner_ref;
7677 case COMPONENT_REF:
7678 /* If the operand is a CONSTRUCTOR, we can just extract the
7679 appropriate field if it is present. */
7680 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7682 unsigned HOST_WIDE_INT idx;
7683 tree field, value;
7685 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7686 idx, field, value)
7687 if (field == TREE_OPERAND (exp, 1)
7688 /* We can normally use the value of the field in the
7689 CONSTRUCTOR. However, if this is a bitfield in
7690 an integral mode that we can fit in a HOST_WIDE_INT,
7691 we must mask only the number of bits in the bitfield,
7692 since this is done implicitly by the constructor. If
7693 the bitfield does not meet either of those conditions,
7694 we can't do this optimization. */
7695 && (! DECL_BIT_FIELD (field)
7696 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7697 && (GET_MODE_BITSIZE (DECL_MODE (field))
7698 <= HOST_BITS_PER_WIDE_INT))))
7700 if (DECL_BIT_FIELD (field)
7701 && modifier == EXPAND_STACK_PARM)
7702 target = 0;
7703 op0 = expand_expr (value, target, tmode, modifier);
7704 if (DECL_BIT_FIELD (field))
7706 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7707 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7709 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7711 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7712 op0 = expand_and (imode, op0, op1, target);
7714 else
7716 tree count
7717 = build_int_cst (NULL_TREE,
7718 GET_MODE_BITSIZE (imode) - bitsize);
7720 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7721 target, 0);
7722 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7723 target, 0);
7727 return op0;
7730 goto normal_inner_ref;
7732 case BIT_FIELD_REF:
7733 case ARRAY_RANGE_REF:
7734 normal_inner_ref:
7736 enum machine_mode mode1;
7737 HOST_WIDE_INT bitsize, bitpos;
7738 tree offset;
7739 int volatilep = 0;
7740 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7741 &mode1, &unsignedp, &volatilep, true);
7742 rtx orig_op0;
7744 /* If we got back the original object, something is wrong. Perhaps
7745 we are evaluating an expression too early. In any event, don't
7746 infinitely recurse. */
7747 gcc_assert (tem != exp);
7749 /* If TEM's type is a union of variable size, pass TARGET to the inner
7750 computation, since it will need a temporary and TARGET is known
7751 to have to do. This occurs in unchecked conversion in Ada. */
7753 orig_op0 = op0
7754 = expand_expr (tem,
7755 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7756 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7757 != INTEGER_CST)
7758 && modifier != EXPAND_STACK_PARM
7759 ? target : NULL_RTX),
7760 VOIDmode,
7761 (modifier == EXPAND_INITIALIZER
7762 || modifier == EXPAND_CONST_ADDRESS
7763 || modifier == EXPAND_STACK_PARM)
7764 ? modifier : EXPAND_NORMAL);
7766 /* If this is a constant, put it into a register if it is a legitimate
7767 constant, OFFSET is 0, and we won't try to extract outside the
7768 register (in case we were passed a partially uninitialized object
7769 or a view_conversion to a larger size). Force the constant to
7770 memory otherwise. */
7771 if (CONSTANT_P (op0))
7773 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7774 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7775 && offset == 0
7776 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7777 op0 = force_reg (mode, op0);
7778 else
7779 op0 = validize_mem (force_const_mem (mode, op0));
7782 /* Otherwise, if this object not in memory and we either have an
7783 offset, a BLKmode result, or a reference outside the object, put it
7784 there. Such cases can occur in Ada if we have unchecked conversion
7785 of an expression from a scalar type to an array or record type or
7786 for an ARRAY_RANGE_REF whose type is BLKmode. */
7787 else if (!MEM_P (op0)
7788 && (offset != 0
7789 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7790 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7792 tree nt = build_qualified_type (TREE_TYPE (tem),
7793 (TYPE_QUALS (TREE_TYPE (tem))
7794 | TYPE_QUAL_CONST));
7795 rtx memloc = assign_temp (nt, 1, 1, 1);
7797 emit_move_insn (memloc, op0);
7798 op0 = memloc;
7801 if (offset != 0)
7803 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7804 EXPAND_SUM);
7806 gcc_assert (MEM_P (op0));
7808 #ifdef POINTERS_EXTEND_UNSIGNED
7809 if (GET_MODE (offset_rtx) != Pmode)
7810 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7811 #else
7812 if (GET_MODE (offset_rtx) != ptr_mode)
7813 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7814 #endif
7816 if (GET_MODE (op0) == BLKmode
7817 /* A constant address in OP0 can have VOIDmode, we must
7818 not try to call force_reg in that case. */
7819 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7820 && bitsize != 0
7821 && (bitpos % bitsize) == 0
7822 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7823 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7825 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7826 bitpos = 0;
7829 op0 = offset_address (op0, offset_rtx,
7830 highest_pow2_factor (offset));
7833 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7834 record its alignment as BIGGEST_ALIGNMENT. */
7835 if (MEM_P (op0) && bitpos == 0 && offset != 0
7836 && is_aligning_offset (offset, tem))
7837 set_mem_align (op0, BIGGEST_ALIGNMENT);
7839 /* Don't forget about volatility even if this is a bitfield. */
7840 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7842 if (op0 == orig_op0)
7843 op0 = copy_rtx (op0);
7845 MEM_VOLATILE_P (op0) = 1;
7848 /* The following code doesn't handle CONCAT.
7849 Assume only bitpos == 0 can be used for CONCAT, due to
7850 one element arrays having the same mode as its element. */
7851 if (GET_CODE (op0) == CONCAT)
7853 gcc_assert (bitpos == 0
7854 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7855 return op0;
7858 /* In cases where an aligned union has an unaligned object
7859 as a field, we might be extracting a BLKmode value from
7860 an integer-mode (e.g., SImode) object. Handle this case
7861 by doing the extract into an object as wide as the field
7862 (which we know to be the width of a basic mode), then
7863 storing into memory, and changing the mode to BLKmode. */
7864 if (mode1 == VOIDmode
7865 || REG_P (op0) || GET_CODE (op0) == SUBREG
7866 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7867 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7868 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7869 && modifier != EXPAND_CONST_ADDRESS
7870 && modifier != EXPAND_INITIALIZER)
7871 /* If the field isn't aligned enough to fetch as a memref,
7872 fetch it as a bit field. */
7873 || (mode1 != BLKmode
7874 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7875 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7876 || (MEM_P (op0)
7877 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7878 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7879 && ((modifier == EXPAND_CONST_ADDRESS
7880 || modifier == EXPAND_INITIALIZER)
7881 ? STRICT_ALIGNMENT
7882 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7883 || (bitpos % BITS_PER_UNIT != 0)))
7884 /* If the type and the field are a constant size and the
7885 size of the type isn't the same size as the bitfield,
7886 we must use bitfield operations. */
7887 || (bitsize >= 0
7888 && TYPE_SIZE (TREE_TYPE (exp))
7889 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7890 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7891 bitsize)))
7893 enum machine_mode ext_mode = mode;
7895 if (ext_mode == BLKmode
7896 && ! (target != 0 && MEM_P (op0)
7897 && MEM_P (target)
7898 && bitpos % BITS_PER_UNIT == 0))
7899 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7901 if (ext_mode == BLKmode)
7903 if (target == 0)
7904 target = assign_temp (type, 0, 1, 1);
7906 if (bitsize == 0)
7907 return target;
7909 /* In this case, BITPOS must start at a byte boundary and
7910 TARGET, if specified, must be a MEM. */
7911 gcc_assert (MEM_P (op0)
7912 && (!target || MEM_P (target))
7913 && !(bitpos % BITS_PER_UNIT));
7915 emit_block_move (target,
7916 adjust_address (op0, VOIDmode,
7917 bitpos / BITS_PER_UNIT),
7918 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7919 / BITS_PER_UNIT),
7920 (modifier == EXPAND_STACK_PARM
7921 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7923 return target;
7926 op0 = validize_mem (op0);
7928 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7929 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7931 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7932 (modifier == EXPAND_STACK_PARM
7933 ? NULL_RTX : target),
7934 ext_mode, ext_mode);
7936 /* If the result is a record type and BITSIZE is narrower than
7937 the mode of OP0, an integral mode, and this is a big endian
7938 machine, we must put the field into the high-order bits. */
7939 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7940 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7941 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7942 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7943 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7944 - bitsize),
7945 op0, 1);
7947 /* If the result type is BLKmode, store the data into a temporary
7948 of the appropriate type, but with the mode corresponding to the
7949 mode for the data we have (op0's mode). It's tempting to make
7950 this a constant type, since we know it's only being stored once,
7951 but that can cause problems if we are taking the address of this
7952 COMPONENT_REF because the MEM of any reference via that address
7953 will have flags corresponding to the type, which will not
7954 necessarily be constant. */
7955 if (mode == BLKmode)
7957 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7958 rtx new;
7960 /* If the reference doesn't use the alias set of its type,
7961 we cannot create the temporary using that type. */
7962 if (component_uses_parent_alias_set (exp))
7964 new = assign_stack_local (ext_mode, size, 0);
7965 set_mem_alias_set (new, get_alias_set (exp));
7967 else
7968 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7970 emit_move_insn (new, op0);
7971 op0 = copy_rtx (new);
7972 PUT_MODE (op0, BLKmode);
7973 set_mem_attributes (op0, exp, 1);
7976 return op0;
7979 /* If the result is BLKmode, use that to access the object
7980 now as well. */
7981 if (mode == BLKmode)
7982 mode1 = BLKmode;
7984 /* Get a reference to just this component. */
7985 if (modifier == EXPAND_CONST_ADDRESS
7986 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7987 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7988 else
7989 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7991 if (op0 == orig_op0)
7992 op0 = copy_rtx (op0);
7994 set_mem_attributes (op0, exp, 0);
7995 if (REG_P (XEXP (op0, 0)))
7996 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7998 MEM_VOLATILE_P (op0) |= volatilep;
7999 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
8000 || modifier == EXPAND_CONST_ADDRESS
8001 || modifier == EXPAND_INITIALIZER)
8002 return op0;
8003 else if (target == 0)
8004 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8006 convert_move (target, op0, unsignedp);
8007 return target;
8010 case OBJ_TYPE_REF:
8011 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
8013 case CALL_EXPR:
8014 /* All valid uses of __builtin_va_arg_pack () are removed during
8015 inlining. */
8016 if (CALL_EXPR_VA_ARG_PACK (exp))
8017 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8019 tree fndecl = get_callee_fndecl (exp), attr;
8021 if (fndecl
8022 && (attr = lookup_attribute ("error",
8023 DECL_ATTRIBUTES (fndecl))) != NULL)
8024 error ("%Kcall to %qs declared with attribute error: %s",
8025 exp, lang_hooks.decl_printable_name (fndecl, 1),
8026 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8027 if (fndecl
8028 && (attr = lookup_attribute ("warning",
8029 DECL_ATTRIBUTES (fndecl))) != NULL)
8030 warning (0, "%Kcall to %qs declared with attribute warning: %s",
8031 exp, lang_hooks.decl_printable_name (fndecl, 1),
8032 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8034 /* Check for a built-in function. */
8035 if (fndecl && DECL_BUILT_IN (fndecl))
8037 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND)
8038 return lang_hooks.expand_expr (exp, original_target,
8039 tmode, modifier, alt_rtl);
8040 else
8041 return expand_builtin (exp, target, subtarget, tmode, ignore);
8044 return expand_call (exp, target, ignore);
8046 case NON_LVALUE_EXPR:
8047 case NOP_EXPR:
8048 case CONVERT_EXPR:
8049 if (TREE_OPERAND (exp, 0) == error_mark_node)
8050 return const0_rtx;
8052 if (TREE_CODE (type) == UNION_TYPE)
8054 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
8056 /* If both input and output are BLKmode, this conversion isn't doing
8057 anything except possibly changing memory attribute. */
8058 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8060 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8061 modifier);
8063 result = copy_rtx (result);
8064 set_mem_attributes (result, exp, 0);
8065 return result;
8068 if (target == 0)
8070 if (TYPE_MODE (type) != BLKmode)
8071 target = gen_reg_rtx (TYPE_MODE (type));
8072 else
8073 target = assign_temp (type, 0, 1, 1);
8076 if (MEM_P (target))
8077 /* Store data into beginning of memory target. */
8078 store_expr (TREE_OPERAND (exp, 0),
8079 adjust_address (target, TYPE_MODE (valtype), 0),
8080 modifier == EXPAND_STACK_PARM,
8081 false);
8083 else
8085 gcc_assert (REG_P (target));
8087 /* Store this field into a union of the proper type. */
8088 store_field (target,
8089 MIN ((int_size_in_bytes (TREE_TYPE
8090 (TREE_OPERAND (exp, 0)))
8091 * BITS_PER_UNIT),
8092 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8093 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8094 type, 0, false);
8097 /* Return the entire union. */
8098 return target;
8101 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8103 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8104 modifier);
8106 /* If the signedness of the conversion differs and OP0 is
8107 a promoted SUBREG, clear that indication since we now
8108 have to do the proper extension. */
8109 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8110 && GET_CODE (op0) == SUBREG)
8111 SUBREG_PROMOTED_VAR_P (op0) = 0;
8113 return REDUCE_BIT_FIELD (op0);
8116 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8117 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8118 if (GET_MODE (op0) == mode)
8121 /* If OP0 is a constant, just convert it into the proper mode. */
8122 else if (CONSTANT_P (op0))
8124 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8125 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8127 if (modifier == EXPAND_INITIALIZER)
8128 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8129 subreg_lowpart_offset (mode,
8130 inner_mode));
8131 else
8132 op0= convert_modes (mode, inner_mode, op0,
8133 TYPE_UNSIGNED (inner_type));
8136 else if (modifier == EXPAND_INITIALIZER)
8137 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8139 else if (target == 0)
8140 op0 = convert_to_mode (mode, op0,
8141 TYPE_UNSIGNED (TREE_TYPE
8142 (TREE_OPERAND (exp, 0))));
8143 else
8145 convert_move (target, op0,
8146 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8147 op0 = target;
8150 return REDUCE_BIT_FIELD (op0);
8152 case VIEW_CONVERT_EXPR:
8153 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8155 /* If the input and output modes are both the same, we are done. */
8156 if (TYPE_MODE (type) == GET_MODE (op0))
8158 /* If neither mode is BLKmode, and both modes are the same size
8159 then we can use gen_lowpart. */
8160 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8161 && GET_MODE_SIZE (TYPE_MODE (type))
8162 == GET_MODE_SIZE (GET_MODE (op0)))
8164 if (GET_CODE (op0) == SUBREG)
8165 op0 = force_reg (GET_MODE (op0), op0);
8166 op0 = gen_lowpart (TYPE_MODE (type), op0);
8168 /* If both modes are integral, then we can convert from one to the
8169 other. */
8170 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8171 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8172 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8173 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8174 /* As a last resort, spill op0 to memory, and reload it in a
8175 different mode. */
8176 else if (!MEM_P (op0))
8178 /* If the operand is not a MEM, force it into memory. Since we
8179 are going to be changing the mode of the MEM, don't call
8180 force_const_mem for constants because we don't allow pool
8181 constants to change mode. */
8182 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8184 gcc_assert (!TREE_ADDRESSABLE (exp));
8186 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8187 target
8188 = assign_stack_temp_for_type
8189 (TYPE_MODE (inner_type),
8190 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8192 emit_move_insn (target, op0);
8193 op0 = target;
8196 /* At this point, OP0 is in the correct mode. If the output type is such
8197 that the operand is known to be aligned, indicate that it is.
8198 Otherwise, we need only be concerned about alignment for non-BLKmode
8199 results. */
8200 if (MEM_P (op0))
8202 op0 = copy_rtx (op0);
8204 if (TYPE_ALIGN_OK (type))
8205 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8206 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8207 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8209 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8210 HOST_WIDE_INT temp_size
8211 = MAX (int_size_in_bytes (inner_type),
8212 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8213 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8214 temp_size, 0, type);
8215 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8217 gcc_assert (!TREE_ADDRESSABLE (exp));
8219 if (GET_MODE (op0) == BLKmode)
8220 emit_block_move (new_with_op0_mode, op0,
8221 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8222 (modifier == EXPAND_STACK_PARM
8223 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8224 else
8225 emit_move_insn (new_with_op0_mode, op0);
8227 op0 = new;
8230 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8233 return op0;
8235 case POINTER_PLUS_EXPR:
8236 /* Even though the sizetype mode and the pointer's mode can be different
8237 expand is able to handle this correctly and get the correct result out
8238 of the PLUS_EXPR code. */
8239 case PLUS_EXPR:
8241 /* Check if this is a case for multiplication and addition. */
8242 if ((TREE_CODE (type) == INTEGER_TYPE
8243 || TREE_CODE (type) == FIXED_POINT_TYPE)
8244 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8246 tree subsubexp0, subsubexp1;
8247 enum tree_code code0, code1, this_code;
8249 subexp0 = TREE_OPERAND (exp, 0);
8250 subsubexp0 = TREE_OPERAND (subexp0, 0);
8251 subsubexp1 = TREE_OPERAND (subexp0, 1);
8252 code0 = TREE_CODE (subsubexp0);
8253 code1 = TREE_CODE (subsubexp1);
8254 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8255 : FIXED_CONVERT_EXPR;
8256 if (code0 == this_code && code1 == this_code
8257 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8258 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8259 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8260 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8261 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8262 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8264 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8265 enum machine_mode innermode = TYPE_MODE (op0type);
8266 bool zextend_p = TYPE_UNSIGNED (op0type);
8267 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8268 if (sat_p == 0)
8269 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8270 else
8271 this_optab = zextend_p ? usmadd_widen_optab
8272 : ssmadd_widen_optab;
8273 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8274 && (optab_handler (this_optab, mode)->insn_code
8275 != CODE_FOR_nothing))
8277 expand_operands (TREE_OPERAND (subsubexp0, 0),
8278 TREE_OPERAND (subsubexp1, 0),
8279 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8280 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8281 VOIDmode, EXPAND_NORMAL);
8282 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8283 target, unsignedp);
8284 gcc_assert (temp);
8285 return REDUCE_BIT_FIELD (temp);
8290 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8291 something else, make sure we add the register to the constant and
8292 then to the other thing. This case can occur during strength
8293 reduction and doing it this way will produce better code if the
8294 frame pointer or argument pointer is eliminated.
8296 fold-const.c will ensure that the constant is always in the inner
8297 PLUS_EXPR, so the only case we need to do anything about is if
8298 sp, ap, or fp is our second argument, in which case we must swap
8299 the innermost first argument and our second argument. */
8301 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8302 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8303 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8304 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8305 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8306 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8308 tree t = TREE_OPERAND (exp, 1);
8310 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8311 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8314 /* If the result is to be ptr_mode and we are adding an integer to
8315 something, we might be forming a constant. So try to use
8316 plus_constant. If it produces a sum and we can't accept it,
8317 use force_operand. This allows P = &ARR[const] to generate
8318 efficient code on machines where a SYMBOL_REF is not a valid
8319 address.
8321 If this is an EXPAND_SUM call, always return the sum. */
8322 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8323 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8325 if (modifier == EXPAND_STACK_PARM)
8326 target = 0;
8327 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8329 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8331 rtx constant_part;
8333 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8334 EXPAND_SUM);
8335 /* Use immed_double_const to ensure that the constant is
8336 truncated according to the mode of OP1, then sign extended
8337 to a HOST_WIDE_INT. Using the constant directly can result
8338 in non-canonical RTL in a 64x32 cross compile. */
8339 constant_part
8340 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8341 (HOST_WIDE_INT) 0,
8342 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8343 op1 = plus_constant (op1, INTVAL (constant_part));
8344 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8345 op1 = force_operand (op1, target);
8346 return REDUCE_BIT_FIELD (op1);
8349 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8350 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8351 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8353 rtx constant_part;
8355 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8356 (modifier == EXPAND_INITIALIZER
8357 ? EXPAND_INITIALIZER : EXPAND_SUM));
8358 if (! CONSTANT_P (op0))
8360 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8361 VOIDmode, modifier);
8362 /* Return a PLUS if modifier says it's OK. */
8363 if (modifier == EXPAND_SUM
8364 || modifier == EXPAND_INITIALIZER)
8365 return simplify_gen_binary (PLUS, mode, op0, op1);
8366 goto binop2;
8368 /* Use immed_double_const to ensure that the constant is
8369 truncated according to the mode of OP1, then sign extended
8370 to a HOST_WIDE_INT. Using the constant directly can result
8371 in non-canonical RTL in a 64x32 cross compile. */
8372 constant_part
8373 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8374 (HOST_WIDE_INT) 0,
8375 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8376 op0 = plus_constant (op0, INTVAL (constant_part));
8377 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8378 op0 = force_operand (op0, target);
8379 return REDUCE_BIT_FIELD (op0);
8383 /* No sense saving up arithmetic to be done
8384 if it's all in the wrong mode to form part of an address.
8385 And force_operand won't know whether to sign-extend or
8386 zero-extend. */
8387 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8388 || mode != ptr_mode)
8390 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8391 subtarget, &op0, &op1, 0);
8392 if (op0 == const0_rtx)
8393 return op1;
8394 if (op1 == const0_rtx)
8395 return op0;
8396 goto binop2;
8399 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8400 subtarget, &op0, &op1, modifier);
8401 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8403 case MINUS_EXPR:
8404 /* Check if this is a case for multiplication and subtraction. */
8405 if ((TREE_CODE (type) == INTEGER_TYPE
8406 || TREE_CODE (type) == FIXED_POINT_TYPE)
8407 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8409 tree subsubexp0, subsubexp1;
8410 enum tree_code code0, code1, this_code;
8412 subexp1 = TREE_OPERAND (exp, 1);
8413 subsubexp0 = TREE_OPERAND (subexp1, 0);
8414 subsubexp1 = TREE_OPERAND (subexp1, 1);
8415 code0 = TREE_CODE (subsubexp0);
8416 code1 = TREE_CODE (subsubexp1);
8417 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8418 : FIXED_CONVERT_EXPR;
8419 if (code0 == this_code && code1 == this_code
8420 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8421 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8422 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8423 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8424 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8425 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8427 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8428 enum machine_mode innermode = TYPE_MODE (op0type);
8429 bool zextend_p = TYPE_UNSIGNED (op0type);
8430 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8431 if (sat_p == 0)
8432 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8433 else
8434 this_optab = zextend_p ? usmsub_widen_optab
8435 : ssmsub_widen_optab;
8436 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8437 && (optab_handler (this_optab, mode)->insn_code
8438 != CODE_FOR_nothing))
8440 expand_operands (TREE_OPERAND (subsubexp0, 0),
8441 TREE_OPERAND (subsubexp1, 0),
8442 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8443 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8444 VOIDmode, EXPAND_NORMAL);
8445 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8446 target, unsignedp);
8447 gcc_assert (temp);
8448 return REDUCE_BIT_FIELD (temp);
8453 /* For initializers, we are allowed to return a MINUS of two
8454 symbolic constants. Here we handle all cases when both operands
8455 are constant. */
8456 /* Handle difference of two symbolic constants,
8457 for the sake of an initializer. */
8458 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8459 && really_constant_p (TREE_OPERAND (exp, 0))
8460 && really_constant_p (TREE_OPERAND (exp, 1)))
8462 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8463 NULL_RTX, &op0, &op1, modifier);
8465 /* If the last operand is a CONST_INT, use plus_constant of
8466 the negated constant. Else make the MINUS. */
8467 if (GET_CODE (op1) == CONST_INT)
8468 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8469 else
8470 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8473 /* No sense saving up arithmetic to be done
8474 if it's all in the wrong mode to form part of an address.
8475 And force_operand won't know whether to sign-extend or
8476 zero-extend. */
8477 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8478 || mode != ptr_mode)
8479 goto binop;
8481 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8482 subtarget, &op0, &op1, modifier);
8484 /* Convert A - const to A + (-const). */
8485 if (GET_CODE (op1) == CONST_INT)
8487 op1 = negate_rtx (mode, op1);
8488 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8491 goto binop2;
8493 case MULT_EXPR:
8494 /* If this is a fixed-point operation, then we cannot use the code
8495 below because "expand_mult" doesn't support sat/no-sat fixed-point
8496 multiplications. */
8497 if (ALL_FIXED_POINT_MODE_P (mode))
8498 goto binop;
8500 /* If first operand is constant, swap them.
8501 Thus the following special case checks need only
8502 check the second operand. */
8503 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8505 tree t1 = TREE_OPERAND (exp, 0);
8506 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8507 TREE_OPERAND (exp, 1) = t1;
8510 /* Attempt to return something suitable for generating an
8511 indexed address, for machines that support that. */
8513 if (modifier == EXPAND_SUM && mode == ptr_mode
8514 && host_integerp (TREE_OPERAND (exp, 1), 0))
8516 tree exp1 = TREE_OPERAND (exp, 1);
8518 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8519 EXPAND_SUM);
8521 if (!REG_P (op0))
8522 op0 = force_operand (op0, NULL_RTX);
8523 if (!REG_P (op0))
8524 op0 = copy_to_mode_reg (mode, op0);
8526 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8527 gen_int_mode (tree_low_cst (exp1, 0),
8528 TYPE_MODE (TREE_TYPE (exp1)))));
8531 if (modifier == EXPAND_STACK_PARM)
8532 target = 0;
8534 /* Check for multiplying things that have been extended
8535 from a narrower type. If this machine supports multiplying
8536 in that narrower type with a result in the desired type,
8537 do it that way, and avoid the explicit type-conversion. */
8539 subexp0 = TREE_OPERAND (exp, 0);
8540 subexp1 = TREE_OPERAND (exp, 1);
8541 /* First, check if we have a multiplication of one signed and one
8542 unsigned operand. */
8543 if (TREE_CODE (subexp0) == NOP_EXPR
8544 && TREE_CODE (subexp1) == NOP_EXPR
8545 && TREE_CODE (type) == INTEGER_TYPE
8546 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8547 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8548 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8549 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8550 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8551 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8553 enum machine_mode innermode
8554 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8555 this_optab = usmul_widen_optab;
8556 if (mode == GET_MODE_WIDER_MODE (innermode))
8558 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8560 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8561 expand_operands (TREE_OPERAND (subexp0, 0),
8562 TREE_OPERAND (subexp1, 0),
8563 NULL_RTX, &op0, &op1, 0);
8564 else
8565 expand_operands (TREE_OPERAND (subexp0, 0),
8566 TREE_OPERAND (subexp1, 0),
8567 NULL_RTX, &op1, &op0, 0);
8569 goto binop3;
8573 /* Check for a multiplication with matching signedness. */
8574 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8575 && TREE_CODE (type) == INTEGER_TYPE
8576 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8577 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8578 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8579 && int_fits_type_p (TREE_OPERAND (exp, 1),
8580 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8581 /* Don't use a widening multiply if a shift will do. */
8582 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8583 > HOST_BITS_PER_WIDE_INT)
8584 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8586 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8587 && (TYPE_PRECISION (TREE_TYPE
8588 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8589 == TYPE_PRECISION (TREE_TYPE
8590 (TREE_OPERAND
8591 (TREE_OPERAND (exp, 0), 0))))
8592 /* If both operands are extended, they must either both
8593 be zero-extended or both be sign-extended. */
8594 && (TYPE_UNSIGNED (TREE_TYPE
8595 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8596 == TYPE_UNSIGNED (TREE_TYPE
8597 (TREE_OPERAND
8598 (TREE_OPERAND (exp, 0), 0)))))))
8600 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8601 enum machine_mode innermode = TYPE_MODE (op0type);
8602 bool zextend_p = TYPE_UNSIGNED (op0type);
8603 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8604 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8606 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8608 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8610 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8611 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8612 TREE_OPERAND (exp, 1),
8613 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8614 else
8615 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8616 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8617 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8618 goto binop3;
8620 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8621 && innermode == word_mode)
8623 rtx htem, hipart;
8624 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8625 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8626 op1 = convert_modes (innermode, mode,
8627 expand_normal (TREE_OPERAND (exp, 1)),
8628 unsignedp);
8629 else
8630 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8631 temp = expand_binop (mode, other_optab, op0, op1, target,
8632 unsignedp, OPTAB_LIB_WIDEN);
8633 hipart = gen_highpart (innermode, temp);
8634 htem = expand_mult_highpart_adjust (innermode, hipart,
8635 op0, op1, hipart,
8636 zextend_p);
8637 if (htem != hipart)
8638 emit_move_insn (hipart, htem);
8639 return REDUCE_BIT_FIELD (temp);
8643 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8644 subtarget, &op0, &op1, 0);
8645 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8647 case TRUNC_DIV_EXPR:
8648 case FLOOR_DIV_EXPR:
8649 case CEIL_DIV_EXPR:
8650 case ROUND_DIV_EXPR:
8651 case EXACT_DIV_EXPR:
8652 /* If this is a fixed-point operation, then we cannot use the code
8653 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8654 divisions. */
8655 if (ALL_FIXED_POINT_MODE_P (mode))
8656 goto binop;
8658 if (modifier == EXPAND_STACK_PARM)
8659 target = 0;
8660 /* Possible optimization: compute the dividend with EXPAND_SUM
8661 then if the divisor is constant can optimize the case
8662 where some terms of the dividend have coeffs divisible by it. */
8663 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8664 subtarget, &op0, &op1, 0);
8665 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8667 case RDIV_EXPR:
8668 goto binop;
8670 case TRUNC_MOD_EXPR:
8671 case FLOOR_MOD_EXPR:
8672 case CEIL_MOD_EXPR:
8673 case ROUND_MOD_EXPR:
8674 if (modifier == EXPAND_STACK_PARM)
8675 target = 0;
8676 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8677 subtarget, &op0, &op1, 0);
8678 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8680 case FIXED_CONVERT_EXPR:
8681 op0 = expand_normal (TREE_OPERAND (exp, 0));
8682 if (target == 0 || modifier == EXPAND_STACK_PARM)
8683 target = gen_reg_rtx (mode);
8685 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8686 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8687 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8688 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8689 else
8690 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8691 return target;
8693 case FIX_TRUNC_EXPR:
8694 op0 = expand_normal (TREE_OPERAND (exp, 0));
8695 if (target == 0 || modifier == EXPAND_STACK_PARM)
8696 target = gen_reg_rtx (mode);
8697 expand_fix (target, op0, unsignedp);
8698 return target;
8700 case FLOAT_EXPR:
8701 op0 = expand_normal (TREE_OPERAND (exp, 0));
8702 if (target == 0 || modifier == EXPAND_STACK_PARM)
8703 target = gen_reg_rtx (mode);
8704 /* expand_float can't figure out what to do if FROM has VOIDmode.
8705 So give it the correct mode. With -O, cse will optimize this. */
8706 if (GET_MODE (op0) == VOIDmode)
8707 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8708 op0);
8709 expand_float (target, op0,
8710 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8711 return target;
8713 case NEGATE_EXPR:
8714 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8715 VOIDmode, EXPAND_NORMAL);
8716 if (modifier == EXPAND_STACK_PARM)
8717 target = 0;
8718 temp = expand_unop (mode,
8719 optab_for_tree_code (NEGATE_EXPR, type),
8720 op0, target, 0);
8721 gcc_assert (temp);
8722 return REDUCE_BIT_FIELD (temp);
8724 case ABS_EXPR:
8725 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8726 VOIDmode, EXPAND_NORMAL);
8727 if (modifier == EXPAND_STACK_PARM)
8728 target = 0;
8730 /* ABS_EXPR is not valid for complex arguments. */
8731 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8732 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8734 /* Unsigned abs is simply the operand. Testing here means we don't
8735 risk generating incorrect code below. */
8736 if (TYPE_UNSIGNED (type))
8737 return op0;
8739 return expand_abs (mode, op0, target, unsignedp,
8740 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8742 case MAX_EXPR:
8743 case MIN_EXPR:
8744 target = original_target;
8745 if (target == 0
8746 || modifier == EXPAND_STACK_PARM
8747 || (MEM_P (target) && MEM_VOLATILE_P (target))
8748 || GET_MODE (target) != mode
8749 || (REG_P (target)
8750 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8751 target = gen_reg_rtx (mode);
8752 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8753 target, &op0, &op1, 0);
8755 /* First try to do it with a special MIN or MAX instruction.
8756 If that does not win, use a conditional jump to select the proper
8757 value. */
8758 this_optab = optab_for_tree_code (code, type);
8759 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8760 OPTAB_WIDEN);
8761 if (temp != 0)
8762 return temp;
8764 /* At this point, a MEM target is no longer useful; we will get better
8765 code without it. */
8767 if (! REG_P (target))
8768 target = gen_reg_rtx (mode);
8770 /* If op1 was placed in target, swap op0 and op1. */
8771 if (target != op0 && target == op1)
8773 temp = op0;
8774 op0 = op1;
8775 op1 = temp;
8778 /* We generate better code and avoid problems with op1 mentioning
8779 target by forcing op1 into a pseudo if it isn't a constant. */
8780 if (! CONSTANT_P (op1))
8781 op1 = force_reg (mode, op1);
8784 enum rtx_code comparison_code;
8785 rtx cmpop1 = op1;
8787 if (code == MAX_EXPR)
8788 comparison_code = unsignedp ? GEU : GE;
8789 else
8790 comparison_code = unsignedp ? LEU : LE;
8792 /* Canonicalize to comparisons against 0. */
8793 if (op1 == const1_rtx)
8795 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8796 or (a != 0 ? a : 1) for unsigned.
8797 For MIN we are safe converting (a <= 1 ? a : 1)
8798 into (a <= 0 ? a : 1) */
8799 cmpop1 = const0_rtx;
8800 if (code == MAX_EXPR)
8801 comparison_code = unsignedp ? NE : GT;
8803 if (op1 == constm1_rtx && !unsignedp)
8805 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8806 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8807 cmpop1 = const0_rtx;
8808 if (code == MIN_EXPR)
8809 comparison_code = LT;
8811 #ifdef HAVE_conditional_move
8812 /* Use a conditional move if possible. */
8813 if (can_conditionally_move_p (mode))
8815 rtx insn;
8817 /* ??? Same problem as in expmed.c: emit_conditional_move
8818 forces a stack adjustment via compare_from_rtx, and we
8819 lose the stack adjustment if the sequence we are about
8820 to create is discarded. */
8821 do_pending_stack_adjust ();
8823 start_sequence ();
8825 /* Try to emit the conditional move. */
8826 insn = emit_conditional_move (target, comparison_code,
8827 op0, cmpop1, mode,
8828 op0, op1, mode,
8829 unsignedp);
8831 /* If we could do the conditional move, emit the sequence,
8832 and return. */
8833 if (insn)
8835 rtx seq = get_insns ();
8836 end_sequence ();
8837 emit_insn (seq);
8838 return target;
8841 /* Otherwise discard the sequence and fall back to code with
8842 branches. */
8843 end_sequence ();
8845 #endif
8846 if (target != op0)
8847 emit_move_insn (target, op0);
8849 temp = gen_label_rtx ();
8850 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8851 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8853 emit_move_insn (target, op1);
8854 emit_label (temp);
8855 return target;
8857 case BIT_NOT_EXPR:
8858 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8859 VOIDmode, EXPAND_NORMAL);
8860 if (modifier == EXPAND_STACK_PARM)
8861 target = 0;
8862 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8863 gcc_assert (temp);
8864 return temp;
8866 /* ??? Can optimize bitwise operations with one arg constant.
8867 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8868 and (a bitwise1 b) bitwise2 b (etc)
8869 but that is probably not worth while. */
8871 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8872 boolean values when we want in all cases to compute both of them. In
8873 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8874 as actual zero-or-1 values and then bitwise anding. In cases where
8875 there cannot be any side effects, better code would be made by
8876 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8877 how to recognize those cases. */
8879 case TRUTH_AND_EXPR:
8880 code = BIT_AND_EXPR;
8881 case BIT_AND_EXPR:
8882 goto binop;
8884 case TRUTH_OR_EXPR:
8885 code = BIT_IOR_EXPR;
8886 case BIT_IOR_EXPR:
8887 goto binop;
8889 case TRUTH_XOR_EXPR:
8890 code = BIT_XOR_EXPR;
8891 case BIT_XOR_EXPR:
8892 goto binop;
8894 case LSHIFT_EXPR:
8895 case RSHIFT_EXPR:
8896 case LROTATE_EXPR:
8897 case RROTATE_EXPR:
8898 /* If this is a fixed-point operation, then we cannot use the code
8899 below because "expand_shift" doesn't support sat/no-sat fixed-point
8900 shifts. */
8901 if (ALL_FIXED_POINT_MODE_P (mode))
8902 goto binop;
8904 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8905 subtarget = 0;
8906 if (modifier == EXPAND_STACK_PARM)
8907 target = 0;
8908 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8909 VOIDmode, EXPAND_NORMAL);
8910 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8911 unsignedp);
8913 /* Could determine the answer when only additive constants differ. Also,
8914 the addition of one can be handled by changing the condition. */
8915 case LT_EXPR:
8916 case LE_EXPR:
8917 case GT_EXPR:
8918 case GE_EXPR:
8919 case EQ_EXPR:
8920 case NE_EXPR:
8921 case UNORDERED_EXPR:
8922 case ORDERED_EXPR:
8923 case UNLT_EXPR:
8924 case UNLE_EXPR:
8925 case UNGT_EXPR:
8926 case UNGE_EXPR:
8927 case UNEQ_EXPR:
8928 case LTGT_EXPR:
8929 temp = do_store_flag (exp,
8930 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8931 tmode != VOIDmode ? tmode : mode, 0);
8932 if (temp != 0)
8933 return temp;
8935 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8936 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8937 && original_target
8938 && REG_P (original_target)
8939 && (GET_MODE (original_target)
8940 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8942 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8943 VOIDmode, EXPAND_NORMAL);
8945 /* If temp is constant, we can just compute the result. */
8946 if (GET_CODE (temp) == CONST_INT)
8948 if (INTVAL (temp) != 0)
8949 emit_move_insn (target, const1_rtx);
8950 else
8951 emit_move_insn (target, const0_rtx);
8953 return target;
8956 if (temp != original_target)
8958 enum machine_mode mode1 = GET_MODE (temp);
8959 if (mode1 == VOIDmode)
8960 mode1 = tmode != VOIDmode ? tmode : mode;
8962 temp = copy_to_mode_reg (mode1, temp);
8965 op1 = gen_label_rtx ();
8966 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8967 GET_MODE (temp), unsignedp, op1);
8968 emit_move_insn (temp, const1_rtx);
8969 emit_label (op1);
8970 return temp;
8973 /* If no set-flag instruction, must generate a conditional store
8974 into a temporary variable. Drop through and handle this
8975 like && and ||. */
8977 if (! ignore
8978 && (target == 0
8979 || modifier == EXPAND_STACK_PARM
8980 || ! safe_from_p (target, exp, 1)
8981 /* Make sure we don't have a hard reg (such as function's return
8982 value) live across basic blocks, if not optimizing. */
8983 || (!optimize && REG_P (target)
8984 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8985 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8987 if (target)
8988 emit_move_insn (target, const0_rtx);
8990 op1 = gen_label_rtx ();
8991 jumpifnot (exp, op1);
8993 if (target)
8994 emit_move_insn (target, const1_rtx);
8996 emit_label (op1);
8997 return ignore ? const0_rtx : target;
8999 case TRUTH_NOT_EXPR:
9000 if (modifier == EXPAND_STACK_PARM)
9001 target = 0;
9002 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
9003 VOIDmode, EXPAND_NORMAL);
9004 /* The parser is careful to generate TRUTH_NOT_EXPR
9005 only with operands that are always zero or one. */
9006 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
9007 target, 1, OPTAB_LIB_WIDEN);
9008 gcc_assert (temp);
9009 return temp;
9011 case STATEMENT_LIST:
9013 tree_stmt_iterator iter;
9015 gcc_assert (ignore);
9017 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9018 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9020 return const0_rtx;
9022 case COND_EXPR:
9023 /* A COND_EXPR with its type being VOID_TYPE represents a
9024 conditional jump and is handled in
9025 expand_gimple_cond_expr. */
9026 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
9028 /* Note that COND_EXPRs whose type is a structure or union
9029 are required to be constructed to contain assignments of
9030 a temporary variable, so that we can evaluate them here
9031 for side effect only. If type is void, we must do likewise. */
9033 gcc_assert (!TREE_ADDRESSABLE (type)
9034 && !ignore
9035 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
9036 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
9038 /* If we are not to produce a result, we have no target. Otherwise,
9039 if a target was specified use it; it will not be used as an
9040 intermediate target unless it is safe. If no target, use a
9041 temporary. */
9043 if (modifier != EXPAND_STACK_PARM
9044 && original_target
9045 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
9046 && GET_MODE (original_target) == mode
9047 #ifdef HAVE_conditional_move
9048 && (! can_conditionally_move_p (mode)
9049 || REG_P (original_target))
9050 #endif
9051 && !MEM_P (original_target))
9052 temp = original_target;
9053 else
9054 temp = assign_temp (type, 0, 0, 1);
9056 do_pending_stack_adjust ();
9057 NO_DEFER_POP;
9058 op0 = gen_label_rtx ();
9059 op1 = gen_label_rtx ();
9060 jumpifnot (TREE_OPERAND (exp, 0), op0);
9061 store_expr (TREE_OPERAND (exp, 1), temp,
9062 modifier == EXPAND_STACK_PARM,
9063 false);
9065 emit_jump_insn (gen_jump (op1));
9066 emit_barrier ();
9067 emit_label (op0);
9068 store_expr (TREE_OPERAND (exp, 2), temp,
9069 modifier == EXPAND_STACK_PARM,
9070 false);
9072 emit_label (op1);
9073 OK_DEFER_POP;
9074 return temp;
9076 case VEC_COND_EXPR:
9077 target = expand_vec_cond_expr (exp, target);
9078 return target;
9080 case MODIFY_EXPR:
9082 tree lhs = TREE_OPERAND (exp, 0);
9083 tree rhs = TREE_OPERAND (exp, 1);
9084 gcc_assert (ignore);
9085 expand_assignment (lhs, rhs, false);
9086 return const0_rtx;
9089 case GIMPLE_MODIFY_STMT:
9091 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
9092 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
9094 gcc_assert (ignore);
9096 /* Check for |= or &= of a bitfield of size one into another bitfield
9097 of size 1. In this case, (unless we need the result of the
9098 assignment) we can do this more efficiently with a
9099 test followed by an assignment, if necessary.
9101 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9102 things change so we do, this code should be enhanced to
9103 support it. */
9104 if (TREE_CODE (lhs) == COMPONENT_REF
9105 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9106 || TREE_CODE (rhs) == BIT_AND_EXPR)
9107 && TREE_OPERAND (rhs, 0) == lhs
9108 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9109 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9110 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9112 rtx label = gen_label_rtx ();
9113 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9114 do_jump (TREE_OPERAND (rhs, 1),
9115 value ? label : 0,
9116 value ? 0 : label);
9117 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9118 MOVE_NONTEMPORAL (exp));
9119 do_pending_stack_adjust ();
9120 emit_label (label);
9121 return const0_rtx;
9124 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9125 return const0_rtx;
9128 case RETURN_EXPR:
9129 if (!TREE_OPERAND (exp, 0))
9130 expand_null_return ();
9131 else
9132 expand_return (TREE_OPERAND (exp, 0));
9133 return const0_rtx;
9135 case ADDR_EXPR:
9136 return expand_expr_addr_expr (exp, target, tmode, modifier);
9138 case COMPLEX_EXPR:
9139 /* Get the rtx code of the operands. */
9140 op0 = expand_normal (TREE_OPERAND (exp, 0));
9141 op1 = expand_normal (TREE_OPERAND (exp, 1));
9143 if (!target)
9144 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9146 /* Move the real (op0) and imaginary (op1) parts to their location. */
9147 write_complex_part (target, op0, false);
9148 write_complex_part (target, op1, true);
9150 return target;
9152 case REALPART_EXPR:
9153 op0 = expand_normal (TREE_OPERAND (exp, 0));
9154 return read_complex_part (op0, false);
9156 case IMAGPART_EXPR:
9157 op0 = expand_normal (TREE_OPERAND (exp, 0));
9158 return read_complex_part (op0, true);
9160 case RESX_EXPR:
9161 expand_resx_expr (exp);
9162 return const0_rtx;
9164 case TRY_CATCH_EXPR:
9165 case CATCH_EXPR:
9166 case EH_FILTER_EXPR:
9167 case TRY_FINALLY_EXPR:
9168 /* Lowered by tree-eh.c. */
9169 gcc_unreachable ();
9171 case WITH_CLEANUP_EXPR:
9172 case CLEANUP_POINT_EXPR:
9173 case TARGET_EXPR:
9174 case CASE_LABEL_EXPR:
9175 case VA_ARG_EXPR:
9176 case BIND_EXPR:
9177 case INIT_EXPR:
9178 case CONJ_EXPR:
9179 case COMPOUND_EXPR:
9180 case PREINCREMENT_EXPR:
9181 case PREDECREMENT_EXPR:
9182 case POSTINCREMENT_EXPR:
9183 case POSTDECREMENT_EXPR:
9184 case LOOP_EXPR:
9185 case EXIT_EXPR:
9186 case TRUTH_ANDIF_EXPR:
9187 case TRUTH_ORIF_EXPR:
9188 /* Lowered by gimplify.c. */
9189 gcc_unreachable ();
9191 case CHANGE_DYNAMIC_TYPE_EXPR:
9192 /* This is ignored at the RTL level. The tree level set
9193 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9194 overkill for the RTL layer but is all that we can
9195 represent. */
9196 return const0_rtx;
9198 case EXC_PTR_EXPR:
9199 return get_exception_pointer (cfun);
9201 case FILTER_EXPR:
9202 return get_exception_filter (cfun);
9204 case FDESC_EXPR:
9205 /* Function descriptors are not valid except for as
9206 initialization constants, and should not be expanded. */
9207 gcc_unreachable ();
9209 case SWITCH_EXPR:
9210 expand_case (exp);
9211 return const0_rtx;
9213 case LABEL_EXPR:
9214 expand_label (TREE_OPERAND (exp, 0));
9215 return const0_rtx;
9217 case ASM_EXPR:
9218 expand_asm_expr (exp);
9219 return const0_rtx;
9221 case WITH_SIZE_EXPR:
9222 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9223 have pulled out the size to use in whatever context it needed. */
9224 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9225 modifier, alt_rtl);
9227 case REALIGN_LOAD_EXPR:
9229 tree oprnd0 = TREE_OPERAND (exp, 0);
9230 tree oprnd1 = TREE_OPERAND (exp, 1);
9231 tree oprnd2 = TREE_OPERAND (exp, 2);
9232 rtx op2;
9234 this_optab = optab_for_tree_code (code, type);
9235 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9236 op2 = expand_normal (oprnd2);
9237 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9238 target, unsignedp);
9239 gcc_assert (temp);
9240 return temp;
9243 case DOT_PROD_EXPR:
9245 tree oprnd0 = TREE_OPERAND (exp, 0);
9246 tree oprnd1 = TREE_OPERAND (exp, 1);
9247 tree oprnd2 = TREE_OPERAND (exp, 2);
9248 rtx op2;
9250 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9251 op2 = expand_normal (oprnd2);
9252 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9253 target, unsignedp);
9254 return target;
9257 case WIDEN_SUM_EXPR:
9259 tree oprnd0 = TREE_OPERAND (exp, 0);
9260 tree oprnd1 = TREE_OPERAND (exp, 1);
9262 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9263 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9264 target, unsignedp);
9265 return target;
9268 case REDUC_MAX_EXPR:
9269 case REDUC_MIN_EXPR:
9270 case REDUC_PLUS_EXPR:
9272 op0 = expand_normal (TREE_OPERAND (exp, 0));
9273 this_optab = optab_for_tree_code (code, type);
9274 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9275 gcc_assert (temp);
9276 return temp;
9279 case VEC_EXTRACT_EVEN_EXPR:
9280 case VEC_EXTRACT_ODD_EXPR:
9282 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9283 NULL_RTX, &op0, &op1, 0);
9284 this_optab = optab_for_tree_code (code, type);
9285 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9286 OPTAB_WIDEN);
9287 gcc_assert (temp);
9288 return temp;
9291 case VEC_INTERLEAVE_HIGH_EXPR:
9292 case VEC_INTERLEAVE_LOW_EXPR:
9294 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9295 NULL_RTX, &op0, &op1, 0);
9296 this_optab = optab_for_tree_code (code, type);
9297 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9298 OPTAB_WIDEN);
9299 gcc_assert (temp);
9300 return temp;
9303 case VEC_LSHIFT_EXPR:
9304 case VEC_RSHIFT_EXPR:
9306 target = expand_vec_shift_expr (exp, target);
9307 return target;
9310 case VEC_UNPACK_HI_EXPR:
9311 case VEC_UNPACK_LO_EXPR:
9313 op0 = expand_normal (TREE_OPERAND (exp, 0));
9314 this_optab = optab_for_tree_code (code, type);
9315 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9316 target, unsignedp);
9317 gcc_assert (temp);
9318 return temp;
9321 case VEC_UNPACK_FLOAT_HI_EXPR:
9322 case VEC_UNPACK_FLOAT_LO_EXPR:
9324 op0 = expand_normal (TREE_OPERAND (exp, 0));
9325 /* The signedness is determined from input operand. */
9326 this_optab = optab_for_tree_code (code,
9327 TREE_TYPE (TREE_OPERAND (exp, 0)));
9328 temp = expand_widen_pattern_expr
9329 (exp, op0, NULL_RTX, NULL_RTX,
9330 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9332 gcc_assert (temp);
9333 return temp;
9336 case VEC_WIDEN_MULT_HI_EXPR:
9337 case VEC_WIDEN_MULT_LO_EXPR:
9339 tree oprnd0 = TREE_OPERAND (exp, 0);
9340 tree oprnd1 = TREE_OPERAND (exp, 1);
9342 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9343 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9344 target, unsignedp);
9345 gcc_assert (target);
9346 return target;
9349 case VEC_PACK_TRUNC_EXPR:
9350 case VEC_PACK_SAT_EXPR:
9351 case VEC_PACK_FIX_TRUNC_EXPR:
9353 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9354 goto binop;
9357 case OMP_ATOMIC_LOAD:
9358 case OMP_ATOMIC_STORE:
9359 /* OMP expansion is not run when there were errors, so these codes
9360 can get here. */
9361 gcc_assert (errorcount != 0);
9362 return NULL_RTX;
9364 default:
9365 return lang_hooks.expand_expr (exp, original_target, tmode,
9366 modifier, alt_rtl);
9369 /* Here to do an ordinary binary operator. */
9370 binop:
9371 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9372 subtarget, &op0, &op1, 0);
9373 binop2:
9374 this_optab = optab_for_tree_code (code, type);
9375 binop3:
9376 if (modifier == EXPAND_STACK_PARM)
9377 target = 0;
9378 temp = expand_binop (mode, this_optab, op0, op1, target,
9379 unsignedp, OPTAB_LIB_WIDEN);
9380 gcc_assert (temp);
9381 return REDUCE_BIT_FIELD (temp);
9383 #undef REDUCE_BIT_FIELD
9385 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9386 signedness of TYPE), possibly returning the result in TARGET. */
9387 static rtx
9388 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9390 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9391 if (target && GET_MODE (target) != GET_MODE (exp))
9392 target = 0;
9393 /* For constant values, reduce using build_int_cst_type. */
9394 if (GET_CODE (exp) == CONST_INT)
9396 HOST_WIDE_INT value = INTVAL (exp);
9397 tree t = build_int_cst_type (type, value);
9398 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9400 else if (TYPE_UNSIGNED (type))
9402 rtx mask;
9403 if (prec < HOST_BITS_PER_WIDE_INT)
9404 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9405 GET_MODE (exp));
9406 else
9407 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9408 ((unsigned HOST_WIDE_INT) 1
9409 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9410 GET_MODE (exp));
9411 return expand_and (GET_MODE (exp), exp, mask, target);
9413 else
9415 tree count = build_int_cst (NULL_TREE,
9416 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9417 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9418 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9422 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9423 when applied to the address of EXP produces an address known to be
9424 aligned more than BIGGEST_ALIGNMENT. */
9426 static int
9427 is_aligning_offset (const_tree offset, const_tree exp)
9429 /* Strip off any conversions. */
9430 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9431 || TREE_CODE (offset) == NOP_EXPR
9432 || TREE_CODE (offset) == CONVERT_EXPR)
9433 offset = TREE_OPERAND (offset, 0);
9435 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9436 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9437 if (TREE_CODE (offset) != BIT_AND_EXPR
9438 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9439 || compare_tree_int (TREE_OPERAND (offset, 1),
9440 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9441 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9442 return 0;
9444 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9445 It must be NEGATE_EXPR. Then strip any more conversions. */
9446 offset = TREE_OPERAND (offset, 0);
9447 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9448 || TREE_CODE (offset) == NOP_EXPR
9449 || TREE_CODE (offset) == CONVERT_EXPR)
9450 offset = TREE_OPERAND (offset, 0);
9452 if (TREE_CODE (offset) != NEGATE_EXPR)
9453 return 0;
9455 offset = TREE_OPERAND (offset, 0);
9456 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9457 || TREE_CODE (offset) == NOP_EXPR
9458 || TREE_CODE (offset) == CONVERT_EXPR)
9459 offset = TREE_OPERAND (offset, 0);
9461 /* This must now be the address of EXP. */
9462 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9465 /* Return the tree node if an ARG corresponds to a string constant or zero
9466 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9467 in bytes within the string that ARG is accessing. The type of the
9468 offset will be `sizetype'. */
9470 tree
9471 string_constant (tree arg, tree *ptr_offset)
9473 tree array, offset, lower_bound;
9474 STRIP_NOPS (arg);
9476 if (TREE_CODE (arg) == ADDR_EXPR)
9478 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9480 *ptr_offset = size_zero_node;
9481 return TREE_OPERAND (arg, 0);
9483 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9485 array = TREE_OPERAND (arg, 0);
9486 offset = size_zero_node;
9488 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9490 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9491 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9492 if (TREE_CODE (array) != STRING_CST
9493 && TREE_CODE (array) != VAR_DECL)
9494 return 0;
9496 /* Check if the array has a nonzero lower bound. */
9497 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9498 if (!integer_zerop (lower_bound))
9500 /* If the offset and base aren't both constants, return 0. */
9501 if (TREE_CODE (lower_bound) != INTEGER_CST)
9502 return 0;
9503 if (TREE_CODE (offset) != INTEGER_CST)
9504 return 0;
9505 /* Adjust offset by the lower bound. */
9506 offset = size_diffop (fold_convert (sizetype, offset),
9507 fold_convert (sizetype, lower_bound));
9510 else
9511 return 0;
9513 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9515 tree arg0 = TREE_OPERAND (arg, 0);
9516 tree arg1 = TREE_OPERAND (arg, 1);
9518 STRIP_NOPS (arg0);
9519 STRIP_NOPS (arg1);
9521 if (TREE_CODE (arg0) == ADDR_EXPR
9522 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9523 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9525 array = TREE_OPERAND (arg0, 0);
9526 offset = arg1;
9528 else if (TREE_CODE (arg1) == ADDR_EXPR
9529 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9530 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9532 array = TREE_OPERAND (arg1, 0);
9533 offset = arg0;
9535 else
9536 return 0;
9538 else
9539 return 0;
9541 if (TREE_CODE (array) == STRING_CST)
9543 *ptr_offset = fold_convert (sizetype, offset);
9544 return array;
9546 else if (TREE_CODE (array) == VAR_DECL)
9548 int length;
9550 /* Variables initialized to string literals can be handled too. */
9551 if (DECL_INITIAL (array) == NULL_TREE
9552 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9553 return 0;
9555 /* If they are read-only, non-volatile and bind locally. */
9556 if (! TREE_READONLY (array)
9557 || TREE_SIDE_EFFECTS (array)
9558 || ! targetm.binds_local_p (array))
9559 return 0;
9561 /* Avoid const char foo[4] = "abcde"; */
9562 if (DECL_SIZE_UNIT (array) == NULL_TREE
9563 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9564 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9565 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9566 return 0;
9568 /* If variable is bigger than the string literal, OFFSET must be constant
9569 and inside of the bounds of the string literal. */
9570 offset = fold_convert (sizetype, offset);
9571 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9572 && (! host_integerp (offset, 1)
9573 || compare_tree_int (offset, length) >= 0))
9574 return 0;
9576 *ptr_offset = offset;
9577 return DECL_INITIAL (array);
9580 return 0;
9583 /* Generate code to calculate EXP using a store-flag instruction
9584 and return an rtx for the result. EXP is either a comparison
9585 or a TRUTH_NOT_EXPR whose operand is a comparison.
9587 If TARGET is nonzero, store the result there if convenient.
9589 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9590 cheap.
9592 Return zero if there is no suitable set-flag instruction
9593 available on this machine.
9595 Once expand_expr has been called on the arguments of the comparison,
9596 we are committed to doing the store flag, since it is not safe to
9597 re-evaluate the expression. We emit the store-flag insn by calling
9598 emit_store_flag, but only expand the arguments if we have a reason
9599 to believe that emit_store_flag will be successful. If we think that
9600 it will, but it isn't, we have to simulate the store-flag with a
9601 set/jump/set sequence. */
9603 static rtx
9604 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9606 enum rtx_code code;
9607 tree arg0, arg1, type;
9608 tree tem;
9609 enum machine_mode operand_mode;
9610 int invert = 0;
9611 int unsignedp;
9612 rtx op0, op1;
9613 enum insn_code icode;
9614 rtx subtarget = target;
9615 rtx result, label;
9617 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9618 result at the end. We can't simply invert the test since it would
9619 have already been inverted if it were valid. This case occurs for
9620 some floating-point comparisons. */
9622 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9623 invert = 1, exp = TREE_OPERAND (exp, 0);
9625 arg0 = TREE_OPERAND (exp, 0);
9626 arg1 = TREE_OPERAND (exp, 1);
9628 /* Don't crash if the comparison was erroneous. */
9629 if (arg0 == error_mark_node || arg1 == error_mark_node)
9630 return const0_rtx;
9632 type = TREE_TYPE (arg0);
9633 operand_mode = TYPE_MODE (type);
9634 unsignedp = TYPE_UNSIGNED (type);
9636 /* We won't bother with BLKmode store-flag operations because it would mean
9637 passing a lot of information to emit_store_flag. */
9638 if (operand_mode == BLKmode)
9639 return 0;
9641 /* We won't bother with store-flag operations involving function pointers
9642 when function pointers must be canonicalized before comparisons. */
9643 #ifdef HAVE_canonicalize_funcptr_for_compare
9644 if (HAVE_canonicalize_funcptr_for_compare
9645 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9646 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9647 == FUNCTION_TYPE))
9648 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9649 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9650 == FUNCTION_TYPE))))
9651 return 0;
9652 #endif
9654 STRIP_NOPS (arg0);
9655 STRIP_NOPS (arg1);
9657 /* Get the rtx comparison code to use. We know that EXP is a comparison
9658 operation of some type. Some comparisons against 1 and -1 can be
9659 converted to comparisons with zero. Do so here so that the tests
9660 below will be aware that we have a comparison with zero. These
9661 tests will not catch constants in the first operand, but constants
9662 are rarely passed as the first operand. */
9664 switch (TREE_CODE (exp))
9666 case EQ_EXPR:
9667 code = EQ;
9668 break;
9669 case NE_EXPR:
9670 code = NE;
9671 break;
9672 case LT_EXPR:
9673 if (integer_onep (arg1))
9674 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9675 else
9676 code = unsignedp ? LTU : LT;
9677 break;
9678 case LE_EXPR:
9679 if (! unsignedp && integer_all_onesp (arg1))
9680 arg1 = integer_zero_node, code = LT;
9681 else
9682 code = unsignedp ? LEU : LE;
9683 break;
9684 case GT_EXPR:
9685 if (! unsignedp && integer_all_onesp (arg1))
9686 arg1 = integer_zero_node, code = GE;
9687 else
9688 code = unsignedp ? GTU : GT;
9689 break;
9690 case GE_EXPR:
9691 if (integer_onep (arg1))
9692 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9693 else
9694 code = unsignedp ? GEU : GE;
9695 break;
9697 case UNORDERED_EXPR:
9698 code = UNORDERED;
9699 break;
9700 case ORDERED_EXPR:
9701 code = ORDERED;
9702 break;
9703 case UNLT_EXPR:
9704 code = UNLT;
9705 break;
9706 case UNLE_EXPR:
9707 code = UNLE;
9708 break;
9709 case UNGT_EXPR:
9710 code = UNGT;
9711 break;
9712 case UNGE_EXPR:
9713 code = UNGE;
9714 break;
9715 case UNEQ_EXPR:
9716 code = UNEQ;
9717 break;
9718 case LTGT_EXPR:
9719 code = LTGT;
9720 break;
9722 default:
9723 gcc_unreachable ();
9726 /* Put a constant second. */
9727 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9728 || TREE_CODE (arg0) == FIXED_CST)
9730 tem = arg0; arg0 = arg1; arg1 = tem;
9731 code = swap_condition (code);
9734 /* If this is an equality or inequality test of a single bit, we can
9735 do this by shifting the bit being tested to the low-order bit and
9736 masking the result with the constant 1. If the condition was EQ,
9737 we xor it with 1. This does not require an scc insn and is faster
9738 than an scc insn even if we have it.
9740 The code to make this transformation was moved into fold_single_bit_test,
9741 so we just call into the folder and expand its result. */
9743 if ((code == NE || code == EQ)
9744 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9745 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9747 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9748 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9749 arg0, arg1, type),
9750 target, VOIDmode, EXPAND_NORMAL);
9753 /* Now see if we are likely to be able to do this. Return if not. */
9754 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9755 return 0;
9757 icode = setcc_gen_code[(int) code];
9759 if (icode == CODE_FOR_nothing)
9761 enum machine_mode wmode;
9763 for (wmode = operand_mode;
9764 icode == CODE_FOR_nothing && wmode != VOIDmode;
9765 wmode = GET_MODE_WIDER_MODE (wmode))
9766 icode = optab_handler (cstore_optab, wmode)->insn_code;
9769 if (icode == CODE_FOR_nothing
9770 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9772 /* We can only do this if it is one of the special cases that
9773 can be handled without an scc insn. */
9774 if ((code == LT && integer_zerop (arg1))
9775 || (! only_cheap && code == GE && integer_zerop (arg1)))
9777 else if (! only_cheap && (code == NE || code == EQ)
9778 && TREE_CODE (type) != REAL_TYPE
9779 && ((optab_handler (abs_optab, operand_mode)->insn_code
9780 != CODE_FOR_nothing)
9781 || (optab_handler (ffs_optab, operand_mode)->insn_code
9782 != CODE_FOR_nothing)))
9784 else
9785 return 0;
9788 if (! get_subtarget (target)
9789 || GET_MODE (subtarget) != operand_mode)
9790 subtarget = 0;
9792 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9794 if (target == 0)
9795 target = gen_reg_rtx (mode);
9797 result = emit_store_flag (target, code, op0, op1,
9798 operand_mode, unsignedp, 1);
9800 if (result)
9802 if (invert)
9803 result = expand_binop (mode, xor_optab, result, const1_rtx,
9804 result, 0, OPTAB_LIB_WIDEN);
9805 return result;
9808 /* If this failed, we have to do this with set/compare/jump/set code. */
9809 if (!REG_P (target)
9810 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9811 target = gen_reg_rtx (GET_MODE (target));
9813 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9814 label = gen_label_rtx ();
9815 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9816 NULL_RTX, label);
9818 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9819 emit_label (label);
9821 return target;
9825 /* Stubs in case we haven't got a casesi insn. */
9826 #ifndef HAVE_casesi
9827 # define HAVE_casesi 0
9828 # define gen_casesi(a, b, c, d, e) (0)
9829 # define CODE_FOR_casesi CODE_FOR_nothing
9830 #endif
9832 /* If the machine does not have a case insn that compares the bounds,
9833 this means extra overhead for dispatch tables, which raises the
9834 threshold for using them. */
9835 #ifndef CASE_VALUES_THRESHOLD
9836 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9837 #endif /* CASE_VALUES_THRESHOLD */
9839 unsigned int
9840 case_values_threshold (void)
9842 return CASE_VALUES_THRESHOLD;
9845 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9846 0 otherwise (i.e. if there is no casesi instruction). */
9848 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9849 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9851 enum machine_mode index_mode = SImode;
9852 int index_bits = GET_MODE_BITSIZE (index_mode);
9853 rtx op1, op2, index;
9854 enum machine_mode op_mode;
9856 if (! HAVE_casesi)
9857 return 0;
9859 /* Convert the index to SImode. */
9860 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9862 enum machine_mode omode = TYPE_MODE (index_type);
9863 rtx rangertx = expand_normal (range);
9865 /* We must handle the endpoints in the original mode. */
9866 index_expr = build2 (MINUS_EXPR, index_type,
9867 index_expr, minval);
9868 minval = integer_zero_node;
9869 index = expand_normal (index_expr);
9870 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9871 omode, 1, default_label);
9872 /* Now we can safely truncate. */
9873 index = convert_to_mode (index_mode, index, 0);
9875 else
9877 if (TYPE_MODE (index_type) != index_mode)
9879 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9880 index_expr = fold_convert (index_type, index_expr);
9883 index = expand_normal (index_expr);
9886 do_pending_stack_adjust ();
9888 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9889 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9890 (index, op_mode))
9891 index = copy_to_mode_reg (op_mode, index);
9893 op1 = expand_normal (minval);
9895 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9896 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9897 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9898 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9899 (op1, op_mode))
9900 op1 = copy_to_mode_reg (op_mode, op1);
9902 op2 = expand_normal (range);
9904 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9905 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9906 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9907 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9908 (op2, op_mode))
9909 op2 = copy_to_mode_reg (op_mode, op2);
9911 emit_jump_insn (gen_casesi (index, op1, op2,
9912 table_label, default_label));
9913 return 1;
9916 /* Attempt to generate a tablejump instruction; same concept. */
9917 #ifndef HAVE_tablejump
9918 #define HAVE_tablejump 0
9919 #define gen_tablejump(x, y) (0)
9920 #endif
9922 /* Subroutine of the next function.
9924 INDEX is the value being switched on, with the lowest value
9925 in the table already subtracted.
9926 MODE is its expected mode (needed if INDEX is constant).
9927 RANGE is the length of the jump table.
9928 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9930 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9931 index value is out of range. */
9933 static void
9934 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9935 rtx default_label)
9937 rtx temp, vector;
9939 if (INTVAL (range) > cfun->max_jumptable_ents)
9940 cfun->max_jumptable_ents = INTVAL (range);
9942 /* Do an unsigned comparison (in the proper mode) between the index
9943 expression and the value which represents the length of the range.
9944 Since we just finished subtracting the lower bound of the range
9945 from the index expression, this comparison allows us to simultaneously
9946 check that the original index expression value is both greater than
9947 or equal to the minimum value of the range and less than or equal to
9948 the maximum value of the range. */
9950 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9951 default_label);
9953 /* If index is in range, it must fit in Pmode.
9954 Convert to Pmode so we can index with it. */
9955 if (mode != Pmode)
9956 index = convert_to_mode (Pmode, index, 1);
9958 /* Don't let a MEM slip through, because then INDEX that comes
9959 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9960 and break_out_memory_refs will go to work on it and mess it up. */
9961 #ifdef PIC_CASE_VECTOR_ADDRESS
9962 if (flag_pic && !REG_P (index))
9963 index = copy_to_mode_reg (Pmode, index);
9964 #endif
9966 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9967 GET_MODE_SIZE, because this indicates how large insns are. The other
9968 uses should all be Pmode, because they are addresses. This code
9969 could fail if addresses and insns are not the same size. */
9970 index = gen_rtx_PLUS (Pmode,
9971 gen_rtx_MULT (Pmode, index,
9972 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9973 gen_rtx_LABEL_REF (Pmode, table_label));
9974 #ifdef PIC_CASE_VECTOR_ADDRESS
9975 if (flag_pic)
9976 index = PIC_CASE_VECTOR_ADDRESS (index);
9977 else
9978 #endif
9979 index = memory_address (CASE_VECTOR_MODE, index);
9980 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9981 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9982 convert_move (temp, vector, 0);
9984 emit_jump_insn (gen_tablejump (temp, table_label));
9986 /* If we are generating PIC code or if the table is PC-relative, the
9987 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9988 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9989 emit_barrier ();
9993 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9994 rtx table_label, rtx default_label)
9996 rtx index;
9998 if (! HAVE_tablejump)
9999 return 0;
10001 index_expr = fold_build2 (MINUS_EXPR, index_type,
10002 fold_convert (index_type, index_expr),
10003 fold_convert (index_type, minval));
10004 index = expand_normal (index_expr);
10005 do_pending_stack_adjust ();
10007 do_tablejump (index, TYPE_MODE (index_type),
10008 convert_modes (TYPE_MODE (index_type),
10009 TYPE_MODE (TREE_TYPE (range)),
10010 expand_normal (range),
10011 TYPE_UNSIGNED (TREE_TYPE (range))),
10012 table_label, default_label);
10013 return 1;
10016 /* Nonzero if the mode is a valid vector mode for this architecture.
10017 This returns nonzero even if there is no hardware support for the
10018 vector mode, but we can emulate with narrower modes. */
10021 vector_mode_valid_p (enum machine_mode mode)
10023 enum mode_class class = GET_MODE_CLASS (mode);
10024 enum machine_mode innermode;
10026 /* Doh! What's going on? */
10027 if (class != MODE_VECTOR_INT
10028 && class != MODE_VECTOR_FLOAT
10029 && class != MODE_VECTOR_FRACT
10030 && class != MODE_VECTOR_UFRACT
10031 && class != MODE_VECTOR_ACCUM
10032 && class != MODE_VECTOR_UACCUM)
10033 return 0;
10035 /* Hardware support. Woo hoo! */
10036 if (targetm.vector_mode_supported_p (mode))
10037 return 1;
10039 innermode = GET_MODE_INNER (mode);
10041 /* We should probably return 1 if requesting V4DI and we have no DI,
10042 but we have V2DI, but this is probably very unlikely. */
10044 /* If we have support for the inner mode, we can safely emulate it.
10045 We may not have V2DI, but me can emulate with a pair of DIs. */
10046 return targetm.scalar_mode_supported_p (innermode);
10049 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10050 static rtx
10051 const_vector_from_tree (tree exp)
10053 rtvec v;
10054 int units, i;
10055 tree link, elt;
10056 enum machine_mode inner, mode;
10058 mode = TYPE_MODE (TREE_TYPE (exp));
10060 if (initializer_zerop (exp))
10061 return CONST0_RTX (mode);
10063 units = GET_MODE_NUNITS (mode);
10064 inner = GET_MODE_INNER (mode);
10066 v = rtvec_alloc (units);
10068 link = TREE_VECTOR_CST_ELTS (exp);
10069 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10071 elt = TREE_VALUE (link);
10073 if (TREE_CODE (elt) == REAL_CST)
10074 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10075 inner);
10076 else if (TREE_CODE (elt) == FIXED_CST)
10077 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10078 inner);
10079 else
10080 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10081 TREE_INT_CST_HIGH (elt),
10082 inner);
10085 /* Initialize remaining elements to 0. */
10086 for (; i < units; ++i)
10087 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10089 return gen_rtx_CONST_VECTOR (mode, v);
10091 #include "gt-expr.h"