Merged r158465 through r158660 into branch.
[official-gcc.git] / gcc / expr.c
blob5448bbe2b8fa3cae5200df803a67a9fd9d6fde28
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
56 #include "diagnostic.h"
57 #include "ssaexpand.h"
59 /* Decide whether a function's arguments should be processed
60 from first to last or from last to first.
62 They should if the stack and args grow in opposite directions, but
63 only if we have push insns. */
65 #ifdef PUSH_ROUNDING
67 #ifndef PUSH_ARGS_REVERSED
68 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
69 #define PUSH_ARGS_REVERSED /* If it's last to first. */
70 #endif
71 #endif
73 #endif
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
90 int cse_not_expected;
92 /* This structure is used by move_by_pieces to describe the move to
93 be performed. */
94 struct move_by_pieces_d
96 rtx to;
97 rtx to_addr;
98 int autinc_to;
99 int explicit_inc_to;
100 rtx from;
101 rtx from_addr;
102 int autinc_from;
103 int explicit_inc_from;
104 unsigned HOST_WIDE_INT len;
105 HOST_WIDE_INT offset;
106 int reverse;
109 /* This structure is used by store_by_pieces to describe the clear to
110 be performed. */
112 struct store_by_pieces_d
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 unsigned HOST_WIDE_INT len;
119 HOST_WIDE_INT offset;
120 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
121 void *constfundata;
122 int reverse;
125 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
126 unsigned int,
127 unsigned int);
128 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
129 struct move_by_pieces_d *);
130 static bool block_move_libcall_safe_for_call_parm (void);
131 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
132 static tree emit_block_move_libcall_fn (int);
133 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
134 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
135 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
136 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
137 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
138 struct store_by_pieces_d *);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, alias_set_type);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, alias_set_type, bool);
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
180 #endif
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
188 #endif
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero. */
192 #ifndef SET_BY_PIECES_P
193 #define SET_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
196 #endif
198 /* This macro is used to determine whether store_by_pieces should be
199 called to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
203 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
204 #endif
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movmem_optab[NUM_MACHINE_MODES];
209 /* This array records the insn_code of insns to perform block sets. */
210 enum insn_code setmem_optab[NUM_MACHINE_MODES];
212 /* These arrays record the insn_code of three different kinds of insns
213 to perform block compares. */
214 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
216 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
218 /* Synchronization primitives. */
219 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245 #endif
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
251 void
252 init_expr_target (void)
254 rtx insn, pat;
255 enum machine_mode mode;
256 int num_clobbers;
257 rtx mem, mem1;
258 rtx reg;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
277 int regno;
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 regno++)
292 if (! HARD_REGNO_MODE_OK (regno, mode))
293 continue;
295 SET_REGNO (reg, regno);
297 SET_SRC (pat) = mem;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
328 enum insn_code ic;
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
332 continue;
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
344 void
345 init_expr (void)
347 memset (&crtl->expr, 0, sizeof (crtl->expr));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
352 fixed-point.
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
356 void
357 convert_move (rtx to, rtx from, int unsignedp)
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
363 enum insn_code code;
364 rtx libcall;
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
375 /* If the source and destination are already the same, then there's
376 nothing to do. */
377 if (to == from)
378 return;
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
382 TO here. */
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
395 emit_move_insn (to, from);
396 return;
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
405 else
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
408 emit_move_insn (to, from);
409 return;
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
416 return;
419 if (to_real)
421 rtx value, insns;
422 convert_optab tab;
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
433 tab = sext_optab;
434 else
435 tab = trunc_optab;
437 /* Try converting directly if the insn is supported. */
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
444 return;
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
453 start_sequence ();
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
455 1, from, from_mode);
456 insns = get_insns ();
457 end_sequence ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
460 from)
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
462 return;
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
479 to, from, UNKNOWN);
480 return;
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
484 rtx new_from;
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
491 if (to_mode == full_mode)
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
494 to, from, UNKNOWN);
495 return;
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
504 from = new_from;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
518 else
519 expand_fixed_convert (to, from, 0, 1);
520 return;
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
529 rtx insns;
530 rtx lowpart;
531 rtx fill_value;
532 rtx lowfrom;
533 int i;
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
539 != CODE_FOR_nothing)
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
548 return;
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
555 rtx word_to = gen_reg_rtx (word_mode);
556 if (REG_P (to))
558 if (reg_overlap_mentioned_p (to, from))
559 from = force_reg (from_mode, from);
560 emit_clobber (to);
562 convert_move (word_to, from, unsignedp);
563 emit_unop_insn (code, to, word_to, equiv_code);
564 return;
567 /* No special multiword conversion insn; do it by hand. */
568 start_sequence ();
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
579 else
580 lowpart_mode = from_mode;
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
587 /* Compute the value to put in each remaining word. */
588 if (unsignedp)
589 fill_value = const0_rtx;
590 else
591 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
592 LT, lowfrom, const0_rtx,
593 VOIDmode, 0, -1);
595 /* Fill the remaining words. */
596 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
598 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
599 rtx subword = operand_subword (to, index, 1, to_mode);
601 gcc_assert (subword);
603 if (fill_value != subword)
604 emit_move_insn (subword, fill_value);
607 insns = get_insns ();
608 end_sequence ();
610 emit_insn (insns);
611 return;
614 /* Truncating multi-word to a word or less. */
615 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
616 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
618 if (!((MEM_P (from)
619 && ! MEM_VOLATILE_P (from)
620 && direct_load[(int) to_mode]
621 && ! mode_dependent_address_p (XEXP (from, 0)))
622 || REG_P (from)
623 || GET_CODE (from) == SUBREG))
624 from = force_reg (from_mode, from);
625 convert_move (to, gen_lowpart (word_mode, from), 0);
626 return;
629 /* Now follow all the conversions between integers
630 no more than a word long. */
632 /* For truncation, usually we can just refer to FROM in a narrower mode. */
633 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
634 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
635 GET_MODE_BITSIZE (from_mode)))
637 if (!((MEM_P (from)
638 && ! MEM_VOLATILE_P (from)
639 && direct_load[(int) to_mode]
640 && ! mode_dependent_address_p (XEXP (from, 0)))
641 || REG_P (from)
642 || GET_CODE (from) == SUBREG))
643 from = force_reg (from_mode, from);
644 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
645 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
646 from = copy_to_reg (from);
647 emit_move_insn (to, gen_lowpart (to_mode, from));
648 return;
651 /* Handle extension. */
652 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
654 /* Convert directly if that works. */
655 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
656 != CODE_FOR_nothing)
658 emit_unop_insn (code, to, from, equiv_code);
659 return;
661 else
663 enum machine_mode intermediate;
664 rtx tmp;
665 tree shift_amount;
667 /* Search for a mode to convert via. */
668 for (intermediate = from_mode; intermediate != VOIDmode;
669 intermediate = GET_MODE_WIDER_MODE (intermediate))
670 if (((can_extend_p (to_mode, intermediate, unsignedp)
671 != CODE_FOR_nothing)
672 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
673 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
674 GET_MODE_BITSIZE (intermediate))))
675 && (can_extend_p (intermediate, from_mode, unsignedp)
676 != CODE_FOR_nothing))
678 convert_move (to, convert_to_mode (intermediate, from,
679 unsignedp), unsignedp);
680 return;
683 /* No suitable intermediate mode.
684 Generate what we need with shifts. */
685 shift_amount = build_int_cst (NULL_TREE,
686 GET_MODE_BITSIZE (to_mode)
687 - GET_MODE_BITSIZE (from_mode));
688 from = gen_lowpart (to_mode, force_reg (from_mode, from));
689 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
690 to, unsignedp);
691 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
692 to, unsignedp);
693 if (tmp != to)
694 emit_move_insn (to, tmp);
695 return;
699 /* Support special truncate insns for certain modes. */
700 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
702 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
703 to, from, UNKNOWN);
704 return;
707 /* Handle truncation of volatile memrefs, and so on;
708 the things that couldn't be truncated directly,
709 and for which there was no special instruction.
711 ??? Code above formerly short-circuited this, for most integer
712 mode pairs, with a force_reg in from_mode followed by a recursive
713 call to this routine. Appears always to have been wrong. */
714 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
716 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
717 emit_move_insn (to, temp);
718 return;
721 /* Mode combination is not recognized. */
722 gcc_unreachable ();
725 /* Return an rtx for a value that would result
726 from converting X to mode MODE.
727 Both X and MODE may be floating, or both integer.
728 UNSIGNEDP is nonzero if X is an unsigned value.
729 This can be done by referring to a part of X in place
730 or by copying to a new temporary with conversion. */
733 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
735 return convert_modes (mode, VOIDmode, x, unsignedp);
738 /* Return an rtx for a value that would result
739 from converting X from mode OLDMODE to mode MODE.
740 Both modes may be floating, or both integer.
741 UNSIGNEDP is nonzero if X is an unsigned value.
743 This can be done by referring to a part of X in place
744 or by copying to a new temporary with conversion.
746 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
749 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
751 rtx temp;
753 /* If FROM is a SUBREG that indicates that we have already done at least
754 the required extension, strip it. */
756 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
757 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
758 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
759 x = gen_lowpart (mode, x);
761 if (GET_MODE (x) != VOIDmode)
762 oldmode = GET_MODE (x);
764 if (mode == oldmode)
765 return x;
767 /* There is one case that we must handle specially: If we are converting
768 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
769 we are to interpret the constant as unsigned, gen_lowpart will do
770 the wrong if the constant appears negative. What we want to do is
771 make the high-order word of the constant zero, not all ones. */
773 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
774 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
775 && CONST_INT_P (x) && INTVAL (x) < 0)
777 double_int val = uhwi_to_double_int (INTVAL (x));
779 /* We need to zero extend VAL. */
780 if (oldmode != VOIDmode)
781 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
783 return immed_double_int_const (val, mode);
786 /* We can do this with a gen_lowpart if both desired and current modes
787 are integer, and this is either a constant integer, a register, or a
788 non-volatile MEM. Except for the constant case where MODE is no
789 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
791 if ((CONST_INT_P (x)
792 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
793 || (GET_MODE_CLASS (mode) == MODE_INT
794 && GET_MODE_CLASS (oldmode) == MODE_INT
795 && (GET_CODE (x) == CONST_DOUBLE
796 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
797 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
798 && direct_load[(int) mode])
799 || (REG_P (x)
800 && (! HARD_REGISTER_P (x)
801 || HARD_REGNO_MODE_OK (REGNO (x), mode))
802 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
803 GET_MODE_BITSIZE (GET_MODE (x)))))))))
805 /* ?? If we don't know OLDMODE, we have to assume here that
806 X does not need sign- or zero-extension. This may not be
807 the case, but it's the best we can do. */
808 if (CONST_INT_P (x) && oldmode != VOIDmode
809 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
811 HOST_WIDE_INT val = INTVAL (x);
812 int width = GET_MODE_BITSIZE (oldmode);
814 /* We must sign or zero-extend in this case. Start by
815 zero-extending, then sign extend if we need to. */
816 val &= ((HOST_WIDE_INT) 1 << width) - 1;
817 if (! unsignedp
818 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
819 val |= (HOST_WIDE_INT) (-1) << width;
821 return gen_int_mode (val, mode);
824 return gen_lowpart (mode, x);
827 /* Converting from integer constant into mode is always equivalent to an
828 subreg operation. */
829 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
831 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
832 return simplify_gen_subreg (mode, x, oldmode, 0);
835 temp = gen_reg_rtx (mode);
836 convert_move (temp, x, unsignedp);
837 return temp;
840 /* STORE_MAX_PIECES is the number of bytes at a time that we can
841 store efficiently. Due to internal GCC limitations, this is
842 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
843 for an immediate constant. */
845 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
847 /* Determine whether the LEN bytes can be moved by using several move
848 instructions. Return nonzero if a call to move_by_pieces should
849 succeed. */
852 can_move_by_pieces (unsigned HOST_WIDE_INT len,
853 unsigned int align ATTRIBUTE_UNUSED)
855 return MOVE_BY_PIECES_P (len, align);
858 /* Generate several move instructions to copy LEN bytes from block FROM to
859 block TO. (These are MEM rtx's with BLKmode).
861 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
862 used to push FROM to the stack.
864 ALIGN is maximum stack alignment we can assume.
866 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
867 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
868 stpcpy. */
871 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
872 unsigned int align, int endp)
874 struct move_by_pieces_d data;
875 enum machine_mode to_addr_mode, from_addr_mode
876 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
877 rtx to_addr, from_addr = XEXP (from, 0);
878 unsigned int max_size = MOVE_MAX_PIECES + 1;
879 enum machine_mode mode = VOIDmode, tmode;
880 enum insn_code icode;
882 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
884 data.offset = 0;
885 data.from_addr = from_addr;
886 if (to)
888 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
889 to_addr = XEXP (to, 0);
890 data.to = to;
891 data.autinc_to
892 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
893 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
894 data.reverse
895 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
897 else
899 to_addr_mode = VOIDmode;
900 to_addr = NULL_RTX;
901 data.to = NULL_RTX;
902 data.autinc_to = 1;
903 #ifdef STACK_GROWS_DOWNWARD
904 data.reverse = 1;
905 #else
906 data.reverse = 0;
907 #endif
909 data.to_addr = to_addr;
910 data.from = from;
911 data.autinc_from
912 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
913 || GET_CODE (from_addr) == POST_INC
914 || GET_CODE (from_addr) == POST_DEC);
916 data.explicit_inc_from = 0;
917 data.explicit_inc_to = 0;
918 if (data.reverse) data.offset = len;
919 data.len = len;
921 /* If copying requires more than two move insns,
922 copy addresses to registers (to make displacements shorter)
923 and use post-increment if available. */
924 if (!(data.autinc_from && data.autinc_to)
925 && move_by_pieces_ninsns (len, align, max_size) > 2)
927 /* Find the mode of the largest move... */
928 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
929 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
930 if (GET_MODE_SIZE (tmode) < max_size)
931 mode = tmode;
933 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
935 data.from_addr = copy_to_mode_reg (from_addr_mode,
936 plus_constant (from_addr, len));
937 data.autinc_from = 1;
938 data.explicit_inc_from = -1;
940 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
942 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
943 data.autinc_from = 1;
944 data.explicit_inc_from = 1;
946 if (!data.autinc_from && CONSTANT_P (from_addr))
947 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
948 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
950 data.to_addr = copy_to_mode_reg (to_addr_mode,
951 plus_constant (to_addr, len));
952 data.autinc_to = 1;
953 data.explicit_inc_to = -1;
955 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
957 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
958 data.autinc_to = 1;
959 data.explicit_inc_to = 1;
961 if (!data.autinc_to && CONSTANT_P (to_addr))
962 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
965 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
966 if (align >= GET_MODE_ALIGNMENT (tmode))
967 align = GET_MODE_ALIGNMENT (tmode);
968 else
970 enum machine_mode xmode;
972 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
973 tmode != VOIDmode;
974 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
975 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
976 || SLOW_UNALIGNED_ACCESS (tmode, align))
977 break;
979 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
982 /* First move what we can in the largest integer mode, then go to
983 successively smaller modes. */
985 while (max_size > 1)
987 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
988 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
989 if (GET_MODE_SIZE (tmode) < max_size)
990 mode = tmode;
992 if (mode == VOIDmode)
993 break;
995 icode = optab_handler (mov_optab, mode)->insn_code;
996 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
997 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
999 max_size = GET_MODE_SIZE (mode);
1002 /* The code above should have handled everything. */
1003 gcc_assert (!data.len);
1005 if (endp)
1007 rtx to1;
1009 gcc_assert (!data.reverse);
1010 if (data.autinc_to)
1012 if (endp == 2)
1014 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1015 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1016 else
1017 data.to_addr = copy_to_mode_reg (to_addr_mode,
1018 plus_constant (data.to_addr,
1019 -1));
1021 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1022 data.offset);
1024 else
1026 if (endp == 2)
1027 --data.offset;
1028 to1 = adjust_address (data.to, QImode, data.offset);
1030 return to1;
1032 else
1033 return data.to;
1036 /* Return number of insns required to move L bytes by pieces.
1037 ALIGN (in bits) is maximum alignment we can assume. */
1039 static unsigned HOST_WIDE_INT
1040 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1041 unsigned int max_size)
1043 unsigned HOST_WIDE_INT n_insns = 0;
1044 enum machine_mode tmode;
1046 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1047 if (align >= GET_MODE_ALIGNMENT (tmode))
1048 align = GET_MODE_ALIGNMENT (tmode);
1049 else
1051 enum machine_mode tmode, xmode;
1053 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1054 tmode != VOIDmode;
1055 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1056 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1057 || SLOW_UNALIGNED_ACCESS (tmode, align))
1058 break;
1060 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1063 while (max_size > 1)
1065 enum machine_mode mode = VOIDmode;
1066 enum insn_code icode;
1068 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1069 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1070 if (GET_MODE_SIZE (tmode) < max_size)
1071 mode = tmode;
1073 if (mode == VOIDmode)
1074 break;
1076 icode = optab_handler (mov_optab, mode)->insn_code;
1077 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1078 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1080 max_size = GET_MODE_SIZE (mode);
1083 gcc_assert (!l);
1084 return n_insns;
1087 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1088 with move instructions for mode MODE. GENFUN is the gen_... function
1089 to make a move insn for that mode. DATA has all the other info. */
1091 static void
1092 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1093 struct move_by_pieces_d *data)
1095 unsigned int size = GET_MODE_SIZE (mode);
1096 rtx to1 = NULL_RTX, from1;
1098 while (data->len >= size)
1100 if (data->reverse)
1101 data->offset -= size;
1103 if (data->to)
1105 if (data->autinc_to)
1106 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1107 data->offset);
1108 else
1109 to1 = adjust_address (data->to, mode, data->offset);
1112 if (data->autinc_from)
1113 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1114 data->offset);
1115 else
1116 from1 = adjust_address (data->from, mode, data->offset);
1118 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1119 emit_insn (gen_add2_insn (data->to_addr,
1120 GEN_INT (-(HOST_WIDE_INT)size)));
1121 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1122 emit_insn (gen_add2_insn (data->from_addr,
1123 GEN_INT (-(HOST_WIDE_INT)size)));
1125 if (data->to)
1126 emit_insn ((*genfun) (to1, from1));
1127 else
1129 #ifdef PUSH_ROUNDING
1130 emit_single_push_insn (mode, from1, NULL);
1131 #else
1132 gcc_unreachable ();
1133 #endif
1136 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1137 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1138 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1139 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1141 if (! data->reverse)
1142 data->offset += size;
1144 data->len -= size;
1148 /* Emit code to move a block Y to a block X. This may be done with
1149 string-move instructions, with multiple scalar move instructions,
1150 or with a library call.
1152 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1153 SIZE is an rtx that says how long they are.
1154 ALIGN is the maximum alignment we can assume they have.
1155 METHOD describes what kind of copy this is, and what mechanisms may be used.
1157 Return the address of the new block, if memcpy is called and returns it,
1158 0 otherwise. */
1161 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1162 unsigned int expected_align, HOST_WIDE_INT expected_size)
1164 bool may_use_call;
1165 rtx retval = 0;
1166 unsigned int align;
1168 switch (method)
1170 case BLOCK_OP_NORMAL:
1171 case BLOCK_OP_TAILCALL:
1172 may_use_call = true;
1173 break;
1175 case BLOCK_OP_CALL_PARM:
1176 may_use_call = block_move_libcall_safe_for_call_parm ();
1178 /* Make inhibit_defer_pop nonzero around the library call
1179 to force it to pop the arguments right away. */
1180 NO_DEFER_POP;
1181 break;
1183 case BLOCK_OP_NO_LIBCALL:
1184 may_use_call = false;
1185 break;
1187 default:
1188 gcc_unreachable ();
1191 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1192 gcc_assert (align >= BITS_PER_UNIT);
1194 gcc_assert (MEM_P (x));
1195 gcc_assert (MEM_P (y));
1196 gcc_assert (size);
1198 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1199 block copy is more efficient for other large modes, e.g. DCmode. */
1200 x = adjust_address (x, BLKmode, 0);
1201 y = adjust_address (y, BLKmode, 0);
1203 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1204 can be incorrect is coming from __builtin_memcpy. */
1205 if (CONST_INT_P (size))
1207 if (INTVAL (size) == 0)
1208 return 0;
1210 x = shallow_copy_rtx (x);
1211 y = shallow_copy_rtx (y);
1212 set_mem_size (x, size);
1213 set_mem_size (y, size);
1216 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1217 move_by_pieces (x, y, INTVAL (size), align, 0);
1218 else if (emit_block_move_via_movmem (x, y, size, align,
1219 expected_align, expected_size))
1221 else if (may_use_call
1222 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1223 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1224 retval = emit_block_move_via_libcall (x, y, size,
1225 method == BLOCK_OP_TAILCALL);
1226 else
1227 emit_block_move_via_loop (x, y, size, align);
1229 if (method == BLOCK_OP_CALL_PARM)
1230 OK_DEFER_POP;
1232 return retval;
1236 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1238 return emit_block_move_hints (x, y, size, method, 0, -1);
1241 /* A subroutine of emit_block_move. Returns true if calling the
1242 block move libcall will not clobber any parameters which may have
1243 already been placed on the stack. */
1245 static bool
1246 block_move_libcall_safe_for_call_parm (void)
1248 #if defined (REG_PARM_STACK_SPACE)
1249 tree fn;
1250 #endif
1252 /* If arguments are pushed on the stack, then they're safe. */
1253 if (PUSH_ARGS)
1254 return true;
1256 /* If registers go on the stack anyway, any argument is sure to clobber
1257 an outgoing argument. */
1258 #if defined (REG_PARM_STACK_SPACE)
1259 fn = emit_block_move_libcall_fn (false);
1260 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1261 depend on its argument. */
1262 (void) fn;
1263 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1264 && REG_PARM_STACK_SPACE (fn) != 0)
1265 return false;
1266 #endif
1268 /* If any argument goes in memory, then it might clobber an outgoing
1269 argument. */
1271 CUMULATIVE_ARGS args_so_far;
1272 tree fn, arg;
1274 fn = emit_block_move_libcall_fn (false);
1275 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1277 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1278 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1280 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1281 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1282 if (!tmp || !REG_P (tmp))
1283 return false;
1284 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1285 return false;
1286 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1289 return true;
1292 /* A subroutine of emit_block_move. Expand a movmem pattern;
1293 return true if successful. */
1295 static bool
1296 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1297 unsigned int expected_align, HOST_WIDE_INT expected_size)
1299 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1300 int save_volatile_ok = volatile_ok;
1301 enum machine_mode mode;
1303 if (expected_align < align)
1304 expected_align = align;
1306 /* Since this is a move insn, we don't care about volatility. */
1307 volatile_ok = 1;
1309 /* Try the most limited insn first, because there's no point
1310 including more than one in the machine description unless
1311 the more limited one has some advantage. */
1313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1314 mode = GET_MODE_WIDER_MODE (mode))
1316 enum insn_code code = movmem_optab[(int) mode];
1317 insn_operand_predicate_fn pred;
1319 if (code != CODE_FOR_nothing
1320 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1321 here because if SIZE is less than the mode mask, as it is
1322 returned by the macro, it will definitely be less than the
1323 actual mode mask. */
1324 && ((CONST_INT_P (size)
1325 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1326 <= (GET_MODE_MASK (mode) >> 1)))
1327 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1328 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1329 || (*pred) (x, BLKmode))
1330 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1331 || (*pred) (y, BLKmode))
1332 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1333 || (*pred) (opalign, VOIDmode)))
1335 rtx op2;
1336 rtx last = get_last_insn ();
1337 rtx pat;
1339 op2 = convert_to_mode (mode, size, 1);
1340 pred = insn_data[(int) code].operand[2].predicate;
1341 if (pred != 0 && ! (*pred) (op2, mode))
1342 op2 = copy_to_mode_reg (mode, op2);
1344 /* ??? When called via emit_block_move_for_call, it'd be
1345 nice if there were some way to inform the backend, so
1346 that it doesn't fail the expansion because it thinks
1347 emitting the libcall would be more efficient. */
1349 if (insn_data[(int) code].n_operands == 4)
1350 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1351 else
1352 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1353 GEN_INT (expected_align
1354 / BITS_PER_UNIT),
1355 GEN_INT (expected_size));
1356 if (pat)
1358 emit_insn (pat);
1359 volatile_ok = save_volatile_ok;
1360 return true;
1362 else
1363 delete_insns_since (last);
1367 volatile_ok = save_volatile_ok;
1368 return false;
1371 /* A subroutine of emit_block_move. Expand a call to memcpy.
1372 Return the return value from memcpy, 0 otherwise. */
1375 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1377 rtx dst_addr, src_addr;
1378 tree call_expr, fn, src_tree, dst_tree, size_tree;
1379 enum machine_mode size_mode;
1380 rtx retval;
1382 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1383 pseudos. We can then place those new pseudos into a VAR_DECL and
1384 use them later. */
1386 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1387 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1389 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1390 src_addr = convert_memory_address (ptr_mode, src_addr);
1392 dst_tree = make_tree (ptr_type_node, dst_addr);
1393 src_tree = make_tree (ptr_type_node, src_addr);
1395 size_mode = TYPE_MODE (sizetype);
1397 size = convert_to_mode (size_mode, size, 1);
1398 size = copy_to_mode_reg (size_mode, size);
1400 /* It is incorrect to use the libcall calling conventions to call
1401 memcpy in this context. This could be a user call to memcpy and
1402 the user may wish to examine the return value from memcpy. For
1403 targets where libcalls and normal calls have different conventions
1404 for returning pointers, we could end up generating incorrect code. */
1406 size_tree = make_tree (sizetype, size);
1408 fn = emit_block_move_libcall_fn (true);
1409 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1410 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1412 retval = expand_normal (call_expr);
1414 return retval;
1417 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1418 for the function we use for block copies. The first time FOR_CALL
1419 is true, we call assemble_external. */
1421 static GTY(()) tree block_move_fn;
1423 void
1424 init_block_move_fn (const char *asmspec)
1426 if (!block_move_fn)
1428 tree args, fn;
1430 fn = get_identifier ("memcpy");
1431 args = build_function_type_list (ptr_type_node, ptr_type_node,
1432 const_ptr_type_node, sizetype,
1433 NULL_TREE);
1435 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1436 DECL_EXTERNAL (fn) = 1;
1437 TREE_PUBLIC (fn) = 1;
1438 DECL_ARTIFICIAL (fn) = 1;
1439 TREE_NOTHROW (fn) = 1;
1440 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1441 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1443 block_move_fn = fn;
1446 if (asmspec)
1447 set_user_assembler_name (block_move_fn, asmspec);
1450 static tree
1451 emit_block_move_libcall_fn (int for_call)
1453 static bool emitted_extern;
1455 if (!block_move_fn)
1456 init_block_move_fn (NULL);
1458 if (for_call && !emitted_extern)
1460 emitted_extern = true;
1461 make_decl_rtl (block_move_fn);
1462 assemble_external (block_move_fn);
1465 return block_move_fn;
1468 /* A subroutine of emit_block_move. Copy the data via an explicit
1469 loop. This is used only when libcalls are forbidden. */
1470 /* ??? It'd be nice to copy in hunks larger than QImode. */
1472 static void
1473 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1474 unsigned int align ATTRIBUTE_UNUSED)
1476 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1477 enum machine_mode x_addr_mode
1478 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1479 enum machine_mode y_addr_mode
1480 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1481 enum machine_mode iter_mode;
1483 iter_mode = GET_MODE (size);
1484 if (iter_mode == VOIDmode)
1485 iter_mode = word_mode;
1487 top_label = gen_label_rtx ();
1488 cmp_label = gen_label_rtx ();
1489 iter = gen_reg_rtx (iter_mode);
1491 emit_move_insn (iter, const0_rtx);
1493 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1494 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1495 do_pending_stack_adjust ();
1497 emit_jump (cmp_label);
1498 emit_label (top_label);
1500 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1501 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1503 if (x_addr_mode != y_addr_mode)
1504 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1505 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1507 x = change_address (x, QImode, x_addr);
1508 y = change_address (y, QImode, y_addr);
1510 emit_move_insn (x, y);
1512 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1513 true, OPTAB_LIB_WIDEN);
1514 if (tmp != iter)
1515 emit_move_insn (iter, tmp);
1517 emit_label (cmp_label);
1519 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1520 true, top_label);
1523 /* Copy all or part of a value X into registers starting at REGNO.
1524 The number of registers to be filled is NREGS. */
1526 void
1527 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1529 int i;
1530 #ifdef HAVE_load_multiple
1531 rtx pat;
1532 rtx last;
1533 #endif
1535 if (nregs == 0)
1536 return;
1538 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1539 x = validize_mem (force_const_mem (mode, x));
1541 /* See if the machine can do this with a load multiple insn. */
1542 #ifdef HAVE_load_multiple
1543 if (HAVE_load_multiple)
1545 last = get_last_insn ();
1546 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1547 GEN_INT (nregs));
1548 if (pat)
1550 emit_insn (pat);
1551 return;
1553 else
1554 delete_insns_since (last);
1556 #endif
1558 for (i = 0; i < nregs; i++)
1559 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1560 operand_subword_force (x, i, mode));
1563 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1564 The number of registers to be filled is NREGS. */
1566 void
1567 move_block_from_reg (int regno, rtx x, int nregs)
1569 int i;
1571 if (nregs == 0)
1572 return;
1574 /* See if the machine can do this with a store multiple insn. */
1575 #ifdef HAVE_store_multiple
1576 if (HAVE_store_multiple)
1578 rtx last = get_last_insn ();
1579 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1580 GEN_INT (nregs));
1581 if (pat)
1583 emit_insn (pat);
1584 return;
1586 else
1587 delete_insns_since (last);
1589 #endif
1591 for (i = 0; i < nregs; i++)
1593 rtx tem = operand_subword (x, i, 1, BLKmode);
1595 gcc_assert (tem);
1597 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1601 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1602 ORIG, where ORIG is a non-consecutive group of registers represented by
1603 a PARALLEL. The clone is identical to the original except in that the
1604 original set of registers is replaced by a new set of pseudo registers.
1605 The new set has the same modes as the original set. */
1608 gen_group_rtx (rtx orig)
1610 int i, length;
1611 rtx *tmps;
1613 gcc_assert (GET_CODE (orig) == PARALLEL);
1615 length = XVECLEN (orig, 0);
1616 tmps = XALLOCAVEC (rtx, length);
1618 /* Skip a NULL entry in first slot. */
1619 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1621 if (i)
1622 tmps[0] = 0;
1624 for (; i < length; i++)
1626 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1627 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1629 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1632 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1635 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1636 except that values are placed in TMPS[i], and must later be moved
1637 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1639 static void
1640 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1642 rtx src;
1643 int start, i;
1644 enum machine_mode m = GET_MODE (orig_src);
1646 gcc_assert (GET_CODE (dst) == PARALLEL);
1648 if (m != VOIDmode
1649 && !SCALAR_INT_MODE_P (m)
1650 && !MEM_P (orig_src)
1651 && GET_CODE (orig_src) != CONCAT)
1653 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1654 if (imode == BLKmode)
1655 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1656 else
1657 src = gen_reg_rtx (imode);
1658 if (imode != BLKmode)
1659 src = gen_lowpart (GET_MODE (orig_src), src);
1660 emit_move_insn (src, orig_src);
1661 /* ...and back again. */
1662 if (imode != BLKmode)
1663 src = gen_lowpart (imode, src);
1664 emit_group_load_1 (tmps, dst, src, type, ssize);
1665 return;
1668 /* Check for a NULL entry, used to indicate that the parameter goes
1669 both on the stack and in registers. */
1670 if (XEXP (XVECEXP (dst, 0, 0), 0))
1671 start = 0;
1672 else
1673 start = 1;
1675 /* Process the pieces. */
1676 for (i = start; i < XVECLEN (dst, 0); i++)
1678 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1679 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1680 unsigned int bytelen = GET_MODE_SIZE (mode);
1681 int shift = 0;
1683 /* Handle trailing fragments that run over the size of the struct. */
1684 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1686 /* Arrange to shift the fragment to where it belongs.
1687 extract_bit_field loads to the lsb of the reg. */
1688 if (
1689 #ifdef BLOCK_REG_PADDING
1690 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1691 == (BYTES_BIG_ENDIAN ? upward : downward)
1692 #else
1693 BYTES_BIG_ENDIAN
1694 #endif
1696 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1697 bytelen = ssize - bytepos;
1698 gcc_assert (bytelen > 0);
1701 /* If we won't be loading directly from memory, protect the real source
1702 from strange tricks we might play; but make sure that the source can
1703 be loaded directly into the destination. */
1704 src = orig_src;
1705 if (!MEM_P (orig_src)
1706 && (!CONSTANT_P (orig_src)
1707 || (GET_MODE (orig_src) != mode
1708 && GET_MODE (orig_src) != VOIDmode)))
1710 if (GET_MODE (orig_src) == VOIDmode)
1711 src = gen_reg_rtx (mode);
1712 else
1713 src = gen_reg_rtx (GET_MODE (orig_src));
1715 emit_move_insn (src, orig_src);
1718 /* Optimize the access just a bit. */
1719 if (MEM_P (src)
1720 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1721 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1722 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1723 && bytelen == GET_MODE_SIZE (mode))
1725 tmps[i] = gen_reg_rtx (mode);
1726 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1728 else if (COMPLEX_MODE_P (mode)
1729 && GET_MODE (src) == mode
1730 && bytelen == GET_MODE_SIZE (mode))
1731 /* Let emit_move_complex do the bulk of the work. */
1732 tmps[i] = src;
1733 else if (GET_CODE (src) == CONCAT)
1735 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1736 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1738 if ((bytepos == 0 && bytelen == slen0)
1739 || (bytepos != 0 && bytepos + bytelen <= slen))
1741 /* The following assumes that the concatenated objects all
1742 have the same size. In this case, a simple calculation
1743 can be used to determine the object and the bit field
1744 to be extracted. */
1745 tmps[i] = XEXP (src, bytepos / slen0);
1746 if (! CONSTANT_P (tmps[i])
1747 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1748 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1749 (bytepos % slen0) * BITS_PER_UNIT,
1750 1, NULL_RTX, mode, mode);
1752 else
1754 rtx mem;
1756 gcc_assert (!bytepos);
1757 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1758 emit_move_insn (mem, src);
1759 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1760 0, 1, NULL_RTX, mode, mode);
1763 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1764 SIMD register, which is currently broken. While we get GCC
1765 to emit proper RTL for these cases, let's dump to memory. */
1766 else if (VECTOR_MODE_P (GET_MODE (dst))
1767 && REG_P (src))
1769 int slen = GET_MODE_SIZE (GET_MODE (src));
1770 rtx mem;
1772 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1773 emit_move_insn (mem, src);
1774 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1776 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1777 && XVECLEN (dst, 0) > 1)
1778 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1779 else if (CONSTANT_P (src))
1781 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1783 if (len == ssize)
1784 tmps[i] = src;
1785 else
1787 rtx first, second;
1789 gcc_assert (2 * len == ssize);
1790 split_double (src, &first, &second);
1791 if (i)
1792 tmps[i] = second;
1793 else
1794 tmps[i] = first;
1797 else if (REG_P (src) && GET_MODE (src) == mode)
1798 tmps[i] = src;
1799 else
1800 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1801 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1802 mode, mode);
1804 if (shift)
1805 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1806 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1810 /* Emit code to move a block SRC of type TYPE to a block DST,
1811 where DST is non-consecutive registers represented by a PARALLEL.
1812 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1813 if not known. */
1815 void
1816 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1818 rtx *tmps;
1819 int i;
1821 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1822 emit_group_load_1 (tmps, dst, src, type, ssize);
1824 /* Copy the extracted pieces into the proper (probable) hard regs. */
1825 for (i = 0; i < XVECLEN (dst, 0); i++)
1827 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1828 if (d == NULL)
1829 continue;
1830 emit_move_insn (d, tmps[i]);
1834 /* Similar, but load SRC into new pseudos in a format that looks like
1835 PARALLEL. This can later be fed to emit_group_move to get things
1836 in the right place. */
1839 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1841 rtvec vec;
1842 int i;
1844 vec = rtvec_alloc (XVECLEN (parallel, 0));
1845 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1847 /* Convert the vector to look just like the original PARALLEL, except
1848 with the computed values. */
1849 for (i = 0; i < XVECLEN (parallel, 0); i++)
1851 rtx e = XVECEXP (parallel, 0, i);
1852 rtx d = XEXP (e, 0);
1854 if (d)
1856 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1857 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1859 RTVEC_ELT (vec, i) = e;
1862 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1865 /* Emit code to move a block SRC to block DST, where SRC and DST are
1866 non-consecutive groups of registers, each represented by a PARALLEL. */
1868 void
1869 emit_group_move (rtx dst, rtx src)
1871 int i;
1873 gcc_assert (GET_CODE (src) == PARALLEL
1874 && GET_CODE (dst) == PARALLEL
1875 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1877 /* Skip first entry if NULL. */
1878 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1879 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1880 XEXP (XVECEXP (src, 0, i), 0));
1883 /* Move a group of registers represented by a PARALLEL into pseudos. */
1886 emit_group_move_into_temps (rtx src)
1888 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1889 int i;
1891 for (i = 0; i < XVECLEN (src, 0); i++)
1893 rtx e = XVECEXP (src, 0, i);
1894 rtx d = XEXP (e, 0);
1896 if (d)
1897 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1898 RTVEC_ELT (vec, i) = e;
1901 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1904 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1905 where SRC is non-consecutive registers represented by a PARALLEL.
1906 SSIZE represents the total size of block ORIG_DST, or -1 if not
1907 known. */
1909 void
1910 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1912 rtx *tmps, dst;
1913 int start, finish, i;
1914 enum machine_mode m = GET_MODE (orig_dst);
1916 gcc_assert (GET_CODE (src) == PARALLEL);
1918 if (!SCALAR_INT_MODE_P (m)
1919 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1921 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1922 if (imode == BLKmode)
1923 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1924 else
1925 dst = gen_reg_rtx (imode);
1926 emit_group_store (dst, src, type, ssize);
1927 if (imode != BLKmode)
1928 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1929 emit_move_insn (orig_dst, dst);
1930 return;
1933 /* Check for a NULL entry, used to indicate that the parameter goes
1934 both on the stack and in registers. */
1935 if (XEXP (XVECEXP (src, 0, 0), 0))
1936 start = 0;
1937 else
1938 start = 1;
1939 finish = XVECLEN (src, 0);
1941 tmps = XALLOCAVEC (rtx, finish);
1943 /* Copy the (probable) hard regs into pseudos. */
1944 for (i = start; i < finish; i++)
1946 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1947 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1949 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1950 emit_move_insn (tmps[i], reg);
1952 else
1953 tmps[i] = reg;
1956 /* If we won't be storing directly into memory, protect the real destination
1957 from strange tricks we might play. */
1958 dst = orig_dst;
1959 if (GET_CODE (dst) == PARALLEL)
1961 rtx temp;
1963 /* We can get a PARALLEL dst if there is a conditional expression in
1964 a return statement. In that case, the dst and src are the same,
1965 so no action is necessary. */
1966 if (rtx_equal_p (dst, src))
1967 return;
1969 /* It is unclear if we can ever reach here, but we may as well handle
1970 it. Allocate a temporary, and split this into a store/load to/from
1971 the temporary. */
1973 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1974 emit_group_store (temp, src, type, ssize);
1975 emit_group_load (dst, temp, type, ssize);
1976 return;
1978 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1980 enum machine_mode outer = GET_MODE (dst);
1981 enum machine_mode inner;
1982 HOST_WIDE_INT bytepos;
1983 bool done = false;
1984 rtx temp;
1986 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1987 dst = gen_reg_rtx (outer);
1989 /* Make life a bit easier for combine. */
1990 /* If the first element of the vector is the low part
1991 of the destination mode, use a paradoxical subreg to
1992 initialize the destination. */
1993 if (start < finish)
1995 inner = GET_MODE (tmps[start]);
1996 bytepos = subreg_lowpart_offset (inner, outer);
1997 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1999 temp = simplify_gen_subreg (outer, tmps[start],
2000 inner, 0);
2001 if (temp)
2003 emit_move_insn (dst, temp);
2004 done = true;
2005 start++;
2010 /* If the first element wasn't the low part, try the last. */
2011 if (!done
2012 && start < finish - 1)
2014 inner = GET_MODE (tmps[finish - 1]);
2015 bytepos = subreg_lowpart_offset (inner, outer);
2016 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2018 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2019 inner, 0);
2020 if (temp)
2022 emit_move_insn (dst, temp);
2023 done = true;
2024 finish--;
2029 /* Otherwise, simply initialize the result to zero. */
2030 if (!done)
2031 emit_move_insn (dst, CONST0_RTX (outer));
2034 /* Process the pieces. */
2035 for (i = start; i < finish; i++)
2037 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2038 enum machine_mode mode = GET_MODE (tmps[i]);
2039 unsigned int bytelen = GET_MODE_SIZE (mode);
2040 unsigned int adj_bytelen = bytelen;
2041 rtx dest = dst;
2043 /* Handle trailing fragments that run over the size of the struct. */
2044 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2045 adj_bytelen = ssize - bytepos;
2047 if (GET_CODE (dst) == CONCAT)
2049 if (bytepos + adj_bytelen
2050 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2051 dest = XEXP (dst, 0);
2052 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2054 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2055 dest = XEXP (dst, 1);
2057 else
2059 enum machine_mode dest_mode = GET_MODE (dest);
2060 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2062 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2064 if (GET_MODE_ALIGNMENT (dest_mode)
2065 >= GET_MODE_ALIGNMENT (tmp_mode))
2067 dest = assign_stack_temp (dest_mode,
2068 GET_MODE_SIZE (dest_mode),
2070 emit_move_insn (adjust_address (dest,
2071 tmp_mode,
2072 bytepos),
2073 tmps[i]);
2074 dst = dest;
2076 else
2078 dest = assign_stack_temp (tmp_mode,
2079 GET_MODE_SIZE (tmp_mode),
2081 emit_move_insn (dest, tmps[i]);
2082 dst = adjust_address (dest, dest_mode, bytepos);
2084 break;
2088 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2090 /* store_bit_field always takes its value from the lsb.
2091 Move the fragment to the lsb if it's not already there. */
2092 if (
2093 #ifdef BLOCK_REG_PADDING
2094 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2095 == (BYTES_BIG_ENDIAN ? upward : downward)
2096 #else
2097 BYTES_BIG_ENDIAN
2098 #endif
2101 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2102 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2103 build_int_cst (NULL_TREE, shift),
2104 tmps[i], 0);
2106 bytelen = adj_bytelen;
2109 /* Optimize the access just a bit. */
2110 if (MEM_P (dest)
2111 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2112 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2113 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2114 && bytelen == GET_MODE_SIZE (mode))
2115 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2116 else
2117 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2118 mode, tmps[i]);
2121 /* Copy from the pseudo into the (probable) hard reg. */
2122 if (orig_dst != dst)
2123 emit_move_insn (orig_dst, dst);
2126 /* Generate code to copy a BLKmode object of TYPE out of a
2127 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2128 is null, a stack temporary is created. TGTBLK is returned.
2130 The purpose of this routine is to handle functions that return
2131 BLKmode structures in registers. Some machines (the PA for example)
2132 want to return all small structures in registers regardless of the
2133 structure's alignment. */
2136 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2138 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2139 rtx src = NULL, dst = NULL;
2140 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2141 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2142 enum machine_mode copy_mode;
2144 if (tgtblk == 0)
2146 tgtblk = assign_temp (build_qualified_type (type,
2147 (TYPE_QUALS (type)
2148 | TYPE_QUAL_CONST)),
2149 0, 1, 1);
2150 preserve_temp_slots (tgtblk);
2153 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2154 into a new pseudo which is a full word. */
2156 if (GET_MODE (srcreg) != BLKmode
2157 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2158 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2160 /* If the structure doesn't take up a whole number of words, see whether
2161 SRCREG is padded on the left or on the right. If it's on the left,
2162 set PADDING_CORRECTION to the number of bits to skip.
2164 In most ABIs, the structure will be returned at the least end of
2165 the register, which translates to right padding on little-endian
2166 targets and left padding on big-endian targets. The opposite
2167 holds if the structure is returned at the most significant
2168 end of the register. */
2169 if (bytes % UNITS_PER_WORD != 0
2170 && (targetm.calls.return_in_msb (type)
2171 ? !BYTES_BIG_ENDIAN
2172 : BYTES_BIG_ENDIAN))
2173 padding_correction
2174 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2176 /* Copy the structure BITSIZE bits at a time. If the target lives in
2177 memory, take care of not reading/writing past its end by selecting
2178 a copy mode suited to BITSIZE. This should always be possible given
2179 how it is computed.
2181 We could probably emit more efficient code for machines which do not use
2182 strict alignment, but it doesn't seem worth the effort at the current
2183 time. */
2185 copy_mode = word_mode;
2186 if (MEM_P (tgtblk))
2188 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2189 if (mem_mode != BLKmode)
2190 copy_mode = mem_mode;
2193 for (bitpos = 0, xbitpos = padding_correction;
2194 bitpos < bytes * BITS_PER_UNIT;
2195 bitpos += bitsize, xbitpos += bitsize)
2197 /* We need a new source operand each time xbitpos is on a
2198 word boundary and when xbitpos == padding_correction
2199 (the first time through). */
2200 if (xbitpos % BITS_PER_WORD == 0
2201 || xbitpos == padding_correction)
2202 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2203 GET_MODE (srcreg));
2205 /* We need a new destination operand each time bitpos is on
2206 a word boundary. */
2207 if (bitpos % BITS_PER_WORD == 0)
2208 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2210 /* Use xbitpos for the source extraction (right justified) and
2211 bitpos for the destination store (left justified). */
2212 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2213 extract_bit_field (src, bitsize,
2214 xbitpos % BITS_PER_WORD, 1,
2215 NULL_RTX, copy_mode, copy_mode));
2218 return tgtblk;
2221 /* Add a USE expression for REG to the (possibly empty) list pointed
2222 to by CALL_FUSAGE. REG must denote a hard register. */
2224 void
2225 use_reg (rtx *call_fusage, rtx reg)
2227 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2229 *call_fusage
2230 = gen_rtx_EXPR_LIST (VOIDmode,
2231 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2234 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2235 starting at REGNO. All of these registers must be hard registers. */
2237 void
2238 use_regs (rtx *call_fusage, int regno, int nregs)
2240 int i;
2242 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2244 for (i = 0; i < nregs; i++)
2245 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2248 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2249 PARALLEL REGS. This is for calls that pass values in multiple
2250 non-contiguous locations. The Irix 6 ABI has examples of this. */
2252 void
2253 use_group_regs (rtx *call_fusage, rtx regs)
2255 int i;
2257 for (i = 0; i < XVECLEN (regs, 0); i++)
2259 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2261 /* A NULL entry means the parameter goes both on the stack and in
2262 registers. This can also be a MEM for targets that pass values
2263 partially on the stack and partially in registers. */
2264 if (reg != 0 && REG_P (reg))
2265 use_reg (call_fusage, reg);
2269 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2270 assigment and the code of the expresion on the RHS is CODE. Return
2271 NULL otherwise. */
2273 static gimple
2274 get_def_for_expr (tree name, enum tree_code code)
2276 gimple def_stmt;
2278 if (TREE_CODE (name) != SSA_NAME)
2279 return NULL;
2281 def_stmt = get_gimple_for_ssa_name (name);
2282 if (!def_stmt
2283 || gimple_assign_rhs_code (def_stmt) != code)
2284 return NULL;
2286 return def_stmt;
2290 /* Determine whether the LEN bytes generated by CONSTFUN can be
2291 stored to memory using several move instructions. CONSTFUNDATA is
2292 a pointer which will be passed as argument in every CONSTFUN call.
2293 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2294 a memset operation and false if it's a copy of a constant string.
2295 Return nonzero if a call to store_by_pieces should succeed. */
2298 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2299 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2300 void *constfundata, unsigned int align, bool memsetp)
2302 unsigned HOST_WIDE_INT l;
2303 unsigned int max_size;
2304 HOST_WIDE_INT offset = 0;
2305 enum machine_mode mode, tmode;
2306 enum insn_code icode;
2307 int reverse;
2308 rtx cst;
2310 if (len == 0)
2311 return 1;
2313 if (! (memsetp
2314 ? SET_BY_PIECES_P (len, align)
2315 : STORE_BY_PIECES_P (len, align)))
2316 return 0;
2318 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2319 if (align >= GET_MODE_ALIGNMENT (tmode))
2320 align = GET_MODE_ALIGNMENT (tmode);
2321 else
2323 enum machine_mode xmode;
2325 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2326 tmode != VOIDmode;
2327 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2328 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2329 || SLOW_UNALIGNED_ACCESS (tmode, align))
2330 break;
2332 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2335 /* We would first store what we can in the largest integer mode, then go to
2336 successively smaller modes. */
2338 for (reverse = 0;
2339 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2340 reverse++)
2342 l = len;
2343 mode = VOIDmode;
2344 max_size = STORE_MAX_PIECES + 1;
2345 while (max_size > 1)
2347 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2348 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2349 if (GET_MODE_SIZE (tmode) < max_size)
2350 mode = tmode;
2352 if (mode == VOIDmode)
2353 break;
2355 icode = optab_handler (mov_optab, mode)->insn_code;
2356 if (icode != CODE_FOR_nothing
2357 && align >= GET_MODE_ALIGNMENT (mode))
2359 unsigned int size = GET_MODE_SIZE (mode);
2361 while (l >= size)
2363 if (reverse)
2364 offset -= size;
2366 cst = (*constfun) (constfundata, offset, mode);
2367 if (!LEGITIMATE_CONSTANT_P (cst))
2368 return 0;
2370 if (!reverse)
2371 offset += size;
2373 l -= size;
2377 max_size = GET_MODE_SIZE (mode);
2380 /* The code above should have handled everything. */
2381 gcc_assert (!l);
2384 return 1;
2387 /* Generate several move instructions to store LEN bytes generated by
2388 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2389 pointer which will be passed as argument in every CONSTFUN call.
2390 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2391 a memset operation and false if it's a copy of a constant string.
2392 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2393 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2394 stpcpy. */
2397 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2398 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2399 void *constfundata, unsigned int align, bool memsetp, int endp)
2401 enum machine_mode to_addr_mode
2402 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2403 struct store_by_pieces_d data;
2405 if (len == 0)
2407 gcc_assert (endp != 2);
2408 return to;
2411 gcc_assert (memsetp
2412 ? SET_BY_PIECES_P (len, align)
2413 : STORE_BY_PIECES_P (len, align));
2414 data.constfun = constfun;
2415 data.constfundata = constfundata;
2416 data.len = len;
2417 data.to = to;
2418 store_by_pieces_1 (&data, align);
2419 if (endp)
2421 rtx to1;
2423 gcc_assert (!data.reverse);
2424 if (data.autinc_to)
2426 if (endp == 2)
2428 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2429 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2430 else
2431 data.to_addr = copy_to_mode_reg (to_addr_mode,
2432 plus_constant (data.to_addr,
2433 -1));
2435 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2436 data.offset);
2438 else
2440 if (endp == 2)
2441 --data.offset;
2442 to1 = adjust_address (data.to, QImode, data.offset);
2444 return to1;
2446 else
2447 return data.to;
2450 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2451 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2453 static void
2454 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2456 struct store_by_pieces_d data;
2458 if (len == 0)
2459 return;
2461 data.constfun = clear_by_pieces_1;
2462 data.constfundata = NULL;
2463 data.len = len;
2464 data.to = to;
2465 store_by_pieces_1 (&data, align);
2468 /* Callback routine for clear_by_pieces.
2469 Return const0_rtx unconditionally. */
2471 static rtx
2472 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2473 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2474 enum machine_mode mode ATTRIBUTE_UNUSED)
2476 return const0_rtx;
2479 /* Subroutine of clear_by_pieces and store_by_pieces.
2480 Generate several move instructions to store LEN bytes of block TO. (A MEM
2481 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2483 static void
2484 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2485 unsigned int align ATTRIBUTE_UNUSED)
2487 enum machine_mode to_addr_mode
2488 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2489 rtx to_addr = XEXP (data->to, 0);
2490 unsigned int max_size = STORE_MAX_PIECES + 1;
2491 enum machine_mode mode = VOIDmode, tmode;
2492 enum insn_code icode;
2494 data->offset = 0;
2495 data->to_addr = to_addr;
2496 data->autinc_to
2497 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2498 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2500 data->explicit_inc_to = 0;
2501 data->reverse
2502 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2503 if (data->reverse)
2504 data->offset = data->len;
2506 /* If storing requires more than two move insns,
2507 copy addresses to registers (to make displacements shorter)
2508 and use post-increment if available. */
2509 if (!data->autinc_to
2510 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2512 /* Determine the main mode we'll be using. */
2513 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2514 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2515 if (GET_MODE_SIZE (tmode) < max_size)
2516 mode = tmode;
2518 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2520 data->to_addr = copy_to_mode_reg (to_addr_mode,
2521 plus_constant (to_addr, data->len));
2522 data->autinc_to = 1;
2523 data->explicit_inc_to = -1;
2526 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2527 && ! data->autinc_to)
2529 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2530 data->autinc_to = 1;
2531 data->explicit_inc_to = 1;
2534 if ( !data->autinc_to && CONSTANT_P (to_addr))
2535 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2538 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2539 if (align >= GET_MODE_ALIGNMENT (tmode))
2540 align = GET_MODE_ALIGNMENT (tmode);
2541 else
2543 enum machine_mode xmode;
2545 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2546 tmode != VOIDmode;
2547 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2548 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2549 || SLOW_UNALIGNED_ACCESS (tmode, align))
2550 break;
2552 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2555 /* First store what we can in the largest integer mode, then go to
2556 successively smaller modes. */
2558 while (max_size > 1)
2560 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2561 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2562 if (GET_MODE_SIZE (tmode) < max_size)
2563 mode = tmode;
2565 if (mode == VOIDmode)
2566 break;
2568 icode = optab_handler (mov_optab, mode)->insn_code;
2569 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2570 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2572 max_size = GET_MODE_SIZE (mode);
2575 /* The code above should have handled everything. */
2576 gcc_assert (!data->len);
2579 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2580 with move instructions for mode MODE. GENFUN is the gen_... function
2581 to make a move insn for that mode. DATA has all the other info. */
2583 static void
2584 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2585 struct store_by_pieces_d *data)
2587 unsigned int size = GET_MODE_SIZE (mode);
2588 rtx to1, cst;
2590 while (data->len >= size)
2592 if (data->reverse)
2593 data->offset -= size;
2595 if (data->autinc_to)
2596 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2597 data->offset);
2598 else
2599 to1 = adjust_address (data->to, mode, data->offset);
2601 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2602 emit_insn (gen_add2_insn (data->to_addr,
2603 GEN_INT (-(HOST_WIDE_INT) size)));
2605 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2606 emit_insn ((*genfun) (to1, cst));
2608 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2609 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2611 if (! data->reverse)
2612 data->offset += size;
2614 data->len -= size;
2618 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2619 its length in bytes. */
2622 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2623 unsigned int expected_align, HOST_WIDE_INT expected_size)
2625 enum machine_mode mode = GET_MODE (object);
2626 unsigned int align;
2628 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2630 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2631 just move a zero. Otherwise, do this a piece at a time. */
2632 if (mode != BLKmode
2633 && CONST_INT_P (size)
2634 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2636 rtx zero = CONST0_RTX (mode);
2637 if (zero != NULL)
2639 emit_move_insn (object, zero);
2640 return NULL;
2643 if (COMPLEX_MODE_P (mode))
2645 zero = CONST0_RTX (GET_MODE_INNER (mode));
2646 if (zero != NULL)
2648 write_complex_part (object, zero, 0);
2649 write_complex_part (object, zero, 1);
2650 return NULL;
2655 if (size == const0_rtx)
2656 return NULL;
2658 align = MEM_ALIGN (object);
2660 if (CONST_INT_P (size)
2661 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2662 clear_by_pieces (object, INTVAL (size), align);
2663 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2664 expected_align, expected_size))
2666 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2667 return set_storage_via_libcall (object, size, const0_rtx,
2668 method == BLOCK_OP_TAILCALL);
2669 else
2670 gcc_unreachable ();
2672 return NULL;
2676 clear_storage (rtx object, rtx size, enum block_op_methods method)
2678 return clear_storage_hints (object, size, method, 0, -1);
2682 /* A subroutine of clear_storage. Expand a call to memset.
2683 Return the return value of memset, 0 otherwise. */
2686 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2688 tree call_expr, fn, object_tree, size_tree, val_tree;
2689 enum machine_mode size_mode;
2690 rtx retval;
2692 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2693 place those into new pseudos into a VAR_DECL and use them later. */
2695 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2697 size_mode = TYPE_MODE (sizetype);
2698 size = convert_to_mode (size_mode, size, 1);
2699 size = copy_to_mode_reg (size_mode, size);
2701 /* It is incorrect to use the libcall calling conventions to call
2702 memset in this context. This could be a user call to memset and
2703 the user may wish to examine the return value from memset. For
2704 targets where libcalls and normal calls have different conventions
2705 for returning pointers, we could end up generating incorrect code. */
2707 object_tree = make_tree (ptr_type_node, object);
2708 if (!CONST_INT_P (val))
2709 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2710 size_tree = make_tree (sizetype, size);
2711 val_tree = make_tree (integer_type_node, val);
2713 fn = clear_storage_libcall_fn (true);
2714 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2715 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2717 retval = expand_normal (call_expr);
2719 return retval;
2722 /* A subroutine of set_storage_via_libcall. Create the tree node
2723 for the function we use for block clears. The first time FOR_CALL
2724 is true, we call assemble_external. */
2726 tree block_clear_fn;
2728 void
2729 init_block_clear_fn (const char *asmspec)
2731 if (!block_clear_fn)
2733 tree fn, args;
2735 fn = get_identifier ("memset");
2736 args = build_function_type_list (ptr_type_node, ptr_type_node,
2737 integer_type_node, sizetype,
2738 NULL_TREE);
2740 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2741 DECL_EXTERNAL (fn) = 1;
2742 TREE_PUBLIC (fn) = 1;
2743 DECL_ARTIFICIAL (fn) = 1;
2744 TREE_NOTHROW (fn) = 1;
2745 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2746 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2748 block_clear_fn = fn;
2751 if (asmspec)
2752 set_user_assembler_name (block_clear_fn, asmspec);
2755 static tree
2756 clear_storage_libcall_fn (int for_call)
2758 static bool emitted_extern;
2760 if (!block_clear_fn)
2761 init_block_clear_fn (NULL);
2763 if (for_call && !emitted_extern)
2765 emitted_extern = true;
2766 make_decl_rtl (block_clear_fn);
2767 assemble_external (block_clear_fn);
2770 return block_clear_fn;
2773 /* Expand a setmem pattern; return true if successful. */
2775 bool
2776 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2777 unsigned int expected_align, HOST_WIDE_INT expected_size)
2779 /* Try the most limited insn first, because there's no point
2780 including more than one in the machine description unless
2781 the more limited one has some advantage. */
2783 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2784 enum machine_mode mode;
2786 if (expected_align < align)
2787 expected_align = align;
2789 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2790 mode = GET_MODE_WIDER_MODE (mode))
2792 enum insn_code code = setmem_optab[(int) mode];
2793 insn_operand_predicate_fn pred;
2795 if (code != CODE_FOR_nothing
2796 /* We don't need MODE to be narrower than
2797 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2798 the mode mask, as it is returned by the macro, it will
2799 definitely be less than the actual mode mask. */
2800 && ((CONST_INT_P (size)
2801 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2802 <= (GET_MODE_MASK (mode) >> 1)))
2803 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2804 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2805 || (*pred) (object, BLKmode))
2806 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2807 || (*pred) (opalign, VOIDmode)))
2809 rtx opsize, opchar;
2810 enum machine_mode char_mode;
2811 rtx last = get_last_insn ();
2812 rtx pat;
2814 opsize = convert_to_mode (mode, size, 1);
2815 pred = insn_data[(int) code].operand[1].predicate;
2816 if (pred != 0 && ! (*pred) (opsize, mode))
2817 opsize = copy_to_mode_reg (mode, opsize);
2819 opchar = val;
2820 char_mode = insn_data[(int) code].operand[2].mode;
2821 if (char_mode != VOIDmode)
2823 opchar = convert_to_mode (char_mode, opchar, 1);
2824 pred = insn_data[(int) code].operand[2].predicate;
2825 if (pred != 0 && ! (*pred) (opchar, char_mode))
2826 opchar = copy_to_mode_reg (char_mode, opchar);
2829 if (insn_data[(int) code].n_operands == 4)
2830 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2831 else
2832 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2833 GEN_INT (expected_align
2834 / BITS_PER_UNIT),
2835 GEN_INT (expected_size));
2836 if (pat)
2838 emit_insn (pat);
2839 return true;
2841 else
2842 delete_insns_since (last);
2846 return false;
2850 /* Write to one of the components of the complex value CPLX. Write VAL to
2851 the real part if IMAG_P is false, and the imaginary part if its true. */
2853 static void
2854 write_complex_part (rtx cplx, rtx val, bool imag_p)
2856 enum machine_mode cmode;
2857 enum machine_mode imode;
2858 unsigned ibitsize;
2860 if (GET_CODE (cplx) == CONCAT)
2862 emit_move_insn (XEXP (cplx, imag_p), val);
2863 return;
2866 cmode = GET_MODE (cplx);
2867 imode = GET_MODE_INNER (cmode);
2868 ibitsize = GET_MODE_BITSIZE (imode);
2870 /* For MEMs simplify_gen_subreg may generate an invalid new address
2871 because, e.g., the original address is considered mode-dependent
2872 by the target, which restricts simplify_subreg from invoking
2873 adjust_address_nv. Instead of preparing fallback support for an
2874 invalid address, we call adjust_address_nv directly. */
2875 if (MEM_P (cplx))
2877 emit_move_insn (adjust_address_nv (cplx, imode,
2878 imag_p ? GET_MODE_SIZE (imode) : 0),
2879 val);
2880 return;
2883 /* If the sub-object is at least word sized, then we know that subregging
2884 will work. This special case is important, since store_bit_field
2885 wants to operate on integer modes, and there's rarely an OImode to
2886 correspond to TCmode. */
2887 if (ibitsize >= BITS_PER_WORD
2888 /* For hard regs we have exact predicates. Assume we can split
2889 the original object if it spans an even number of hard regs.
2890 This special case is important for SCmode on 64-bit platforms
2891 where the natural size of floating-point regs is 32-bit. */
2892 || (REG_P (cplx)
2893 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2894 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2896 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2897 imag_p ? GET_MODE_SIZE (imode) : 0);
2898 if (part)
2900 emit_move_insn (part, val);
2901 return;
2903 else
2904 /* simplify_gen_subreg may fail for sub-word MEMs. */
2905 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2908 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2911 /* Extract one of the components of the complex value CPLX. Extract the
2912 real part if IMAG_P is false, and the imaginary part if it's true. */
2914 static rtx
2915 read_complex_part (rtx cplx, bool imag_p)
2917 enum machine_mode cmode, imode;
2918 unsigned ibitsize;
2920 if (GET_CODE (cplx) == CONCAT)
2921 return XEXP (cplx, imag_p);
2923 cmode = GET_MODE (cplx);
2924 imode = GET_MODE_INNER (cmode);
2925 ibitsize = GET_MODE_BITSIZE (imode);
2927 /* Special case reads from complex constants that got spilled to memory. */
2928 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2930 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2931 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2933 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2934 if (CONSTANT_CLASS_P (part))
2935 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2939 /* For MEMs simplify_gen_subreg may generate an invalid new address
2940 because, e.g., the original address is considered mode-dependent
2941 by the target, which restricts simplify_subreg from invoking
2942 adjust_address_nv. Instead of preparing fallback support for an
2943 invalid address, we call adjust_address_nv directly. */
2944 if (MEM_P (cplx))
2945 return adjust_address_nv (cplx, imode,
2946 imag_p ? GET_MODE_SIZE (imode) : 0);
2948 /* If the sub-object is at least word sized, then we know that subregging
2949 will work. This special case is important, since extract_bit_field
2950 wants to operate on integer modes, and there's rarely an OImode to
2951 correspond to TCmode. */
2952 if (ibitsize >= BITS_PER_WORD
2953 /* For hard regs we have exact predicates. Assume we can split
2954 the original object if it spans an even number of hard regs.
2955 This special case is important for SCmode on 64-bit platforms
2956 where the natural size of floating-point regs is 32-bit. */
2957 || (REG_P (cplx)
2958 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2959 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2961 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2962 imag_p ? GET_MODE_SIZE (imode) : 0);
2963 if (ret)
2964 return ret;
2965 else
2966 /* simplify_gen_subreg may fail for sub-word MEMs. */
2967 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2970 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2971 true, NULL_RTX, imode, imode);
2974 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2975 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2976 represented in NEW_MODE. If FORCE is true, this will never happen, as
2977 we'll force-create a SUBREG if needed. */
2979 static rtx
2980 emit_move_change_mode (enum machine_mode new_mode,
2981 enum machine_mode old_mode, rtx x, bool force)
2983 rtx ret;
2985 if (push_operand (x, GET_MODE (x)))
2987 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2988 MEM_COPY_ATTRIBUTES (ret, x);
2990 else if (MEM_P (x))
2992 /* We don't have to worry about changing the address since the
2993 size in bytes is supposed to be the same. */
2994 if (reload_in_progress)
2996 /* Copy the MEM to change the mode and move any
2997 substitutions from the old MEM to the new one. */
2998 ret = adjust_address_nv (x, new_mode, 0);
2999 copy_replacements (x, ret);
3001 else
3002 ret = adjust_address (x, new_mode, 0);
3004 else
3006 /* Note that we do want simplify_subreg's behavior of validating
3007 that the new mode is ok for a hard register. If we were to use
3008 simplify_gen_subreg, we would create the subreg, but would
3009 probably run into the target not being able to implement it. */
3010 /* Except, of course, when FORCE is true, when this is exactly what
3011 we want. Which is needed for CCmodes on some targets. */
3012 if (force)
3013 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3014 else
3015 ret = simplify_subreg (new_mode, x, old_mode, 0);
3018 return ret;
3021 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3022 an integer mode of the same size as MODE. Returns the instruction
3023 emitted, or NULL if such a move could not be generated. */
3025 static rtx
3026 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3028 enum machine_mode imode;
3029 enum insn_code code;
3031 /* There must exist a mode of the exact size we require. */
3032 imode = int_mode_for_mode (mode);
3033 if (imode == BLKmode)
3034 return NULL_RTX;
3036 /* The target must support moves in this mode. */
3037 code = optab_handler (mov_optab, imode)->insn_code;
3038 if (code == CODE_FOR_nothing)
3039 return NULL_RTX;
3041 x = emit_move_change_mode (imode, mode, x, force);
3042 if (x == NULL_RTX)
3043 return NULL_RTX;
3044 y = emit_move_change_mode (imode, mode, y, force);
3045 if (y == NULL_RTX)
3046 return NULL_RTX;
3047 return emit_insn (GEN_FCN (code) (x, y));
3050 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3051 Return an equivalent MEM that does not use an auto-increment. */
3053 static rtx
3054 emit_move_resolve_push (enum machine_mode mode, rtx x)
3056 enum rtx_code code = GET_CODE (XEXP (x, 0));
3057 HOST_WIDE_INT adjust;
3058 rtx temp;
3060 adjust = GET_MODE_SIZE (mode);
3061 #ifdef PUSH_ROUNDING
3062 adjust = PUSH_ROUNDING (adjust);
3063 #endif
3064 if (code == PRE_DEC || code == POST_DEC)
3065 adjust = -adjust;
3066 else if (code == PRE_MODIFY || code == POST_MODIFY)
3068 rtx expr = XEXP (XEXP (x, 0), 1);
3069 HOST_WIDE_INT val;
3071 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3072 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3073 val = INTVAL (XEXP (expr, 1));
3074 if (GET_CODE (expr) == MINUS)
3075 val = -val;
3076 gcc_assert (adjust == val || adjust == -val);
3077 adjust = val;
3080 /* Do not use anti_adjust_stack, since we don't want to update
3081 stack_pointer_delta. */
3082 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3083 GEN_INT (adjust), stack_pointer_rtx,
3084 0, OPTAB_LIB_WIDEN);
3085 if (temp != stack_pointer_rtx)
3086 emit_move_insn (stack_pointer_rtx, temp);
3088 switch (code)
3090 case PRE_INC:
3091 case PRE_DEC:
3092 case PRE_MODIFY:
3093 temp = stack_pointer_rtx;
3094 break;
3095 case POST_INC:
3096 case POST_DEC:
3097 case POST_MODIFY:
3098 temp = plus_constant (stack_pointer_rtx, -adjust);
3099 break;
3100 default:
3101 gcc_unreachable ();
3104 return replace_equiv_address (x, temp);
3107 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3108 X is known to satisfy push_operand, and MODE is known to be complex.
3109 Returns the last instruction emitted. */
3112 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3114 enum machine_mode submode = GET_MODE_INNER (mode);
3115 bool imag_first;
3117 #ifdef PUSH_ROUNDING
3118 unsigned int submodesize = GET_MODE_SIZE (submode);
3120 /* In case we output to the stack, but the size is smaller than the
3121 machine can push exactly, we need to use move instructions. */
3122 if (PUSH_ROUNDING (submodesize) != submodesize)
3124 x = emit_move_resolve_push (mode, x);
3125 return emit_move_insn (x, y);
3127 #endif
3129 /* Note that the real part always precedes the imag part in memory
3130 regardless of machine's endianness. */
3131 switch (GET_CODE (XEXP (x, 0)))
3133 case PRE_DEC:
3134 case POST_DEC:
3135 imag_first = true;
3136 break;
3137 case PRE_INC:
3138 case POST_INC:
3139 imag_first = false;
3140 break;
3141 default:
3142 gcc_unreachable ();
3145 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3146 read_complex_part (y, imag_first));
3147 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3148 read_complex_part (y, !imag_first));
3151 /* A subroutine of emit_move_complex. Perform the move from Y to X
3152 via two moves of the parts. Returns the last instruction emitted. */
3155 emit_move_complex_parts (rtx x, rtx y)
3157 /* Show the output dies here. This is necessary for SUBREGs
3158 of pseudos since we cannot track their lifetimes correctly;
3159 hard regs shouldn't appear here except as return values. */
3160 if (!reload_completed && !reload_in_progress
3161 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3162 emit_clobber (x);
3164 write_complex_part (x, read_complex_part (y, false), false);
3165 write_complex_part (x, read_complex_part (y, true), true);
3167 return get_last_insn ();
3170 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3171 MODE is known to be complex. Returns the last instruction emitted. */
3173 static rtx
3174 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3176 bool try_int;
3178 /* Need to take special care for pushes, to maintain proper ordering
3179 of the data, and possibly extra padding. */
3180 if (push_operand (x, mode))
3181 return emit_move_complex_push (mode, x, y);
3183 /* See if we can coerce the target into moving both values at once. */
3185 /* Move floating point as parts. */
3186 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3187 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3188 try_int = false;
3189 /* Not possible if the values are inherently not adjacent. */
3190 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3191 try_int = false;
3192 /* Is possible if both are registers (or subregs of registers). */
3193 else if (register_operand (x, mode) && register_operand (y, mode))
3194 try_int = true;
3195 /* If one of the operands is a memory, and alignment constraints
3196 are friendly enough, we may be able to do combined memory operations.
3197 We do not attempt this if Y is a constant because that combination is
3198 usually better with the by-parts thing below. */
3199 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3200 && (!STRICT_ALIGNMENT
3201 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3202 try_int = true;
3203 else
3204 try_int = false;
3206 if (try_int)
3208 rtx ret;
3210 /* For memory to memory moves, optimal behavior can be had with the
3211 existing block move logic. */
3212 if (MEM_P (x) && MEM_P (y))
3214 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3215 BLOCK_OP_NO_LIBCALL);
3216 return get_last_insn ();
3219 ret = emit_move_via_integer (mode, x, y, true);
3220 if (ret)
3221 return ret;
3224 return emit_move_complex_parts (x, y);
3227 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3228 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3230 static rtx
3231 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3233 rtx ret;
3235 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3236 if (mode != CCmode)
3238 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3239 if (code != CODE_FOR_nothing)
3241 x = emit_move_change_mode (CCmode, mode, x, true);
3242 y = emit_move_change_mode (CCmode, mode, y, true);
3243 return emit_insn (GEN_FCN (code) (x, y));
3247 /* Otherwise, find the MODE_INT mode of the same width. */
3248 ret = emit_move_via_integer (mode, x, y, false);
3249 gcc_assert (ret != NULL);
3250 return ret;
3253 /* Return true if word I of OP lies entirely in the
3254 undefined bits of a paradoxical subreg. */
3256 static bool
3257 undefined_operand_subword_p (const_rtx op, int i)
3259 enum machine_mode innermode, innermostmode;
3260 int offset;
3261 if (GET_CODE (op) != SUBREG)
3262 return false;
3263 innermode = GET_MODE (op);
3264 innermostmode = GET_MODE (SUBREG_REG (op));
3265 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3266 /* The SUBREG_BYTE represents offset, as if the value were stored in
3267 memory, except for a paradoxical subreg where we define
3268 SUBREG_BYTE to be 0; undo this exception as in
3269 simplify_subreg. */
3270 if (SUBREG_BYTE (op) == 0
3271 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3273 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3274 if (WORDS_BIG_ENDIAN)
3275 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3276 if (BYTES_BIG_ENDIAN)
3277 offset += difference % UNITS_PER_WORD;
3279 if (offset >= GET_MODE_SIZE (innermostmode)
3280 || offset <= -GET_MODE_SIZE (word_mode))
3281 return true;
3282 return false;
3285 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3286 MODE is any multi-word or full-word mode that lacks a move_insn
3287 pattern. Note that you will get better code if you define such
3288 patterns, even if they must turn into multiple assembler instructions. */
3290 static rtx
3291 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3293 rtx last_insn = 0;
3294 rtx seq, inner;
3295 bool need_clobber;
3296 int i;
3298 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3300 /* If X is a push on the stack, do the push now and replace
3301 X with a reference to the stack pointer. */
3302 if (push_operand (x, mode))
3303 x = emit_move_resolve_push (mode, x);
3305 /* If we are in reload, see if either operand is a MEM whose address
3306 is scheduled for replacement. */
3307 if (reload_in_progress && MEM_P (x)
3308 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3309 x = replace_equiv_address_nv (x, inner);
3310 if (reload_in_progress && MEM_P (y)
3311 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3312 y = replace_equiv_address_nv (y, inner);
3314 start_sequence ();
3316 need_clobber = false;
3317 for (i = 0;
3318 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3319 i++)
3321 rtx xpart = operand_subword (x, i, 1, mode);
3322 rtx ypart;
3324 /* Do not generate code for a move if it would come entirely
3325 from the undefined bits of a paradoxical subreg. */
3326 if (undefined_operand_subword_p (y, i))
3327 continue;
3329 ypart = operand_subword (y, i, 1, mode);
3331 /* If we can't get a part of Y, put Y into memory if it is a
3332 constant. Otherwise, force it into a register. Then we must
3333 be able to get a part of Y. */
3334 if (ypart == 0 && CONSTANT_P (y))
3336 y = use_anchored_address (force_const_mem (mode, y));
3337 ypart = operand_subword (y, i, 1, mode);
3339 else if (ypart == 0)
3340 ypart = operand_subword_force (y, i, mode);
3342 gcc_assert (xpart && ypart);
3344 need_clobber |= (GET_CODE (xpart) == SUBREG);
3346 last_insn = emit_move_insn (xpart, ypart);
3349 seq = get_insns ();
3350 end_sequence ();
3352 /* Show the output dies here. This is necessary for SUBREGs
3353 of pseudos since we cannot track their lifetimes correctly;
3354 hard regs shouldn't appear here except as return values.
3355 We never want to emit such a clobber after reload. */
3356 if (x != y
3357 && ! (reload_in_progress || reload_completed)
3358 && need_clobber != 0)
3359 emit_clobber (x);
3361 emit_insn (seq);
3363 return last_insn;
3366 /* Low level part of emit_move_insn.
3367 Called just like emit_move_insn, but assumes X and Y
3368 are basically valid. */
3371 emit_move_insn_1 (rtx x, rtx y)
3373 enum machine_mode mode = GET_MODE (x);
3374 enum insn_code code;
3376 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3378 code = optab_handler (mov_optab, mode)->insn_code;
3379 if (code != CODE_FOR_nothing)
3380 return emit_insn (GEN_FCN (code) (x, y));
3382 /* Expand complex moves by moving real part and imag part. */
3383 if (COMPLEX_MODE_P (mode))
3384 return emit_move_complex (mode, x, y);
3386 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3387 || ALL_FIXED_POINT_MODE_P (mode))
3389 rtx result = emit_move_via_integer (mode, x, y, true);
3391 /* If we can't find an integer mode, use multi words. */
3392 if (result)
3393 return result;
3394 else
3395 return emit_move_multi_word (mode, x, y);
3398 if (GET_MODE_CLASS (mode) == MODE_CC)
3399 return emit_move_ccmode (mode, x, y);
3401 /* Try using a move pattern for the corresponding integer mode. This is
3402 only safe when simplify_subreg can convert MODE constants into integer
3403 constants. At present, it can only do this reliably if the value
3404 fits within a HOST_WIDE_INT. */
3405 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3407 rtx ret = emit_move_via_integer (mode, x, y, false);
3408 if (ret)
3409 return ret;
3412 return emit_move_multi_word (mode, x, y);
3415 /* Generate code to copy Y into X.
3416 Both Y and X must have the same mode, except that
3417 Y can be a constant with VOIDmode.
3418 This mode cannot be BLKmode; use emit_block_move for that.
3420 Return the last instruction emitted. */
3423 emit_move_insn (rtx x, rtx y)
3425 enum machine_mode mode = GET_MODE (x);
3426 rtx y_cst = NULL_RTX;
3427 rtx last_insn, set;
3429 gcc_assert (mode != BLKmode
3430 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3432 if (CONSTANT_P (y))
3434 if (optimize
3435 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3436 && (last_insn = compress_float_constant (x, y)))
3437 return last_insn;
3439 y_cst = y;
3441 if (!LEGITIMATE_CONSTANT_P (y))
3443 y = force_const_mem (mode, y);
3445 /* If the target's cannot_force_const_mem prevented the spill,
3446 assume that the target's move expanders will also take care
3447 of the non-legitimate constant. */
3448 if (!y)
3449 y = y_cst;
3450 else
3451 y = use_anchored_address (y);
3455 /* If X or Y are memory references, verify that their addresses are valid
3456 for the machine. */
3457 if (MEM_P (x)
3458 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3459 MEM_ADDR_SPACE (x))
3460 && ! push_operand (x, GET_MODE (x))))
3461 x = validize_mem (x);
3463 if (MEM_P (y)
3464 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3465 MEM_ADDR_SPACE (y)))
3466 y = validize_mem (y);
3468 gcc_assert (mode != BLKmode);
3470 last_insn = emit_move_insn_1 (x, y);
3472 if (y_cst && REG_P (x)
3473 && (set = single_set (last_insn)) != NULL_RTX
3474 && SET_DEST (set) == x
3475 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3476 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3478 return last_insn;
3481 /* If Y is representable exactly in a narrower mode, and the target can
3482 perform the extension directly from constant or memory, then emit the
3483 move as an extension. */
3485 static rtx
3486 compress_float_constant (rtx x, rtx y)
3488 enum machine_mode dstmode = GET_MODE (x);
3489 enum machine_mode orig_srcmode = GET_MODE (y);
3490 enum machine_mode srcmode;
3491 REAL_VALUE_TYPE r;
3492 int oldcost, newcost;
3493 bool speed = optimize_insn_for_speed_p ();
3495 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3497 if (LEGITIMATE_CONSTANT_P (y))
3498 oldcost = rtx_cost (y, SET, speed);
3499 else
3500 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3502 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3503 srcmode != orig_srcmode;
3504 srcmode = GET_MODE_WIDER_MODE (srcmode))
3506 enum insn_code ic;
3507 rtx trunc_y, last_insn;
3509 /* Skip if the target can't extend this way. */
3510 ic = can_extend_p (dstmode, srcmode, 0);
3511 if (ic == CODE_FOR_nothing)
3512 continue;
3514 /* Skip if the narrowed value isn't exact. */
3515 if (! exact_real_truncate (srcmode, &r))
3516 continue;
3518 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3520 if (LEGITIMATE_CONSTANT_P (trunc_y))
3522 /* Skip if the target needs extra instructions to perform
3523 the extension. */
3524 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3525 continue;
3526 /* This is valid, but may not be cheaper than the original. */
3527 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3528 if (oldcost < newcost)
3529 continue;
3531 else if (float_extend_from_mem[dstmode][srcmode])
3533 trunc_y = force_const_mem (srcmode, trunc_y);
3534 /* This is valid, but may not be cheaper than the original. */
3535 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3536 if (oldcost < newcost)
3537 continue;
3538 trunc_y = validize_mem (trunc_y);
3540 else
3541 continue;
3543 /* For CSE's benefit, force the compressed constant pool entry
3544 into a new pseudo. This constant may be used in different modes,
3545 and if not, combine will put things back together for us. */
3546 trunc_y = force_reg (srcmode, trunc_y);
3547 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3548 last_insn = get_last_insn ();
3550 if (REG_P (x))
3551 set_unique_reg_note (last_insn, REG_EQUAL, y);
3553 return last_insn;
3556 return NULL_RTX;
3559 /* Pushing data onto the stack. */
3561 /* Push a block of length SIZE (perhaps variable)
3562 and return an rtx to address the beginning of the block.
3563 The value may be virtual_outgoing_args_rtx.
3565 EXTRA is the number of bytes of padding to push in addition to SIZE.
3566 BELOW nonzero means this padding comes at low addresses;
3567 otherwise, the padding comes at high addresses. */
3570 push_block (rtx size, int extra, int below)
3572 rtx temp;
3574 size = convert_modes (Pmode, ptr_mode, size, 1);
3575 if (CONSTANT_P (size))
3576 anti_adjust_stack (plus_constant (size, extra));
3577 else if (REG_P (size) && extra == 0)
3578 anti_adjust_stack (size);
3579 else
3581 temp = copy_to_mode_reg (Pmode, size);
3582 if (extra != 0)
3583 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3584 temp, 0, OPTAB_LIB_WIDEN);
3585 anti_adjust_stack (temp);
3588 #ifndef STACK_GROWS_DOWNWARD
3589 if (0)
3590 #else
3591 if (1)
3592 #endif
3594 temp = virtual_outgoing_args_rtx;
3595 if (extra != 0 && below)
3596 temp = plus_constant (temp, extra);
3598 else
3600 if (CONST_INT_P (size))
3601 temp = plus_constant (virtual_outgoing_args_rtx,
3602 -INTVAL (size) - (below ? 0 : extra));
3603 else if (extra != 0 && !below)
3604 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3605 negate_rtx (Pmode, plus_constant (size, extra)));
3606 else
3607 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3608 negate_rtx (Pmode, size));
3611 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3614 #ifdef PUSH_ROUNDING
3616 /* Emit single push insn. */
3618 static void
3619 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3621 rtx dest_addr;
3622 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3623 rtx dest;
3624 enum insn_code icode;
3625 insn_operand_predicate_fn pred;
3627 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3628 /* If there is push pattern, use it. Otherwise try old way of throwing
3629 MEM representing push operation to move expander. */
3630 icode = optab_handler (push_optab, mode)->insn_code;
3631 if (icode != CODE_FOR_nothing)
3633 if (((pred = insn_data[(int) icode].operand[0].predicate)
3634 && !((*pred) (x, mode))))
3635 x = force_reg (mode, x);
3636 emit_insn (GEN_FCN (icode) (x));
3637 return;
3639 if (GET_MODE_SIZE (mode) == rounded_size)
3640 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3641 /* If we are to pad downward, adjust the stack pointer first and
3642 then store X into the stack location using an offset. This is
3643 because emit_move_insn does not know how to pad; it does not have
3644 access to type. */
3645 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3647 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3648 HOST_WIDE_INT offset;
3650 emit_move_insn (stack_pointer_rtx,
3651 expand_binop (Pmode,
3652 #ifdef STACK_GROWS_DOWNWARD
3653 sub_optab,
3654 #else
3655 add_optab,
3656 #endif
3657 stack_pointer_rtx,
3658 GEN_INT (rounded_size),
3659 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3661 offset = (HOST_WIDE_INT) padding_size;
3662 #ifdef STACK_GROWS_DOWNWARD
3663 if (STACK_PUSH_CODE == POST_DEC)
3664 /* We have already decremented the stack pointer, so get the
3665 previous value. */
3666 offset += (HOST_WIDE_INT) rounded_size;
3667 #else
3668 if (STACK_PUSH_CODE == POST_INC)
3669 /* We have already incremented the stack pointer, so get the
3670 previous value. */
3671 offset -= (HOST_WIDE_INT) rounded_size;
3672 #endif
3673 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3675 else
3677 #ifdef STACK_GROWS_DOWNWARD
3678 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3679 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3680 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3681 #else
3682 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3683 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3684 GEN_INT (rounded_size));
3685 #endif
3686 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3689 dest = gen_rtx_MEM (mode, dest_addr);
3691 if (type != 0)
3693 set_mem_attributes (dest, type, 1);
3695 if (flag_optimize_sibling_calls)
3696 /* Function incoming arguments may overlap with sibling call
3697 outgoing arguments and we cannot allow reordering of reads
3698 from function arguments with stores to outgoing arguments
3699 of sibling calls. */
3700 set_mem_alias_set (dest, 0);
3702 emit_move_insn (dest, x);
3704 #endif
3706 /* Generate code to push X onto the stack, assuming it has mode MODE and
3707 type TYPE.
3708 MODE is redundant except when X is a CONST_INT (since they don't
3709 carry mode info).
3710 SIZE is an rtx for the size of data to be copied (in bytes),
3711 needed only if X is BLKmode.
3713 ALIGN (in bits) is maximum alignment we can assume.
3715 If PARTIAL and REG are both nonzero, then copy that many of the first
3716 bytes of X into registers starting with REG, and push the rest of X.
3717 The amount of space pushed is decreased by PARTIAL bytes.
3718 REG must be a hard register in this case.
3719 If REG is zero but PARTIAL is not, take any all others actions for an
3720 argument partially in registers, but do not actually load any
3721 registers.
3723 EXTRA is the amount in bytes of extra space to leave next to this arg.
3724 This is ignored if an argument block has already been allocated.
3726 On a machine that lacks real push insns, ARGS_ADDR is the address of
3727 the bottom of the argument block for this call. We use indexing off there
3728 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3729 argument block has not been preallocated.
3731 ARGS_SO_FAR is the size of args previously pushed for this call.
3733 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3734 for arguments passed in registers. If nonzero, it will be the number
3735 of bytes required. */
3737 void
3738 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3739 unsigned int align, int partial, rtx reg, int extra,
3740 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3741 rtx alignment_pad)
3743 rtx xinner;
3744 enum direction stack_direction
3745 #ifdef STACK_GROWS_DOWNWARD
3746 = downward;
3747 #else
3748 = upward;
3749 #endif
3751 /* Decide where to pad the argument: `downward' for below,
3752 `upward' for above, or `none' for don't pad it.
3753 Default is below for small data on big-endian machines; else above. */
3754 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3756 /* Invert direction if stack is post-decrement.
3757 FIXME: why? */
3758 if (STACK_PUSH_CODE == POST_DEC)
3759 if (where_pad != none)
3760 where_pad = (where_pad == downward ? upward : downward);
3762 xinner = x;
3764 if (mode == BLKmode
3765 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3767 /* Copy a block into the stack, entirely or partially. */
3769 rtx temp;
3770 int used;
3771 int offset;
3772 int skip;
3774 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3775 used = partial - offset;
3777 if (mode != BLKmode)
3779 /* A value is to be stored in an insufficiently aligned
3780 stack slot; copy via a suitably aligned slot if
3781 necessary. */
3782 size = GEN_INT (GET_MODE_SIZE (mode));
3783 if (!MEM_P (xinner))
3785 temp = assign_temp (type, 0, 1, 1);
3786 emit_move_insn (temp, xinner);
3787 xinner = temp;
3791 gcc_assert (size);
3793 /* USED is now the # of bytes we need not copy to the stack
3794 because registers will take care of them. */
3796 if (partial != 0)
3797 xinner = adjust_address (xinner, BLKmode, used);
3799 /* If the partial register-part of the arg counts in its stack size,
3800 skip the part of stack space corresponding to the registers.
3801 Otherwise, start copying to the beginning of the stack space,
3802 by setting SKIP to 0. */
3803 skip = (reg_parm_stack_space == 0) ? 0 : used;
3805 #ifdef PUSH_ROUNDING
3806 /* Do it with several push insns if that doesn't take lots of insns
3807 and if there is no difficulty with push insns that skip bytes
3808 on the stack for alignment purposes. */
3809 if (args_addr == 0
3810 && PUSH_ARGS
3811 && CONST_INT_P (size)
3812 && skip == 0
3813 && MEM_ALIGN (xinner) >= align
3814 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3815 /* Here we avoid the case of a structure whose weak alignment
3816 forces many pushes of a small amount of data,
3817 and such small pushes do rounding that causes trouble. */
3818 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3819 || align >= BIGGEST_ALIGNMENT
3820 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3821 == (align / BITS_PER_UNIT)))
3822 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3824 /* Push padding now if padding above and stack grows down,
3825 or if padding below and stack grows up.
3826 But if space already allocated, this has already been done. */
3827 if (extra && args_addr == 0
3828 && where_pad != none && where_pad != stack_direction)
3829 anti_adjust_stack (GEN_INT (extra));
3831 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3833 else
3834 #endif /* PUSH_ROUNDING */
3836 rtx target;
3838 /* Otherwise make space on the stack and copy the data
3839 to the address of that space. */
3841 /* Deduct words put into registers from the size we must copy. */
3842 if (partial != 0)
3844 if (CONST_INT_P (size))
3845 size = GEN_INT (INTVAL (size) - used);
3846 else
3847 size = expand_binop (GET_MODE (size), sub_optab, size,
3848 GEN_INT (used), NULL_RTX, 0,
3849 OPTAB_LIB_WIDEN);
3852 /* Get the address of the stack space.
3853 In this case, we do not deal with EXTRA separately.
3854 A single stack adjust will do. */
3855 if (! args_addr)
3857 temp = push_block (size, extra, where_pad == downward);
3858 extra = 0;
3860 else if (CONST_INT_P (args_so_far))
3861 temp = memory_address (BLKmode,
3862 plus_constant (args_addr,
3863 skip + INTVAL (args_so_far)));
3864 else
3865 temp = memory_address (BLKmode,
3866 plus_constant (gen_rtx_PLUS (Pmode,
3867 args_addr,
3868 args_so_far),
3869 skip));
3871 if (!ACCUMULATE_OUTGOING_ARGS)
3873 /* If the source is referenced relative to the stack pointer,
3874 copy it to another register to stabilize it. We do not need
3875 to do this if we know that we won't be changing sp. */
3877 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3878 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3879 temp = copy_to_reg (temp);
3882 target = gen_rtx_MEM (BLKmode, temp);
3884 /* We do *not* set_mem_attributes here, because incoming arguments
3885 may overlap with sibling call outgoing arguments and we cannot
3886 allow reordering of reads from function arguments with stores
3887 to outgoing arguments of sibling calls. We do, however, want
3888 to record the alignment of the stack slot. */
3889 /* ALIGN may well be better aligned than TYPE, e.g. due to
3890 PARM_BOUNDARY. Assume the caller isn't lying. */
3891 set_mem_align (target, align);
3893 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3896 else if (partial > 0)
3898 /* Scalar partly in registers. */
3900 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3901 int i;
3902 int not_stack;
3903 /* # bytes of start of argument
3904 that we must make space for but need not store. */
3905 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3906 int args_offset = INTVAL (args_so_far);
3907 int skip;
3909 /* Push padding now if padding above and stack grows down,
3910 or if padding below and stack grows up.
3911 But if space already allocated, this has already been done. */
3912 if (extra && args_addr == 0
3913 && where_pad != none && where_pad != stack_direction)
3914 anti_adjust_stack (GEN_INT (extra));
3916 /* If we make space by pushing it, we might as well push
3917 the real data. Otherwise, we can leave OFFSET nonzero
3918 and leave the space uninitialized. */
3919 if (args_addr == 0)
3920 offset = 0;
3922 /* Now NOT_STACK gets the number of words that we don't need to
3923 allocate on the stack. Convert OFFSET to words too. */
3924 not_stack = (partial - offset) / UNITS_PER_WORD;
3925 offset /= UNITS_PER_WORD;
3927 /* If the partial register-part of the arg counts in its stack size,
3928 skip the part of stack space corresponding to the registers.
3929 Otherwise, start copying to the beginning of the stack space,
3930 by setting SKIP to 0. */
3931 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3933 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3934 x = validize_mem (force_const_mem (mode, x));
3936 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3937 SUBREGs of such registers are not allowed. */
3938 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3939 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3940 x = copy_to_reg (x);
3942 /* Loop over all the words allocated on the stack for this arg. */
3943 /* We can do it by words, because any scalar bigger than a word
3944 has a size a multiple of a word. */
3945 #ifndef PUSH_ARGS_REVERSED
3946 for (i = not_stack; i < size; i++)
3947 #else
3948 for (i = size - 1; i >= not_stack; i--)
3949 #endif
3950 if (i >= not_stack + offset)
3951 emit_push_insn (operand_subword_force (x, i, mode),
3952 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3953 0, args_addr,
3954 GEN_INT (args_offset + ((i - not_stack + skip)
3955 * UNITS_PER_WORD)),
3956 reg_parm_stack_space, alignment_pad);
3958 else
3960 rtx addr;
3961 rtx dest;
3963 /* Push padding now if padding above and stack grows down,
3964 or if padding below and stack grows up.
3965 But if space already allocated, this has already been done. */
3966 if (extra && args_addr == 0
3967 && where_pad != none && where_pad != stack_direction)
3968 anti_adjust_stack (GEN_INT (extra));
3970 #ifdef PUSH_ROUNDING
3971 if (args_addr == 0 && PUSH_ARGS)
3972 emit_single_push_insn (mode, x, type);
3973 else
3974 #endif
3976 if (CONST_INT_P (args_so_far))
3977 addr
3978 = memory_address (mode,
3979 plus_constant (args_addr,
3980 INTVAL (args_so_far)));
3981 else
3982 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3983 args_so_far));
3984 dest = gen_rtx_MEM (mode, addr);
3986 /* We do *not* set_mem_attributes here, because incoming arguments
3987 may overlap with sibling call outgoing arguments and we cannot
3988 allow reordering of reads from function arguments with stores
3989 to outgoing arguments of sibling calls. We do, however, want
3990 to record the alignment of the stack slot. */
3991 /* ALIGN may well be better aligned than TYPE, e.g. due to
3992 PARM_BOUNDARY. Assume the caller isn't lying. */
3993 set_mem_align (dest, align);
3995 emit_move_insn (dest, x);
3999 /* If part should go in registers, copy that part
4000 into the appropriate registers. Do this now, at the end,
4001 since mem-to-mem copies above may do function calls. */
4002 if (partial > 0 && reg != 0)
4004 /* Handle calls that pass values in multiple non-contiguous locations.
4005 The Irix 6 ABI has examples of this. */
4006 if (GET_CODE (reg) == PARALLEL)
4007 emit_group_load (reg, x, type, -1);
4008 else
4010 gcc_assert (partial % UNITS_PER_WORD == 0);
4011 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4015 if (extra && args_addr == 0 && where_pad == stack_direction)
4016 anti_adjust_stack (GEN_INT (extra));
4018 if (alignment_pad && args_addr == 0)
4019 anti_adjust_stack (alignment_pad);
4022 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4023 operations. */
4025 static rtx
4026 get_subtarget (rtx x)
4028 return (optimize
4029 || x == 0
4030 /* Only registers can be subtargets. */
4031 || !REG_P (x)
4032 /* Don't use hard regs to avoid extending their life. */
4033 || REGNO (x) < FIRST_PSEUDO_REGISTER
4034 ? 0 : x);
4037 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4038 FIELD is a bitfield. Returns true if the optimization was successful,
4039 and there's nothing else to do. */
4041 static bool
4042 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4043 unsigned HOST_WIDE_INT bitpos,
4044 enum machine_mode mode1, rtx str_rtx,
4045 tree to, tree src)
4047 enum machine_mode str_mode = GET_MODE (str_rtx);
4048 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4049 tree op0, op1;
4050 rtx value, result;
4051 optab binop;
4053 if (mode1 != VOIDmode
4054 || bitsize >= BITS_PER_WORD
4055 || str_bitsize > BITS_PER_WORD
4056 || TREE_SIDE_EFFECTS (to)
4057 || TREE_THIS_VOLATILE (to))
4058 return false;
4060 STRIP_NOPS (src);
4061 if (!BINARY_CLASS_P (src)
4062 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4063 return false;
4065 op0 = TREE_OPERAND (src, 0);
4066 op1 = TREE_OPERAND (src, 1);
4067 STRIP_NOPS (op0);
4069 if (!operand_equal_p (to, op0, 0))
4070 return false;
4072 if (MEM_P (str_rtx))
4074 unsigned HOST_WIDE_INT offset1;
4076 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4077 str_mode = word_mode;
4078 str_mode = get_best_mode (bitsize, bitpos,
4079 MEM_ALIGN (str_rtx), str_mode, 0);
4080 if (str_mode == VOIDmode)
4081 return false;
4082 str_bitsize = GET_MODE_BITSIZE (str_mode);
4084 offset1 = bitpos;
4085 bitpos %= str_bitsize;
4086 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4087 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4089 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4090 return false;
4092 /* If the bit field covers the whole REG/MEM, store_field
4093 will likely generate better code. */
4094 if (bitsize >= str_bitsize)
4095 return false;
4097 /* We can't handle fields split across multiple entities. */
4098 if (bitpos + bitsize > str_bitsize)
4099 return false;
4101 if (BYTES_BIG_ENDIAN)
4102 bitpos = str_bitsize - bitpos - bitsize;
4104 switch (TREE_CODE (src))
4106 case PLUS_EXPR:
4107 case MINUS_EXPR:
4108 /* For now, just optimize the case of the topmost bitfield
4109 where we don't need to do any masking and also
4110 1 bit bitfields where xor can be used.
4111 We might win by one instruction for the other bitfields
4112 too if insv/extv instructions aren't used, so that
4113 can be added later. */
4114 if (bitpos + bitsize != str_bitsize
4115 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4116 break;
4118 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4119 value = convert_modes (str_mode,
4120 TYPE_MODE (TREE_TYPE (op1)), value,
4121 TYPE_UNSIGNED (TREE_TYPE (op1)));
4123 /* We may be accessing data outside the field, which means
4124 we can alias adjacent data. */
4125 if (MEM_P (str_rtx))
4127 str_rtx = shallow_copy_rtx (str_rtx);
4128 set_mem_alias_set (str_rtx, 0);
4129 set_mem_expr (str_rtx, 0);
4132 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4133 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4135 value = expand_and (str_mode, value, const1_rtx, NULL);
4136 binop = xor_optab;
4138 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4139 build_int_cst (NULL_TREE, bitpos),
4140 NULL_RTX, 1);
4141 result = expand_binop (str_mode, binop, str_rtx,
4142 value, str_rtx, 1, OPTAB_WIDEN);
4143 if (result != str_rtx)
4144 emit_move_insn (str_rtx, result);
4145 return true;
4147 case BIT_IOR_EXPR:
4148 case BIT_XOR_EXPR:
4149 if (TREE_CODE (op1) != INTEGER_CST)
4150 break;
4151 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4152 value = convert_modes (GET_MODE (str_rtx),
4153 TYPE_MODE (TREE_TYPE (op1)), value,
4154 TYPE_UNSIGNED (TREE_TYPE (op1)));
4156 /* We may be accessing data outside the field, which means
4157 we can alias adjacent data. */
4158 if (MEM_P (str_rtx))
4160 str_rtx = shallow_copy_rtx (str_rtx);
4161 set_mem_alias_set (str_rtx, 0);
4162 set_mem_expr (str_rtx, 0);
4165 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4166 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4168 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4169 - 1);
4170 value = expand_and (GET_MODE (str_rtx), value, mask,
4171 NULL_RTX);
4173 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4174 build_int_cst (NULL_TREE, bitpos),
4175 NULL_RTX, 1);
4176 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4177 value, str_rtx, 1, OPTAB_WIDEN);
4178 if (result != str_rtx)
4179 emit_move_insn (str_rtx, result);
4180 return true;
4182 default:
4183 break;
4186 return false;
4190 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4191 is true, try generating a nontemporal store. */
4193 void
4194 expand_assignment (tree to, tree from, bool nontemporal)
4196 rtx to_rtx = 0;
4197 rtx result;
4199 /* Don't crash if the lhs of the assignment was erroneous. */
4200 if (TREE_CODE (to) == ERROR_MARK)
4202 result = expand_normal (from);
4203 return;
4206 /* Optimize away no-op moves without side-effects. */
4207 if (operand_equal_p (to, from, 0))
4208 return;
4210 /* Assignment of a structure component needs special treatment
4211 if the structure component's rtx is not simply a MEM.
4212 Assignment of an array element at a constant index, and assignment of
4213 an array element in an unaligned packed structure field, has the same
4214 problem. */
4215 if (handled_component_p (to)
4216 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4218 enum machine_mode mode1;
4219 HOST_WIDE_INT bitsize, bitpos;
4220 tree offset;
4221 int unsignedp;
4222 int volatilep = 0;
4223 tree tem;
4225 push_temp_slots ();
4226 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4227 &unsignedp, &volatilep, true);
4229 /* If we are going to use store_bit_field and extract_bit_field,
4230 make sure to_rtx will be safe for multiple use. */
4232 to_rtx = expand_normal (tem);
4234 if (offset != 0)
4236 enum machine_mode address_mode;
4237 rtx offset_rtx;
4239 if (!MEM_P (to_rtx))
4241 /* We can get constant negative offsets into arrays with broken
4242 user code. Translate this to a trap instead of ICEing. */
4243 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4244 expand_builtin_trap ();
4245 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4248 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4249 address_mode
4250 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4251 if (GET_MODE (offset_rtx) != address_mode)
4252 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4254 /* A constant address in TO_RTX can have VOIDmode, we must not try
4255 to call force_reg for that case. Avoid that case. */
4256 if (MEM_P (to_rtx)
4257 && GET_MODE (to_rtx) == BLKmode
4258 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4259 && bitsize > 0
4260 && (bitpos % bitsize) == 0
4261 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4262 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4264 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4265 bitpos = 0;
4268 to_rtx = offset_address (to_rtx, offset_rtx,
4269 highest_pow2_factor_for_target (to,
4270 offset));
4273 /* Handle expand_expr of a complex value returning a CONCAT. */
4274 if (GET_CODE (to_rtx) == CONCAT)
4276 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4278 gcc_assert (bitpos == 0);
4279 result = store_expr (from, to_rtx, false, nontemporal);
4281 else
4283 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4284 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4285 nontemporal);
4288 else
4290 if (MEM_P (to_rtx))
4292 /* If the field is at offset zero, we could have been given the
4293 DECL_RTX of the parent struct. Don't munge it. */
4294 to_rtx = shallow_copy_rtx (to_rtx);
4296 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4298 /* Deal with volatile and readonly fields. The former is only
4299 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4300 if (volatilep)
4301 MEM_VOLATILE_P (to_rtx) = 1;
4302 if (component_uses_parent_alias_set (to))
4303 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4306 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4307 to_rtx, to, from))
4308 result = NULL;
4309 else
4310 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4311 TREE_TYPE (tem), get_alias_set (to),
4312 nontemporal);
4315 if (result)
4316 preserve_temp_slots (result);
4317 free_temp_slots ();
4318 pop_temp_slots ();
4319 return;
4322 else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF)
4324 addr_space_t as = ADDR_SPACE_GENERIC;
4325 enum machine_mode mode, op_mode1;
4326 enum insn_code icode;
4327 rtx reg, addr, mem, insn;
4329 if (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (to, 0))))
4330 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
4332 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4333 reg = force_not_mem (reg);
4335 mode = TYPE_MODE (TREE_TYPE (to));
4336 addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode,
4337 EXPAND_SUM);
4338 addr = memory_address_addr_space (mode, addr, as);
4339 mem = gen_rtx_MEM (mode, addr);
4341 set_mem_attributes (mem, to, 0);
4342 set_mem_addr_space (mem, as);
4344 icode = movmisalign_optab->handlers[mode].insn_code;
4345 gcc_assert (icode != CODE_FOR_nothing);
4347 op_mode1 = insn_data[icode].operand[1].mode;
4348 if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4349 && op_mode1 != VOIDmode)
4350 reg = copy_to_mode_reg (op_mode1, reg);
4352 insn = GEN_FCN (icode) (mem, reg);
4353 emit_insn (insn);
4354 return;
4357 /* If the rhs is a function call and its value is not an aggregate,
4358 call the function before we start to compute the lhs.
4359 This is needed for correct code for cases such as
4360 val = setjmp (buf) on machines where reference to val
4361 requires loading up part of an address in a separate insn.
4363 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4364 since it might be a promoted variable where the zero- or sign- extension
4365 needs to be done. Handling this in the normal way is safe because no
4366 computation is done before the call. The same is true for SSA names. */
4367 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4368 && COMPLETE_TYPE_P (TREE_TYPE (from))
4369 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4370 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4371 && REG_P (DECL_RTL (to)))
4372 || TREE_CODE (to) == SSA_NAME))
4374 rtx value;
4376 push_temp_slots ();
4377 value = expand_normal (from);
4378 if (to_rtx == 0)
4379 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4381 /* Handle calls that return values in multiple non-contiguous locations.
4382 The Irix 6 ABI has examples of this. */
4383 if (GET_CODE (to_rtx) == PARALLEL)
4384 emit_group_load (to_rtx, value, TREE_TYPE (from),
4385 int_size_in_bytes (TREE_TYPE (from)));
4386 else if (GET_MODE (to_rtx) == BLKmode)
4387 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4388 else
4390 if (POINTER_TYPE_P (TREE_TYPE (to)))
4391 value = convert_memory_address_addr_space
4392 (GET_MODE (to_rtx), value,
4393 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4395 emit_move_insn (to_rtx, value);
4397 preserve_temp_slots (to_rtx);
4398 free_temp_slots ();
4399 pop_temp_slots ();
4400 return;
4403 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4404 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4406 if (to_rtx == 0)
4407 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4409 /* Don't move directly into a return register. */
4410 if (TREE_CODE (to) == RESULT_DECL
4411 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4413 rtx temp;
4415 push_temp_slots ();
4416 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4418 if (GET_CODE (to_rtx) == PARALLEL)
4419 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4420 int_size_in_bytes (TREE_TYPE (from)));
4421 else
4422 emit_move_insn (to_rtx, temp);
4424 preserve_temp_slots (to_rtx);
4425 free_temp_slots ();
4426 pop_temp_slots ();
4427 return;
4430 /* In case we are returning the contents of an object which overlaps
4431 the place the value is being stored, use a safe function when copying
4432 a value through a pointer into a structure value return block. */
4433 if (TREE_CODE (to) == RESULT_DECL
4434 && TREE_CODE (from) == INDIRECT_REF
4435 && ADDR_SPACE_GENERIC_P
4436 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4437 && refs_may_alias_p (to, from)
4438 && cfun->returns_struct
4439 && !cfun->returns_pcc_struct)
4441 rtx from_rtx, size;
4443 push_temp_slots ();
4444 size = expr_size (from);
4445 from_rtx = expand_normal (from);
4447 emit_library_call (memmove_libfunc, LCT_NORMAL,
4448 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4449 XEXP (from_rtx, 0), Pmode,
4450 convert_to_mode (TYPE_MODE (sizetype),
4451 size, TYPE_UNSIGNED (sizetype)),
4452 TYPE_MODE (sizetype));
4454 preserve_temp_slots (to_rtx);
4455 free_temp_slots ();
4456 pop_temp_slots ();
4457 return;
4460 /* Compute FROM and store the value in the rtx we got. */
4462 push_temp_slots ();
4463 result = store_expr (from, to_rtx, 0, nontemporal);
4464 preserve_temp_slots (result);
4465 free_temp_slots ();
4466 pop_temp_slots ();
4467 return;
4470 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4471 succeeded, false otherwise. */
4473 bool
4474 emit_storent_insn (rtx to, rtx from)
4476 enum machine_mode mode = GET_MODE (to), imode;
4477 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4478 rtx pattern;
4480 if (code == CODE_FOR_nothing)
4481 return false;
4483 imode = insn_data[code].operand[0].mode;
4484 if (!insn_data[code].operand[0].predicate (to, imode))
4485 return false;
4487 imode = insn_data[code].operand[1].mode;
4488 if (!insn_data[code].operand[1].predicate (from, imode))
4490 from = copy_to_mode_reg (imode, from);
4491 if (!insn_data[code].operand[1].predicate (from, imode))
4492 return false;
4495 pattern = GEN_FCN (code) (to, from);
4496 if (pattern == NULL_RTX)
4497 return false;
4499 emit_insn (pattern);
4500 return true;
4503 /* Generate code for computing expression EXP,
4504 and storing the value into TARGET.
4506 If the mode is BLKmode then we may return TARGET itself.
4507 It turns out that in BLKmode it doesn't cause a problem.
4508 because C has no operators that could combine two different
4509 assignments into the same BLKmode object with different values
4510 with no sequence point. Will other languages need this to
4511 be more thorough?
4513 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4514 stack, and block moves may need to be treated specially.
4516 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4519 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4521 rtx temp;
4522 rtx alt_rtl = NULL_RTX;
4523 location_t loc = EXPR_LOCATION (exp);
4525 if (VOID_TYPE_P (TREE_TYPE (exp)))
4527 /* C++ can generate ?: expressions with a throw expression in one
4528 branch and an rvalue in the other. Here, we resolve attempts to
4529 store the throw expression's nonexistent result. */
4530 gcc_assert (!call_param_p);
4531 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4532 return NULL_RTX;
4534 if (TREE_CODE (exp) == COMPOUND_EXPR)
4536 /* Perform first part of compound expression, then assign from second
4537 part. */
4538 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4539 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4540 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4541 nontemporal);
4543 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4545 /* For conditional expression, get safe form of the target. Then
4546 test the condition, doing the appropriate assignment on either
4547 side. This avoids the creation of unnecessary temporaries.
4548 For non-BLKmode, it is more efficient not to do this. */
4550 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4552 do_pending_stack_adjust ();
4553 NO_DEFER_POP;
4554 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4555 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4556 nontemporal);
4557 emit_jump_insn (gen_jump (lab2));
4558 emit_barrier ();
4559 emit_label (lab1);
4560 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4561 nontemporal);
4562 emit_label (lab2);
4563 OK_DEFER_POP;
4565 return NULL_RTX;
4567 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4568 /* If this is a scalar in a register that is stored in a wider mode
4569 than the declared mode, compute the result into its declared mode
4570 and then convert to the wider mode. Our value is the computed
4571 expression. */
4573 rtx inner_target = 0;
4575 /* We can do the conversion inside EXP, which will often result
4576 in some optimizations. Do the conversion in two steps: first
4577 change the signedness, if needed, then the extend. But don't
4578 do this if the type of EXP is a subtype of something else
4579 since then the conversion might involve more than just
4580 converting modes. */
4581 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4582 && TREE_TYPE (TREE_TYPE (exp)) == 0
4583 && GET_MODE_PRECISION (GET_MODE (target))
4584 == TYPE_PRECISION (TREE_TYPE (exp)))
4586 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4587 != SUBREG_PROMOTED_UNSIGNED_P (target))
4589 /* Some types, e.g. Fortran's logical*4, won't have a signed
4590 version, so use the mode instead. */
4591 tree ntype
4592 = (signed_or_unsigned_type_for
4593 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4594 if (ntype == NULL)
4595 ntype = lang_hooks.types.type_for_mode
4596 (TYPE_MODE (TREE_TYPE (exp)),
4597 SUBREG_PROMOTED_UNSIGNED_P (target));
4599 exp = fold_convert_loc (loc, ntype, exp);
4602 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4603 (GET_MODE (SUBREG_REG (target)),
4604 SUBREG_PROMOTED_UNSIGNED_P (target)),
4605 exp);
4607 inner_target = SUBREG_REG (target);
4610 temp = expand_expr (exp, inner_target, VOIDmode,
4611 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4613 /* If TEMP is a VOIDmode constant, use convert_modes to make
4614 sure that we properly convert it. */
4615 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4617 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4618 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4619 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4620 GET_MODE (target), temp,
4621 SUBREG_PROMOTED_UNSIGNED_P (target));
4624 convert_move (SUBREG_REG (target), temp,
4625 SUBREG_PROMOTED_UNSIGNED_P (target));
4627 return NULL_RTX;
4629 else if (TREE_CODE (exp) == STRING_CST
4630 && !nontemporal && !call_param_p
4631 && TREE_STRING_LENGTH (exp) > 0
4632 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4634 /* Optimize initialization of an array with a STRING_CST. */
4635 HOST_WIDE_INT exp_len, str_copy_len;
4636 rtx dest_mem;
4638 exp_len = int_expr_size (exp);
4639 if (exp_len <= 0)
4640 goto normal_expr;
4642 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4643 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4644 goto normal_expr;
4646 str_copy_len = TREE_STRING_LENGTH (exp);
4647 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4649 str_copy_len += STORE_MAX_PIECES - 1;
4650 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4652 str_copy_len = MIN (str_copy_len, exp_len);
4653 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4654 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4655 MEM_ALIGN (target), false))
4656 goto normal_expr;
4658 dest_mem = target;
4660 dest_mem = store_by_pieces (dest_mem,
4661 str_copy_len, builtin_strncpy_read_str,
4662 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4663 MEM_ALIGN (target), false,
4664 exp_len > str_copy_len ? 1 : 0);
4665 if (exp_len > str_copy_len)
4666 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4667 GEN_INT (exp_len - str_copy_len),
4668 BLOCK_OP_NORMAL);
4669 return NULL_RTX;
4671 else
4673 rtx tmp_target;
4675 normal_expr:
4676 /* If we want to use a nontemporal store, force the value to
4677 register first. */
4678 tmp_target = nontemporal ? NULL_RTX : target;
4679 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4680 (call_param_p
4681 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4682 &alt_rtl);
4685 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4686 the same as that of TARGET, adjust the constant. This is needed, for
4687 example, in case it is a CONST_DOUBLE and we want only a word-sized
4688 value. */
4689 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4690 && TREE_CODE (exp) != ERROR_MARK
4691 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4692 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4693 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4695 /* If value was not generated in the target, store it there.
4696 Convert the value to TARGET's type first if necessary and emit the
4697 pending incrementations that have been queued when expanding EXP.
4698 Note that we cannot emit the whole queue blindly because this will
4699 effectively disable the POST_INC optimization later.
4701 If TEMP and TARGET compare equal according to rtx_equal_p, but
4702 one or both of them are volatile memory refs, we have to distinguish
4703 two cases:
4704 - expand_expr has used TARGET. In this case, we must not generate
4705 another copy. This can be detected by TARGET being equal according
4706 to == .
4707 - expand_expr has not used TARGET - that means that the source just
4708 happens to have the same RTX form. Since temp will have been created
4709 by expand_expr, it will compare unequal according to == .
4710 We must generate a copy in this case, to reach the correct number
4711 of volatile memory references. */
4713 if ((! rtx_equal_p (temp, target)
4714 || (temp != target && (side_effects_p (temp)
4715 || side_effects_p (target))))
4716 && TREE_CODE (exp) != ERROR_MARK
4717 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4718 but TARGET is not valid memory reference, TEMP will differ
4719 from TARGET although it is really the same location. */
4720 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4721 /* If there's nothing to copy, don't bother. Don't call
4722 expr_size unless necessary, because some front-ends (C++)
4723 expr_size-hook must not be given objects that are not
4724 supposed to be bit-copied or bit-initialized. */
4725 && expr_size (exp) != const0_rtx)
4727 if (GET_MODE (temp) != GET_MODE (target)
4728 && GET_MODE (temp) != VOIDmode)
4730 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4731 if (GET_MODE (target) == BLKmode
4732 || GET_MODE (temp) == BLKmode)
4733 emit_block_move (target, temp, expr_size (exp),
4734 (call_param_p
4735 ? BLOCK_OP_CALL_PARM
4736 : BLOCK_OP_NORMAL));
4737 else
4738 convert_move (target, temp, unsignedp);
4741 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4743 /* Handle copying a string constant into an array. The string
4744 constant may be shorter than the array. So copy just the string's
4745 actual length, and clear the rest. First get the size of the data
4746 type of the string, which is actually the size of the target. */
4747 rtx size = expr_size (exp);
4749 if (CONST_INT_P (size)
4750 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4751 emit_block_move (target, temp, size,
4752 (call_param_p
4753 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4754 else
4756 enum machine_mode pointer_mode
4757 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4758 enum machine_mode address_mode
4759 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4761 /* Compute the size of the data to copy from the string. */
4762 tree copy_size
4763 = size_binop_loc (loc, MIN_EXPR,
4764 make_tree (sizetype, size),
4765 size_int (TREE_STRING_LENGTH (exp)));
4766 rtx copy_size_rtx
4767 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4768 (call_param_p
4769 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4770 rtx label = 0;
4772 /* Copy that much. */
4773 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4774 TYPE_UNSIGNED (sizetype));
4775 emit_block_move (target, temp, copy_size_rtx,
4776 (call_param_p
4777 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4779 /* Figure out how much is left in TARGET that we have to clear.
4780 Do all calculations in pointer_mode. */
4781 if (CONST_INT_P (copy_size_rtx))
4783 size = plus_constant (size, -INTVAL (copy_size_rtx));
4784 target = adjust_address (target, BLKmode,
4785 INTVAL (copy_size_rtx));
4787 else
4789 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4790 copy_size_rtx, NULL_RTX, 0,
4791 OPTAB_LIB_WIDEN);
4793 if (GET_MODE (copy_size_rtx) != address_mode)
4794 copy_size_rtx = convert_to_mode (address_mode,
4795 copy_size_rtx,
4796 TYPE_UNSIGNED (sizetype));
4798 target = offset_address (target, copy_size_rtx,
4799 highest_pow2_factor (copy_size));
4800 label = gen_label_rtx ();
4801 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4802 GET_MODE (size), 0, label);
4805 if (size != const0_rtx)
4806 clear_storage (target, size, BLOCK_OP_NORMAL);
4808 if (label)
4809 emit_label (label);
4812 /* Handle calls that return values in multiple non-contiguous locations.
4813 The Irix 6 ABI has examples of this. */
4814 else if (GET_CODE (target) == PARALLEL)
4815 emit_group_load (target, temp, TREE_TYPE (exp),
4816 int_size_in_bytes (TREE_TYPE (exp)));
4817 else if (GET_MODE (temp) == BLKmode)
4818 emit_block_move (target, temp, expr_size (exp),
4819 (call_param_p
4820 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4821 else if (nontemporal
4822 && emit_storent_insn (target, temp))
4823 /* If we managed to emit a nontemporal store, there is nothing else to
4824 do. */
4826 else
4828 temp = force_operand (temp, target);
4829 if (temp != target)
4830 emit_move_insn (target, temp);
4834 return NULL_RTX;
4837 /* Helper for categorize_ctor_elements. Identical interface. */
4839 static bool
4840 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4841 HOST_WIDE_INT *p_elt_count,
4842 bool *p_must_clear)
4844 unsigned HOST_WIDE_INT idx;
4845 HOST_WIDE_INT nz_elts, elt_count;
4846 tree value, purpose;
4848 /* Whether CTOR is a valid constant initializer, in accordance with what
4849 initializer_constant_valid_p does. If inferred from the constructor
4850 elements, true until proven otherwise. */
4851 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4852 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4854 nz_elts = 0;
4855 elt_count = 0;
4857 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4859 HOST_WIDE_INT mult = 1;
4861 if (TREE_CODE (purpose) == RANGE_EXPR)
4863 tree lo_index = TREE_OPERAND (purpose, 0);
4864 tree hi_index = TREE_OPERAND (purpose, 1);
4866 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4867 mult = (tree_low_cst (hi_index, 1)
4868 - tree_low_cst (lo_index, 1) + 1);
4871 switch (TREE_CODE (value))
4873 case CONSTRUCTOR:
4875 HOST_WIDE_INT nz = 0, ic = 0;
4877 bool const_elt_p
4878 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4880 nz_elts += mult * nz;
4881 elt_count += mult * ic;
4883 if (const_from_elts_p && const_p)
4884 const_p = const_elt_p;
4886 break;
4888 case INTEGER_CST:
4889 case REAL_CST:
4890 case FIXED_CST:
4891 if (!initializer_zerop (value))
4892 nz_elts += mult;
4893 elt_count += mult;
4894 break;
4896 case STRING_CST:
4897 nz_elts += mult * TREE_STRING_LENGTH (value);
4898 elt_count += mult * TREE_STRING_LENGTH (value);
4899 break;
4901 case COMPLEX_CST:
4902 if (!initializer_zerop (TREE_REALPART (value)))
4903 nz_elts += mult;
4904 if (!initializer_zerop (TREE_IMAGPART (value)))
4905 nz_elts += mult;
4906 elt_count += mult;
4907 break;
4909 case VECTOR_CST:
4911 tree v;
4912 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4914 if (!initializer_zerop (TREE_VALUE (v)))
4915 nz_elts += mult;
4916 elt_count += mult;
4919 break;
4921 default:
4923 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4924 if (tc < 1)
4925 tc = 1;
4926 nz_elts += mult * tc;
4927 elt_count += mult * tc;
4929 if (const_from_elts_p && const_p)
4930 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4931 != NULL_TREE;
4933 break;
4937 if (!*p_must_clear
4938 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4939 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4941 tree init_sub_type;
4942 bool clear_this = true;
4944 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4946 /* We don't expect more than one element of the union to be
4947 initialized. Not sure what we should do otherwise... */
4948 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4949 == 1);
4951 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4952 CONSTRUCTOR_ELTS (ctor),
4953 0)->value);
4955 /* ??? We could look at each element of the union, and find the
4956 largest element. Which would avoid comparing the size of the
4957 initialized element against any tail padding in the union.
4958 Doesn't seem worth the effort... */
4959 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4960 TYPE_SIZE (init_sub_type)) == 1)
4962 /* And now we have to find out if the element itself is fully
4963 constructed. E.g. for union { struct { int a, b; } s; } u
4964 = { .s = { .a = 1 } }. */
4965 if (elt_count == count_type_elements (init_sub_type, false))
4966 clear_this = false;
4970 *p_must_clear = clear_this;
4973 *p_nz_elts += nz_elts;
4974 *p_elt_count += elt_count;
4976 return const_p;
4979 /* Examine CTOR to discover:
4980 * how many scalar fields are set to nonzero values,
4981 and place it in *P_NZ_ELTS;
4982 * how many scalar fields in total are in CTOR,
4983 and place it in *P_ELT_COUNT.
4984 * if a type is a union, and the initializer from the constructor
4985 is not the largest element in the union, then set *p_must_clear.
4987 Return whether or not CTOR is a valid static constant initializer, the same
4988 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4990 bool
4991 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4992 HOST_WIDE_INT *p_elt_count,
4993 bool *p_must_clear)
4995 *p_nz_elts = 0;
4996 *p_elt_count = 0;
4997 *p_must_clear = false;
4999 return
5000 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5003 /* Count the number of scalars in TYPE. Return -1 on overflow or
5004 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5005 array member at the end of the structure. */
5007 HOST_WIDE_INT
5008 count_type_elements (const_tree type, bool allow_flexarr)
5010 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5011 switch (TREE_CODE (type))
5013 case ARRAY_TYPE:
5015 tree telts = array_type_nelts (type);
5016 if (telts && host_integerp (telts, 1))
5018 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5019 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5020 if (n == 0)
5021 return 0;
5022 else if (max / n > m)
5023 return n * m;
5025 return -1;
5028 case RECORD_TYPE:
5030 HOST_WIDE_INT n = 0, t;
5031 tree f;
5033 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5034 if (TREE_CODE (f) == FIELD_DECL)
5036 t = count_type_elements (TREE_TYPE (f), false);
5037 if (t < 0)
5039 /* Check for structures with flexible array member. */
5040 tree tf = TREE_TYPE (f);
5041 if (allow_flexarr
5042 && TREE_CHAIN (f) == NULL
5043 && TREE_CODE (tf) == ARRAY_TYPE
5044 && TYPE_DOMAIN (tf)
5045 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5046 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5047 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5048 && int_size_in_bytes (type) >= 0)
5049 break;
5051 return -1;
5053 n += t;
5056 return n;
5059 case UNION_TYPE:
5060 case QUAL_UNION_TYPE:
5061 return -1;
5063 case COMPLEX_TYPE:
5064 return 2;
5066 case VECTOR_TYPE:
5067 return TYPE_VECTOR_SUBPARTS (type);
5069 case INTEGER_TYPE:
5070 case REAL_TYPE:
5071 case FIXED_POINT_TYPE:
5072 case ENUMERAL_TYPE:
5073 case BOOLEAN_TYPE:
5074 case POINTER_TYPE:
5075 case OFFSET_TYPE:
5076 case REFERENCE_TYPE:
5077 return 1;
5079 case ERROR_MARK:
5080 return 0;
5082 case VOID_TYPE:
5083 case METHOD_TYPE:
5084 case FUNCTION_TYPE:
5085 case LANG_TYPE:
5086 default:
5087 gcc_unreachable ();
5091 /* Return 1 if EXP contains mostly (3/4) zeros. */
5093 static int
5094 mostly_zeros_p (const_tree exp)
5096 if (TREE_CODE (exp) == CONSTRUCTOR)
5099 HOST_WIDE_INT nz_elts, count, elts;
5100 bool must_clear;
5102 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5103 if (must_clear)
5104 return 1;
5106 elts = count_type_elements (TREE_TYPE (exp), false);
5108 return nz_elts < elts / 4;
5111 return initializer_zerop (exp);
5114 /* Return 1 if EXP contains all zeros. */
5116 static int
5117 all_zeros_p (const_tree exp)
5119 if (TREE_CODE (exp) == CONSTRUCTOR)
5122 HOST_WIDE_INT nz_elts, count;
5123 bool must_clear;
5125 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5126 return nz_elts == 0;
5129 return initializer_zerop (exp);
5132 /* Helper function for store_constructor.
5133 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5134 TYPE is the type of the CONSTRUCTOR, not the element type.
5135 CLEARED is as for store_constructor.
5136 ALIAS_SET is the alias set to use for any stores.
5138 This provides a recursive shortcut back to store_constructor when it isn't
5139 necessary to go through store_field. This is so that we can pass through
5140 the cleared field to let store_constructor know that we may not have to
5141 clear a substructure if the outer structure has already been cleared. */
5143 static void
5144 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5145 HOST_WIDE_INT bitpos, enum machine_mode mode,
5146 tree exp, tree type, int cleared,
5147 alias_set_type alias_set)
5149 if (TREE_CODE (exp) == CONSTRUCTOR
5150 /* We can only call store_constructor recursively if the size and
5151 bit position are on a byte boundary. */
5152 && bitpos % BITS_PER_UNIT == 0
5153 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5154 /* If we have a nonzero bitpos for a register target, then we just
5155 let store_field do the bitfield handling. This is unlikely to
5156 generate unnecessary clear instructions anyways. */
5157 && (bitpos == 0 || MEM_P (target)))
5159 if (MEM_P (target))
5160 target
5161 = adjust_address (target,
5162 GET_MODE (target) == BLKmode
5163 || 0 != (bitpos
5164 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5165 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5168 /* Update the alias set, if required. */
5169 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5170 && MEM_ALIAS_SET (target) != 0)
5172 target = copy_rtx (target);
5173 set_mem_alias_set (target, alias_set);
5176 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5178 else
5179 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5182 /* Store the value of constructor EXP into the rtx TARGET.
5183 TARGET is either a REG or a MEM; we know it cannot conflict, since
5184 safe_from_p has been called.
5185 CLEARED is true if TARGET is known to have been zero'd.
5186 SIZE is the number of bytes of TARGET we are allowed to modify: this
5187 may not be the same as the size of EXP if we are assigning to a field
5188 which has been packed to exclude padding bits. */
5190 static void
5191 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5193 tree type = TREE_TYPE (exp);
5194 #ifdef WORD_REGISTER_OPERATIONS
5195 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5196 #endif
5198 switch (TREE_CODE (type))
5200 case RECORD_TYPE:
5201 case UNION_TYPE:
5202 case QUAL_UNION_TYPE:
5204 unsigned HOST_WIDE_INT idx;
5205 tree field, value;
5207 /* If size is zero or the target is already cleared, do nothing. */
5208 if (size == 0 || cleared)
5209 cleared = 1;
5210 /* We either clear the aggregate or indicate the value is dead. */
5211 else if ((TREE_CODE (type) == UNION_TYPE
5212 || TREE_CODE (type) == QUAL_UNION_TYPE)
5213 && ! CONSTRUCTOR_ELTS (exp))
5214 /* If the constructor is empty, clear the union. */
5216 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5217 cleared = 1;
5220 /* If we are building a static constructor into a register,
5221 set the initial value as zero so we can fold the value into
5222 a constant. But if more than one register is involved,
5223 this probably loses. */
5224 else if (REG_P (target) && TREE_STATIC (exp)
5225 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5227 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5228 cleared = 1;
5231 /* If the constructor has fewer fields than the structure or
5232 if we are initializing the structure to mostly zeros, clear
5233 the whole structure first. Don't do this if TARGET is a
5234 register whose mode size isn't equal to SIZE since
5235 clear_storage can't handle this case. */
5236 else if (size > 0
5237 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5238 != fields_length (type))
5239 || mostly_zeros_p (exp))
5240 && (!REG_P (target)
5241 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5242 == size)))
5244 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5245 cleared = 1;
5248 if (REG_P (target) && !cleared)
5249 emit_clobber (target);
5251 /* Store each element of the constructor into the
5252 corresponding field of TARGET. */
5253 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5255 enum machine_mode mode;
5256 HOST_WIDE_INT bitsize;
5257 HOST_WIDE_INT bitpos = 0;
5258 tree offset;
5259 rtx to_rtx = target;
5261 /* Just ignore missing fields. We cleared the whole
5262 structure, above, if any fields are missing. */
5263 if (field == 0)
5264 continue;
5266 if (cleared && initializer_zerop (value))
5267 continue;
5269 if (host_integerp (DECL_SIZE (field), 1))
5270 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5271 else
5272 bitsize = -1;
5274 mode = DECL_MODE (field);
5275 if (DECL_BIT_FIELD (field))
5276 mode = VOIDmode;
5278 offset = DECL_FIELD_OFFSET (field);
5279 if (host_integerp (offset, 0)
5280 && host_integerp (bit_position (field), 0))
5282 bitpos = int_bit_position (field);
5283 offset = 0;
5285 else
5286 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5288 if (offset)
5290 enum machine_mode address_mode;
5291 rtx offset_rtx;
5293 offset
5294 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5295 make_tree (TREE_TYPE (exp),
5296 target));
5298 offset_rtx = expand_normal (offset);
5299 gcc_assert (MEM_P (to_rtx));
5301 address_mode
5302 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5303 if (GET_MODE (offset_rtx) != address_mode)
5304 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5306 to_rtx = offset_address (to_rtx, offset_rtx,
5307 highest_pow2_factor (offset));
5310 #ifdef WORD_REGISTER_OPERATIONS
5311 /* If this initializes a field that is smaller than a
5312 word, at the start of a word, try to widen it to a full
5313 word. This special case allows us to output C++ member
5314 function initializations in a form that the optimizers
5315 can understand. */
5316 if (REG_P (target)
5317 && bitsize < BITS_PER_WORD
5318 && bitpos % BITS_PER_WORD == 0
5319 && GET_MODE_CLASS (mode) == MODE_INT
5320 && TREE_CODE (value) == INTEGER_CST
5321 && exp_size >= 0
5322 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5324 tree type = TREE_TYPE (value);
5326 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5328 type = lang_hooks.types.type_for_size
5329 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5330 value = fold_convert (type, value);
5333 if (BYTES_BIG_ENDIAN)
5334 value
5335 = fold_build2 (LSHIFT_EXPR, type, value,
5336 build_int_cst (type,
5337 BITS_PER_WORD - bitsize));
5338 bitsize = BITS_PER_WORD;
5339 mode = word_mode;
5341 #endif
5343 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5344 && DECL_NONADDRESSABLE_P (field))
5346 to_rtx = copy_rtx (to_rtx);
5347 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5350 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5351 value, type, cleared,
5352 get_alias_set (TREE_TYPE (field)));
5354 break;
5356 case ARRAY_TYPE:
5358 tree value, index;
5359 unsigned HOST_WIDE_INT i;
5360 int need_to_clear;
5361 tree domain;
5362 tree elttype = TREE_TYPE (type);
5363 int const_bounds_p;
5364 HOST_WIDE_INT minelt = 0;
5365 HOST_WIDE_INT maxelt = 0;
5367 domain = TYPE_DOMAIN (type);
5368 const_bounds_p = (TYPE_MIN_VALUE (domain)
5369 && TYPE_MAX_VALUE (domain)
5370 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5371 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5373 /* If we have constant bounds for the range of the type, get them. */
5374 if (const_bounds_p)
5376 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5377 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5380 /* If the constructor has fewer elements than the array, clear
5381 the whole array first. Similarly if this is static
5382 constructor of a non-BLKmode object. */
5383 if (cleared)
5384 need_to_clear = 0;
5385 else if (REG_P (target) && TREE_STATIC (exp))
5386 need_to_clear = 1;
5387 else
5389 unsigned HOST_WIDE_INT idx;
5390 tree index, value;
5391 HOST_WIDE_INT count = 0, zero_count = 0;
5392 need_to_clear = ! const_bounds_p;
5394 /* This loop is a more accurate version of the loop in
5395 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5396 is also needed to check for missing elements. */
5397 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5399 HOST_WIDE_INT this_node_count;
5401 if (need_to_clear)
5402 break;
5404 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5406 tree lo_index = TREE_OPERAND (index, 0);
5407 tree hi_index = TREE_OPERAND (index, 1);
5409 if (! host_integerp (lo_index, 1)
5410 || ! host_integerp (hi_index, 1))
5412 need_to_clear = 1;
5413 break;
5416 this_node_count = (tree_low_cst (hi_index, 1)
5417 - tree_low_cst (lo_index, 1) + 1);
5419 else
5420 this_node_count = 1;
5422 count += this_node_count;
5423 if (mostly_zeros_p (value))
5424 zero_count += this_node_count;
5427 /* Clear the entire array first if there are any missing
5428 elements, or if the incidence of zero elements is >=
5429 75%. */
5430 if (! need_to_clear
5431 && (count < maxelt - minelt + 1
5432 || 4 * zero_count >= 3 * count))
5433 need_to_clear = 1;
5436 if (need_to_clear && size > 0)
5438 if (REG_P (target))
5439 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5440 else
5441 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5442 cleared = 1;
5445 if (!cleared && REG_P (target))
5446 /* Inform later passes that the old value is dead. */
5447 emit_clobber (target);
5449 /* Store each element of the constructor into the
5450 corresponding element of TARGET, determined by counting the
5451 elements. */
5452 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5454 enum machine_mode mode;
5455 HOST_WIDE_INT bitsize;
5456 HOST_WIDE_INT bitpos;
5457 rtx xtarget = target;
5459 if (cleared && initializer_zerop (value))
5460 continue;
5462 mode = TYPE_MODE (elttype);
5463 if (mode == BLKmode)
5464 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5465 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5466 : -1);
5467 else
5468 bitsize = GET_MODE_BITSIZE (mode);
5470 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5472 tree lo_index = TREE_OPERAND (index, 0);
5473 tree hi_index = TREE_OPERAND (index, 1);
5474 rtx index_r, pos_rtx;
5475 HOST_WIDE_INT lo, hi, count;
5476 tree position;
5478 /* If the range is constant and "small", unroll the loop. */
5479 if (const_bounds_p
5480 && host_integerp (lo_index, 0)
5481 && host_integerp (hi_index, 0)
5482 && (lo = tree_low_cst (lo_index, 0),
5483 hi = tree_low_cst (hi_index, 0),
5484 count = hi - lo + 1,
5485 (!MEM_P (target)
5486 || count <= 2
5487 || (host_integerp (TYPE_SIZE (elttype), 1)
5488 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5489 <= 40 * 8)))))
5491 lo -= minelt; hi -= minelt;
5492 for (; lo <= hi; lo++)
5494 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5496 if (MEM_P (target)
5497 && !MEM_KEEP_ALIAS_SET_P (target)
5498 && TREE_CODE (type) == ARRAY_TYPE
5499 && TYPE_NONALIASED_COMPONENT (type))
5501 target = copy_rtx (target);
5502 MEM_KEEP_ALIAS_SET_P (target) = 1;
5505 store_constructor_field
5506 (target, bitsize, bitpos, mode, value, type, cleared,
5507 get_alias_set (elttype));
5510 else
5512 rtx loop_start = gen_label_rtx ();
5513 rtx loop_end = gen_label_rtx ();
5514 tree exit_cond;
5516 expand_normal (hi_index);
5518 index = build_decl (EXPR_LOCATION (exp),
5519 VAR_DECL, NULL_TREE, domain);
5520 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5521 SET_DECL_RTL (index, index_r);
5522 store_expr (lo_index, index_r, 0, false);
5524 /* Build the head of the loop. */
5525 do_pending_stack_adjust ();
5526 emit_label (loop_start);
5528 /* Assign value to element index. */
5529 position =
5530 fold_convert (ssizetype,
5531 fold_build2 (MINUS_EXPR,
5532 TREE_TYPE (index),
5533 index,
5534 TYPE_MIN_VALUE (domain)));
5536 position =
5537 size_binop (MULT_EXPR, position,
5538 fold_convert (ssizetype,
5539 TYPE_SIZE_UNIT (elttype)));
5541 pos_rtx = expand_normal (position);
5542 xtarget = offset_address (target, pos_rtx,
5543 highest_pow2_factor (position));
5544 xtarget = adjust_address (xtarget, mode, 0);
5545 if (TREE_CODE (value) == CONSTRUCTOR)
5546 store_constructor (value, xtarget, cleared,
5547 bitsize / BITS_PER_UNIT);
5548 else
5549 store_expr (value, xtarget, 0, false);
5551 /* Generate a conditional jump to exit the loop. */
5552 exit_cond = build2 (LT_EXPR, integer_type_node,
5553 index, hi_index);
5554 jumpif (exit_cond, loop_end, -1);
5556 /* Update the loop counter, and jump to the head of
5557 the loop. */
5558 expand_assignment (index,
5559 build2 (PLUS_EXPR, TREE_TYPE (index),
5560 index, integer_one_node),
5561 false);
5563 emit_jump (loop_start);
5565 /* Build the end of the loop. */
5566 emit_label (loop_end);
5569 else if ((index != 0 && ! host_integerp (index, 0))
5570 || ! host_integerp (TYPE_SIZE (elttype), 1))
5572 tree position;
5574 if (index == 0)
5575 index = ssize_int (1);
5577 if (minelt)
5578 index = fold_convert (ssizetype,
5579 fold_build2 (MINUS_EXPR,
5580 TREE_TYPE (index),
5581 index,
5582 TYPE_MIN_VALUE (domain)));
5584 position =
5585 size_binop (MULT_EXPR, index,
5586 fold_convert (ssizetype,
5587 TYPE_SIZE_UNIT (elttype)));
5588 xtarget = offset_address (target,
5589 expand_normal (position),
5590 highest_pow2_factor (position));
5591 xtarget = adjust_address (xtarget, mode, 0);
5592 store_expr (value, xtarget, 0, false);
5594 else
5596 if (index != 0)
5597 bitpos = ((tree_low_cst (index, 0) - minelt)
5598 * tree_low_cst (TYPE_SIZE (elttype), 1));
5599 else
5600 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5602 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5603 && TREE_CODE (type) == ARRAY_TYPE
5604 && TYPE_NONALIASED_COMPONENT (type))
5606 target = copy_rtx (target);
5607 MEM_KEEP_ALIAS_SET_P (target) = 1;
5609 store_constructor_field (target, bitsize, bitpos, mode, value,
5610 type, cleared, get_alias_set (elttype));
5613 break;
5616 case VECTOR_TYPE:
5618 unsigned HOST_WIDE_INT idx;
5619 constructor_elt *ce;
5620 int i;
5621 int need_to_clear;
5622 int icode = 0;
5623 tree elttype = TREE_TYPE (type);
5624 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5625 enum machine_mode eltmode = TYPE_MODE (elttype);
5626 HOST_WIDE_INT bitsize;
5627 HOST_WIDE_INT bitpos;
5628 rtvec vector = NULL;
5629 unsigned n_elts;
5630 alias_set_type alias;
5632 gcc_assert (eltmode != BLKmode);
5634 n_elts = TYPE_VECTOR_SUBPARTS (type);
5635 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5637 enum machine_mode mode = GET_MODE (target);
5639 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5640 if (icode != CODE_FOR_nothing)
5642 unsigned int i;
5644 vector = rtvec_alloc (n_elts);
5645 for (i = 0; i < n_elts; i++)
5646 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5650 /* If the constructor has fewer elements than the vector,
5651 clear the whole array first. Similarly if this is static
5652 constructor of a non-BLKmode object. */
5653 if (cleared)
5654 need_to_clear = 0;
5655 else if (REG_P (target) && TREE_STATIC (exp))
5656 need_to_clear = 1;
5657 else
5659 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5660 tree value;
5662 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5664 int n_elts_here = tree_low_cst
5665 (int_const_binop (TRUNC_DIV_EXPR,
5666 TYPE_SIZE (TREE_TYPE (value)),
5667 TYPE_SIZE (elttype), 0), 1);
5669 count += n_elts_here;
5670 if (mostly_zeros_p (value))
5671 zero_count += n_elts_here;
5674 /* Clear the entire vector first if there are any missing elements,
5675 or if the incidence of zero elements is >= 75%. */
5676 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5679 if (need_to_clear && size > 0 && !vector)
5681 if (REG_P (target))
5682 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5683 else
5684 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5685 cleared = 1;
5688 /* Inform later passes that the old value is dead. */
5689 if (!cleared && !vector && REG_P (target))
5690 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5692 if (MEM_P (target))
5693 alias = MEM_ALIAS_SET (target);
5694 else
5695 alias = get_alias_set (elttype);
5697 /* Store each element of the constructor into the corresponding
5698 element of TARGET, determined by counting the elements. */
5699 for (idx = 0, i = 0;
5700 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5701 idx++, i += bitsize / elt_size)
5703 HOST_WIDE_INT eltpos;
5704 tree value = ce->value;
5706 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5707 if (cleared && initializer_zerop (value))
5708 continue;
5710 if (ce->index)
5711 eltpos = tree_low_cst (ce->index, 1);
5712 else
5713 eltpos = i;
5715 if (vector)
5717 /* Vector CONSTRUCTORs should only be built from smaller
5718 vectors in the case of BLKmode vectors. */
5719 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5720 RTVEC_ELT (vector, eltpos)
5721 = expand_normal (value);
5723 else
5725 enum machine_mode value_mode =
5726 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5727 ? TYPE_MODE (TREE_TYPE (value))
5728 : eltmode;
5729 bitpos = eltpos * elt_size;
5730 store_constructor_field (target, bitsize, bitpos,
5731 value_mode, value, type,
5732 cleared, alias);
5736 if (vector)
5737 emit_insn (GEN_FCN (icode)
5738 (target,
5739 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5740 break;
5743 default:
5744 gcc_unreachable ();
5748 /* Store the value of EXP (an expression tree)
5749 into a subfield of TARGET which has mode MODE and occupies
5750 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5751 If MODE is VOIDmode, it means that we are storing into a bit-field.
5753 Always return const0_rtx unless we have something particular to
5754 return.
5756 TYPE is the type of the underlying object,
5758 ALIAS_SET is the alias set for the destination. This value will
5759 (in general) be different from that for TARGET, since TARGET is a
5760 reference to the containing structure.
5762 If NONTEMPORAL is true, try generating a nontemporal store. */
5764 static rtx
5765 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5766 enum machine_mode mode, tree exp, tree type,
5767 alias_set_type alias_set, bool nontemporal)
5769 if (TREE_CODE (exp) == ERROR_MARK)
5770 return const0_rtx;
5772 /* If we have nothing to store, do nothing unless the expression has
5773 side-effects. */
5774 if (bitsize == 0)
5775 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5777 /* If we are storing into an unaligned field of an aligned union that is
5778 in a register, we may have the mode of TARGET being an integer mode but
5779 MODE == BLKmode. In that case, get an aligned object whose size and
5780 alignment are the same as TARGET and store TARGET into it (we can avoid
5781 the store if the field being stored is the entire width of TARGET). Then
5782 call ourselves recursively to store the field into a BLKmode version of
5783 that object. Finally, load from the object into TARGET. This is not
5784 very efficient in general, but should only be slightly more expensive
5785 than the otherwise-required unaligned accesses. Perhaps this can be
5786 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5787 twice, once with emit_move_insn and once via store_field. */
5789 if (mode == BLKmode
5790 && (REG_P (target) || GET_CODE (target) == SUBREG))
5792 rtx object = assign_temp (type, 0, 1, 1);
5793 rtx blk_object = adjust_address (object, BLKmode, 0);
5795 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5796 emit_move_insn (object, target);
5798 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5799 nontemporal);
5801 emit_move_insn (target, object);
5803 /* We want to return the BLKmode version of the data. */
5804 return blk_object;
5807 if (GET_CODE (target) == CONCAT)
5809 /* We're storing into a struct containing a single __complex. */
5811 gcc_assert (!bitpos);
5812 return store_expr (exp, target, 0, nontemporal);
5815 /* If the structure is in a register or if the component
5816 is a bit field, we cannot use addressing to access it.
5817 Use bit-field techniques or SUBREG to store in it. */
5819 if (mode == VOIDmode
5820 || (mode != BLKmode && ! direct_store[(int) mode]
5821 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5822 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5823 || REG_P (target)
5824 || GET_CODE (target) == SUBREG
5825 /* If the field isn't aligned enough to store as an ordinary memref,
5826 store it as a bit field. */
5827 || (mode != BLKmode
5828 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5829 || bitpos % GET_MODE_ALIGNMENT (mode))
5830 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5831 || (bitpos % BITS_PER_UNIT != 0)))
5832 /* If the RHS and field are a constant size and the size of the
5833 RHS isn't the same size as the bitfield, we must use bitfield
5834 operations. */
5835 || (bitsize >= 0
5836 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5837 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5839 rtx temp;
5840 gimple nop_def;
5842 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5843 implies a mask operation. If the precision is the same size as
5844 the field we're storing into, that mask is redundant. This is
5845 particularly common with bit field assignments generated by the
5846 C front end. */
5847 nop_def = get_def_for_expr (exp, NOP_EXPR);
5848 if (nop_def)
5850 tree type = TREE_TYPE (exp);
5851 if (INTEGRAL_TYPE_P (type)
5852 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5853 && bitsize == TYPE_PRECISION (type))
5855 tree op = gimple_assign_rhs1 (nop_def);
5856 type = TREE_TYPE (op);
5857 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5858 exp = op;
5862 temp = expand_normal (exp);
5864 /* If BITSIZE is narrower than the size of the type of EXP
5865 we will be narrowing TEMP. Normally, what's wanted are the
5866 low-order bits. However, if EXP's type is a record and this is
5867 big-endian machine, we want the upper BITSIZE bits. */
5868 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5869 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5870 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5871 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5872 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5873 - bitsize),
5874 NULL_RTX, 1);
5876 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5877 MODE. */
5878 if (mode != VOIDmode && mode != BLKmode
5879 && mode != TYPE_MODE (TREE_TYPE (exp)))
5880 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5882 /* If the modes of TEMP and TARGET are both BLKmode, both
5883 must be in memory and BITPOS must be aligned on a byte
5884 boundary. If so, we simply do a block copy. Likewise
5885 for a BLKmode-like TARGET. */
5886 if (GET_MODE (temp) == BLKmode
5887 && (GET_MODE (target) == BLKmode
5888 || (MEM_P (target)
5889 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5890 && (bitpos % BITS_PER_UNIT) == 0
5891 && (bitsize % BITS_PER_UNIT) == 0)))
5893 gcc_assert (MEM_P (target) && MEM_P (temp)
5894 && (bitpos % BITS_PER_UNIT) == 0);
5896 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5897 emit_block_move (target, temp,
5898 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5899 / BITS_PER_UNIT),
5900 BLOCK_OP_NORMAL);
5902 return const0_rtx;
5905 /* Store the value in the bitfield. */
5906 store_bit_field (target, bitsize, bitpos, mode, temp);
5908 return const0_rtx;
5910 else
5912 /* Now build a reference to just the desired component. */
5913 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5915 if (to_rtx == target)
5916 to_rtx = copy_rtx (to_rtx);
5918 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5919 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5920 set_mem_alias_set (to_rtx, alias_set);
5922 return store_expr (exp, to_rtx, 0, nontemporal);
5926 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5927 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5928 codes and find the ultimate containing object, which we return.
5930 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5931 bit position, and *PUNSIGNEDP to the signedness of the field.
5932 If the position of the field is variable, we store a tree
5933 giving the variable offset (in units) in *POFFSET.
5934 This offset is in addition to the bit position.
5935 If the position is not variable, we store 0 in *POFFSET.
5937 If any of the extraction expressions is volatile,
5938 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5940 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5941 Otherwise, it is a mode that can be used to access the field.
5943 If the field describes a variable-sized object, *PMODE is set to
5944 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5945 this case, but the address of the object can be found.
5947 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5948 look through nodes that serve as markers of a greater alignment than
5949 the one that can be deduced from the expression. These nodes make it
5950 possible for front-ends to prevent temporaries from being created by
5951 the middle-end on alignment considerations. For that purpose, the
5952 normal operating mode at high-level is to always pass FALSE so that
5953 the ultimate containing object is really returned; moreover, the
5954 associated predicate handled_component_p will always return TRUE
5955 on these nodes, thus indicating that they are essentially handled
5956 by get_inner_reference. TRUE should only be passed when the caller
5957 is scanning the expression in order to build another representation
5958 and specifically knows how to handle these nodes; as such, this is
5959 the normal operating mode in the RTL expanders. */
5961 tree
5962 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5963 HOST_WIDE_INT *pbitpos, tree *poffset,
5964 enum machine_mode *pmode, int *punsignedp,
5965 int *pvolatilep, bool keep_aligning)
5967 tree size_tree = 0;
5968 enum machine_mode mode = VOIDmode;
5969 bool blkmode_bitfield = false;
5970 tree offset = size_zero_node;
5971 tree bit_offset = bitsize_zero_node;
5973 /* First get the mode, signedness, and size. We do this from just the
5974 outermost expression. */
5975 *pbitsize = -1;
5976 if (TREE_CODE (exp) == COMPONENT_REF)
5978 tree field = TREE_OPERAND (exp, 1);
5979 size_tree = DECL_SIZE (field);
5980 if (!DECL_BIT_FIELD (field))
5981 mode = DECL_MODE (field);
5982 else if (DECL_MODE (field) == BLKmode)
5983 blkmode_bitfield = true;
5985 *punsignedp = DECL_UNSIGNED (field);
5987 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5989 size_tree = TREE_OPERAND (exp, 1);
5990 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5991 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5993 /* For vector types, with the correct size of access, use the mode of
5994 inner type. */
5995 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5996 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5997 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5998 mode = TYPE_MODE (TREE_TYPE (exp));
6000 else
6002 mode = TYPE_MODE (TREE_TYPE (exp));
6003 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6005 if (mode == BLKmode)
6006 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6007 else
6008 *pbitsize = GET_MODE_BITSIZE (mode);
6011 if (size_tree != 0)
6013 if (! host_integerp (size_tree, 1))
6014 mode = BLKmode, *pbitsize = -1;
6015 else
6016 *pbitsize = tree_low_cst (size_tree, 1);
6019 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6020 and find the ultimate containing object. */
6021 while (1)
6023 switch (TREE_CODE (exp))
6025 case BIT_FIELD_REF:
6026 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6027 TREE_OPERAND (exp, 2));
6028 break;
6030 case COMPONENT_REF:
6032 tree field = TREE_OPERAND (exp, 1);
6033 tree this_offset = component_ref_field_offset (exp);
6035 /* If this field hasn't been filled in yet, don't go past it.
6036 This should only happen when folding expressions made during
6037 type construction. */
6038 if (this_offset == 0)
6039 break;
6041 offset = size_binop (PLUS_EXPR, offset, this_offset);
6042 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6043 DECL_FIELD_BIT_OFFSET (field));
6045 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6047 break;
6049 case ARRAY_REF:
6050 case ARRAY_RANGE_REF:
6052 tree index = TREE_OPERAND (exp, 1);
6053 tree low_bound = array_ref_low_bound (exp);
6054 tree unit_size = array_ref_element_size (exp);
6056 /* We assume all arrays have sizes that are a multiple of a byte.
6057 First subtract the lower bound, if any, in the type of the
6058 index, then convert to sizetype and multiply by the size of
6059 the array element. */
6060 if (! integer_zerop (low_bound))
6061 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6062 index, low_bound);
6064 offset = size_binop (PLUS_EXPR, offset,
6065 size_binop (MULT_EXPR,
6066 fold_convert (sizetype, index),
6067 unit_size));
6069 break;
6071 case REALPART_EXPR:
6072 break;
6074 case IMAGPART_EXPR:
6075 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6076 bitsize_int (*pbitsize));
6077 break;
6079 case VIEW_CONVERT_EXPR:
6080 if (keep_aligning && STRICT_ALIGNMENT
6081 && (TYPE_ALIGN (TREE_TYPE (exp))
6082 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6083 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6084 < BIGGEST_ALIGNMENT)
6085 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6086 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6087 goto done;
6088 break;
6090 default:
6091 goto done;
6094 /* If any reference in the chain is volatile, the effect is volatile. */
6095 if (TREE_THIS_VOLATILE (exp))
6096 *pvolatilep = 1;
6098 exp = TREE_OPERAND (exp, 0);
6100 done:
6102 /* If OFFSET is constant, see if we can return the whole thing as a
6103 constant bit position. Make sure to handle overflow during
6104 this conversion. */
6105 if (host_integerp (offset, 0))
6107 double_int tem = double_int_mul (tree_to_double_int (offset),
6108 uhwi_to_double_int (BITS_PER_UNIT));
6109 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6110 if (double_int_fits_in_shwi_p (tem))
6112 *pbitpos = double_int_to_shwi (tem);
6113 *poffset = offset = NULL_TREE;
6117 /* Otherwise, split it up. */
6118 if (offset)
6120 *pbitpos = tree_low_cst (bit_offset, 0);
6121 *poffset = offset;
6124 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6125 if (mode == VOIDmode
6126 && blkmode_bitfield
6127 && (*pbitpos % BITS_PER_UNIT) == 0
6128 && (*pbitsize % BITS_PER_UNIT) == 0)
6129 *pmode = BLKmode;
6130 else
6131 *pmode = mode;
6133 return exp;
6136 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6137 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6138 EXP is marked as PACKED. */
6140 bool
6141 contains_packed_reference (const_tree exp)
6143 bool packed_p = false;
6145 while (1)
6147 switch (TREE_CODE (exp))
6149 case COMPONENT_REF:
6151 tree field = TREE_OPERAND (exp, 1);
6152 packed_p = DECL_PACKED (field)
6153 || TYPE_PACKED (TREE_TYPE (field))
6154 || TYPE_PACKED (TREE_TYPE (exp));
6155 if (packed_p)
6156 goto done;
6158 break;
6160 case BIT_FIELD_REF:
6161 case ARRAY_REF:
6162 case ARRAY_RANGE_REF:
6163 case REALPART_EXPR:
6164 case IMAGPART_EXPR:
6165 case VIEW_CONVERT_EXPR:
6166 break;
6168 default:
6169 goto done;
6171 exp = TREE_OPERAND (exp, 0);
6173 done:
6174 return packed_p;
6177 /* Return a tree of sizetype representing the size, in bytes, of the element
6178 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6180 tree
6181 array_ref_element_size (tree exp)
6183 tree aligned_size = TREE_OPERAND (exp, 3);
6184 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6185 location_t loc = EXPR_LOCATION (exp);
6187 /* If a size was specified in the ARRAY_REF, it's the size measured
6188 in alignment units of the element type. So multiply by that value. */
6189 if (aligned_size)
6191 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6192 sizetype from another type of the same width and signedness. */
6193 if (TREE_TYPE (aligned_size) != sizetype)
6194 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6195 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6196 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6199 /* Otherwise, take the size from that of the element type. Substitute
6200 any PLACEHOLDER_EXPR that we have. */
6201 else
6202 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6205 /* Return a tree representing the lower bound of the array mentioned in
6206 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6208 tree
6209 array_ref_low_bound (tree exp)
6211 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6213 /* If a lower bound is specified in EXP, use it. */
6214 if (TREE_OPERAND (exp, 2))
6215 return TREE_OPERAND (exp, 2);
6217 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6218 substituting for a PLACEHOLDER_EXPR as needed. */
6219 if (domain_type && TYPE_MIN_VALUE (domain_type))
6220 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6222 /* Otherwise, return a zero of the appropriate type. */
6223 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6226 /* Return a tree representing the upper bound of the array mentioned in
6227 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6229 tree
6230 array_ref_up_bound (tree exp)
6232 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6234 /* If there is a domain type and it has an upper bound, use it, substituting
6235 for a PLACEHOLDER_EXPR as needed. */
6236 if (domain_type && TYPE_MAX_VALUE (domain_type))
6237 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6239 /* Otherwise fail. */
6240 return NULL_TREE;
6243 /* Return a tree representing the offset, in bytes, of the field referenced
6244 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6246 tree
6247 component_ref_field_offset (tree exp)
6249 tree aligned_offset = TREE_OPERAND (exp, 2);
6250 tree field = TREE_OPERAND (exp, 1);
6251 location_t loc = EXPR_LOCATION (exp);
6253 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6254 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6255 value. */
6256 if (aligned_offset)
6258 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6259 sizetype from another type of the same width and signedness. */
6260 if (TREE_TYPE (aligned_offset) != sizetype)
6261 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6262 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6263 size_int (DECL_OFFSET_ALIGN (field)
6264 / BITS_PER_UNIT));
6267 /* Otherwise, take the offset from that of the field. Substitute
6268 any PLACEHOLDER_EXPR that we have. */
6269 else
6270 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6273 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6275 static unsigned HOST_WIDE_INT
6276 target_align (const_tree target)
6278 /* We might have a chain of nested references with intermediate misaligning
6279 bitfields components, so need to recurse to find out. */
6281 unsigned HOST_WIDE_INT this_align, outer_align;
6283 switch (TREE_CODE (target))
6285 case BIT_FIELD_REF:
6286 return 1;
6288 case COMPONENT_REF:
6289 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6290 outer_align = target_align (TREE_OPERAND (target, 0));
6291 return MIN (this_align, outer_align);
6293 case ARRAY_REF:
6294 case ARRAY_RANGE_REF:
6295 this_align = TYPE_ALIGN (TREE_TYPE (target));
6296 outer_align = target_align (TREE_OPERAND (target, 0));
6297 return MIN (this_align, outer_align);
6299 CASE_CONVERT:
6300 case NON_LVALUE_EXPR:
6301 case VIEW_CONVERT_EXPR:
6302 this_align = TYPE_ALIGN (TREE_TYPE (target));
6303 outer_align = target_align (TREE_OPERAND (target, 0));
6304 return MAX (this_align, outer_align);
6306 default:
6307 return TYPE_ALIGN (TREE_TYPE (target));
6312 /* Given an rtx VALUE that may contain additions and multiplications, return
6313 an equivalent value that just refers to a register, memory, or constant.
6314 This is done by generating instructions to perform the arithmetic and
6315 returning a pseudo-register containing the value.
6317 The returned value may be a REG, SUBREG, MEM or constant. */
6320 force_operand (rtx value, rtx target)
6322 rtx op1, op2;
6323 /* Use subtarget as the target for operand 0 of a binary operation. */
6324 rtx subtarget = get_subtarget (target);
6325 enum rtx_code code = GET_CODE (value);
6327 /* Check for subreg applied to an expression produced by loop optimizer. */
6328 if (code == SUBREG
6329 && !REG_P (SUBREG_REG (value))
6330 && !MEM_P (SUBREG_REG (value)))
6332 value
6333 = simplify_gen_subreg (GET_MODE (value),
6334 force_reg (GET_MODE (SUBREG_REG (value)),
6335 force_operand (SUBREG_REG (value),
6336 NULL_RTX)),
6337 GET_MODE (SUBREG_REG (value)),
6338 SUBREG_BYTE (value));
6339 code = GET_CODE (value);
6342 /* Check for a PIC address load. */
6343 if ((code == PLUS || code == MINUS)
6344 && XEXP (value, 0) == pic_offset_table_rtx
6345 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6346 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6347 || GET_CODE (XEXP (value, 1)) == CONST))
6349 if (!subtarget)
6350 subtarget = gen_reg_rtx (GET_MODE (value));
6351 emit_move_insn (subtarget, value);
6352 return subtarget;
6355 if (ARITHMETIC_P (value))
6357 op2 = XEXP (value, 1);
6358 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6359 subtarget = 0;
6360 if (code == MINUS && CONST_INT_P (op2))
6362 code = PLUS;
6363 op2 = negate_rtx (GET_MODE (value), op2);
6366 /* Check for an addition with OP2 a constant integer and our first
6367 operand a PLUS of a virtual register and something else. In that
6368 case, we want to emit the sum of the virtual register and the
6369 constant first and then add the other value. This allows virtual
6370 register instantiation to simply modify the constant rather than
6371 creating another one around this addition. */
6372 if (code == PLUS && CONST_INT_P (op2)
6373 && GET_CODE (XEXP (value, 0)) == PLUS
6374 && REG_P (XEXP (XEXP (value, 0), 0))
6375 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6376 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6378 rtx temp = expand_simple_binop (GET_MODE (value), code,
6379 XEXP (XEXP (value, 0), 0), op2,
6380 subtarget, 0, OPTAB_LIB_WIDEN);
6381 return expand_simple_binop (GET_MODE (value), code, temp,
6382 force_operand (XEXP (XEXP (value,
6383 0), 1), 0),
6384 target, 0, OPTAB_LIB_WIDEN);
6387 op1 = force_operand (XEXP (value, 0), subtarget);
6388 op2 = force_operand (op2, NULL_RTX);
6389 switch (code)
6391 case MULT:
6392 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6393 case DIV:
6394 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6395 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6396 target, 1, OPTAB_LIB_WIDEN);
6397 else
6398 return expand_divmod (0,
6399 FLOAT_MODE_P (GET_MODE (value))
6400 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6401 GET_MODE (value), op1, op2, target, 0);
6402 case MOD:
6403 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6404 target, 0);
6405 case UDIV:
6406 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6407 target, 1);
6408 case UMOD:
6409 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6410 target, 1);
6411 case ASHIFTRT:
6412 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6413 target, 0, OPTAB_LIB_WIDEN);
6414 default:
6415 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6416 target, 1, OPTAB_LIB_WIDEN);
6419 if (UNARY_P (value))
6421 if (!target)
6422 target = gen_reg_rtx (GET_MODE (value));
6423 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6424 switch (code)
6426 case ZERO_EXTEND:
6427 case SIGN_EXTEND:
6428 case TRUNCATE:
6429 case FLOAT_EXTEND:
6430 case FLOAT_TRUNCATE:
6431 convert_move (target, op1, code == ZERO_EXTEND);
6432 return target;
6434 case FIX:
6435 case UNSIGNED_FIX:
6436 expand_fix (target, op1, code == UNSIGNED_FIX);
6437 return target;
6439 case FLOAT:
6440 case UNSIGNED_FLOAT:
6441 expand_float (target, op1, code == UNSIGNED_FLOAT);
6442 return target;
6444 default:
6445 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6449 #ifdef INSN_SCHEDULING
6450 /* On machines that have insn scheduling, we want all memory reference to be
6451 explicit, so we need to deal with such paradoxical SUBREGs. */
6452 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6453 && (GET_MODE_SIZE (GET_MODE (value))
6454 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6455 value
6456 = simplify_gen_subreg (GET_MODE (value),
6457 force_reg (GET_MODE (SUBREG_REG (value)),
6458 force_operand (SUBREG_REG (value),
6459 NULL_RTX)),
6460 GET_MODE (SUBREG_REG (value)),
6461 SUBREG_BYTE (value));
6462 #endif
6464 return value;
6467 /* Subroutine of expand_expr: return nonzero iff there is no way that
6468 EXP can reference X, which is being modified. TOP_P is nonzero if this
6469 call is going to be used to determine whether we need a temporary
6470 for EXP, as opposed to a recursive call to this function.
6472 It is always safe for this routine to return zero since it merely
6473 searches for optimization opportunities. */
6476 safe_from_p (const_rtx x, tree exp, int top_p)
6478 rtx exp_rtl = 0;
6479 int i, nops;
6481 if (x == 0
6482 /* If EXP has varying size, we MUST use a target since we currently
6483 have no way of allocating temporaries of variable size
6484 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6485 So we assume here that something at a higher level has prevented a
6486 clash. This is somewhat bogus, but the best we can do. Only
6487 do this when X is BLKmode and when we are at the top level. */
6488 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6489 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6490 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6491 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6492 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6493 != INTEGER_CST)
6494 && GET_MODE (x) == BLKmode)
6495 /* If X is in the outgoing argument area, it is always safe. */
6496 || (MEM_P (x)
6497 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6498 || (GET_CODE (XEXP (x, 0)) == PLUS
6499 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6500 return 1;
6502 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6503 find the underlying pseudo. */
6504 if (GET_CODE (x) == SUBREG)
6506 x = SUBREG_REG (x);
6507 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6508 return 0;
6511 /* Now look at our tree code and possibly recurse. */
6512 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6514 case tcc_declaration:
6515 exp_rtl = DECL_RTL_IF_SET (exp);
6516 break;
6518 case tcc_constant:
6519 return 1;
6521 case tcc_exceptional:
6522 if (TREE_CODE (exp) == TREE_LIST)
6524 while (1)
6526 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6527 return 0;
6528 exp = TREE_CHAIN (exp);
6529 if (!exp)
6530 return 1;
6531 if (TREE_CODE (exp) != TREE_LIST)
6532 return safe_from_p (x, exp, 0);
6535 else if (TREE_CODE (exp) == CONSTRUCTOR)
6537 constructor_elt *ce;
6538 unsigned HOST_WIDE_INT idx;
6540 for (idx = 0;
6541 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6542 idx++)
6543 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6544 || !safe_from_p (x, ce->value, 0))
6545 return 0;
6546 return 1;
6548 else if (TREE_CODE (exp) == ERROR_MARK)
6549 return 1; /* An already-visited SAVE_EXPR? */
6550 else
6551 return 0;
6553 case tcc_statement:
6554 /* The only case we look at here is the DECL_INITIAL inside a
6555 DECL_EXPR. */
6556 return (TREE_CODE (exp) != DECL_EXPR
6557 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6558 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6559 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6561 case tcc_binary:
6562 case tcc_comparison:
6563 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6564 return 0;
6565 /* Fall through. */
6567 case tcc_unary:
6568 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6570 case tcc_expression:
6571 case tcc_reference:
6572 case tcc_vl_exp:
6573 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6574 the expression. If it is set, we conflict iff we are that rtx or
6575 both are in memory. Otherwise, we check all operands of the
6576 expression recursively. */
6578 switch (TREE_CODE (exp))
6580 case ADDR_EXPR:
6581 /* If the operand is static or we are static, we can't conflict.
6582 Likewise if we don't conflict with the operand at all. */
6583 if (staticp (TREE_OPERAND (exp, 0))
6584 || TREE_STATIC (exp)
6585 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6586 return 1;
6588 /* Otherwise, the only way this can conflict is if we are taking
6589 the address of a DECL a that address if part of X, which is
6590 very rare. */
6591 exp = TREE_OPERAND (exp, 0);
6592 if (DECL_P (exp))
6594 if (!DECL_RTL_SET_P (exp)
6595 || !MEM_P (DECL_RTL (exp)))
6596 return 0;
6597 else
6598 exp_rtl = XEXP (DECL_RTL (exp), 0);
6600 break;
6602 case MISALIGNED_INDIRECT_REF:
6603 case ALIGN_INDIRECT_REF:
6604 case INDIRECT_REF:
6605 if (MEM_P (x)
6606 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6607 get_alias_set (exp)))
6608 return 0;
6609 break;
6611 case CALL_EXPR:
6612 /* Assume that the call will clobber all hard registers and
6613 all of memory. */
6614 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6615 || MEM_P (x))
6616 return 0;
6617 break;
6619 case WITH_CLEANUP_EXPR:
6620 case CLEANUP_POINT_EXPR:
6621 /* Lowered by gimplify.c. */
6622 gcc_unreachable ();
6624 case SAVE_EXPR:
6625 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6627 default:
6628 break;
6631 /* If we have an rtx, we do not need to scan our operands. */
6632 if (exp_rtl)
6633 break;
6635 nops = TREE_OPERAND_LENGTH (exp);
6636 for (i = 0; i < nops; i++)
6637 if (TREE_OPERAND (exp, i) != 0
6638 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6639 return 0;
6641 break;
6643 case tcc_type:
6644 /* Should never get a type here. */
6645 gcc_unreachable ();
6648 /* If we have an rtl, find any enclosed object. Then see if we conflict
6649 with it. */
6650 if (exp_rtl)
6652 if (GET_CODE (exp_rtl) == SUBREG)
6654 exp_rtl = SUBREG_REG (exp_rtl);
6655 if (REG_P (exp_rtl)
6656 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6657 return 0;
6660 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6661 are memory and they conflict. */
6662 return ! (rtx_equal_p (x, exp_rtl)
6663 || (MEM_P (x) && MEM_P (exp_rtl)
6664 && true_dependence (exp_rtl, VOIDmode, x,
6665 rtx_addr_varies_p)));
6668 /* If we reach here, it is safe. */
6669 return 1;
6673 /* Return the highest power of two that EXP is known to be a multiple of.
6674 This is used in updating alignment of MEMs in array references. */
6676 unsigned HOST_WIDE_INT
6677 highest_pow2_factor (const_tree exp)
6679 unsigned HOST_WIDE_INT c0, c1;
6681 switch (TREE_CODE (exp))
6683 case INTEGER_CST:
6684 /* We can find the lowest bit that's a one. If the low
6685 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6686 We need to handle this case since we can find it in a COND_EXPR,
6687 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6688 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6689 later ICE. */
6690 if (TREE_OVERFLOW (exp))
6691 return BIGGEST_ALIGNMENT;
6692 else
6694 /* Note: tree_low_cst is intentionally not used here,
6695 we don't care about the upper bits. */
6696 c0 = TREE_INT_CST_LOW (exp);
6697 c0 &= -c0;
6698 return c0 ? c0 : BIGGEST_ALIGNMENT;
6700 break;
6702 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6703 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6704 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6705 return MIN (c0, c1);
6707 case MULT_EXPR:
6708 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6709 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6710 return c0 * c1;
6712 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6713 case CEIL_DIV_EXPR:
6714 if (integer_pow2p (TREE_OPERAND (exp, 1))
6715 && host_integerp (TREE_OPERAND (exp, 1), 1))
6717 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6718 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6719 return MAX (1, c0 / c1);
6721 break;
6723 case BIT_AND_EXPR:
6724 /* The highest power of two of a bit-and expression is the maximum of
6725 that of its operands. We typically get here for a complex LHS and
6726 a constant negative power of two on the RHS to force an explicit
6727 alignment, so don't bother looking at the LHS. */
6728 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6730 CASE_CONVERT:
6731 case SAVE_EXPR:
6732 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6734 case COMPOUND_EXPR:
6735 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6737 case COND_EXPR:
6738 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6739 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6740 return MIN (c0, c1);
6742 default:
6743 break;
6746 return 1;
6749 /* Similar, except that the alignment requirements of TARGET are
6750 taken into account. Assume it is at least as aligned as its
6751 type, unless it is a COMPONENT_REF in which case the layout of
6752 the structure gives the alignment. */
6754 static unsigned HOST_WIDE_INT
6755 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6757 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6758 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6760 return MAX (factor, talign);
6763 /* Return &VAR expression for emulated thread local VAR. */
6765 static tree
6766 emutls_var_address (tree var)
6768 tree emuvar = emutls_decl (var);
6769 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6770 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6771 tree arglist = build_tree_list (NULL_TREE, arg);
6772 tree call = build_function_call_expr (UNKNOWN_LOCATION, fn, arglist);
6773 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6777 /* Subroutine of expand_expr. Expand the two operands of a binary
6778 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6779 The value may be stored in TARGET if TARGET is nonzero. The
6780 MODIFIER argument is as documented by expand_expr. */
6782 static void
6783 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6784 enum expand_modifier modifier)
6786 if (! safe_from_p (target, exp1, 1))
6787 target = 0;
6788 if (operand_equal_p (exp0, exp1, 0))
6790 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6791 *op1 = copy_rtx (*op0);
6793 else
6795 /* If we need to preserve evaluation order, copy exp0 into its own
6796 temporary variable so that it can't be clobbered by exp1. */
6797 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6798 exp0 = save_expr (exp0);
6799 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6800 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6805 /* Return a MEM that contains constant EXP. DEFER is as for
6806 output_constant_def and MODIFIER is as for expand_expr. */
6808 static rtx
6809 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6811 rtx mem;
6813 mem = output_constant_def (exp, defer);
6814 if (modifier != EXPAND_INITIALIZER)
6815 mem = use_anchored_address (mem);
6816 return mem;
6819 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6820 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6822 static rtx
6823 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6824 enum expand_modifier modifier, addr_space_t as)
6826 rtx result, subtarget;
6827 tree inner, offset;
6828 HOST_WIDE_INT bitsize, bitpos;
6829 int volatilep, unsignedp;
6830 enum machine_mode mode1;
6832 /* If we are taking the address of a constant and are at the top level,
6833 we have to use output_constant_def since we can't call force_const_mem
6834 at top level. */
6835 /* ??? This should be considered a front-end bug. We should not be
6836 generating ADDR_EXPR of something that isn't an LVALUE. The only
6837 exception here is STRING_CST. */
6838 if (CONSTANT_CLASS_P (exp))
6839 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6841 /* Everything must be something allowed by is_gimple_addressable. */
6842 switch (TREE_CODE (exp))
6844 case INDIRECT_REF:
6845 /* This case will happen via recursion for &a->b. */
6846 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6848 case CONST_DECL:
6849 /* Expand the initializer like constants above. */
6850 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6852 case REALPART_EXPR:
6853 /* The real part of the complex number is always first, therefore
6854 the address is the same as the address of the parent object. */
6855 offset = 0;
6856 bitpos = 0;
6857 inner = TREE_OPERAND (exp, 0);
6858 break;
6860 case IMAGPART_EXPR:
6861 /* The imaginary part of the complex number is always second.
6862 The expression is therefore always offset by the size of the
6863 scalar type. */
6864 offset = 0;
6865 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6866 inner = TREE_OPERAND (exp, 0);
6867 break;
6869 case VAR_DECL:
6870 /* TLS emulation hook - replace __thread VAR's &VAR with
6871 __emutls_get_address (&_emutls.VAR). */
6872 if (! targetm.have_tls
6873 && TREE_CODE (exp) == VAR_DECL
6874 && DECL_THREAD_LOCAL_P (exp))
6876 exp = emutls_var_address (exp);
6877 return expand_expr (exp, target, tmode, modifier);
6879 /* Fall through. */
6881 default:
6882 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6883 expand_expr, as that can have various side effects; LABEL_DECLs for
6884 example, may not have their DECL_RTL set yet. Expand the rtl of
6885 CONSTRUCTORs too, which should yield a memory reference for the
6886 constructor's contents. Assume language specific tree nodes can
6887 be expanded in some interesting way. */
6888 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6889 if (DECL_P (exp)
6890 || TREE_CODE (exp) == CONSTRUCTOR
6891 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6893 result = expand_expr (exp, target, tmode,
6894 modifier == EXPAND_INITIALIZER
6895 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6897 /* If the DECL isn't in memory, then the DECL wasn't properly
6898 marked TREE_ADDRESSABLE, which will be either a front-end
6899 or a tree optimizer bug. */
6900 gcc_assert (MEM_P (result));
6901 result = XEXP (result, 0);
6903 /* ??? Is this needed anymore? */
6904 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6906 assemble_external (exp);
6907 TREE_USED (exp) = 1;
6910 if (modifier != EXPAND_INITIALIZER
6911 && modifier != EXPAND_CONST_ADDRESS)
6912 result = force_operand (result, target);
6913 return result;
6916 /* Pass FALSE as the last argument to get_inner_reference although
6917 we are expanding to RTL. The rationale is that we know how to
6918 handle "aligning nodes" here: we can just bypass them because
6919 they won't change the final object whose address will be returned
6920 (they actually exist only for that purpose). */
6921 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6922 &mode1, &unsignedp, &volatilep, false);
6923 break;
6926 /* We must have made progress. */
6927 gcc_assert (inner != exp);
6929 subtarget = offset || bitpos ? NULL_RTX : target;
6930 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6931 inner alignment, force the inner to be sufficiently aligned. */
6932 if (CONSTANT_CLASS_P (inner)
6933 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6935 inner = copy_node (inner);
6936 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6937 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6938 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6940 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
6942 if (offset)
6944 rtx tmp;
6946 if (modifier != EXPAND_NORMAL)
6947 result = force_operand (result, NULL);
6948 tmp = expand_expr (offset, NULL_RTX, tmode,
6949 modifier == EXPAND_INITIALIZER
6950 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6952 result = convert_memory_address_addr_space (tmode, result, as);
6953 tmp = convert_memory_address_addr_space (tmode, tmp, as);
6955 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6956 result = gen_rtx_PLUS (tmode, result, tmp);
6957 else
6959 subtarget = bitpos ? NULL_RTX : target;
6960 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6961 1, OPTAB_LIB_WIDEN);
6965 if (bitpos)
6967 /* Someone beforehand should have rejected taking the address
6968 of such an object. */
6969 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6971 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6972 if (modifier < EXPAND_SUM)
6973 result = force_operand (result, target);
6976 return result;
6979 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6980 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6982 static rtx
6983 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6984 enum expand_modifier modifier)
6986 addr_space_t as = ADDR_SPACE_GENERIC;
6987 enum machine_mode address_mode = Pmode;
6988 enum machine_mode pointer_mode = ptr_mode;
6989 enum machine_mode rmode;
6990 rtx result;
6992 /* Target mode of VOIDmode says "whatever's natural". */
6993 if (tmode == VOIDmode)
6994 tmode = TYPE_MODE (TREE_TYPE (exp));
6996 if (POINTER_TYPE_P (TREE_TYPE (exp)))
6998 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
6999 address_mode = targetm.addr_space.address_mode (as);
7000 pointer_mode = targetm.addr_space.pointer_mode (as);
7003 /* We can get called with some Weird Things if the user does silliness
7004 like "(short) &a". In that case, convert_memory_address won't do
7005 the right thing, so ignore the given target mode. */
7006 if (tmode != address_mode && tmode != pointer_mode)
7007 tmode = address_mode;
7009 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7010 tmode, modifier, as);
7012 /* Despite expand_expr claims concerning ignoring TMODE when not
7013 strictly convenient, stuff breaks if we don't honor it. Note
7014 that combined with the above, we only do this for pointer modes. */
7015 rmode = GET_MODE (result);
7016 if (rmode == VOIDmode)
7017 rmode = tmode;
7018 if (rmode != tmode)
7019 result = convert_memory_address_addr_space (tmode, result, as);
7021 return result;
7024 /* Generate code for computing CONSTRUCTOR EXP.
7025 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7026 is TRUE, instead of creating a temporary variable in memory
7027 NULL is returned and the caller needs to handle it differently. */
7029 static rtx
7030 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7031 bool avoid_temp_mem)
7033 tree type = TREE_TYPE (exp);
7034 enum machine_mode mode = TYPE_MODE (type);
7036 /* Try to avoid creating a temporary at all. This is possible
7037 if all of the initializer is zero.
7038 FIXME: try to handle all [0..255] initializers we can handle
7039 with memset. */
7040 if (TREE_STATIC (exp)
7041 && !TREE_ADDRESSABLE (exp)
7042 && target != 0 && mode == BLKmode
7043 && all_zeros_p (exp))
7045 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7046 return target;
7049 /* All elts simple constants => refer to a constant in memory. But
7050 if this is a non-BLKmode mode, let it store a field at a time
7051 since that should make a CONST_INT or CONST_DOUBLE when we
7052 fold. Likewise, if we have a target we can use, it is best to
7053 store directly into the target unless the type is large enough
7054 that memcpy will be used. If we are making an initializer and
7055 all operands are constant, put it in memory as well.
7057 FIXME: Avoid trying to fill vector constructors piece-meal.
7058 Output them with output_constant_def below unless we're sure
7059 they're zeros. This should go away when vector initializers
7060 are treated like VECTOR_CST instead of arrays. */
7061 if ((TREE_STATIC (exp)
7062 && ((mode == BLKmode
7063 && ! (target != 0 && safe_from_p (target, exp, 1)))
7064 || TREE_ADDRESSABLE (exp)
7065 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7066 && (! MOVE_BY_PIECES_P
7067 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7068 TYPE_ALIGN (type)))
7069 && ! mostly_zeros_p (exp))))
7070 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7071 && TREE_CONSTANT (exp)))
7073 rtx constructor;
7075 if (avoid_temp_mem)
7076 return NULL_RTX;
7078 constructor = expand_expr_constant (exp, 1, modifier);
7080 if (modifier != EXPAND_CONST_ADDRESS
7081 && modifier != EXPAND_INITIALIZER
7082 && modifier != EXPAND_SUM)
7083 constructor = validize_mem (constructor);
7085 return constructor;
7088 /* Handle calls that pass values in multiple non-contiguous
7089 locations. The Irix 6 ABI has examples of this. */
7090 if (target == 0 || ! safe_from_p (target, exp, 1)
7091 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7093 if (avoid_temp_mem)
7094 return NULL_RTX;
7096 target
7097 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7098 | (TREE_READONLY (exp)
7099 * TYPE_QUAL_CONST))),
7100 0, TREE_ADDRESSABLE (exp), 1);
7103 store_constructor (exp, target, 0, int_expr_size (exp));
7104 return target;
7108 /* expand_expr: generate code for computing expression EXP.
7109 An rtx for the computed value is returned. The value is never null.
7110 In the case of a void EXP, const0_rtx is returned.
7112 The value may be stored in TARGET if TARGET is nonzero.
7113 TARGET is just a suggestion; callers must assume that
7114 the rtx returned may not be the same as TARGET.
7116 If TARGET is CONST0_RTX, it means that the value will be ignored.
7118 If TMODE is not VOIDmode, it suggests generating the
7119 result in mode TMODE. But this is done only when convenient.
7120 Otherwise, TMODE is ignored and the value generated in its natural mode.
7121 TMODE is just a suggestion; callers must assume that
7122 the rtx returned may not have mode TMODE.
7124 Note that TARGET may have neither TMODE nor MODE. In that case, it
7125 probably will not be used.
7127 If MODIFIER is EXPAND_SUM then when EXP is an addition
7128 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7129 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7130 products as above, or REG or MEM, or constant.
7131 Ordinarily in such cases we would output mul or add instructions
7132 and then return a pseudo reg containing the sum.
7134 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7135 it also marks a label as absolutely required (it can't be dead).
7136 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7137 This is used for outputting expressions used in initializers.
7139 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7140 with a constant address even if that address is not normally legitimate.
7141 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7143 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7144 a call parameter. Such targets require special care as we haven't yet
7145 marked TARGET so that it's safe from being trashed by libcalls. We
7146 don't want to use TARGET for anything but the final result;
7147 Intermediate values must go elsewhere. Additionally, calls to
7148 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7150 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7151 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7152 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7153 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7154 recursively. */
7157 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7158 enum expand_modifier modifier, rtx *alt_rtl)
7160 rtx ret;
7162 /* Handle ERROR_MARK before anybody tries to access its type. */
7163 if (TREE_CODE (exp) == ERROR_MARK
7164 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7166 ret = CONST0_RTX (tmode);
7167 return ret ? ret : const0_rtx;
7170 /* If this is an expression of some kind and it has an associated line
7171 number, then emit the line number before expanding the expression.
7173 We need to save and restore the file and line information so that
7174 errors discovered during expansion are emitted with the right
7175 information. It would be better of the diagnostic routines
7176 used the file/line information embedded in the tree nodes rather
7177 than globals. */
7178 if (cfun && EXPR_HAS_LOCATION (exp))
7180 location_t saved_location = input_location;
7181 location_t saved_curr_loc = get_curr_insn_source_location ();
7182 tree saved_block = get_curr_insn_block ();
7183 input_location = EXPR_LOCATION (exp);
7184 set_curr_insn_source_location (input_location);
7186 /* Record where the insns produced belong. */
7187 set_curr_insn_block (TREE_BLOCK (exp));
7189 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7191 input_location = saved_location;
7192 set_curr_insn_block (saved_block);
7193 set_curr_insn_source_location (saved_curr_loc);
7195 else
7197 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7200 return ret;
7204 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7205 enum expand_modifier modifier)
7207 rtx op0, op1, op2, temp;
7208 tree type;
7209 int unsignedp;
7210 enum machine_mode mode;
7211 enum tree_code code = ops->code;
7212 optab this_optab;
7213 rtx subtarget, original_target;
7214 int ignore;
7215 bool reduce_bit_field;
7216 gimple subexp0_def, subexp1_def;
7217 tree top0, top1;
7218 location_t loc = ops->location;
7219 tree treeop0, treeop1;
7220 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7221 ? reduce_to_bit_field_precision ((expr), \
7222 target, \
7223 type) \
7224 : (expr))
7226 type = ops->type;
7227 mode = TYPE_MODE (type);
7228 unsignedp = TYPE_UNSIGNED (type);
7230 treeop0 = ops->op0;
7231 treeop1 = ops->op1;
7233 /* We should be called only on simple (binary or unary) expressions,
7234 exactly those that are valid in gimple expressions that aren't
7235 GIMPLE_SINGLE_RHS (or invalid). */
7236 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7237 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS);
7239 ignore = (target == const0_rtx
7240 || ((CONVERT_EXPR_CODE_P (code)
7241 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7242 && TREE_CODE (type) == VOID_TYPE));
7244 /* We should be called only if we need the result. */
7245 gcc_assert (!ignore);
7247 /* An operation in what may be a bit-field type needs the
7248 result to be reduced to the precision of the bit-field type,
7249 which is narrower than that of the type's mode. */
7250 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7251 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7253 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7254 target = 0;
7256 /* Use subtarget as the target for operand 0 of a binary operation. */
7257 subtarget = get_subtarget (target);
7258 original_target = target;
7260 switch (code)
7262 case NON_LVALUE_EXPR:
7263 case PAREN_EXPR:
7264 CASE_CONVERT:
7265 if (treeop0 == error_mark_node)
7266 return const0_rtx;
7268 if (TREE_CODE (type) == UNION_TYPE)
7270 tree valtype = TREE_TYPE (treeop0);
7272 /* If both input and output are BLKmode, this conversion isn't doing
7273 anything except possibly changing memory attribute. */
7274 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7276 rtx result = expand_expr (treeop0, target, tmode,
7277 modifier);
7279 result = copy_rtx (result);
7280 set_mem_attributes (result, type, 0);
7281 return result;
7284 if (target == 0)
7286 if (TYPE_MODE (type) != BLKmode)
7287 target = gen_reg_rtx (TYPE_MODE (type));
7288 else
7289 target = assign_temp (type, 0, 1, 1);
7292 if (MEM_P (target))
7293 /* Store data into beginning of memory target. */
7294 store_expr (treeop0,
7295 adjust_address (target, TYPE_MODE (valtype), 0),
7296 modifier == EXPAND_STACK_PARM,
7297 false);
7299 else
7301 gcc_assert (REG_P (target));
7303 /* Store this field into a union of the proper type. */
7304 store_field (target,
7305 MIN ((int_size_in_bytes (TREE_TYPE
7306 (treeop0))
7307 * BITS_PER_UNIT),
7308 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7309 0, TYPE_MODE (valtype), treeop0,
7310 type, 0, false);
7313 /* Return the entire union. */
7314 return target;
7317 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7319 op0 = expand_expr (treeop0, target, VOIDmode,
7320 modifier);
7322 /* If the signedness of the conversion differs and OP0 is
7323 a promoted SUBREG, clear that indication since we now
7324 have to do the proper extension. */
7325 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7326 && GET_CODE (op0) == SUBREG)
7327 SUBREG_PROMOTED_VAR_P (op0) = 0;
7329 return REDUCE_BIT_FIELD (op0);
7332 op0 = expand_expr (treeop0, NULL_RTX, mode,
7333 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7334 if (GET_MODE (op0) == mode)
7337 /* If OP0 is a constant, just convert it into the proper mode. */
7338 else if (CONSTANT_P (op0))
7340 tree inner_type = TREE_TYPE (treeop0);
7341 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7343 if (modifier == EXPAND_INITIALIZER)
7344 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7345 subreg_lowpart_offset (mode,
7346 inner_mode));
7347 else
7348 op0= convert_modes (mode, inner_mode, op0,
7349 TYPE_UNSIGNED (inner_type));
7352 else if (modifier == EXPAND_INITIALIZER)
7353 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7355 else if (target == 0)
7356 op0 = convert_to_mode (mode, op0,
7357 TYPE_UNSIGNED (TREE_TYPE
7358 (treeop0)));
7359 else
7361 convert_move (target, op0,
7362 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7363 op0 = target;
7366 return REDUCE_BIT_FIELD (op0);
7368 case ADDR_SPACE_CONVERT_EXPR:
7370 tree treeop0_type = TREE_TYPE (treeop0);
7371 addr_space_t as_to;
7372 addr_space_t as_from;
7374 gcc_assert (POINTER_TYPE_P (type));
7375 gcc_assert (POINTER_TYPE_P (treeop0_type));
7377 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7378 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7380 /* Conversions between pointers to the same address space should
7381 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7382 gcc_assert (as_to != as_from);
7384 /* Ask target code to handle conversion between pointers
7385 to overlapping address spaces. */
7386 if (targetm.addr_space.subset_p (as_to, as_from)
7387 || targetm.addr_space.subset_p (as_from, as_to))
7389 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7390 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7391 gcc_assert (op0);
7392 return op0;
7395 /* For disjoint address spaces, converting anything but
7396 a null pointer invokes undefined behaviour. We simply
7397 always return a null pointer here. */
7398 return CONST0_RTX (mode);
7401 case POINTER_PLUS_EXPR:
7402 /* Even though the sizetype mode and the pointer's mode can be different
7403 expand is able to handle this correctly and get the correct result out
7404 of the PLUS_EXPR code. */
7405 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7406 if sizetype precision is smaller than pointer precision. */
7407 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7408 treeop1 = fold_convert_loc (loc, type,
7409 fold_convert_loc (loc, ssizetype,
7410 treeop1));
7411 case PLUS_EXPR:
7413 /* Check if this is a case for multiplication and addition. */
7414 if ((TREE_CODE (type) == INTEGER_TYPE
7415 || TREE_CODE (type) == FIXED_POINT_TYPE)
7416 && (subexp0_def = get_def_for_expr (treeop0,
7417 MULT_EXPR)))
7419 tree subsubexp0, subsubexp1;
7420 gimple subsubexp0_def, subsubexp1_def;
7421 enum tree_code this_code;
7423 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
7424 : FIXED_CONVERT_EXPR;
7425 subsubexp0 = gimple_assign_rhs1 (subexp0_def);
7426 subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
7427 subsubexp1 = gimple_assign_rhs2 (subexp0_def);
7428 subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
7429 if (subsubexp0_def && subsubexp1_def
7430 && (top0 = gimple_assign_rhs1 (subsubexp0_def))
7431 && (top1 = gimple_assign_rhs1 (subsubexp1_def))
7432 && (TYPE_PRECISION (TREE_TYPE (top0))
7433 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
7434 && (TYPE_PRECISION (TREE_TYPE (top0))
7435 == TYPE_PRECISION (TREE_TYPE (top1)))
7436 && (TYPE_UNSIGNED (TREE_TYPE (top0))
7437 == TYPE_UNSIGNED (TREE_TYPE (top1))))
7439 tree op0type = TREE_TYPE (top0);
7440 enum machine_mode innermode = TYPE_MODE (op0type);
7441 bool zextend_p = TYPE_UNSIGNED (op0type);
7442 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
7443 if (sat_p == 0)
7444 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
7445 else
7446 this_optab = zextend_p ? usmadd_widen_optab
7447 : ssmadd_widen_optab;
7448 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7449 && (optab_handler (this_optab, mode)->insn_code
7450 != CODE_FOR_nothing))
7452 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7453 EXPAND_NORMAL);
7454 op2 = expand_expr (treeop1, subtarget,
7455 VOIDmode, EXPAND_NORMAL);
7456 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
7457 target, unsignedp);
7458 gcc_assert (temp);
7459 return REDUCE_BIT_FIELD (temp);
7464 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7465 something else, make sure we add the register to the constant and
7466 then to the other thing. This case can occur during strength
7467 reduction and doing it this way will produce better code if the
7468 frame pointer or argument pointer is eliminated.
7470 fold-const.c will ensure that the constant is always in the inner
7471 PLUS_EXPR, so the only case we need to do anything about is if
7472 sp, ap, or fp is our second argument, in which case we must swap
7473 the innermost first argument and our second argument. */
7475 if (TREE_CODE (treeop0) == PLUS_EXPR
7476 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7477 && TREE_CODE (treeop1) == VAR_DECL
7478 && (DECL_RTL (treeop1) == frame_pointer_rtx
7479 || DECL_RTL (treeop1) == stack_pointer_rtx
7480 || DECL_RTL (treeop1) == arg_pointer_rtx))
7482 tree t = treeop1;
7484 treeop1 = TREE_OPERAND (treeop0, 0);
7485 TREE_OPERAND (treeop0, 0) = t;
7488 /* If the result is to be ptr_mode and we are adding an integer to
7489 something, we might be forming a constant. So try to use
7490 plus_constant. If it produces a sum and we can't accept it,
7491 use force_operand. This allows P = &ARR[const] to generate
7492 efficient code on machines where a SYMBOL_REF is not a valid
7493 address.
7495 If this is an EXPAND_SUM call, always return the sum. */
7496 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7497 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7499 if (modifier == EXPAND_STACK_PARM)
7500 target = 0;
7501 if (TREE_CODE (treeop0) == INTEGER_CST
7502 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7503 && TREE_CONSTANT (treeop1))
7505 rtx constant_part;
7507 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7508 EXPAND_SUM);
7509 /* Use immed_double_const to ensure that the constant is
7510 truncated according to the mode of OP1, then sign extended
7511 to a HOST_WIDE_INT. Using the constant directly can result
7512 in non-canonical RTL in a 64x32 cross compile. */
7513 constant_part
7514 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7515 (HOST_WIDE_INT) 0,
7516 TYPE_MODE (TREE_TYPE (treeop1)));
7517 op1 = plus_constant (op1, INTVAL (constant_part));
7518 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7519 op1 = force_operand (op1, target);
7520 return REDUCE_BIT_FIELD (op1);
7523 else if (TREE_CODE (treeop1) == INTEGER_CST
7524 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7525 && TREE_CONSTANT (treeop0))
7527 rtx constant_part;
7529 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7530 (modifier == EXPAND_INITIALIZER
7531 ? EXPAND_INITIALIZER : EXPAND_SUM));
7532 if (! CONSTANT_P (op0))
7534 op1 = expand_expr (treeop1, NULL_RTX,
7535 VOIDmode, modifier);
7536 /* Return a PLUS if modifier says it's OK. */
7537 if (modifier == EXPAND_SUM
7538 || modifier == EXPAND_INITIALIZER)
7539 return simplify_gen_binary (PLUS, mode, op0, op1);
7540 goto binop2;
7542 /* Use immed_double_const to ensure that the constant is
7543 truncated according to the mode of OP1, then sign extended
7544 to a HOST_WIDE_INT. Using the constant directly can result
7545 in non-canonical RTL in a 64x32 cross compile. */
7546 constant_part
7547 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7548 (HOST_WIDE_INT) 0,
7549 TYPE_MODE (TREE_TYPE (treeop0)));
7550 op0 = plus_constant (op0, INTVAL (constant_part));
7551 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7552 op0 = force_operand (op0, target);
7553 return REDUCE_BIT_FIELD (op0);
7557 /* No sense saving up arithmetic to be done
7558 if it's all in the wrong mode to form part of an address.
7559 And force_operand won't know whether to sign-extend or
7560 zero-extend. */
7561 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7562 || mode != ptr_mode)
7564 expand_operands (treeop0, treeop1,
7565 subtarget, &op0, &op1, EXPAND_NORMAL);
7566 if (op0 == const0_rtx)
7567 return op1;
7568 if (op1 == const0_rtx)
7569 return op0;
7570 goto binop2;
7573 expand_operands (treeop0, treeop1,
7574 subtarget, &op0, &op1, modifier);
7575 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7577 case MINUS_EXPR:
7578 /* Check if this is a case for multiplication and subtraction. */
7579 if ((TREE_CODE (type) == INTEGER_TYPE
7580 || TREE_CODE (type) == FIXED_POINT_TYPE)
7581 && (subexp1_def = get_def_for_expr (treeop1,
7582 MULT_EXPR)))
7584 tree subsubexp0, subsubexp1;
7585 gimple subsubexp0_def, subsubexp1_def;
7586 enum tree_code this_code;
7588 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
7589 : FIXED_CONVERT_EXPR;
7590 subsubexp0 = gimple_assign_rhs1 (subexp1_def);
7591 subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
7592 subsubexp1 = gimple_assign_rhs2 (subexp1_def);
7593 subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
7594 if (subsubexp0_def && subsubexp1_def
7595 && (top0 = gimple_assign_rhs1 (subsubexp0_def))
7596 && (top1 = gimple_assign_rhs1 (subsubexp1_def))
7597 && (TYPE_PRECISION (TREE_TYPE (top0))
7598 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
7599 && (TYPE_PRECISION (TREE_TYPE (top0))
7600 == TYPE_PRECISION (TREE_TYPE (top1)))
7601 && (TYPE_UNSIGNED (TREE_TYPE (top0))
7602 == TYPE_UNSIGNED (TREE_TYPE (top1))))
7604 tree op0type = TREE_TYPE (top0);
7605 enum machine_mode innermode = TYPE_MODE (op0type);
7606 bool zextend_p = TYPE_UNSIGNED (op0type);
7607 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
7608 if (sat_p == 0)
7609 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
7610 else
7611 this_optab = zextend_p ? usmsub_widen_optab
7612 : ssmsub_widen_optab;
7613 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7614 && (optab_handler (this_optab, mode)->insn_code
7615 != CODE_FOR_nothing))
7617 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7618 EXPAND_NORMAL);
7619 op2 = expand_expr (treeop0, subtarget,
7620 VOIDmode, EXPAND_NORMAL);
7621 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
7622 target, unsignedp);
7623 gcc_assert (temp);
7624 return REDUCE_BIT_FIELD (temp);
7629 /* For initializers, we are allowed to return a MINUS of two
7630 symbolic constants. Here we handle all cases when both operands
7631 are constant. */
7632 /* Handle difference of two symbolic constants,
7633 for the sake of an initializer. */
7634 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7635 && really_constant_p (treeop0)
7636 && really_constant_p (treeop1))
7638 expand_operands (treeop0, treeop1,
7639 NULL_RTX, &op0, &op1, modifier);
7641 /* If the last operand is a CONST_INT, use plus_constant of
7642 the negated constant. Else make the MINUS. */
7643 if (CONST_INT_P (op1))
7644 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7645 else
7646 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7649 /* No sense saving up arithmetic to be done
7650 if it's all in the wrong mode to form part of an address.
7651 And force_operand won't know whether to sign-extend or
7652 zero-extend. */
7653 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7654 || mode != ptr_mode)
7655 goto binop;
7657 expand_operands (treeop0, treeop1,
7658 subtarget, &op0, &op1, modifier);
7660 /* Convert A - const to A + (-const). */
7661 if (CONST_INT_P (op1))
7663 op1 = negate_rtx (mode, op1);
7664 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7667 goto binop2;
7669 case WIDEN_MULT_EXPR:
7670 /* If first operand is constant, swap them.
7671 Thus the following special case checks need only
7672 check the second operand. */
7673 if (TREE_CODE (treeop0) == INTEGER_CST)
7675 tree t1 = treeop0;
7676 treeop0 = treeop1;
7677 treeop1 = t1;
7680 /* First, check if we have a multiplication of one signed and one
7681 unsigned operand. */
7682 if (TREE_CODE (treeop1) != INTEGER_CST
7683 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7684 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7686 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7687 this_optab = usmul_widen_optab;
7688 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7690 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7692 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7693 expand_operands (treeop0, treeop1, subtarget, &op0, &op1,
7694 EXPAND_NORMAL);
7695 else
7696 expand_operands (treeop0, treeop1, subtarget, &op1, &op0,
7697 EXPAND_NORMAL);
7698 goto binop3;
7702 /* Check for a multiplication with matching signedness. */
7703 else if ((TREE_CODE (treeop1) == INTEGER_CST
7704 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7705 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7706 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7708 tree op0type = TREE_TYPE (treeop0);
7709 enum machine_mode innermode = TYPE_MODE (op0type);
7710 bool zextend_p = TYPE_UNSIGNED (op0type);
7711 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7712 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7714 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7716 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7718 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7719 EXPAND_NORMAL);
7720 temp = expand_widening_mult (mode, op0, op1, target,
7721 unsignedp, this_optab);
7722 return REDUCE_BIT_FIELD (temp);
7724 if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
7725 && innermode == word_mode)
7727 rtx htem, hipart;
7728 op0 = expand_normal (treeop0);
7729 if (TREE_CODE (treeop1) == INTEGER_CST)
7730 op1 = convert_modes (innermode, mode,
7731 expand_normal (treeop1), unsignedp);
7732 else
7733 op1 = expand_normal (treeop1);
7734 temp = expand_binop (mode, other_optab, op0, op1, target,
7735 unsignedp, OPTAB_LIB_WIDEN);
7736 hipart = gen_highpart (innermode, temp);
7737 htem = expand_mult_highpart_adjust (innermode, hipart,
7738 op0, op1, hipart,
7739 zextend_p);
7740 if (htem != hipart)
7741 emit_move_insn (hipart, htem);
7742 return REDUCE_BIT_FIELD (temp);
7746 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7747 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7748 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7749 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7751 case MULT_EXPR:
7752 /* If this is a fixed-point operation, then we cannot use the code
7753 below because "expand_mult" doesn't support sat/no-sat fixed-point
7754 multiplications. */
7755 if (ALL_FIXED_POINT_MODE_P (mode))
7756 goto binop;
7758 /* If first operand is constant, swap them.
7759 Thus the following special case checks need only
7760 check the second operand. */
7761 if (TREE_CODE (treeop0) == INTEGER_CST)
7763 tree t1 = treeop0;
7764 treeop0 = treeop1;
7765 treeop1 = t1;
7768 /* Attempt to return something suitable for generating an
7769 indexed address, for machines that support that. */
7771 if (modifier == EXPAND_SUM && mode == ptr_mode
7772 && host_integerp (treeop1, 0))
7774 tree exp1 = treeop1;
7776 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7777 EXPAND_SUM);
7779 if (!REG_P (op0))
7780 op0 = force_operand (op0, NULL_RTX);
7781 if (!REG_P (op0))
7782 op0 = copy_to_mode_reg (mode, op0);
7784 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7785 gen_int_mode (tree_low_cst (exp1, 0),
7786 TYPE_MODE (TREE_TYPE (exp1)))));
7789 if (modifier == EXPAND_STACK_PARM)
7790 target = 0;
7792 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7793 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7795 case TRUNC_DIV_EXPR:
7796 case FLOOR_DIV_EXPR:
7797 case CEIL_DIV_EXPR:
7798 case ROUND_DIV_EXPR:
7799 case EXACT_DIV_EXPR:
7800 /* If this is a fixed-point operation, then we cannot use the code
7801 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7802 divisions. */
7803 if (ALL_FIXED_POINT_MODE_P (mode))
7804 goto binop;
7806 if (modifier == EXPAND_STACK_PARM)
7807 target = 0;
7808 /* Possible optimization: compute the dividend with EXPAND_SUM
7809 then if the divisor is constant can optimize the case
7810 where some terms of the dividend have coeffs divisible by it. */
7811 expand_operands (treeop0, treeop1,
7812 subtarget, &op0, &op1, EXPAND_NORMAL);
7813 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7815 case RDIV_EXPR:
7816 goto binop;
7818 case TRUNC_MOD_EXPR:
7819 case FLOOR_MOD_EXPR:
7820 case CEIL_MOD_EXPR:
7821 case ROUND_MOD_EXPR:
7822 if (modifier == EXPAND_STACK_PARM)
7823 target = 0;
7824 expand_operands (treeop0, treeop1,
7825 subtarget, &op0, &op1, EXPAND_NORMAL);
7826 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7828 case FIXED_CONVERT_EXPR:
7829 op0 = expand_normal (treeop0);
7830 if (target == 0 || modifier == EXPAND_STACK_PARM)
7831 target = gen_reg_rtx (mode);
7833 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7834 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7835 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7836 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7837 else
7838 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7839 return target;
7841 case FIX_TRUNC_EXPR:
7842 op0 = expand_normal (treeop0);
7843 if (target == 0 || modifier == EXPAND_STACK_PARM)
7844 target = gen_reg_rtx (mode);
7845 expand_fix (target, op0, unsignedp);
7846 return target;
7848 case FLOAT_EXPR:
7849 op0 = expand_normal (treeop0);
7850 if (target == 0 || modifier == EXPAND_STACK_PARM)
7851 target = gen_reg_rtx (mode);
7852 /* expand_float can't figure out what to do if FROM has VOIDmode.
7853 So give it the correct mode. With -O, cse will optimize this. */
7854 if (GET_MODE (op0) == VOIDmode)
7855 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7856 op0);
7857 expand_float (target, op0,
7858 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7859 return target;
7861 case NEGATE_EXPR:
7862 op0 = expand_expr (treeop0, subtarget,
7863 VOIDmode, EXPAND_NORMAL);
7864 if (modifier == EXPAND_STACK_PARM)
7865 target = 0;
7866 temp = expand_unop (mode,
7867 optab_for_tree_code (NEGATE_EXPR, type,
7868 optab_default),
7869 op0, target, 0);
7870 gcc_assert (temp);
7871 return REDUCE_BIT_FIELD (temp);
7873 case ABS_EXPR:
7874 op0 = expand_expr (treeop0, subtarget,
7875 VOIDmode, EXPAND_NORMAL);
7876 if (modifier == EXPAND_STACK_PARM)
7877 target = 0;
7879 /* ABS_EXPR is not valid for complex arguments. */
7880 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7881 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7883 /* Unsigned abs is simply the operand. Testing here means we don't
7884 risk generating incorrect code below. */
7885 if (TYPE_UNSIGNED (type))
7886 return op0;
7888 return expand_abs (mode, op0, target, unsignedp,
7889 safe_from_p (target, treeop0, 1));
7891 case MAX_EXPR:
7892 case MIN_EXPR:
7893 target = original_target;
7894 if (target == 0
7895 || modifier == EXPAND_STACK_PARM
7896 || (MEM_P (target) && MEM_VOLATILE_P (target))
7897 || GET_MODE (target) != mode
7898 || (REG_P (target)
7899 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7900 target = gen_reg_rtx (mode);
7901 expand_operands (treeop0, treeop1,
7902 target, &op0, &op1, EXPAND_NORMAL);
7904 /* First try to do it with a special MIN or MAX instruction.
7905 If that does not win, use a conditional jump to select the proper
7906 value. */
7907 this_optab = optab_for_tree_code (code, type, optab_default);
7908 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7909 OPTAB_WIDEN);
7910 if (temp != 0)
7911 return temp;
7913 /* At this point, a MEM target is no longer useful; we will get better
7914 code without it. */
7916 if (! REG_P (target))
7917 target = gen_reg_rtx (mode);
7919 /* If op1 was placed in target, swap op0 and op1. */
7920 if (target != op0 && target == op1)
7922 temp = op0;
7923 op0 = op1;
7924 op1 = temp;
7927 /* We generate better code and avoid problems with op1 mentioning
7928 target by forcing op1 into a pseudo if it isn't a constant. */
7929 if (! CONSTANT_P (op1))
7930 op1 = force_reg (mode, op1);
7933 enum rtx_code comparison_code;
7934 rtx cmpop1 = op1;
7936 if (code == MAX_EXPR)
7937 comparison_code = unsignedp ? GEU : GE;
7938 else
7939 comparison_code = unsignedp ? LEU : LE;
7941 /* Canonicalize to comparisons against 0. */
7942 if (op1 == const1_rtx)
7944 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7945 or (a != 0 ? a : 1) for unsigned.
7946 For MIN we are safe converting (a <= 1 ? a : 1)
7947 into (a <= 0 ? a : 1) */
7948 cmpop1 = const0_rtx;
7949 if (code == MAX_EXPR)
7950 comparison_code = unsignedp ? NE : GT;
7952 if (op1 == constm1_rtx && !unsignedp)
7954 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7955 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7956 cmpop1 = const0_rtx;
7957 if (code == MIN_EXPR)
7958 comparison_code = LT;
7960 #ifdef HAVE_conditional_move
7961 /* Use a conditional move if possible. */
7962 if (can_conditionally_move_p (mode))
7964 rtx insn;
7966 /* ??? Same problem as in expmed.c: emit_conditional_move
7967 forces a stack adjustment via compare_from_rtx, and we
7968 lose the stack adjustment if the sequence we are about
7969 to create is discarded. */
7970 do_pending_stack_adjust ();
7972 start_sequence ();
7974 /* Try to emit the conditional move. */
7975 insn = emit_conditional_move (target, comparison_code,
7976 op0, cmpop1, mode,
7977 op0, op1, mode,
7978 unsignedp);
7980 /* If we could do the conditional move, emit the sequence,
7981 and return. */
7982 if (insn)
7984 rtx seq = get_insns ();
7985 end_sequence ();
7986 emit_insn (seq);
7987 return target;
7990 /* Otherwise discard the sequence and fall back to code with
7991 branches. */
7992 end_sequence ();
7994 #endif
7995 if (target != op0)
7996 emit_move_insn (target, op0);
7998 temp = gen_label_rtx ();
7999 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8000 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8001 -1);
8003 emit_move_insn (target, op1);
8004 emit_label (temp);
8005 return target;
8007 case BIT_NOT_EXPR:
8008 op0 = expand_expr (treeop0, subtarget,
8009 VOIDmode, EXPAND_NORMAL);
8010 if (modifier == EXPAND_STACK_PARM)
8011 target = 0;
8012 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8013 gcc_assert (temp);
8014 return temp;
8016 /* ??? Can optimize bitwise operations with one arg constant.
8017 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8018 and (a bitwise1 b) bitwise2 b (etc)
8019 but that is probably not worth while. */
8021 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8022 boolean values when we want in all cases to compute both of them. In
8023 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8024 as actual zero-or-1 values and then bitwise anding. In cases where
8025 there cannot be any side effects, better code would be made by
8026 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8027 how to recognize those cases. */
8029 case TRUTH_AND_EXPR:
8030 code = BIT_AND_EXPR;
8031 case BIT_AND_EXPR:
8032 goto binop;
8034 case TRUTH_OR_EXPR:
8035 code = BIT_IOR_EXPR;
8036 case BIT_IOR_EXPR:
8037 goto binop;
8039 case TRUTH_XOR_EXPR:
8040 code = BIT_XOR_EXPR;
8041 case BIT_XOR_EXPR:
8042 goto binop;
8044 case LROTATE_EXPR:
8045 case RROTATE_EXPR:
8046 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8047 || (GET_MODE_PRECISION (TYPE_MODE (type))
8048 == TYPE_PRECISION (type)));
8049 /* fall through */
8051 case LSHIFT_EXPR:
8052 case RSHIFT_EXPR:
8053 /* If this is a fixed-point operation, then we cannot use the code
8054 below because "expand_shift" doesn't support sat/no-sat fixed-point
8055 shifts. */
8056 if (ALL_FIXED_POINT_MODE_P (mode))
8057 goto binop;
8059 if (! safe_from_p (subtarget, treeop1, 1))
8060 subtarget = 0;
8061 if (modifier == EXPAND_STACK_PARM)
8062 target = 0;
8063 op0 = expand_expr (treeop0, subtarget,
8064 VOIDmode, EXPAND_NORMAL);
8065 temp = expand_shift (code, mode, op0, treeop1, target,
8066 unsignedp);
8067 if (code == LSHIFT_EXPR)
8068 temp = REDUCE_BIT_FIELD (temp);
8069 return temp;
8071 /* Could determine the answer when only additive constants differ. Also,
8072 the addition of one can be handled by changing the condition. */
8073 case LT_EXPR:
8074 case LE_EXPR:
8075 case GT_EXPR:
8076 case GE_EXPR:
8077 case EQ_EXPR:
8078 case NE_EXPR:
8079 case UNORDERED_EXPR:
8080 case ORDERED_EXPR:
8081 case UNLT_EXPR:
8082 case UNLE_EXPR:
8083 case UNGT_EXPR:
8084 case UNGE_EXPR:
8085 case UNEQ_EXPR:
8086 case LTGT_EXPR:
8087 temp = do_store_flag (ops,
8088 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8089 tmode != VOIDmode ? tmode : mode);
8090 if (temp)
8091 return temp;
8093 /* Use a compare and a jump for BLKmode comparisons, or for function
8094 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8096 if ((target == 0
8097 || modifier == EXPAND_STACK_PARM
8098 || ! safe_from_p (target, treeop0, 1)
8099 || ! safe_from_p (target, treeop1, 1)
8100 /* Make sure we don't have a hard reg (such as function's return
8101 value) live across basic blocks, if not optimizing. */
8102 || (!optimize && REG_P (target)
8103 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8104 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8106 emit_move_insn (target, const0_rtx);
8108 op1 = gen_label_rtx ();
8109 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8111 emit_move_insn (target, const1_rtx);
8113 emit_label (op1);
8114 return target;
8116 case TRUTH_NOT_EXPR:
8117 if (modifier == EXPAND_STACK_PARM)
8118 target = 0;
8119 op0 = expand_expr (treeop0, target,
8120 VOIDmode, EXPAND_NORMAL);
8121 /* The parser is careful to generate TRUTH_NOT_EXPR
8122 only with operands that are always zero or one. */
8123 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8124 target, 1, OPTAB_LIB_WIDEN);
8125 gcc_assert (temp);
8126 return temp;
8128 case COMPLEX_EXPR:
8129 /* Get the rtx code of the operands. */
8130 op0 = expand_normal (treeop0);
8131 op1 = expand_normal (treeop1);
8133 if (!target)
8134 target = gen_reg_rtx (TYPE_MODE (type));
8136 /* Move the real (op0) and imaginary (op1) parts to their location. */
8137 write_complex_part (target, op0, false);
8138 write_complex_part (target, op1, true);
8140 return target;
8142 case WIDEN_SUM_EXPR:
8144 tree oprnd0 = treeop0;
8145 tree oprnd1 = treeop1;
8147 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8148 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8149 target, unsignedp);
8150 return target;
8153 case REDUC_MAX_EXPR:
8154 case REDUC_MIN_EXPR:
8155 case REDUC_PLUS_EXPR:
8157 op0 = expand_normal (treeop0);
8158 this_optab = optab_for_tree_code (code, type, optab_default);
8159 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8160 gcc_assert (temp);
8161 return temp;
8164 case VEC_EXTRACT_EVEN_EXPR:
8165 case VEC_EXTRACT_ODD_EXPR:
8167 expand_operands (treeop0, treeop1,
8168 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8169 this_optab = optab_for_tree_code (code, type, optab_default);
8170 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8171 OPTAB_WIDEN);
8172 gcc_assert (temp);
8173 return temp;
8176 case VEC_INTERLEAVE_HIGH_EXPR:
8177 case VEC_INTERLEAVE_LOW_EXPR:
8179 expand_operands (treeop0, treeop1,
8180 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8181 this_optab = optab_for_tree_code (code, type, optab_default);
8182 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8183 OPTAB_WIDEN);
8184 gcc_assert (temp);
8185 return temp;
8188 case VEC_LSHIFT_EXPR:
8189 case VEC_RSHIFT_EXPR:
8191 target = expand_vec_shift_expr (ops, target);
8192 return target;
8195 case VEC_UNPACK_HI_EXPR:
8196 case VEC_UNPACK_LO_EXPR:
8198 op0 = expand_normal (treeop0);
8199 this_optab = optab_for_tree_code (code, type, optab_default);
8200 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8201 target, unsignedp);
8202 gcc_assert (temp);
8203 return temp;
8206 case VEC_UNPACK_FLOAT_HI_EXPR:
8207 case VEC_UNPACK_FLOAT_LO_EXPR:
8209 op0 = expand_normal (treeop0);
8210 /* The signedness is determined from input operand. */
8211 this_optab = optab_for_tree_code (code,
8212 TREE_TYPE (treeop0),
8213 optab_default);
8214 temp = expand_widen_pattern_expr
8215 (ops, op0, NULL_RTX, NULL_RTX,
8216 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8218 gcc_assert (temp);
8219 return temp;
8222 case VEC_WIDEN_MULT_HI_EXPR:
8223 case VEC_WIDEN_MULT_LO_EXPR:
8225 tree oprnd0 = treeop0;
8226 tree oprnd1 = treeop1;
8228 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8229 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8230 target, unsignedp);
8231 gcc_assert (target);
8232 return target;
8235 case VEC_PACK_TRUNC_EXPR:
8236 case VEC_PACK_SAT_EXPR:
8237 case VEC_PACK_FIX_TRUNC_EXPR:
8238 mode = TYPE_MODE (TREE_TYPE (treeop0));
8239 goto binop;
8241 default:
8242 gcc_unreachable ();
8245 /* Here to do an ordinary binary operator. */
8246 binop:
8247 expand_operands (treeop0, treeop1,
8248 subtarget, &op0, &op1, EXPAND_NORMAL);
8249 binop2:
8250 this_optab = optab_for_tree_code (code, type, optab_default);
8251 binop3:
8252 if (modifier == EXPAND_STACK_PARM)
8253 target = 0;
8254 temp = expand_binop (mode, this_optab, op0, op1, target,
8255 unsignedp, OPTAB_LIB_WIDEN);
8256 gcc_assert (temp);
8257 return REDUCE_BIT_FIELD (temp);
8259 #undef REDUCE_BIT_FIELD
8262 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8263 enum expand_modifier modifier, rtx *alt_rtl)
8265 rtx op0, op1, temp, decl_rtl;
8266 tree type;
8267 int unsignedp;
8268 enum machine_mode mode;
8269 enum tree_code code = TREE_CODE (exp);
8270 optab this_optab;
8271 rtx subtarget, original_target;
8272 int ignore;
8273 tree context;
8274 bool reduce_bit_field;
8275 location_t loc = EXPR_LOCATION (exp);
8276 struct separate_ops ops;
8277 tree treeop0, treeop1, treeop2;
8279 type = TREE_TYPE (exp);
8280 mode = TYPE_MODE (type);
8281 unsignedp = TYPE_UNSIGNED (type);
8283 treeop0 = treeop1 = treeop2 = NULL_TREE;
8284 if (!VL_EXP_CLASS_P (exp))
8285 switch (TREE_CODE_LENGTH (code))
8287 default:
8288 case 3: treeop2 = TREE_OPERAND (exp, 2);
8289 case 2: treeop1 = TREE_OPERAND (exp, 1);
8290 case 1: treeop0 = TREE_OPERAND (exp, 0);
8291 case 0: break;
8293 ops.code = code;
8294 ops.type = type;
8295 ops.op0 = treeop0;
8296 ops.op1 = treeop1;
8297 ops.op2 = treeop2;
8298 ops.location = loc;
8300 ignore = (target == const0_rtx
8301 || ((CONVERT_EXPR_CODE_P (code)
8302 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8303 && TREE_CODE (type) == VOID_TYPE));
8305 /* An operation in what may be a bit-field type needs the
8306 result to be reduced to the precision of the bit-field type,
8307 which is narrower than that of the type's mode. */
8308 reduce_bit_field = (!ignore
8309 && TREE_CODE (type) == INTEGER_TYPE
8310 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8312 /* If we are going to ignore this result, we need only do something
8313 if there is a side-effect somewhere in the expression. If there
8314 is, short-circuit the most common cases here. Note that we must
8315 not call expand_expr with anything but const0_rtx in case this
8316 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8318 if (ignore)
8320 if (! TREE_SIDE_EFFECTS (exp))
8321 return const0_rtx;
8323 /* Ensure we reference a volatile object even if value is ignored, but
8324 don't do this if all we are doing is taking its address. */
8325 if (TREE_THIS_VOLATILE (exp)
8326 && TREE_CODE (exp) != FUNCTION_DECL
8327 && mode != VOIDmode && mode != BLKmode
8328 && modifier != EXPAND_CONST_ADDRESS)
8330 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8331 if (MEM_P (temp))
8332 temp = copy_to_reg (temp);
8333 return const0_rtx;
8336 if (TREE_CODE_CLASS (code) == tcc_unary
8337 || code == COMPONENT_REF || code == INDIRECT_REF)
8338 return expand_expr (treeop0, const0_rtx, VOIDmode,
8339 modifier);
8341 else if (TREE_CODE_CLASS (code) == tcc_binary
8342 || TREE_CODE_CLASS (code) == tcc_comparison
8343 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8345 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8346 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8347 return const0_rtx;
8349 else if (code == BIT_FIELD_REF)
8351 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8352 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8353 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8354 return const0_rtx;
8357 target = 0;
8360 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8361 target = 0;
8363 /* Use subtarget as the target for operand 0 of a binary operation. */
8364 subtarget = get_subtarget (target);
8365 original_target = target;
8367 switch (code)
8369 case LABEL_DECL:
8371 tree function = decl_function_context (exp);
8373 temp = label_rtx (exp);
8374 temp = gen_rtx_LABEL_REF (Pmode, temp);
8376 if (function != current_function_decl
8377 && function != 0)
8378 LABEL_REF_NONLOCAL_P (temp) = 1;
8380 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8381 return temp;
8384 case SSA_NAME:
8385 /* ??? ivopts calls expander, without any preparation from
8386 out-of-ssa. So fake instructions as if this was an access to the
8387 base variable. This unnecessarily allocates a pseudo, see how we can
8388 reuse it, if partition base vars have it set already. */
8389 if (!currently_expanding_to_rtl)
8390 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, NULL);
8392 gimple g = get_gimple_for_ssa_name (exp);
8393 if (g)
8394 return expand_expr_real (gimple_assign_rhs_to_tree (g), target,
8395 tmode, modifier, NULL);
8397 decl_rtl = get_rtx_for_ssa_name (exp);
8398 exp = SSA_NAME_VAR (exp);
8399 goto expand_decl_rtl;
8401 case PARM_DECL:
8402 case VAR_DECL:
8403 /* If a static var's type was incomplete when the decl was written,
8404 but the type is complete now, lay out the decl now. */
8405 if (DECL_SIZE (exp) == 0
8406 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8407 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8408 layout_decl (exp, 0);
8410 /* TLS emulation hook - replace __thread vars with
8411 *__emutls_get_address (&_emutls.var). */
8412 if (! targetm.have_tls
8413 && TREE_CODE (exp) == VAR_DECL
8414 && DECL_THREAD_LOCAL_P (exp))
8416 exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp));
8417 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
8420 /* ... fall through ... */
8422 case FUNCTION_DECL:
8423 case RESULT_DECL:
8424 decl_rtl = DECL_RTL (exp);
8425 expand_decl_rtl:
8426 gcc_assert (decl_rtl);
8427 decl_rtl = copy_rtx (decl_rtl);
8429 /* Ensure variable marked as used even if it doesn't go through
8430 a parser. If it hasn't be used yet, write out an external
8431 definition. */
8432 if (! TREE_USED (exp))
8434 assemble_external (exp);
8435 TREE_USED (exp) = 1;
8438 /* Show we haven't gotten RTL for this yet. */
8439 temp = 0;
8441 /* Variables inherited from containing functions should have
8442 been lowered by this point. */
8443 context = decl_function_context (exp);
8444 gcc_assert (!context
8445 || context == current_function_decl
8446 || TREE_STATIC (exp)
8447 /* ??? C++ creates functions that are not TREE_STATIC. */
8448 || TREE_CODE (exp) == FUNCTION_DECL);
8450 /* This is the case of an array whose size is to be determined
8451 from its initializer, while the initializer is still being parsed.
8452 See expand_decl. */
8454 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8455 temp = validize_mem (decl_rtl);
8457 /* If DECL_RTL is memory, we are in the normal case and the
8458 address is not valid, get the address into a register. */
8460 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8462 if (alt_rtl)
8463 *alt_rtl = decl_rtl;
8464 decl_rtl = use_anchored_address (decl_rtl);
8465 if (modifier != EXPAND_CONST_ADDRESS
8466 && modifier != EXPAND_SUM
8467 && !memory_address_addr_space_p (DECL_MODE (exp),
8468 XEXP (decl_rtl, 0),
8469 MEM_ADDR_SPACE (decl_rtl)))
8470 temp = replace_equiv_address (decl_rtl,
8471 copy_rtx (XEXP (decl_rtl, 0)));
8474 /* If we got something, return it. But first, set the alignment
8475 if the address is a register. */
8476 if (temp != 0)
8478 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8479 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8481 return temp;
8484 /* If the mode of DECL_RTL does not match that of the decl, it
8485 must be a promoted value. We return a SUBREG of the wanted mode,
8486 but mark it so that we know that it was already extended. */
8488 if (REG_P (decl_rtl)
8489 && GET_MODE (decl_rtl) != DECL_MODE (exp))
8491 enum machine_mode pmode;
8493 /* Get the signedness used for this variable. Ensure we get the
8494 same mode we got when the variable was declared. */
8495 pmode = promote_decl_mode (exp, &unsignedp);
8496 gcc_assert (GET_MODE (decl_rtl) == pmode);
8498 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8499 SUBREG_PROMOTED_VAR_P (temp) = 1;
8500 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8501 return temp;
8504 return decl_rtl;
8506 case INTEGER_CST:
8507 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8508 TREE_INT_CST_HIGH (exp), mode);
8510 return temp;
8512 case VECTOR_CST:
8514 tree tmp = NULL_TREE;
8515 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8516 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8517 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8518 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8519 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8520 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8521 return const_vector_from_tree (exp);
8522 if (GET_MODE_CLASS (mode) == MODE_INT)
8524 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8525 if (type_for_mode)
8526 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8528 if (!tmp)
8529 tmp = build_constructor_from_list (type,
8530 TREE_VECTOR_CST_ELTS (exp));
8531 return expand_expr (tmp, ignore ? const0_rtx : target,
8532 tmode, modifier);
8535 case CONST_DECL:
8536 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8538 case REAL_CST:
8539 /* If optimized, generate immediate CONST_DOUBLE
8540 which will be turned into memory by reload if necessary.
8542 We used to force a register so that loop.c could see it. But
8543 this does not allow gen_* patterns to perform optimizations with
8544 the constants. It also produces two insns in cases like "x = 1.0;".
8545 On most machines, floating-point constants are not permitted in
8546 many insns, so we'd end up copying it to a register in any case.
8548 Now, we do the copying in expand_binop, if appropriate. */
8549 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8550 TYPE_MODE (TREE_TYPE (exp)));
8552 case FIXED_CST:
8553 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8554 TYPE_MODE (TREE_TYPE (exp)));
8556 case COMPLEX_CST:
8557 /* Handle evaluating a complex constant in a CONCAT target. */
8558 if (original_target && GET_CODE (original_target) == CONCAT)
8560 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8561 rtx rtarg, itarg;
8563 rtarg = XEXP (original_target, 0);
8564 itarg = XEXP (original_target, 1);
8566 /* Move the real and imaginary parts separately. */
8567 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8568 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8570 if (op0 != rtarg)
8571 emit_move_insn (rtarg, op0);
8572 if (op1 != itarg)
8573 emit_move_insn (itarg, op1);
8575 return original_target;
8578 /* ... fall through ... */
8580 case STRING_CST:
8581 temp = expand_expr_constant (exp, 1, modifier);
8583 /* temp contains a constant address.
8584 On RISC machines where a constant address isn't valid,
8585 make some insns to get that address into a register. */
8586 if (modifier != EXPAND_CONST_ADDRESS
8587 && modifier != EXPAND_INITIALIZER
8588 && modifier != EXPAND_SUM
8589 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8590 MEM_ADDR_SPACE (temp)))
8591 return replace_equiv_address (temp,
8592 copy_rtx (XEXP (temp, 0)));
8593 return temp;
8595 case SAVE_EXPR:
8597 tree val = treeop0;
8598 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8600 if (!SAVE_EXPR_RESOLVED_P (exp))
8602 /* We can indeed still hit this case, typically via builtin
8603 expanders calling save_expr immediately before expanding
8604 something. Assume this means that we only have to deal
8605 with non-BLKmode values. */
8606 gcc_assert (GET_MODE (ret) != BLKmode);
8608 val = build_decl (EXPR_LOCATION (exp),
8609 VAR_DECL, NULL, TREE_TYPE (exp));
8610 DECL_ARTIFICIAL (val) = 1;
8611 DECL_IGNORED_P (val) = 1;
8612 treeop0 = val;
8613 TREE_OPERAND (exp, 0) = treeop0;
8614 SAVE_EXPR_RESOLVED_P (exp) = 1;
8616 if (!CONSTANT_P (ret))
8617 ret = copy_to_reg (ret);
8618 SET_DECL_RTL (val, ret);
8621 return ret;
8625 case CONSTRUCTOR:
8626 /* If we don't need the result, just ensure we evaluate any
8627 subexpressions. */
8628 if (ignore)
8630 unsigned HOST_WIDE_INT idx;
8631 tree value;
8633 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8634 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8636 return const0_rtx;
8639 return expand_constructor (exp, target, modifier, false);
8641 case MISALIGNED_INDIRECT_REF:
8642 case ALIGN_INDIRECT_REF:
8643 case INDIRECT_REF:
8645 tree exp1 = treeop0;
8646 addr_space_t as = ADDR_SPACE_GENERIC;
8647 enum machine_mode address_mode = Pmode;
8649 if (modifier != EXPAND_WRITE)
8651 tree t;
8653 t = fold_read_from_constant_string (exp);
8654 if (t)
8655 return expand_expr (t, target, tmode, modifier);
8658 if (POINTER_TYPE_P (TREE_TYPE (exp1)))
8660 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp1)));
8661 address_mode = targetm.addr_space.address_mode (as);
8664 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
8665 op0 = memory_address_addr_space (mode, op0, as);
8667 if (code == ALIGN_INDIRECT_REF)
8669 int align = TYPE_ALIGN_UNIT (type);
8670 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
8671 op0 = memory_address_addr_space (mode, op0, as);
8674 temp = gen_rtx_MEM (mode, op0);
8676 set_mem_attributes (temp, exp, 0);
8677 set_mem_addr_space (temp, as);
8679 /* Resolve the misalignment now, so that we don't have to remember
8680 to resolve it later. Of course, this only works for reads. */
8681 if (code == MISALIGNED_INDIRECT_REF)
8683 int icode;
8684 rtx reg, insn;
8686 gcc_assert (modifier == EXPAND_NORMAL
8687 || modifier == EXPAND_STACK_PARM);
8689 /* The vectorizer should have already checked the mode. */
8690 icode = optab_handler (movmisalign_optab, mode)->insn_code;
8691 gcc_assert (icode != CODE_FOR_nothing);
8693 /* We've already validated the memory, and we're creating a
8694 new pseudo destination. The predicates really can't fail. */
8695 reg = gen_reg_rtx (mode);
8697 /* Nor can the insn generator. */
8698 insn = GEN_FCN (icode) (reg, temp);
8699 emit_insn (insn);
8701 return reg;
8704 return temp;
8707 case TARGET_MEM_REF:
8709 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8710 struct mem_address addr;
8711 tree base;
8713 get_address_description (exp, &addr);
8714 op0 = addr_for_mem_ref (&addr, as, true);
8715 op0 = memory_address_addr_space (mode, op0, as);
8716 temp = gen_rtx_MEM (mode, op0);
8717 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
8718 set_mem_addr_space (temp, as);
8719 base = get_base_address (TMR_ORIGINAL (exp));
8720 if (INDIRECT_REF_P (base)
8721 && TMR_BASE (exp)
8722 && TREE_CODE (TMR_BASE (exp)) == SSA_NAME
8723 && POINTER_TYPE_P (TREE_TYPE (TMR_BASE (exp))))
8725 set_mem_expr (temp, build1 (INDIRECT_REF,
8726 TREE_TYPE (exp), TMR_BASE (exp)));
8727 set_mem_offset (temp, NULL_RTX);
8730 return temp;
8732 case ARRAY_REF:
8735 tree array = treeop0;
8736 tree index = treeop1;
8738 /* Fold an expression like: "foo"[2].
8739 This is not done in fold so it won't happen inside &.
8740 Don't fold if this is for wide characters since it's too
8741 difficult to do correctly and this is a very rare case. */
8743 if (modifier != EXPAND_CONST_ADDRESS
8744 && modifier != EXPAND_INITIALIZER
8745 && modifier != EXPAND_MEMORY)
8747 tree t = fold_read_from_constant_string (exp);
8749 if (t)
8750 return expand_expr (t, target, tmode, modifier);
8753 /* If this is a constant index into a constant array,
8754 just get the value from the array. Handle both the cases when
8755 we have an explicit constructor and when our operand is a variable
8756 that was declared const. */
8758 if (modifier != EXPAND_CONST_ADDRESS
8759 && modifier != EXPAND_INITIALIZER
8760 && modifier != EXPAND_MEMORY
8761 && TREE_CODE (array) == CONSTRUCTOR
8762 && ! TREE_SIDE_EFFECTS (array)
8763 && TREE_CODE (index) == INTEGER_CST)
8765 unsigned HOST_WIDE_INT ix;
8766 tree field, value;
8768 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8769 field, value)
8770 if (tree_int_cst_equal (field, index))
8772 if (!TREE_SIDE_EFFECTS (value))
8773 return expand_expr (fold (value), target, tmode, modifier);
8774 break;
8778 else if (optimize >= 1
8779 && modifier != EXPAND_CONST_ADDRESS
8780 && modifier != EXPAND_INITIALIZER
8781 && modifier != EXPAND_MEMORY
8782 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8783 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8784 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8785 && targetm.binds_local_p (array))
8787 if (TREE_CODE (index) == INTEGER_CST)
8789 tree init = DECL_INITIAL (array);
8791 if (TREE_CODE (init) == CONSTRUCTOR)
8793 unsigned HOST_WIDE_INT ix;
8794 tree field, value;
8796 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8797 field, value)
8798 if (tree_int_cst_equal (field, index))
8800 if (TREE_SIDE_EFFECTS (value))
8801 break;
8803 if (TREE_CODE (value) == CONSTRUCTOR)
8805 /* If VALUE is a CONSTRUCTOR, this
8806 optimization is only useful if
8807 this doesn't store the CONSTRUCTOR
8808 into memory. If it does, it is more
8809 efficient to just load the data from
8810 the array directly. */
8811 rtx ret = expand_constructor (value, target,
8812 modifier, true);
8813 if (ret == NULL_RTX)
8814 break;
8817 return expand_expr (fold (value), target, tmode,
8818 modifier);
8821 else if(TREE_CODE (init) == STRING_CST)
8823 tree index1 = index;
8824 tree low_bound = array_ref_low_bound (exp);
8825 index1 = fold_convert_loc (loc, sizetype,
8826 treeop1);
8828 /* Optimize the special-case of a zero lower bound.
8830 We convert the low_bound to sizetype to avoid some problems
8831 with constant folding. (E.g. suppose the lower bound is 1,
8832 and its mode is QI. Without the conversion,l (ARRAY
8833 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8834 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8836 if (! integer_zerop (low_bound))
8837 index1 = size_diffop_loc (loc, index1,
8838 fold_convert_loc (loc, sizetype,
8839 low_bound));
8841 if (0 > compare_tree_int (index1,
8842 TREE_STRING_LENGTH (init)))
8844 tree type = TREE_TYPE (TREE_TYPE (init));
8845 enum machine_mode mode = TYPE_MODE (type);
8847 if (GET_MODE_CLASS (mode) == MODE_INT
8848 && GET_MODE_SIZE (mode) == 1)
8849 return gen_int_mode (TREE_STRING_POINTER (init)
8850 [TREE_INT_CST_LOW (index1)],
8851 mode);
8857 goto normal_inner_ref;
8859 case COMPONENT_REF:
8860 /* If the operand is a CONSTRUCTOR, we can just extract the
8861 appropriate field if it is present. */
8862 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8864 unsigned HOST_WIDE_INT idx;
8865 tree field, value;
8867 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8868 idx, field, value)
8869 if (field == treeop1
8870 /* We can normally use the value of the field in the
8871 CONSTRUCTOR. However, if this is a bitfield in
8872 an integral mode that we can fit in a HOST_WIDE_INT,
8873 we must mask only the number of bits in the bitfield,
8874 since this is done implicitly by the constructor. If
8875 the bitfield does not meet either of those conditions,
8876 we can't do this optimization. */
8877 && (! DECL_BIT_FIELD (field)
8878 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8879 && (GET_MODE_BITSIZE (DECL_MODE (field))
8880 <= HOST_BITS_PER_WIDE_INT))))
8882 if (DECL_BIT_FIELD (field)
8883 && modifier == EXPAND_STACK_PARM)
8884 target = 0;
8885 op0 = expand_expr (value, target, tmode, modifier);
8886 if (DECL_BIT_FIELD (field))
8888 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8889 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8891 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8893 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8894 op0 = expand_and (imode, op0, op1, target);
8896 else
8898 tree count
8899 = build_int_cst (NULL_TREE,
8900 GET_MODE_BITSIZE (imode) - bitsize);
8902 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8903 target, 0);
8904 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8905 target, 0);
8909 return op0;
8912 goto normal_inner_ref;
8914 case BIT_FIELD_REF:
8915 case ARRAY_RANGE_REF:
8916 normal_inner_ref:
8918 enum machine_mode mode1, mode2;
8919 HOST_WIDE_INT bitsize, bitpos;
8920 tree offset;
8921 int volatilep = 0, must_force_mem;
8922 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8923 &mode1, &unsignedp, &volatilep, true);
8924 rtx orig_op0, memloc;
8926 /* If we got back the original object, something is wrong. Perhaps
8927 we are evaluating an expression too early. In any event, don't
8928 infinitely recurse. */
8929 gcc_assert (tem != exp);
8931 /* If TEM's type is a union of variable size, pass TARGET to the inner
8932 computation, since it will need a temporary and TARGET is known
8933 to have to do. This occurs in unchecked conversion in Ada. */
8934 orig_op0 = op0
8935 = expand_expr (tem,
8936 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8937 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8938 != INTEGER_CST)
8939 && modifier != EXPAND_STACK_PARM
8940 ? target : NULL_RTX),
8941 VOIDmode,
8942 (modifier == EXPAND_INITIALIZER
8943 || modifier == EXPAND_CONST_ADDRESS
8944 || modifier == EXPAND_STACK_PARM)
8945 ? modifier : EXPAND_NORMAL);
8947 mode2
8948 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
8950 /* If we have either an offset, a BLKmode result, or a reference
8951 outside the underlying object, we must force it to memory.
8952 Such a case can occur in Ada if we have unchecked conversion
8953 of an expression from a scalar type to an aggregate type or
8954 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
8955 passed a partially uninitialized object or a view-conversion
8956 to a larger size. */
8957 must_force_mem = (offset
8958 || mode1 == BLKmode
8959 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
8961 /* Handle CONCAT first. */
8962 if (GET_CODE (op0) == CONCAT && !must_force_mem)
8964 if (bitpos == 0
8965 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
8966 return op0;
8967 if (bitpos == 0
8968 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8969 && bitsize)
8971 op0 = XEXP (op0, 0);
8972 mode2 = GET_MODE (op0);
8974 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8975 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
8976 && bitpos
8977 && bitsize)
8979 op0 = XEXP (op0, 1);
8980 bitpos = 0;
8981 mode2 = GET_MODE (op0);
8983 else
8984 /* Otherwise force into memory. */
8985 must_force_mem = 1;
8988 /* If this is a constant, put it in a register if it is a legitimate
8989 constant and we don't need a memory reference. */
8990 if (CONSTANT_P (op0)
8991 && mode2 != BLKmode
8992 && LEGITIMATE_CONSTANT_P (op0)
8993 && !must_force_mem)
8994 op0 = force_reg (mode2, op0);
8996 /* Otherwise, if this is a constant, try to force it to the constant
8997 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
8998 is a legitimate constant. */
8999 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9000 op0 = validize_mem (memloc);
9002 /* Otherwise, if this is a constant or the object is not in memory
9003 and need be, put it there. */
9004 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9006 tree nt = build_qualified_type (TREE_TYPE (tem),
9007 (TYPE_QUALS (TREE_TYPE (tem))
9008 | TYPE_QUAL_CONST));
9009 memloc = assign_temp (nt, 1, 1, 1);
9010 emit_move_insn (memloc, op0);
9011 op0 = memloc;
9014 if (offset)
9016 enum machine_mode address_mode;
9017 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9018 EXPAND_SUM);
9020 gcc_assert (MEM_P (op0));
9022 address_mode
9023 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9024 if (GET_MODE (offset_rtx) != address_mode)
9025 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9027 if (GET_MODE (op0) == BLKmode
9028 /* A constant address in OP0 can have VOIDmode, we must
9029 not try to call force_reg in that case. */
9030 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9031 && bitsize != 0
9032 && (bitpos % bitsize) == 0
9033 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9034 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9036 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9037 bitpos = 0;
9040 op0 = offset_address (op0, offset_rtx,
9041 highest_pow2_factor (offset));
9044 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9045 record its alignment as BIGGEST_ALIGNMENT. */
9046 if (MEM_P (op0) && bitpos == 0 && offset != 0
9047 && is_aligning_offset (offset, tem))
9048 set_mem_align (op0, BIGGEST_ALIGNMENT);
9050 /* Don't forget about volatility even if this is a bitfield. */
9051 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9053 if (op0 == orig_op0)
9054 op0 = copy_rtx (op0);
9056 MEM_VOLATILE_P (op0) = 1;
9059 /* In cases where an aligned union has an unaligned object
9060 as a field, we might be extracting a BLKmode value from
9061 an integer-mode (e.g., SImode) object. Handle this case
9062 by doing the extract into an object as wide as the field
9063 (which we know to be the width of a basic mode), then
9064 storing into memory, and changing the mode to BLKmode. */
9065 if (mode1 == VOIDmode
9066 || REG_P (op0) || GET_CODE (op0) == SUBREG
9067 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9068 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9069 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9070 && modifier != EXPAND_CONST_ADDRESS
9071 && modifier != EXPAND_INITIALIZER)
9072 /* If the field isn't aligned enough to fetch as a memref,
9073 fetch it as a bit field. */
9074 || (mode1 != BLKmode
9075 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9076 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9077 || (MEM_P (op0)
9078 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9079 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9080 && ((modifier == EXPAND_CONST_ADDRESS
9081 || modifier == EXPAND_INITIALIZER)
9082 ? STRICT_ALIGNMENT
9083 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9084 || (bitpos % BITS_PER_UNIT != 0)))
9085 /* If the type and the field are a constant size and the
9086 size of the type isn't the same size as the bitfield,
9087 we must use bitfield operations. */
9088 || (bitsize >= 0
9089 && TYPE_SIZE (TREE_TYPE (exp))
9090 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9091 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9092 bitsize)))
9094 enum machine_mode ext_mode = mode;
9096 if (ext_mode == BLKmode
9097 && ! (target != 0 && MEM_P (op0)
9098 && MEM_P (target)
9099 && bitpos % BITS_PER_UNIT == 0))
9100 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9102 if (ext_mode == BLKmode)
9104 if (target == 0)
9105 target = assign_temp (type, 0, 1, 1);
9107 if (bitsize == 0)
9108 return target;
9110 /* In this case, BITPOS must start at a byte boundary and
9111 TARGET, if specified, must be a MEM. */
9112 gcc_assert (MEM_P (op0)
9113 && (!target || MEM_P (target))
9114 && !(bitpos % BITS_PER_UNIT));
9116 emit_block_move (target,
9117 adjust_address (op0, VOIDmode,
9118 bitpos / BITS_PER_UNIT),
9119 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9120 / BITS_PER_UNIT),
9121 (modifier == EXPAND_STACK_PARM
9122 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9124 return target;
9127 op0 = validize_mem (op0);
9129 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9130 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9132 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
9133 (modifier == EXPAND_STACK_PARM
9134 ? NULL_RTX : target),
9135 ext_mode, ext_mode);
9137 /* If the result is a record type and BITSIZE is narrower than
9138 the mode of OP0, an integral mode, and this is a big endian
9139 machine, we must put the field into the high-order bits. */
9140 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9141 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9142 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9143 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9144 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9145 - bitsize),
9146 op0, 1);
9148 /* If the result type is BLKmode, store the data into a temporary
9149 of the appropriate type, but with the mode corresponding to the
9150 mode for the data we have (op0's mode). It's tempting to make
9151 this a constant type, since we know it's only being stored once,
9152 but that can cause problems if we are taking the address of this
9153 COMPONENT_REF because the MEM of any reference via that address
9154 will have flags corresponding to the type, which will not
9155 necessarily be constant. */
9156 if (mode == BLKmode)
9158 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9159 rtx new_rtx;
9161 /* If the reference doesn't use the alias set of its type,
9162 we cannot create the temporary using that type. */
9163 if (component_uses_parent_alias_set (exp))
9165 new_rtx = assign_stack_local (ext_mode, size, 0);
9166 set_mem_alias_set (new_rtx, get_alias_set (exp));
9168 else
9169 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9171 emit_move_insn (new_rtx, op0);
9172 op0 = copy_rtx (new_rtx);
9173 PUT_MODE (op0, BLKmode);
9174 set_mem_attributes (op0, exp, 1);
9177 return op0;
9180 /* If the result is BLKmode, use that to access the object
9181 now as well. */
9182 if (mode == BLKmode)
9183 mode1 = BLKmode;
9185 /* Get a reference to just this component. */
9186 if (modifier == EXPAND_CONST_ADDRESS
9187 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9188 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9189 else
9190 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9192 if (op0 == orig_op0)
9193 op0 = copy_rtx (op0);
9195 set_mem_attributes (op0, exp, 0);
9196 if (REG_P (XEXP (op0, 0)))
9197 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9199 MEM_VOLATILE_P (op0) |= volatilep;
9200 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9201 || modifier == EXPAND_CONST_ADDRESS
9202 || modifier == EXPAND_INITIALIZER)
9203 return op0;
9204 else if (target == 0)
9205 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9207 convert_move (target, op0, unsignedp);
9208 return target;
9211 case OBJ_TYPE_REF:
9212 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9214 case CALL_EXPR:
9215 /* All valid uses of __builtin_va_arg_pack () are removed during
9216 inlining. */
9217 if (CALL_EXPR_VA_ARG_PACK (exp))
9218 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9220 tree fndecl = get_callee_fndecl (exp), attr;
9222 if (fndecl
9223 && (attr = lookup_attribute ("error",
9224 DECL_ATTRIBUTES (fndecl))) != NULL)
9225 error ("%Kcall to %qs declared with attribute error: %s",
9226 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9227 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9228 if (fndecl
9229 && (attr = lookup_attribute ("warning",
9230 DECL_ATTRIBUTES (fndecl))) != NULL)
9231 warning_at (tree_nonartificial_location (exp),
9232 0, "%Kcall to %qs declared with attribute warning: %s",
9233 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9234 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9236 /* Check for a built-in function. */
9237 if (fndecl && DECL_BUILT_IN (fndecl))
9239 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9240 return expand_builtin (exp, target, subtarget, tmode, ignore);
9243 return expand_call (exp, target, ignore);
9245 case VIEW_CONVERT_EXPR:
9246 op0 = NULL_RTX;
9248 /* If we are converting to BLKmode, try to avoid an intermediate
9249 temporary by fetching an inner memory reference. */
9250 if (mode == BLKmode
9251 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9252 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9253 && handled_component_p (treeop0))
9255 enum machine_mode mode1;
9256 HOST_WIDE_INT bitsize, bitpos;
9257 tree offset;
9258 int unsignedp;
9259 int volatilep = 0;
9260 tree tem
9261 = get_inner_reference (treeop0, &bitsize, &bitpos,
9262 &offset, &mode1, &unsignedp, &volatilep,
9263 true);
9264 rtx orig_op0;
9266 /* ??? We should work harder and deal with non-zero offsets. */
9267 if (!offset
9268 && (bitpos % BITS_PER_UNIT) == 0
9269 && bitsize >= 0
9270 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9272 /* See the normal_inner_ref case for the rationale. */
9273 orig_op0
9274 = expand_expr (tem,
9275 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9276 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9277 != INTEGER_CST)
9278 && modifier != EXPAND_STACK_PARM
9279 ? target : NULL_RTX),
9280 VOIDmode,
9281 (modifier == EXPAND_INITIALIZER
9282 || modifier == EXPAND_CONST_ADDRESS
9283 || modifier == EXPAND_STACK_PARM)
9284 ? modifier : EXPAND_NORMAL);
9286 if (MEM_P (orig_op0))
9288 op0 = orig_op0;
9290 /* Get a reference to just this component. */
9291 if (modifier == EXPAND_CONST_ADDRESS
9292 || modifier == EXPAND_SUM
9293 || modifier == EXPAND_INITIALIZER)
9294 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9295 else
9296 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9298 if (op0 == orig_op0)
9299 op0 = copy_rtx (op0);
9301 set_mem_attributes (op0, treeop0, 0);
9302 if (REG_P (XEXP (op0, 0)))
9303 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9305 MEM_VOLATILE_P (op0) |= volatilep;
9310 if (!op0)
9311 op0 = expand_expr (treeop0,
9312 NULL_RTX, VOIDmode, modifier);
9314 /* If the input and output modes are both the same, we are done. */
9315 if (mode == GET_MODE (op0))
9317 /* If neither mode is BLKmode, and both modes are the same size
9318 then we can use gen_lowpart. */
9319 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9320 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9321 && !COMPLEX_MODE_P (GET_MODE (op0)))
9323 if (GET_CODE (op0) == SUBREG)
9324 op0 = force_reg (GET_MODE (op0), op0);
9325 op0 = gen_lowpart (mode, op0);
9327 /* If both modes are integral, then we can convert from one to the
9328 other. */
9329 else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode))
9330 op0 = convert_modes (mode, GET_MODE (op0), op0,
9331 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9332 /* As a last resort, spill op0 to memory, and reload it in a
9333 different mode. */
9334 else if (!MEM_P (op0))
9336 /* If the operand is not a MEM, force it into memory. Since we
9337 are going to be changing the mode of the MEM, don't call
9338 force_const_mem for constants because we don't allow pool
9339 constants to change mode. */
9340 tree inner_type = TREE_TYPE (treeop0);
9342 gcc_assert (!TREE_ADDRESSABLE (exp));
9344 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9345 target
9346 = assign_stack_temp_for_type
9347 (TYPE_MODE (inner_type),
9348 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9350 emit_move_insn (target, op0);
9351 op0 = target;
9354 /* At this point, OP0 is in the correct mode. If the output type is
9355 such that the operand is known to be aligned, indicate that it is.
9356 Otherwise, we need only be concerned about alignment for non-BLKmode
9357 results. */
9358 if (MEM_P (op0))
9360 op0 = copy_rtx (op0);
9362 if (TYPE_ALIGN_OK (type))
9363 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9364 else if (STRICT_ALIGNMENT
9365 && mode != BLKmode
9366 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9368 tree inner_type = TREE_TYPE (treeop0);
9369 HOST_WIDE_INT temp_size
9370 = MAX (int_size_in_bytes (inner_type),
9371 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9372 rtx new_rtx
9373 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9374 rtx new_with_op0_mode
9375 = adjust_address (new_rtx, GET_MODE (op0), 0);
9377 gcc_assert (!TREE_ADDRESSABLE (exp));
9379 if (GET_MODE (op0) == BLKmode)
9380 emit_block_move (new_with_op0_mode, op0,
9381 GEN_INT (GET_MODE_SIZE (mode)),
9382 (modifier == EXPAND_STACK_PARM
9383 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9384 else
9385 emit_move_insn (new_with_op0_mode, op0);
9387 op0 = new_rtx;
9390 op0 = adjust_address (op0, mode, 0);
9393 return op0;
9395 /* Use a compare and a jump for BLKmode comparisons, or for function
9396 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9398 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9399 are occassionally created by folding during expansion. */
9400 case TRUTH_ANDIF_EXPR:
9401 case TRUTH_ORIF_EXPR:
9402 if (! ignore
9403 && (target == 0
9404 || modifier == EXPAND_STACK_PARM
9405 || ! safe_from_p (target, treeop0, 1)
9406 || ! safe_from_p (target, treeop1, 1)
9407 /* Make sure we don't have a hard reg (such as function's return
9408 value) live across basic blocks, if not optimizing. */
9409 || (!optimize && REG_P (target)
9410 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9411 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9413 if (target)
9414 emit_move_insn (target, const0_rtx);
9416 op1 = gen_label_rtx ();
9417 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9419 if (target)
9420 emit_move_insn (target, const1_rtx);
9422 emit_label (op1);
9423 return ignore ? const0_rtx : target;
9425 case STATEMENT_LIST:
9427 tree_stmt_iterator iter;
9429 gcc_assert (ignore);
9431 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9432 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9434 return const0_rtx;
9436 case COND_EXPR:
9437 /* A COND_EXPR with its type being VOID_TYPE represents a
9438 conditional jump and is handled in
9439 expand_gimple_cond_expr. */
9440 gcc_assert (!VOID_TYPE_P (type));
9442 /* Note that COND_EXPRs whose type is a structure or union
9443 are required to be constructed to contain assignments of
9444 a temporary variable, so that we can evaluate them here
9445 for side effect only. If type is void, we must do likewise. */
9447 gcc_assert (!TREE_ADDRESSABLE (type)
9448 && !ignore
9449 && TREE_TYPE (treeop1) != void_type_node
9450 && TREE_TYPE (treeop2) != void_type_node);
9452 /* If we are not to produce a result, we have no target. Otherwise,
9453 if a target was specified use it; it will not be used as an
9454 intermediate target unless it is safe. If no target, use a
9455 temporary. */
9457 if (modifier != EXPAND_STACK_PARM
9458 && original_target
9459 && safe_from_p (original_target, treeop0, 1)
9460 && GET_MODE (original_target) == mode
9461 #ifdef HAVE_conditional_move
9462 && (! can_conditionally_move_p (mode)
9463 || REG_P (original_target))
9464 #endif
9465 && !MEM_P (original_target))
9466 temp = original_target;
9467 else
9468 temp = assign_temp (type, 0, 0, 1);
9470 do_pending_stack_adjust ();
9471 NO_DEFER_POP;
9472 op0 = gen_label_rtx ();
9473 op1 = gen_label_rtx ();
9474 jumpifnot (treeop0, op0, -1);
9475 store_expr (treeop1, temp,
9476 modifier == EXPAND_STACK_PARM,
9477 false);
9479 emit_jump_insn (gen_jump (op1));
9480 emit_barrier ();
9481 emit_label (op0);
9482 store_expr (treeop2, temp,
9483 modifier == EXPAND_STACK_PARM,
9484 false);
9486 emit_label (op1);
9487 OK_DEFER_POP;
9488 return temp;
9490 case VEC_COND_EXPR:
9491 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9492 return target;
9494 case MODIFY_EXPR:
9496 tree lhs = treeop0;
9497 tree rhs = treeop1;
9498 gcc_assert (ignore);
9500 /* Check for |= or &= of a bitfield of size one into another bitfield
9501 of size 1. In this case, (unless we need the result of the
9502 assignment) we can do this more efficiently with a
9503 test followed by an assignment, if necessary.
9505 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9506 things change so we do, this code should be enhanced to
9507 support it. */
9508 if (TREE_CODE (lhs) == COMPONENT_REF
9509 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9510 || TREE_CODE (rhs) == BIT_AND_EXPR)
9511 && TREE_OPERAND (rhs, 0) == lhs
9512 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9513 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9514 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9516 rtx label = gen_label_rtx ();
9517 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9518 do_jump (TREE_OPERAND (rhs, 1),
9519 value ? label : 0,
9520 value ? 0 : label, -1);
9521 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9522 MOVE_NONTEMPORAL (exp));
9523 do_pending_stack_adjust ();
9524 emit_label (label);
9525 return const0_rtx;
9528 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9529 return const0_rtx;
9532 case ADDR_EXPR:
9533 return expand_expr_addr_expr (exp, target, tmode, modifier);
9535 case REALPART_EXPR:
9536 op0 = expand_normal (treeop0);
9537 return read_complex_part (op0, false);
9539 case IMAGPART_EXPR:
9540 op0 = expand_normal (treeop0);
9541 return read_complex_part (op0, true);
9543 case RETURN_EXPR:
9544 case LABEL_EXPR:
9545 case GOTO_EXPR:
9546 case SWITCH_EXPR:
9547 case ASM_EXPR:
9548 /* Expanded in cfgexpand.c. */
9549 gcc_unreachable ();
9551 case TRY_CATCH_EXPR:
9552 case CATCH_EXPR:
9553 case EH_FILTER_EXPR:
9554 case TRY_FINALLY_EXPR:
9555 /* Lowered by tree-eh.c. */
9556 gcc_unreachable ();
9558 case WITH_CLEANUP_EXPR:
9559 case CLEANUP_POINT_EXPR:
9560 case TARGET_EXPR:
9561 case CASE_LABEL_EXPR:
9562 case VA_ARG_EXPR:
9563 case BIND_EXPR:
9564 case INIT_EXPR:
9565 case CONJ_EXPR:
9566 case COMPOUND_EXPR:
9567 case PREINCREMENT_EXPR:
9568 case PREDECREMENT_EXPR:
9569 case POSTINCREMENT_EXPR:
9570 case POSTDECREMENT_EXPR:
9571 case LOOP_EXPR:
9572 case EXIT_EXPR:
9573 /* Lowered by gimplify.c. */
9574 gcc_unreachable ();
9576 case FDESC_EXPR:
9577 /* Function descriptors are not valid except for as
9578 initialization constants, and should not be expanded. */
9579 gcc_unreachable ();
9581 case WITH_SIZE_EXPR:
9582 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9583 have pulled out the size to use in whatever context it needed. */
9584 return expand_expr_real (treeop0, original_target, tmode,
9585 modifier, alt_rtl);
9587 case REALIGN_LOAD_EXPR:
9589 tree oprnd0 = treeop0;
9590 tree oprnd1 = treeop1;
9591 tree oprnd2 = treeop2;
9592 rtx op2;
9594 this_optab = optab_for_tree_code (code, type, optab_default);
9595 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9596 op2 = expand_normal (oprnd2);
9597 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9598 target, unsignedp);
9599 gcc_assert (temp);
9600 return temp;
9603 case DOT_PROD_EXPR:
9605 tree oprnd0 = treeop0;
9606 tree oprnd1 = treeop1;
9607 tree oprnd2 = treeop2;
9608 rtx op2;
9610 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9611 op2 = expand_normal (oprnd2);
9612 target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9613 target, unsignedp);
9614 return target;
9617 case COMPOUND_LITERAL_EXPR:
9619 /* Initialize the anonymous variable declared in the compound
9620 literal, then return the variable. */
9621 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9623 /* Create RTL for this variable. */
9624 if (!DECL_RTL_SET_P (decl))
9626 if (DECL_HARD_REGISTER (decl))
9627 /* The user specified an assembler name for this variable.
9628 Set that up now. */
9629 rest_of_decl_compilation (decl, 0, 0);
9630 else
9631 expand_decl (decl);
9634 return expand_expr_real (decl, original_target, tmode,
9635 modifier, alt_rtl);
9638 default:
9639 return expand_expr_real_2 (&ops, target, tmode, modifier);
9643 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9644 signedness of TYPE), possibly returning the result in TARGET. */
9645 static rtx
9646 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9648 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9649 if (target && GET_MODE (target) != GET_MODE (exp))
9650 target = 0;
9651 /* For constant values, reduce using build_int_cst_type. */
9652 if (CONST_INT_P (exp))
9654 HOST_WIDE_INT value = INTVAL (exp);
9655 tree t = build_int_cst_type (type, value);
9656 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9658 else if (TYPE_UNSIGNED (type))
9660 rtx mask = immed_double_int_const (double_int_mask (prec),
9661 GET_MODE (exp));
9662 return expand_and (GET_MODE (exp), exp, mask, target);
9664 else
9666 tree count = build_int_cst (NULL_TREE,
9667 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9668 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9669 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9673 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9674 when applied to the address of EXP produces an address known to be
9675 aligned more than BIGGEST_ALIGNMENT. */
9677 static int
9678 is_aligning_offset (const_tree offset, const_tree exp)
9680 /* Strip off any conversions. */
9681 while (CONVERT_EXPR_P (offset))
9682 offset = TREE_OPERAND (offset, 0);
9684 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9685 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9686 if (TREE_CODE (offset) != BIT_AND_EXPR
9687 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9688 || compare_tree_int (TREE_OPERAND (offset, 1),
9689 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9690 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9691 return 0;
9693 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9694 It must be NEGATE_EXPR. Then strip any more conversions. */
9695 offset = TREE_OPERAND (offset, 0);
9696 while (CONVERT_EXPR_P (offset))
9697 offset = TREE_OPERAND (offset, 0);
9699 if (TREE_CODE (offset) != NEGATE_EXPR)
9700 return 0;
9702 offset = TREE_OPERAND (offset, 0);
9703 while (CONVERT_EXPR_P (offset))
9704 offset = TREE_OPERAND (offset, 0);
9706 /* This must now be the address of EXP. */
9707 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9710 /* Return the tree node if an ARG corresponds to a string constant or zero
9711 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9712 in bytes within the string that ARG is accessing. The type of the
9713 offset will be `sizetype'. */
9715 tree
9716 string_constant (tree arg, tree *ptr_offset)
9718 tree array, offset, lower_bound;
9719 STRIP_NOPS (arg);
9721 if (TREE_CODE (arg) == ADDR_EXPR)
9723 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9725 *ptr_offset = size_zero_node;
9726 return TREE_OPERAND (arg, 0);
9728 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9730 array = TREE_OPERAND (arg, 0);
9731 offset = size_zero_node;
9733 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9735 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9736 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9737 if (TREE_CODE (array) != STRING_CST
9738 && TREE_CODE (array) != VAR_DECL)
9739 return 0;
9741 /* Check if the array has a nonzero lower bound. */
9742 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9743 if (!integer_zerop (lower_bound))
9745 /* If the offset and base aren't both constants, return 0. */
9746 if (TREE_CODE (lower_bound) != INTEGER_CST)
9747 return 0;
9748 if (TREE_CODE (offset) != INTEGER_CST)
9749 return 0;
9750 /* Adjust offset by the lower bound. */
9751 offset = size_diffop (fold_convert (sizetype, offset),
9752 fold_convert (sizetype, lower_bound));
9755 else
9756 return 0;
9758 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9760 tree arg0 = TREE_OPERAND (arg, 0);
9761 tree arg1 = TREE_OPERAND (arg, 1);
9763 STRIP_NOPS (arg0);
9764 STRIP_NOPS (arg1);
9766 if (TREE_CODE (arg0) == ADDR_EXPR
9767 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9768 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9770 array = TREE_OPERAND (arg0, 0);
9771 offset = arg1;
9773 else if (TREE_CODE (arg1) == ADDR_EXPR
9774 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9775 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9777 array = TREE_OPERAND (arg1, 0);
9778 offset = arg0;
9780 else
9781 return 0;
9783 else
9784 return 0;
9786 if (TREE_CODE (array) == STRING_CST)
9788 *ptr_offset = fold_convert (sizetype, offset);
9789 return array;
9791 else if (TREE_CODE (array) == VAR_DECL)
9793 int length;
9795 /* Variables initialized to string literals can be handled too. */
9796 if (DECL_INITIAL (array) == NULL_TREE
9797 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9798 return 0;
9800 /* If they are read-only, non-volatile and bind locally. */
9801 if (! TREE_READONLY (array)
9802 || TREE_SIDE_EFFECTS (array)
9803 || ! targetm.binds_local_p (array))
9804 return 0;
9806 /* Avoid const char foo[4] = "abcde"; */
9807 if (DECL_SIZE_UNIT (array) == NULL_TREE
9808 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9809 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9810 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9811 return 0;
9813 /* If variable is bigger than the string literal, OFFSET must be constant
9814 and inside of the bounds of the string literal. */
9815 offset = fold_convert (sizetype, offset);
9816 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9817 && (! host_integerp (offset, 1)
9818 || compare_tree_int (offset, length) >= 0))
9819 return 0;
9821 *ptr_offset = offset;
9822 return DECL_INITIAL (array);
9825 return 0;
9828 /* Generate code to calculate OPS, and exploded expression
9829 using a store-flag instruction and return an rtx for the result.
9830 OPS reflects a comparison.
9832 If TARGET is nonzero, store the result there if convenient.
9834 Return zero if there is no suitable set-flag instruction
9835 available on this machine.
9837 Once expand_expr has been called on the arguments of the comparison,
9838 we are committed to doing the store flag, since it is not safe to
9839 re-evaluate the expression. We emit the store-flag insn by calling
9840 emit_store_flag, but only expand the arguments if we have a reason
9841 to believe that emit_store_flag will be successful. If we think that
9842 it will, but it isn't, we have to simulate the store-flag with a
9843 set/jump/set sequence. */
9845 static rtx
9846 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9848 enum rtx_code code;
9849 tree arg0, arg1, type;
9850 tree tem;
9851 enum machine_mode operand_mode;
9852 int unsignedp;
9853 rtx op0, op1;
9854 rtx subtarget = target;
9855 location_t loc = ops->location;
9857 arg0 = ops->op0;
9858 arg1 = ops->op1;
9860 /* Don't crash if the comparison was erroneous. */
9861 if (arg0 == error_mark_node || arg1 == error_mark_node)
9862 return const0_rtx;
9864 type = TREE_TYPE (arg0);
9865 operand_mode = TYPE_MODE (type);
9866 unsignedp = TYPE_UNSIGNED (type);
9868 /* We won't bother with BLKmode store-flag operations because it would mean
9869 passing a lot of information to emit_store_flag. */
9870 if (operand_mode == BLKmode)
9871 return 0;
9873 /* We won't bother with store-flag operations involving function pointers
9874 when function pointers must be canonicalized before comparisons. */
9875 #ifdef HAVE_canonicalize_funcptr_for_compare
9876 if (HAVE_canonicalize_funcptr_for_compare
9877 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9878 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9879 == FUNCTION_TYPE))
9880 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9881 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9882 == FUNCTION_TYPE))))
9883 return 0;
9884 #endif
9886 STRIP_NOPS (arg0);
9887 STRIP_NOPS (arg1);
9889 /* Get the rtx comparison code to use. We know that EXP is a comparison
9890 operation of some type. Some comparisons against 1 and -1 can be
9891 converted to comparisons with zero. Do so here so that the tests
9892 below will be aware that we have a comparison with zero. These
9893 tests will not catch constants in the first operand, but constants
9894 are rarely passed as the first operand. */
9896 switch (ops->code)
9898 case EQ_EXPR:
9899 code = EQ;
9900 break;
9901 case NE_EXPR:
9902 code = NE;
9903 break;
9904 case LT_EXPR:
9905 if (integer_onep (arg1))
9906 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9907 else
9908 code = unsignedp ? LTU : LT;
9909 break;
9910 case LE_EXPR:
9911 if (! unsignedp && integer_all_onesp (arg1))
9912 arg1 = integer_zero_node, code = LT;
9913 else
9914 code = unsignedp ? LEU : LE;
9915 break;
9916 case GT_EXPR:
9917 if (! unsignedp && integer_all_onesp (arg1))
9918 arg1 = integer_zero_node, code = GE;
9919 else
9920 code = unsignedp ? GTU : GT;
9921 break;
9922 case GE_EXPR:
9923 if (integer_onep (arg1))
9924 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9925 else
9926 code = unsignedp ? GEU : GE;
9927 break;
9929 case UNORDERED_EXPR:
9930 code = UNORDERED;
9931 break;
9932 case ORDERED_EXPR:
9933 code = ORDERED;
9934 break;
9935 case UNLT_EXPR:
9936 code = UNLT;
9937 break;
9938 case UNLE_EXPR:
9939 code = UNLE;
9940 break;
9941 case UNGT_EXPR:
9942 code = UNGT;
9943 break;
9944 case UNGE_EXPR:
9945 code = UNGE;
9946 break;
9947 case UNEQ_EXPR:
9948 code = UNEQ;
9949 break;
9950 case LTGT_EXPR:
9951 code = LTGT;
9952 break;
9954 default:
9955 gcc_unreachable ();
9958 /* Put a constant second. */
9959 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9960 || TREE_CODE (arg0) == FIXED_CST)
9962 tem = arg0; arg0 = arg1; arg1 = tem;
9963 code = swap_condition (code);
9966 /* If this is an equality or inequality test of a single bit, we can
9967 do this by shifting the bit being tested to the low-order bit and
9968 masking the result with the constant 1. If the condition was EQ,
9969 we xor it with 1. This does not require an scc insn and is faster
9970 than an scc insn even if we have it.
9972 The code to make this transformation was moved into fold_single_bit_test,
9973 so we just call into the folder and expand its result. */
9975 if ((code == NE || code == EQ)
9976 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9977 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9979 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9980 return expand_expr (fold_single_bit_test (loc,
9981 code == NE ? NE_EXPR : EQ_EXPR,
9982 arg0, arg1, type),
9983 target, VOIDmode, EXPAND_NORMAL);
9986 if (! get_subtarget (target)
9987 || GET_MODE (subtarget) != operand_mode)
9988 subtarget = 0;
9990 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
9992 if (target == 0)
9993 target = gen_reg_rtx (mode);
9995 /* Try a cstore if possible. */
9996 return emit_store_flag_force (target, code, op0, op1,
9997 operand_mode, unsignedp, 1);
10001 /* Stubs in case we haven't got a casesi insn. */
10002 #ifndef HAVE_casesi
10003 # define HAVE_casesi 0
10004 # define gen_casesi(a, b, c, d, e) (0)
10005 # define CODE_FOR_casesi CODE_FOR_nothing
10006 #endif
10008 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10009 0 otherwise (i.e. if there is no casesi instruction). */
10011 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10012 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10013 rtx fallback_label ATTRIBUTE_UNUSED)
10015 enum machine_mode index_mode = SImode;
10016 int index_bits = GET_MODE_BITSIZE (index_mode);
10017 rtx op1, op2, index;
10018 enum machine_mode op_mode;
10020 if (! HAVE_casesi)
10021 return 0;
10023 /* Convert the index to SImode. */
10024 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10026 enum machine_mode omode = TYPE_MODE (index_type);
10027 rtx rangertx = expand_normal (range);
10029 /* We must handle the endpoints in the original mode. */
10030 index_expr = build2 (MINUS_EXPR, index_type,
10031 index_expr, minval);
10032 minval = integer_zero_node;
10033 index = expand_normal (index_expr);
10034 if (default_label)
10035 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10036 omode, 1, default_label);
10037 /* Now we can safely truncate. */
10038 index = convert_to_mode (index_mode, index, 0);
10040 else
10042 if (TYPE_MODE (index_type) != index_mode)
10044 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10045 index_expr = fold_convert (index_type, index_expr);
10048 index = expand_normal (index_expr);
10051 do_pending_stack_adjust ();
10053 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10054 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10055 (index, op_mode))
10056 index = copy_to_mode_reg (op_mode, index);
10058 op1 = expand_normal (minval);
10060 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10061 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10062 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10063 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10064 (op1, op_mode))
10065 op1 = copy_to_mode_reg (op_mode, op1);
10067 op2 = expand_normal (range);
10069 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10070 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10071 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10072 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10073 (op2, op_mode))
10074 op2 = copy_to_mode_reg (op_mode, op2);
10076 emit_jump_insn (gen_casesi (index, op1, op2,
10077 table_label, !default_label
10078 ? fallback_label : default_label));
10079 return 1;
10082 /* Attempt to generate a tablejump instruction; same concept. */
10083 #ifndef HAVE_tablejump
10084 #define HAVE_tablejump 0
10085 #define gen_tablejump(x, y) (0)
10086 #endif
10088 /* Subroutine of the next function.
10090 INDEX is the value being switched on, with the lowest value
10091 in the table already subtracted.
10092 MODE is its expected mode (needed if INDEX is constant).
10093 RANGE is the length of the jump table.
10094 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10096 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10097 index value is out of range. */
10099 static void
10100 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10101 rtx default_label)
10103 rtx temp, vector;
10105 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10106 cfun->cfg->max_jumptable_ents = INTVAL (range);
10108 /* Do an unsigned comparison (in the proper mode) between the index
10109 expression and the value which represents the length of the range.
10110 Since we just finished subtracting the lower bound of the range
10111 from the index expression, this comparison allows us to simultaneously
10112 check that the original index expression value is both greater than
10113 or equal to the minimum value of the range and less than or equal to
10114 the maximum value of the range. */
10116 if (default_label)
10117 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10118 default_label);
10120 /* If index is in range, it must fit in Pmode.
10121 Convert to Pmode so we can index with it. */
10122 if (mode != Pmode)
10123 index = convert_to_mode (Pmode, index, 1);
10125 /* Don't let a MEM slip through, because then INDEX that comes
10126 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10127 and break_out_memory_refs will go to work on it and mess it up. */
10128 #ifdef PIC_CASE_VECTOR_ADDRESS
10129 if (flag_pic && !REG_P (index))
10130 index = copy_to_mode_reg (Pmode, index);
10131 #endif
10133 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10134 GET_MODE_SIZE, because this indicates how large insns are. The other
10135 uses should all be Pmode, because they are addresses. This code
10136 could fail if addresses and insns are not the same size. */
10137 index = gen_rtx_PLUS (Pmode,
10138 gen_rtx_MULT (Pmode, index,
10139 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10140 gen_rtx_LABEL_REF (Pmode, table_label));
10141 #ifdef PIC_CASE_VECTOR_ADDRESS
10142 if (flag_pic)
10143 index = PIC_CASE_VECTOR_ADDRESS (index);
10144 else
10145 #endif
10146 index = memory_address (CASE_VECTOR_MODE, index);
10147 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10148 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10149 convert_move (temp, vector, 0);
10151 emit_jump_insn (gen_tablejump (temp, table_label));
10153 /* If we are generating PIC code or if the table is PC-relative, the
10154 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10155 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10156 emit_barrier ();
10160 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10161 rtx table_label, rtx default_label)
10163 rtx index;
10165 if (! HAVE_tablejump)
10166 return 0;
10168 index_expr = fold_build2 (MINUS_EXPR, index_type,
10169 fold_convert (index_type, index_expr),
10170 fold_convert (index_type, minval));
10171 index = expand_normal (index_expr);
10172 do_pending_stack_adjust ();
10174 do_tablejump (index, TYPE_MODE (index_type),
10175 convert_modes (TYPE_MODE (index_type),
10176 TYPE_MODE (TREE_TYPE (range)),
10177 expand_normal (range),
10178 TYPE_UNSIGNED (TREE_TYPE (range))),
10179 table_label, default_label);
10180 return 1;
10183 /* Nonzero if the mode is a valid vector mode for this architecture.
10184 This returns nonzero even if there is no hardware support for the
10185 vector mode, but we can emulate with narrower modes. */
10188 vector_mode_valid_p (enum machine_mode mode)
10190 enum mode_class mclass = GET_MODE_CLASS (mode);
10191 enum machine_mode innermode;
10193 /* Doh! What's going on? */
10194 if (mclass != MODE_VECTOR_INT
10195 && mclass != MODE_VECTOR_FLOAT
10196 && mclass != MODE_VECTOR_FRACT
10197 && mclass != MODE_VECTOR_UFRACT
10198 && mclass != MODE_VECTOR_ACCUM
10199 && mclass != MODE_VECTOR_UACCUM)
10200 return 0;
10202 /* Hardware support. Woo hoo! */
10203 if (targetm.vector_mode_supported_p (mode))
10204 return 1;
10206 innermode = GET_MODE_INNER (mode);
10208 /* We should probably return 1 if requesting V4DI and we have no DI,
10209 but we have V2DI, but this is probably very unlikely. */
10211 /* If we have support for the inner mode, we can safely emulate it.
10212 We may not have V2DI, but me can emulate with a pair of DIs. */
10213 return targetm.scalar_mode_supported_p (innermode);
10216 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10217 static rtx
10218 const_vector_from_tree (tree exp)
10220 rtvec v;
10221 int units, i;
10222 tree link, elt;
10223 enum machine_mode inner, mode;
10225 mode = TYPE_MODE (TREE_TYPE (exp));
10227 if (initializer_zerop (exp))
10228 return CONST0_RTX (mode);
10230 units = GET_MODE_NUNITS (mode);
10231 inner = GET_MODE_INNER (mode);
10233 v = rtvec_alloc (units);
10235 link = TREE_VECTOR_CST_ELTS (exp);
10236 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10238 elt = TREE_VALUE (link);
10240 if (TREE_CODE (elt) == REAL_CST)
10241 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10242 inner);
10243 else if (TREE_CODE (elt) == FIXED_CST)
10244 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10245 inner);
10246 else
10247 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10248 inner);
10251 /* Initialize remaining elements to 0. */
10252 for (; i < units; ++i)
10253 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10255 return gen_rtx_CONST_VECTOR (mode, v);
10259 /* Build a decl for a EH personality function named NAME. */
10261 tree
10262 build_personality_function (const char *name)
10264 tree decl, type;
10266 type = build_function_type_list (integer_type_node, integer_type_node,
10267 long_long_unsigned_type_node,
10268 ptr_type_node, ptr_type_node, NULL_TREE);
10269 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10270 get_identifier (name), type);
10271 DECL_ARTIFICIAL (decl) = 1;
10272 DECL_EXTERNAL (decl) = 1;
10273 TREE_PUBLIC (decl) = 1;
10275 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10276 are the flags assigned by targetm.encode_section_info. */
10277 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10279 return decl;
10282 /* Extracts the personality function of DECL and returns the corresponding
10283 libfunc. */
10286 get_personality_function (tree decl)
10288 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10289 enum eh_personality_kind pk;
10291 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10292 if (pk == eh_personality_none)
10293 return NULL;
10295 if (!personality
10296 && pk == eh_personality_any)
10297 personality = lang_hooks.eh_personality ();
10299 if (pk == eh_personality_lang)
10300 gcc_assert (personality != NULL_TREE);
10302 return XEXP (DECL_RTL (personality), 0);
10305 #include "gt-expr.h"