Daily bump.
[official-gcc.git] / gcc / expr.c
blob46cca7aced560078674f0fa69a7b08ba1ec2548c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
56 #include "diagnostic.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
64 #ifdef PUSH_ROUNDING
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #endif
70 #endif
72 #endif
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* This structure is used by move_by_pieces to describe the move to
92 be performed. */
93 struct move_by_pieces
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 rtx from;
100 rtx from_addr;
101 int autinc_from;
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
105 int reverse;
108 /* This structure is used by store_by_pieces to describe the clear to
109 be performed. */
111 struct store_by_pieces
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
120 void *constfundata;
121 int reverse;
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 unsigned int,
126 unsigned int);
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero. */
191 #ifndef SET_BY_PIECES_P
192 #define SET_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) SET_RATIO)
195 #endif
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memcpy" storage when the source is a constant string. */
199 #ifndef STORE_BY_PIECES_P
200 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
202 < (unsigned int) MOVE_RATIO)
203 #endif
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block sets. */
209 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211 /* These arrays record the insn_code of three different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217 /* Synchronization primitives. */
218 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245 #endif
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
251 void
252 init_expr_target (void)
254 rtx insn, pat;
255 enum machine_mode mode;
256 int num_clobbers;
257 rtx mem, mem1;
258 rtx reg;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
277 int regno;
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 regno++)
292 if (! HARD_REGNO_MODE_OK (regno, mode))
293 continue;
295 SET_REGNO (reg, regno);
297 SET_SRC (pat) = mem;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
328 enum insn_code ic;
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
332 continue;
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
344 void
345 init_expr (void)
347 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
352 fixed-point.
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
356 void
357 convert_move (rtx to, rtx from, int unsignedp)
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
363 enum insn_code code;
364 rtx libcall;
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
375 /* If the source and destination are already the same, then there's
376 nothing to do. */
377 if (to == from)
378 return;
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
382 TO here. */
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
395 emit_move_insn (to, from);
396 return;
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
405 else
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
408 emit_move_insn (to, from);
409 return;
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
416 return;
419 if (to_real)
421 rtx value, insns;
422 convert_optab tab;
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
433 tab = sext_optab;
434 else
435 tab = trunc_optab;
437 /* Try converting directly if the insn is supported. */
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
444 return;
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
453 start_sequence ();
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
455 1, from, from_mode);
456 insns = get_insns ();
457 end_sequence ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
460 from)
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
462 return;
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
479 to, from, UNKNOWN);
480 return;
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
484 rtx new_from;
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
491 if (to_mode == full_mode)
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
494 to, from, UNKNOWN);
495 return;
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
504 from = new_from;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
518 else
519 expand_fixed_convert (to, from, 0, 1);
520 return;
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
529 rtx insns;
530 rtx lowpart;
531 rtx fill_value;
532 rtx lowfrom;
533 int i;
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
539 != CODE_FOR_nothing)
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
548 return;
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
555 if (REG_P (to))
557 if (reg_overlap_mentioned_p (to, from))
558 from = force_reg (from_mode, from);
559 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
561 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
562 emit_unop_insn (code, to,
563 gen_lowpart (word_mode, to), equiv_code);
564 return;
567 /* No special multiword conversion insn; do it by hand. */
568 start_sequence ();
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
579 else
580 lowpart_mode = from_mode;
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
587 /* Compute the value to put in each remaining word. */
588 if (unsignedp)
589 fill_value = const0_rtx;
590 else
592 #ifdef HAVE_slt
593 if (HAVE_slt
594 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
595 && STORE_FLAG_VALUE == -1)
597 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
598 lowpart_mode, 0);
599 fill_value = gen_reg_rtx (word_mode);
600 emit_insn (gen_slt (fill_value));
602 else
603 #endif
605 fill_value
606 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
607 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
608 NULL_RTX, 0);
609 fill_value = convert_to_mode (word_mode, fill_value, 1);
613 /* Fill the remaining words. */
614 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
616 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
617 rtx subword = operand_subword (to, index, 1, to_mode);
619 gcc_assert (subword);
621 if (fill_value != subword)
622 emit_move_insn (subword, fill_value);
625 insns = get_insns ();
626 end_sequence ();
628 emit_no_conflict_block (insns, to, from, NULL_RTX,
629 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
630 return;
633 /* Truncating multi-word to a word or less. */
634 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
635 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
637 if (!((MEM_P (from)
638 && ! MEM_VOLATILE_P (from)
639 && direct_load[(int) to_mode]
640 && ! mode_dependent_address_p (XEXP (from, 0)))
641 || REG_P (from)
642 || GET_CODE (from) == SUBREG))
643 from = force_reg (from_mode, from);
644 convert_move (to, gen_lowpart (word_mode, from), 0);
645 return;
648 /* Now follow all the conversions between integers
649 no more than a word long. */
651 /* For truncation, usually we can just refer to FROM in a narrower mode. */
652 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
653 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
654 GET_MODE_BITSIZE (from_mode)))
656 if (!((MEM_P (from)
657 && ! MEM_VOLATILE_P (from)
658 && direct_load[(int) to_mode]
659 && ! mode_dependent_address_p (XEXP (from, 0)))
660 || REG_P (from)
661 || GET_CODE (from) == SUBREG))
662 from = force_reg (from_mode, from);
663 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
664 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
665 from = copy_to_reg (from);
666 emit_move_insn (to, gen_lowpart (to_mode, from));
667 return;
670 /* Handle extension. */
671 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
673 /* Convert directly if that works. */
674 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
675 != CODE_FOR_nothing)
677 emit_unop_insn (code, to, from, equiv_code);
678 return;
680 else
682 enum machine_mode intermediate;
683 rtx tmp;
684 tree shift_amount;
686 /* Search for a mode to convert via. */
687 for (intermediate = from_mode; intermediate != VOIDmode;
688 intermediate = GET_MODE_WIDER_MODE (intermediate))
689 if (((can_extend_p (to_mode, intermediate, unsignedp)
690 != CODE_FOR_nothing)
691 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
692 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
693 GET_MODE_BITSIZE (intermediate))))
694 && (can_extend_p (intermediate, from_mode, unsignedp)
695 != CODE_FOR_nothing))
697 convert_move (to, convert_to_mode (intermediate, from,
698 unsignedp), unsignedp);
699 return;
702 /* No suitable intermediate mode.
703 Generate what we need with shifts. */
704 shift_amount = build_int_cst (NULL_TREE,
705 GET_MODE_BITSIZE (to_mode)
706 - GET_MODE_BITSIZE (from_mode));
707 from = gen_lowpart (to_mode, force_reg (from_mode, from));
708 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
709 to, unsignedp);
710 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
711 to, unsignedp);
712 if (tmp != to)
713 emit_move_insn (to, tmp);
714 return;
718 /* Support special truncate insns for certain modes. */
719 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
721 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
722 to, from, UNKNOWN);
723 return;
726 /* Handle truncation of volatile memrefs, and so on;
727 the things that couldn't be truncated directly,
728 and for which there was no special instruction.
730 ??? Code above formerly short-circuited this, for most integer
731 mode pairs, with a force_reg in from_mode followed by a recursive
732 call to this routine. Appears always to have been wrong. */
733 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
735 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
736 emit_move_insn (to, temp);
737 return;
740 /* Mode combination is not recognized. */
741 gcc_unreachable ();
744 /* Return an rtx for a value that would result
745 from converting X to mode MODE.
746 Both X and MODE may be floating, or both integer.
747 UNSIGNEDP is nonzero if X is an unsigned value.
748 This can be done by referring to a part of X in place
749 or by copying to a new temporary with conversion. */
752 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
754 return convert_modes (mode, VOIDmode, x, unsignedp);
757 /* Return an rtx for a value that would result
758 from converting X from mode OLDMODE to mode MODE.
759 Both modes may be floating, or both integer.
760 UNSIGNEDP is nonzero if X is an unsigned value.
762 This can be done by referring to a part of X in place
763 or by copying to a new temporary with conversion.
765 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
768 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
770 rtx temp;
772 /* If FROM is a SUBREG that indicates that we have already done at least
773 the required extension, strip it. */
775 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
776 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
777 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
778 x = gen_lowpart (mode, x);
780 if (GET_MODE (x) != VOIDmode)
781 oldmode = GET_MODE (x);
783 if (mode == oldmode)
784 return x;
786 /* There is one case that we must handle specially: If we are converting
787 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
788 we are to interpret the constant as unsigned, gen_lowpart will do
789 the wrong if the constant appears negative. What we want to do is
790 make the high-order word of the constant zero, not all ones. */
792 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
793 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
794 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
796 HOST_WIDE_INT val = INTVAL (x);
798 if (oldmode != VOIDmode
799 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
801 int width = GET_MODE_BITSIZE (oldmode);
803 /* We need to zero extend VAL. */
804 val &= ((HOST_WIDE_INT) 1 << width) - 1;
807 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
810 /* We can do this with a gen_lowpart if both desired and current modes
811 are integer, and this is either a constant integer, a register, or a
812 non-volatile MEM. Except for the constant case where MODE is no
813 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
815 if ((GET_CODE (x) == CONST_INT
816 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
817 || (GET_MODE_CLASS (mode) == MODE_INT
818 && GET_MODE_CLASS (oldmode) == MODE_INT
819 && (GET_CODE (x) == CONST_DOUBLE
820 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
821 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
822 && direct_load[(int) mode])
823 || (REG_P (x)
824 && (! HARD_REGISTER_P (x)
825 || HARD_REGNO_MODE_OK (REGNO (x), mode))
826 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
827 GET_MODE_BITSIZE (GET_MODE (x)))))))))
829 /* ?? If we don't know OLDMODE, we have to assume here that
830 X does not need sign- or zero-extension. This may not be
831 the case, but it's the best we can do. */
832 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
833 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
835 HOST_WIDE_INT val = INTVAL (x);
836 int width = GET_MODE_BITSIZE (oldmode);
838 /* We must sign or zero-extend in this case. Start by
839 zero-extending, then sign extend if we need to. */
840 val &= ((HOST_WIDE_INT) 1 << width) - 1;
841 if (! unsignedp
842 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
843 val |= (HOST_WIDE_INT) (-1) << width;
845 return gen_int_mode (val, mode);
848 return gen_lowpart (mode, x);
851 /* Converting from integer constant into mode is always equivalent to an
852 subreg operation. */
853 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
855 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
856 return simplify_gen_subreg (mode, x, oldmode, 0);
859 temp = gen_reg_rtx (mode);
860 convert_move (temp, x, unsignedp);
861 return temp;
864 /* STORE_MAX_PIECES is the number of bytes at a time that we can
865 store efficiently. Due to internal GCC limitations, this is
866 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
867 for an immediate constant. */
869 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
871 /* Determine whether the LEN bytes can be moved by using several move
872 instructions. Return nonzero if a call to move_by_pieces should
873 succeed. */
876 can_move_by_pieces (unsigned HOST_WIDE_INT len,
877 unsigned int align ATTRIBUTE_UNUSED)
879 return MOVE_BY_PIECES_P (len, align);
882 /* Generate several move instructions to copy LEN bytes from block FROM to
883 block TO. (These are MEM rtx's with BLKmode).
885 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
886 used to push FROM to the stack.
888 ALIGN is maximum stack alignment we can assume.
890 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
891 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
892 stpcpy. */
895 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
896 unsigned int align, int endp)
898 struct move_by_pieces data;
899 rtx to_addr, from_addr = XEXP (from, 0);
900 unsigned int max_size = MOVE_MAX_PIECES + 1;
901 enum machine_mode mode = VOIDmode, tmode;
902 enum insn_code icode;
904 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
906 data.offset = 0;
907 data.from_addr = from_addr;
908 if (to)
910 to_addr = XEXP (to, 0);
911 data.to = to;
912 data.autinc_to
913 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
914 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
915 data.reverse
916 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
918 else
920 to_addr = NULL_RTX;
921 data.to = NULL_RTX;
922 data.autinc_to = 1;
923 #ifdef STACK_GROWS_DOWNWARD
924 data.reverse = 1;
925 #else
926 data.reverse = 0;
927 #endif
929 data.to_addr = to_addr;
930 data.from = from;
931 data.autinc_from
932 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
933 || GET_CODE (from_addr) == POST_INC
934 || GET_CODE (from_addr) == POST_DEC);
936 data.explicit_inc_from = 0;
937 data.explicit_inc_to = 0;
938 if (data.reverse) data.offset = len;
939 data.len = len;
941 /* If copying requires more than two move insns,
942 copy addresses to registers (to make displacements shorter)
943 and use post-increment if available. */
944 if (!(data.autinc_from && data.autinc_to)
945 && move_by_pieces_ninsns (len, align, max_size) > 2)
947 /* Find the mode of the largest move... */
948 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
949 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
950 if (GET_MODE_SIZE (tmode) < max_size)
951 mode = tmode;
953 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
955 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
956 data.autinc_from = 1;
957 data.explicit_inc_from = -1;
959 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
961 data.from_addr = copy_addr_to_reg (from_addr);
962 data.autinc_from = 1;
963 data.explicit_inc_from = 1;
965 if (!data.autinc_from && CONSTANT_P (from_addr))
966 data.from_addr = copy_addr_to_reg (from_addr);
967 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
969 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
970 data.autinc_to = 1;
971 data.explicit_inc_to = -1;
973 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
975 data.to_addr = copy_addr_to_reg (to_addr);
976 data.autinc_to = 1;
977 data.explicit_inc_to = 1;
979 if (!data.autinc_to && CONSTANT_P (to_addr))
980 data.to_addr = copy_addr_to_reg (to_addr);
983 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
984 if (align >= GET_MODE_ALIGNMENT (tmode))
985 align = GET_MODE_ALIGNMENT (tmode);
986 else
988 enum machine_mode xmode;
990 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
991 tmode != VOIDmode;
992 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
993 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
994 || SLOW_UNALIGNED_ACCESS (tmode, align))
995 break;
997 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1000 /* First move what we can in the largest integer mode, then go to
1001 successively smaller modes. */
1003 while (max_size > 1)
1005 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1006 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1007 if (GET_MODE_SIZE (tmode) < max_size)
1008 mode = tmode;
1010 if (mode == VOIDmode)
1011 break;
1013 icode = optab_handler (mov_optab, mode)->insn_code;
1014 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1015 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1017 max_size = GET_MODE_SIZE (mode);
1020 /* The code above should have handled everything. */
1021 gcc_assert (!data.len);
1023 if (endp)
1025 rtx to1;
1027 gcc_assert (!data.reverse);
1028 if (data.autinc_to)
1030 if (endp == 2)
1032 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1033 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1034 else
1035 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1036 -1));
1038 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1039 data.offset);
1041 else
1043 if (endp == 2)
1044 --data.offset;
1045 to1 = adjust_address (data.to, QImode, data.offset);
1047 return to1;
1049 else
1050 return data.to;
1053 /* Return number of insns required to move L bytes by pieces.
1054 ALIGN (in bits) is maximum alignment we can assume. */
1056 static unsigned HOST_WIDE_INT
1057 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1058 unsigned int max_size)
1060 unsigned HOST_WIDE_INT n_insns = 0;
1061 enum machine_mode tmode;
1063 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1064 if (align >= GET_MODE_ALIGNMENT (tmode))
1065 align = GET_MODE_ALIGNMENT (tmode);
1066 else
1068 enum machine_mode tmode, xmode;
1070 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1071 tmode != VOIDmode;
1072 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1073 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1074 || SLOW_UNALIGNED_ACCESS (tmode, align))
1075 break;
1077 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1080 while (max_size > 1)
1082 enum machine_mode mode = VOIDmode;
1083 enum insn_code icode;
1085 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1086 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1087 if (GET_MODE_SIZE (tmode) < max_size)
1088 mode = tmode;
1090 if (mode == VOIDmode)
1091 break;
1093 icode = optab_handler (mov_optab, mode)->insn_code;
1094 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1095 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1097 max_size = GET_MODE_SIZE (mode);
1100 gcc_assert (!l);
1101 return n_insns;
1104 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1105 with move instructions for mode MODE. GENFUN is the gen_... function
1106 to make a move insn for that mode. DATA has all the other info. */
1108 static void
1109 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1110 struct move_by_pieces *data)
1112 unsigned int size = GET_MODE_SIZE (mode);
1113 rtx to1 = NULL_RTX, from1;
1115 while (data->len >= size)
1117 if (data->reverse)
1118 data->offset -= size;
1120 if (data->to)
1122 if (data->autinc_to)
1123 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1124 data->offset);
1125 else
1126 to1 = adjust_address (data->to, mode, data->offset);
1129 if (data->autinc_from)
1130 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1131 data->offset);
1132 else
1133 from1 = adjust_address (data->from, mode, data->offset);
1135 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1136 emit_insn (gen_add2_insn (data->to_addr,
1137 GEN_INT (-(HOST_WIDE_INT)size)));
1138 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1139 emit_insn (gen_add2_insn (data->from_addr,
1140 GEN_INT (-(HOST_WIDE_INT)size)));
1142 if (data->to)
1143 emit_insn ((*genfun) (to1, from1));
1144 else
1146 #ifdef PUSH_ROUNDING
1147 emit_single_push_insn (mode, from1, NULL);
1148 #else
1149 gcc_unreachable ();
1150 #endif
1153 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1154 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1155 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1156 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1158 if (! data->reverse)
1159 data->offset += size;
1161 data->len -= size;
1165 /* Emit code to move a block Y to a block X. This may be done with
1166 string-move instructions, with multiple scalar move instructions,
1167 or with a library call.
1169 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1170 SIZE is an rtx that says how long they are.
1171 ALIGN is the maximum alignment we can assume they have.
1172 METHOD describes what kind of copy this is, and what mechanisms may be used.
1174 Return the address of the new block, if memcpy is called and returns it,
1175 0 otherwise. */
1178 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1179 unsigned int expected_align, HOST_WIDE_INT expected_size)
1181 bool may_use_call;
1182 rtx retval = 0;
1183 unsigned int align;
1185 switch (method)
1187 case BLOCK_OP_NORMAL:
1188 case BLOCK_OP_TAILCALL:
1189 may_use_call = true;
1190 break;
1192 case BLOCK_OP_CALL_PARM:
1193 may_use_call = block_move_libcall_safe_for_call_parm ();
1195 /* Make inhibit_defer_pop nonzero around the library call
1196 to force it to pop the arguments right away. */
1197 NO_DEFER_POP;
1198 break;
1200 case BLOCK_OP_NO_LIBCALL:
1201 may_use_call = false;
1202 break;
1204 default:
1205 gcc_unreachable ();
1208 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1210 gcc_assert (MEM_P (x));
1211 gcc_assert (MEM_P (y));
1212 gcc_assert (size);
1214 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1215 block copy is more efficient for other large modes, e.g. DCmode. */
1216 x = adjust_address (x, BLKmode, 0);
1217 y = adjust_address (y, BLKmode, 0);
1219 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1220 can be incorrect is coming from __builtin_memcpy. */
1221 if (GET_CODE (size) == CONST_INT)
1223 if (INTVAL (size) == 0)
1224 return 0;
1226 x = shallow_copy_rtx (x);
1227 y = shallow_copy_rtx (y);
1228 set_mem_size (x, size);
1229 set_mem_size (y, size);
1232 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1233 move_by_pieces (x, y, INTVAL (size), align, 0);
1234 else if (emit_block_move_via_movmem (x, y, size, align,
1235 expected_align, expected_size))
1237 else if (may_use_call)
1238 retval = emit_block_move_via_libcall (x, y, size,
1239 method == BLOCK_OP_TAILCALL);
1240 else
1241 emit_block_move_via_loop (x, y, size, align);
1243 if (method == BLOCK_OP_CALL_PARM)
1244 OK_DEFER_POP;
1246 return retval;
1250 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1252 return emit_block_move_hints (x, y, size, method, 0, -1);
1255 /* A subroutine of emit_block_move. Returns true if calling the
1256 block move libcall will not clobber any parameters which may have
1257 already been placed on the stack. */
1259 static bool
1260 block_move_libcall_safe_for_call_parm (void)
1262 /* If arguments are pushed on the stack, then they're safe. */
1263 if (PUSH_ARGS)
1264 return true;
1266 /* If registers go on the stack anyway, any argument is sure to clobber
1267 an outgoing argument. */
1268 #if defined (REG_PARM_STACK_SPACE)
1269 if (OUTGOING_REG_PARM_STACK_SPACE)
1271 tree fn;
1272 fn = emit_block_move_libcall_fn (false);
1273 if (REG_PARM_STACK_SPACE (fn) != 0)
1274 return false;
1276 #endif
1278 /* If any argument goes in memory, then it might clobber an outgoing
1279 argument. */
1281 CUMULATIVE_ARGS args_so_far;
1282 tree fn, arg;
1284 fn = emit_block_move_libcall_fn (false);
1285 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1287 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1288 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1290 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1291 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1292 if (!tmp || !REG_P (tmp))
1293 return false;
1294 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1295 return false;
1296 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1299 return true;
1302 /* A subroutine of emit_block_move. Expand a movmem pattern;
1303 return true if successful. */
1305 static bool
1306 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1307 unsigned int expected_align, HOST_WIDE_INT expected_size)
1309 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1310 int save_volatile_ok = volatile_ok;
1311 enum machine_mode mode;
1313 if (expected_align < align)
1314 expected_align = align;
1316 /* Since this is a move insn, we don't care about volatility. */
1317 volatile_ok = 1;
1319 /* Try the most limited insn first, because there's no point
1320 including more than one in the machine description unless
1321 the more limited one has some advantage. */
1323 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1324 mode = GET_MODE_WIDER_MODE (mode))
1326 enum insn_code code = movmem_optab[(int) mode];
1327 insn_operand_predicate_fn pred;
1329 if (code != CODE_FOR_nothing
1330 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1331 here because if SIZE is less than the mode mask, as it is
1332 returned by the macro, it will definitely be less than the
1333 actual mode mask. */
1334 && ((GET_CODE (size) == CONST_INT
1335 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1336 <= (GET_MODE_MASK (mode) >> 1)))
1337 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1338 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1339 || (*pred) (x, BLKmode))
1340 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1341 || (*pred) (y, BLKmode))
1342 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1343 || (*pred) (opalign, VOIDmode)))
1345 rtx op2;
1346 rtx last = get_last_insn ();
1347 rtx pat;
1349 op2 = convert_to_mode (mode, size, 1);
1350 pred = insn_data[(int) code].operand[2].predicate;
1351 if (pred != 0 && ! (*pred) (op2, mode))
1352 op2 = copy_to_mode_reg (mode, op2);
1354 /* ??? When called via emit_block_move_for_call, it'd be
1355 nice if there were some way to inform the backend, so
1356 that it doesn't fail the expansion because it thinks
1357 emitting the libcall would be more efficient. */
1359 if (insn_data[(int) code].n_operands == 4)
1360 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1361 else
1362 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1363 GEN_INT (expected_align),
1364 GEN_INT (expected_size));
1365 if (pat)
1367 emit_insn (pat);
1368 volatile_ok = save_volatile_ok;
1369 return true;
1371 else
1372 delete_insns_since (last);
1376 volatile_ok = save_volatile_ok;
1377 return false;
1380 /* A subroutine of emit_block_move. Expand a call to memcpy.
1381 Return the return value from memcpy, 0 otherwise. */
1384 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1386 rtx dst_addr, src_addr;
1387 tree call_expr, fn, src_tree, dst_tree, size_tree;
1388 enum machine_mode size_mode;
1389 rtx retval;
1391 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1392 pseudos. We can then place those new pseudos into a VAR_DECL and
1393 use them later. */
1395 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1396 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1398 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1399 src_addr = convert_memory_address (ptr_mode, src_addr);
1401 dst_tree = make_tree (ptr_type_node, dst_addr);
1402 src_tree = make_tree (ptr_type_node, src_addr);
1404 size_mode = TYPE_MODE (sizetype);
1406 size = convert_to_mode (size_mode, size, 1);
1407 size = copy_to_mode_reg (size_mode, size);
1409 /* It is incorrect to use the libcall calling conventions to call
1410 memcpy in this context. This could be a user call to memcpy and
1411 the user may wish to examine the return value from memcpy. For
1412 targets where libcalls and normal calls have different conventions
1413 for returning pointers, we could end up generating incorrect code. */
1415 size_tree = make_tree (sizetype, size);
1417 fn = emit_block_move_libcall_fn (true);
1418 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1419 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1421 retval = expand_normal (call_expr);
1423 return retval;
1426 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1427 for the function we use for block copies. The first time FOR_CALL
1428 is true, we call assemble_external. */
1430 static GTY(()) tree block_move_fn;
1432 void
1433 init_block_move_fn (const char *asmspec)
1435 if (!block_move_fn)
1437 tree args, fn;
1439 fn = get_identifier ("memcpy");
1440 args = build_function_type_list (ptr_type_node, ptr_type_node,
1441 const_ptr_type_node, sizetype,
1442 NULL_TREE);
1444 fn = build_decl (FUNCTION_DECL, fn, args);
1445 DECL_EXTERNAL (fn) = 1;
1446 TREE_PUBLIC (fn) = 1;
1447 DECL_ARTIFICIAL (fn) = 1;
1448 TREE_NOTHROW (fn) = 1;
1449 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1450 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1452 block_move_fn = fn;
1455 if (asmspec)
1456 set_user_assembler_name (block_move_fn, asmspec);
1459 static tree
1460 emit_block_move_libcall_fn (int for_call)
1462 static bool emitted_extern;
1464 if (!block_move_fn)
1465 init_block_move_fn (NULL);
1467 if (for_call && !emitted_extern)
1469 emitted_extern = true;
1470 make_decl_rtl (block_move_fn);
1471 assemble_external (block_move_fn);
1474 return block_move_fn;
1477 /* A subroutine of emit_block_move. Copy the data via an explicit
1478 loop. This is used only when libcalls are forbidden. */
1479 /* ??? It'd be nice to copy in hunks larger than QImode. */
1481 static void
1482 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1483 unsigned int align ATTRIBUTE_UNUSED)
1485 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1486 enum machine_mode iter_mode;
1488 iter_mode = GET_MODE (size);
1489 if (iter_mode == VOIDmode)
1490 iter_mode = word_mode;
1492 top_label = gen_label_rtx ();
1493 cmp_label = gen_label_rtx ();
1494 iter = gen_reg_rtx (iter_mode);
1496 emit_move_insn (iter, const0_rtx);
1498 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1499 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1500 do_pending_stack_adjust ();
1502 emit_jump (cmp_label);
1503 emit_label (top_label);
1505 tmp = convert_modes (Pmode, iter_mode, iter, true);
1506 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1507 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1508 x = change_address (x, QImode, x_addr);
1509 y = change_address (y, QImode, y_addr);
1511 emit_move_insn (x, y);
1513 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1514 true, OPTAB_LIB_WIDEN);
1515 if (tmp != iter)
1516 emit_move_insn (iter, tmp);
1518 emit_label (cmp_label);
1520 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1521 true, top_label);
1524 /* Copy all or part of a value X into registers starting at REGNO.
1525 The number of registers to be filled is NREGS. */
1527 void
1528 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1530 int i;
1531 #ifdef HAVE_load_multiple
1532 rtx pat;
1533 rtx last;
1534 #endif
1536 if (nregs == 0)
1537 return;
1539 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1540 x = validize_mem (force_const_mem (mode, x));
1542 /* See if the machine can do this with a load multiple insn. */
1543 #ifdef HAVE_load_multiple
1544 if (HAVE_load_multiple)
1546 last = get_last_insn ();
1547 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1548 GEN_INT (nregs));
1549 if (pat)
1551 emit_insn (pat);
1552 return;
1554 else
1555 delete_insns_since (last);
1557 #endif
1559 for (i = 0; i < nregs; i++)
1560 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1561 operand_subword_force (x, i, mode));
1564 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1565 The number of registers to be filled is NREGS. */
1567 void
1568 move_block_from_reg (int regno, rtx x, int nregs)
1570 int i;
1572 if (nregs == 0)
1573 return;
1575 /* See if the machine can do this with a store multiple insn. */
1576 #ifdef HAVE_store_multiple
1577 if (HAVE_store_multiple)
1579 rtx last = get_last_insn ();
1580 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1581 GEN_INT (nregs));
1582 if (pat)
1584 emit_insn (pat);
1585 return;
1587 else
1588 delete_insns_since (last);
1590 #endif
1592 for (i = 0; i < nregs; i++)
1594 rtx tem = operand_subword (x, i, 1, BLKmode);
1596 gcc_assert (tem);
1598 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1602 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1603 ORIG, where ORIG is a non-consecutive group of registers represented by
1604 a PARALLEL. The clone is identical to the original except in that the
1605 original set of registers is replaced by a new set of pseudo registers.
1606 The new set has the same modes as the original set. */
1609 gen_group_rtx (rtx orig)
1611 int i, length;
1612 rtx *tmps;
1614 gcc_assert (GET_CODE (orig) == PARALLEL);
1616 length = XVECLEN (orig, 0);
1617 tmps = alloca (sizeof (rtx) * length);
1619 /* Skip a NULL entry in first slot. */
1620 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1622 if (i)
1623 tmps[0] = 0;
1625 for (; i < length; i++)
1627 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1628 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1630 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1633 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1636 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1637 except that values are placed in TMPS[i], and must later be moved
1638 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1640 static void
1641 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1643 rtx src;
1644 int start, i;
1645 enum machine_mode m = GET_MODE (orig_src);
1647 gcc_assert (GET_CODE (dst) == PARALLEL);
1649 if (m != VOIDmode
1650 && !SCALAR_INT_MODE_P (m)
1651 && !MEM_P (orig_src)
1652 && GET_CODE (orig_src) != CONCAT)
1654 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1655 if (imode == BLKmode)
1656 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1657 else
1658 src = gen_reg_rtx (imode);
1659 if (imode != BLKmode)
1660 src = gen_lowpart (GET_MODE (orig_src), src);
1661 emit_move_insn (src, orig_src);
1662 /* ...and back again. */
1663 if (imode != BLKmode)
1664 src = gen_lowpart (imode, src);
1665 emit_group_load_1 (tmps, dst, src, type, ssize);
1666 return;
1669 /* Check for a NULL entry, used to indicate that the parameter goes
1670 both on the stack and in registers. */
1671 if (XEXP (XVECEXP (dst, 0, 0), 0))
1672 start = 0;
1673 else
1674 start = 1;
1676 /* Process the pieces. */
1677 for (i = start; i < XVECLEN (dst, 0); i++)
1679 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1680 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1681 unsigned int bytelen = GET_MODE_SIZE (mode);
1682 int shift = 0;
1684 /* Handle trailing fragments that run over the size of the struct. */
1685 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1687 /* Arrange to shift the fragment to where it belongs.
1688 extract_bit_field loads to the lsb of the reg. */
1689 if (
1690 #ifdef BLOCK_REG_PADDING
1691 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1692 == (BYTES_BIG_ENDIAN ? upward : downward)
1693 #else
1694 BYTES_BIG_ENDIAN
1695 #endif
1697 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1698 bytelen = ssize - bytepos;
1699 gcc_assert (bytelen > 0);
1702 /* If we won't be loading directly from memory, protect the real source
1703 from strange tricks we might play; but make sure that the source can
1704 be loaded directly into the destination. */
1705 src = orig_src;
1706 if (!MEM_P (orig_src)
1707 && (!CONSTANT_P (orig_src)
1708 || (GET_MODE (orig_src) != mode
1709 && GET_MODE (orig_src) != VOIDmode)))
1711 if (GET_MODE (orig_src) == VOIDmode)
1712 src = gen_reg_rtx (mode);
1713 else
1714 src = gen_reg_rtx (GET_MODE (orig_src));
1716 emit_move_insn (src, orig_src);
1719 /* Optimize the access just a bit. */
1720 if (MEM_P (src)
1721 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1722 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1723 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1724 && bytelen == GET_MODE_SIZE (mode))
1726 tmps[i] = gen_reg_rtx (mode);
1727 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1729 else if (COMPLEX_MODE_P (mode)
1730 && GET_MODE (src) == mode
1731 && bytelen == GET_MODE_SIZE (mode))
1732 /* Let emit_move_complex do the bulk of the work. */
1733 tmps[i] = src;
1734 else if (GET_CODE (src) == CONCAT)
1736 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1737 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1739 if ((bytepos == 0 && bytelen == slen0)
1740 || (bytepos != 0 && bytepos + bytelen <= slen))
1742 /* The following assumes that the concatenated objects all
1743 have the same size. In this case, a simple calculation
1744 can be used to determine the object and the bit field
1745 to be extracted. */
1746 tmps[i] = XEXP (src, bytepos / slen0);
1747 if (! CONSTANT_P (tmps[i])
1748 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1749 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1750 (bytepos % slen0) * BITS_PER_UNIT,
1751 1, NULL_RTX, mode, mode);
1753 else
1755 rtx mem;
1757 gcc_assert (!bytepos);
1758 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1759 emit_move_insn (mem, src);
1760 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1761 0, 1, NULL_RTX, mode, mode);
1764 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1765 SIMD register, which is currently broken. While we get GCC
1766 to emit proper RTL for these cases, let's dump to memory. */
1767 else if (VECTOR_MODE_P (GET_MODE (dst))
1768 && REG_P (src))
1770 int slen = GET_MODE_SIZE (GET_MODE (src));
1771 rtx mem;
1773 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1774 emit_move_insn (mem, src);
1775 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1777 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1778 && XVECLEN (dst, 0) > 1)
1779 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1780 else if (CONSTANT_P (src))
1782 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1784 if (len == ssize)
1785 tmps[i] = src;
1786 else
1788 rtx first, second;
1790 gcc_assert (2 * len == ssize);
1791 split_double (src, &first, &second);
1792 if (i)
1793 tmps[i] = second;
1794 else
1795 tmps[i] = first;
1798 else if (REG_P (src) && GET_MODE (src) == mode)
1799 tmps[i] = src;
1800 else
1801 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1802 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1803 mode, mode);
1805 if (shift)
1806 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1807 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1811 /* Emit code to move a block SRC of type TYPE to a block DST,
1812 where DST is non-consecutive registers represented by a PARALLEL.
1813 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1814 if not known. */
1816 void
1817 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1819 rtx *tmps;
1820 int i;
1822 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1823 emit_group_load_1 (tmps, dst, src, type, ssize);
1825 /* Copy the extracted pieces into the proper (probable) hard regs. */
1826 for (i = 0; i < XVECLEN (dst, 0); i++)
1828 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1829 if (d == NULL)
1830 continue;
1831 emit_move_insn (d, tmps[i]);
1835 /* Similar, but load SRC into new pseudos in a format that looks like
1836 PARALLEL. This can later be fed to emit_group_move to get things
1837 in the right place. */
1840 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1842 rtvec vec;
1843 int i;
1845 vec = rtvec_alloc (XVECLEN (parallel, 0));
1846 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1848 /* Convert the vector to look just like the original PARALLEL, except
1849 with the computed values. */
1850 for (i = 0; i < XVECLEN (parallel, 0); i++)
1852 rtx e = XVECEXP (parallel, 0, i);
1853 rtx d = XEXP (e, 0);
1855 if (d)
1857 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1858 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1860 RTVEC_ELT (vec, i) = e;
1863 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1866 /* Emit code to move a block SRC to block DST, where SRC and DST are
1867 non-consecutive groups of registers, each represented by a PARALLEL. */
1869 void
1870 emit_group_move (rtx dst, rtx src)
1872 int i;
1874 gcc_assert (GET_CODE (src) == PARALLEL
1875 && GET_CODE (dst) == PARALLEL
1876 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1878 /* Skip first entry if NULL. */
1879 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1880 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1881 XEXP (XVECEXP (src, 0, i), 0));
1884 /* Move a group of registers represented by a PARALLEL into pseudos. */
1887 emit_group_move_into_temps (rtx src)
1889 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1890 int i;
1892 for (i = 0; i < XVECLEN (src, 0); i++)
1894 rtx e = XVECEXP (src, 0, i);
1895 rtx d = XEXP (e, 0);
1897 if (d)
1898 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1899 RTVEC_ELT (vec, i) = e;
1902 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1905 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1906 where SRC is non-consecutive registers represented by a PARALLEL.
1907 SSIZE represents the total size of block ORIG_DST, or -1 if not
1908 known. */
1910 void
1911 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1913 rtx *tmps, dst;
1914 int start, finish, i;
1915 enum machine_mode m = GET_MODE (orig_dst);
1917 gcc_assert (GET_CODE (src) == PARALLEL);
1919 if (!SCALAR_INT_MODE_P (m)
1920 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1922 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1923 if (imode == BLKmode)
1924 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1925 else
1926 dst = gen_reg_rtx (imode);
1927 emit_group_store (dst, src, type, ssize);
1928 if (imode != BLKmode)
1929 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1930 emit_move_insn (orig_dst, dst);
1931 return;
1934 /* Check for a NULL entry, used to indicate that the parameter goes
1935 both on the stack and in registers. */
1936 if (XEXP (XVECEXP (src, 0, 0), 0))
1937 start = 0;
1938 else
1939 start = 1;
1940 finish = XVECLEN (src, 0);
1942 tmps = alloca (sizeof (rtx) * finish);
1944 /* Copy the (probable) hard regs into pseudos. */
1945 for (i = start; i < finish; i++)
1947 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1948 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1950 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1951 emit_move_insn (tmps[i], reg);
1953 else
1954 tmps[i] = reg;
1957 /* If we won't be storing directly into memory, protect the real destination
1958 from strange tricks we might play. */
1959 dst = orig_dst;
1960 if (GET_CODE (dst) == PARALLEL)
1962 rtx temp;
1964 /* We can get a PARALLEL dst if there is a conditional expression in
1965 a return statement. In that case, the dst and src are the same,
1966 so no action is necessary. */
1967 if (rtx_equal_p (dst, src))
1968 return;
1970 /* It is unclear if we can ever reach here, but we may as well handle
1971 it. Allocate a temporary, and split this into a store/load to/from
1972 the temporary. */
1974 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1975 emit_group_store (temp, src, type, ssize);
1976 emit_group_load (dst, temp, type, ssize);
1977 return;
1979 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1981 enum machine_mode outer = GET_MODE (dst);
1982 enum machine_mode inner;
1983 HOST_WIDE_INT bytepos;
1984 bool done = false;
1985 rtx temp;
1987 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1988 dst = gen_reg_rtx (outer);
1990 /* Make life a bit easier for combine. */
1991 /* If the first element of the vector is the low part
1992 of the destination mode, use a paradoxical subreg to
1993 initialize the destination. */
1994 if (start < finish)
1996 inner = GET_MODE (tmps[start]);
1997 bytepos = subreg_lowpart_offset (inner, outer);
1998 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2000 temp = simplify_gen_subreg (outer, tmps[start],
2001 inner, 0);
2002 if (temp)
2004 emit_move_insn (dst, temp);
2005 done = true;
2006 start++;
2011 /* If the first element wasn't the low part, try the last. */
2012 if (!done
2013 && start < finish - 1)
2015 inner = GET_MODE (tmps[finish - 1]);
2016 bytepos = subreg_lowpart_offset (inner, outer);
2017 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2019 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2020 inner, 0);
2021 if (temp)
2023 emit_move_insn (dst, temp);
2024 done = true;
2025 finish--;
2030 /* Otherwise, simply initialize the result to zero. */
2031 if (!done)
2032 emit_move_insn (dst, CONST0_RTX (outer));
2035 /* Process the pieces. */
2036 for (i = start; i < finish; i++)
2038 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2039 enum machine_mode mode = GET_MODE (tmps[i]);
2040 unsigned int bytelen = GET_MODE_SIZE (mode);
2041 rtx dest = dst;
2043 /* Handle trailing fragments that run over the size of the struct. */
2044 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2046 /* store_bit_field always takes its value from the lsb.
2047 Move the fragment to the lsb if it's not already there. */
2048 if (
2049 #ifdef BLOCK_REG_PADDING
2050 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2051 == (BYTES_BIG_ENDIAN ? upward : downward)
2052 #else
2053 BYTES_BIG_ENDIAN
2054 #endif
2057 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2058 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2059 build_int_cst (NULL_TREE, shift),
2060 tmps[i], 0);
2062 bytelen = ssize - bytepos;
2065 if (GET_CODE (dst) == CONCAT)
2067 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2068 dest = XEXP (dst, 0);
2069 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2071 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2072 dest = XEXP (dst, 1);
2074 else
2076 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2077 dest = assign_stack_temp (GET_MODE (dest),
2078 GET_MODE_SIZE (GET_MODE (dest)), 0);
2079 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2080 tmps[i]);
2081 dst = dest;
2082 break;
2086 /* Optimize the access just a bit. */
2087 if (MEM_P (dest)
2088 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2089 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2090 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2091 && bytelen == GET_MODE_SIZE (mode))
2092 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2093 else
2094 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2095 mode, tmps[i]);
2098 /* Copy from the pseudo into the (probable) hard reg. */
2099 if (orig_dst != dst)
2100 emit_move_insn (orig_dst, dst);
2103 /* Generate code to copy a BLKmode object of TYPE out of a
2104 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2105 is null, a stack temporary is created. TGTBLK is returned.
2107 The purpose of this routine is to handle functions that return
2108 BLKmode structures in registers. Some machines (the PA for example)
2109 want to return all small structures in registers regardless of the
2110 structure's alignment. */
2113 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2115 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2116 rtx src = NULL, dst = NULL;
2117 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2118 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2120 if (tgtblk == 0)
2122 tgtblk = assign_temp (build_qualified_type (type,
2123 (TYPE_QUALS (type)
2124 | TYPE_QUAL_CONST)),
2125 0, 1, 1);
2126 preserve_temp_slots (tgtblk);
2129 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2130 into a new pseudo which is a full word. */
2132 if (GET_MODE (srcreg) != BLKmode
2133 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2134 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2136 /* If the structure doesn't take up a whole number of words, see whether
2137 SRCREG is padded on the left or on the right. If it's on the left,
2138 set PADDING_CORRECTION to the number of bits to skip.
2140 In most ABIs, the structure will be returned at the least end of
2141 the register, which translates to right padding on little-endian
2142 targets and left padding on big-endian targets. The opposite
2143 holds if the structure is returned at the most significant
2144 end of the register. */
2145 if (bytes % UNITS_PER_WORD != 0
2146 && (targetm.calls.return_in_msb (type)
2147 ? !BYTES_BIG_ENDIAN
2148 : BYTES_BIG_ENDIAN))
2149 padding_correction
2150 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2152 /* Copy the structure BITSIZE bites at a time.
2154 We could probably emit more efficient code for machines which do not use
2155 strict alignment, but it doesn't seem worth the effort at the current
2156 time. */
2157 for (bitpos = 0, xbitpos = padding_correction;
2158 bitpos < bytes * BITS_PER_UNIT;
2159 bitpos += bitsize, xbitpos += bitsize)
2161 /* We need a new source operand each time xbitpos is on a
2162 word boundary and when xbitpos == padding_correction
2163 (the first time through). */
2164 if (xbitpos % BITS_PER_WORD == 0
2165 || xbitpos == padding_correction)
2166 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2167 GET_MODE (srcreg));
2169 /* We need a new destination operand each time bitpos is on
2170 a word boundary. */
2171 if (bitpos % BITS_PER_WORD == 0)
2172 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2174 /* Use xbitpos for the source extraction (right justified) and
2175 xbitpos for the destination store (left justified). */
2176 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2177 extract_bit_field (src, bitsize,
2178 xbitpos % BITS_PER_WORD, 1,
2179 NULL_RTX, word_mode, word_mode));
2182 return tgtblk;
2185 /* Add a USE expression for REG to the (possibly empty) list pointed
2186 to by CALL_FUSAGE. REG must denote a hard register. */
2188 void
2189 use_reg (rtx *call_fusage, rtx reg)
2191 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2193 *call_fusage
2194 = gen_rtx_EXPR_LIST (VOIDmode,
2195 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2198 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2199 starting at REGNO. All of these registers must be hard registers. */
2201 void
2202 use_regs (rtx *call_fusage, int regno, int nregs)
2204 int i;
2206 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2208 for (i = 0; i < nregs; i++)
2209 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2212 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2213 PARALLEL REGS. This is for calls that pass values in multiple
2214 non-contiguous locations. The Irix 6 ABI has examples of this. */
2216 void
2217 use_group_regs (rtx *call_fusage, rtx regs)
2219 int i;
2221 for (i = 0; i < XVECLEN (regs, 0); i++)
2223 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2225 /* A NULL entry means the parameter goes both on the stack and in
2226 registers. This can also be a MEM for targets that pass values
2227 partially on the stack and partially in registers. */
2228 if (reg != 0 && REG_P (reg))
2229 use_reg (call_fusage, reg);
2234 /* Determine whether the LEN bytes generated by CONSTFUN can be
2235 stored to memory using several move instructions. CONSTFUNDATA is
2236 a pointer which will be passed as argument in every CONSTFUN call.
2237 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2238 a memset operation and false if it's a copy of a constant string.
2239 Return nonzero if a call to store_by_pieces should succeed. */
2242 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2243 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2244 void *constfundata, unsigned int align, bool memsetp)
2246 unsigned HOST_WIDE_INT l;
2247 unsigned int max_size;
2248 HOST_WIDE_INT offset = 0;
2249 enum machine_mode mode, tmode;
2250 enum insn_code icode;
2251 int reverse;
2252 rtx cst;
2254 if (len == 0)
2255 return 1;
2257 if (! (memsetp
2258 ? SET_BY_PIECES_P (len, align)
2259 : STORE_BY_PIECES_P (len, align)))
2260 return 0;
2262 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2263 if (align >= GET_MODE_ALIGNMENT (tmode))
2264 align = GET_MODE_ALIGNMENT (tmode);
2265 else
2267 enum machine_mode xmode;
2269 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2270 tmode != VOIDmode;
2271 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2272 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2273 || SLOW_UNALIGNED_ACCESS (tmode, align))
2274 break;
2276 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2279 /* We would first store what we can in the largest integer mode, then go to
2280 successively smaller modes. */
2282 for (reverse = 0;
2283 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2284 reverse++)
2286 l = len;
2287 mode = VOIDmode;
2288 max_size = STORE_MAX_PIECES + 1;
2289 while (max_size > 1)
2291 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2292 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2293 if (GET_MODE_SIZE (tmode) < max_size)
2294 mode = tmode;
2296 if (mode == VOIDmode)
2297 break;
2299 icode = optab_handler (mov_optab, mode)->insn_code;
2300 if (icode != CODE_FOR_nothing
2301 && align >= GET_MODE_ALIGNMENT (mode))
2303 unsigned int size = GET_MODE_SIZE (mode);
2305 while (l >= size)
2307 if (reverse)
2308 offset -= size;
2310 cst = (*constfun) (constfundata, offset, mode);
2311 if (!LEGITIMATE_CONSTANT_P (cst))
2312 return 0;
2314 if (!reverse)
2315 offset += size;
2317 l -= size;
2321 max_size = GET_MODE_SIZE (mode);
2324 /* The code above should have handled everything. */
2325 gcc_assert (!l);
2328 return 1;
2331 /* Generate several move instructions to store LEN bytes generated by
2332 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2333 pointer which will be passed as argument in every CONSTFUN call.
2334 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2335 a memset operation and false if it's a copy of a constant string.
2336 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2337 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2338 stpcpy. */
2341 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2342 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2343 void *constfundata, unsigned int align, bool memsetp, int endp)
2345 struct store_by_pieces data;
2347 if (len == 0)
2349 gcc_assert (endp != 2);
2350 return to;
2353 gcc_assert (memsetp
2354 ? SET_BY_PIECES_P (len, align)
2355 : STORE_BY_PIECES_P (len, align));
2356 data.constfun = constfun;
2357 data.constfundata = constfundata;
2358 data.len = len;
2359 data.to = to;
2360 store_by_pieces_1 (&data, align);
2361 if (endp)
2363 rtx to1;
2365 gcc_assert (!data.reverse);
2366 if (data.autinc_to)
2368 if (endp == 2)
2370 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2371 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2372 else
2373 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2374 -1));
2376 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2377 data.offset);
2379 else
2381 if (endp == 2)
2382 --data.offset;
2383 to1 = adjust_address (data.to, QImode, data.offset);
2385 return to1;
2387 else
2388 return data.to;
2391 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2392 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2394 static void
2395 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2397 struct store_by_pieces data;
2399 if (len == 0)
2400 return;
2402 data.constfun = clear_by_pieces_1;
2403 data.constfundata = NULL;
2404 data.len = len;
2405 data.to = to;
2406 store_by_pieces_1 (&data, align);
2409 /* Callback routine for clear_by_pieces.
2410 Return const0_rtx unconditionally. */
2412 static rtx
2413 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2414 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2415 enum machine_mode mode ATTRIBUTE_UNUSED)
2417 return const0_rtx;
2420 /* Subroutine of clear_by_pieces and store_by_pieces.
2421 Generate several move instructions to store LEN bytes of block TO. (A MEM
2422 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2424 static void
2425 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2426 unsigned int align ATTRIBUTE_UNUSED)
2428 rtx to_addr = XEXP (data->to, 0);
2429 unsigned int max_size = STORE_MAX_PIECES + 1;
2430 enum machine_mode mode = VOIDmode, tmode;
2431 enum insn_code icode;
2433 data->offset = 0;
2434 data->to_addr = to_addr;
2435 data->autinc_to
2436 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2437 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2439 data->explicit_inc_to = 0;
2440 data->reverse
2441 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2442 if (data->reverse)
2443 data->offset = data->len;
2445 /* If storing requires more than two move insns,
2446 copy addresses to registers (to make displacements shorter)
2447 and use post-increment if available. */
2448 if (!data->autinc_to
2449 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2451 /* Determine the main mode we'll be using. */
2452 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2453 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2454 if (GET_MODE_SIZE (tmode) < max_size)
2455 mode = tmode;
2457 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2459 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2460 data->autinc_to = 1;
2461 data->explicit_inc_to = -1;
2464 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2465 && ! data->autinc_to)
2467 data->to_addr = copy_addr_to_reg (to_addr);
2468 data->autinc_to = 1;
2469 data->explicit_inc_to = 1;
2472 if ( !data->autinc_to && CONSTANT_P (to_addr))
2473 data->to_addr = copy_addr_to_reg (to_addr);
2476 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2477 if (align >= GET_MODE_ALIGNMENT (tmode))
2478 align = GET_MODE_ALIGNMENT (tmode);
2479 else
2481 enum machine_mode xmode;
2483 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2484 tmode != VOIDmode;
2485 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2486 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2487 || SLOW_UNALIGNED_ACCESS (tmode, align))
2488 break;
2490 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2493 /* First store what we can in the largest integer mode, then go to
2494 successively smaller modes. */
2496 while (max_size > 1)
2498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2500 if (GET_MODE_SIZE (tmode) < max_size)
2501 mode = tmode;
2503 if (mode == VOIDmode)
2504 break;
2506 icode = optab_handler (mov_optab, mode)->insn_code;
2507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2508 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2510 max_size = GET_MODE_SIZE (mode);
2513 /* The code above should have handled everything. */
2514 gcc_assert (!data->len);
2517 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2518 with move instructions for mode MODE. GENFUN is the gen_... function
2519 to make a move insn for that mode. DATA has all the other info. */
2521 static void
2522 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2523 struct store_by_pieces *data)
2525 unsigned int size = GET_MODE_SIZE (mode);
2526 rtx to1, cst;
2528 while (data->len >= size)
2530 if (data->reverse)
2531 data->offset -= size;
2533 if (data->autinc_to)
2534 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2535 data->offset);
2536 else
2537 to1 = adjust_address (data->to, mode, data->offset);
2539 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2540 emit_insn (gen_add2_insn (data->to_addr,
2541 GEN_INT (-(HOST_WIDE_INT) size)));
2543 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2544 emit_insn ((*genfun) (to1, cst));
2546 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2549 if (! data->reverse)
2550 data->offset += size;
2552 data->len -= size;
2556 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2557 its length in bytes. */
2560 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2561 unsigned int expected_align, HOST_WIDE_INT expected_size)
2563 enum machine_mode mode = GET_MODE (object);
2564 unsigned int align;
2566 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2568 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2569 just move a zero. Otherwise, do this a piece at a time. */
2570 if (mode != BLKmode
2571 && GET_CODE (size) == CONST_INT
2572 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2574 rtx zero = CONST0_RTX (mode);
2575 if (zero != NULL)
2577 emit_move_insn (object, zero);
2578 return NULL;
2581 if (COMPLEX_MODE_P (mode))
2583 zero = CONST0_RTX (GET_MODE_INNER (mode));
2584 if (zero != NULL)
2586 write_complex_part (object, zero, 0);
2587 write_complex_part (object, zero, 1);
2588 return NULL;
2593 if (size == const0_rtx)
2594 return NULL;
2596 align = MEM_ALIGN (object);
2598 if (GET_CODE (size) == CONST_INT
2599 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2600 clear_by_pieces (object, INTVAL (size), align);
2601 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2602 expected_align, expected_size))
2604 else
2605 return set_storage_via_libcall (object, size, const0_rtx,
2606 method == BLOCK_OP_TAILCALL);
2608 return NULL;
2612 clear_storage (rtx object, rtx size, enum block_op_methods method)
2614 return clear_storage_hints (object, size, method, 0, -1);
2618 /* A subroutine of clear_storage. Expand a call to memset.
2619 Return the return value of memset, 0 otherwise. */
2622 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2624 tree call_expr, fn, object_tree, size_tree, val_tree;
2625 enum machine_mode size_mode;
2626 rtx retval;
2628 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2629 place those into new pseudos into a VAR_DECL and use them later. */
2631 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2633 size_mode = TYPE_MODE (sizetype);
2634 size = convert_to_mode (size_mode, size, 1);
2635 size = copy_to_mode_reg (size_mode, size);
2637 /* It is incorrect to use the libcall calling conventions to call
2638 memset in this context. This could be a user call to memset and
2639 the user may wish to examine the return value from memset. For
2640 targets where libcalls and normal calls have different conventions
2641 for returning pointers, we could end up generating incorrect code. */
2643 object_tree = make_tree (ptr_type_node, object);
2644 if (GET_CODE (val) != CONST_INT)
2645 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2646 size_tree = make_tree (sizetype, size);
2647 val_tree = make_tree (integer_type_node, val);
2649 fn = clear_storage_libcall_fn (true);
2650 call_expr = build_call_expr (fn, 3,
2651 object_tree, integer_zero_node, size_tree);
2652 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2654 retval = expand_normal (call_expr);
2656 return retval;
2659 /* A subroutine of set_storage_via_libcall. Create the tree node
2660 for the function we use for block clears. The first time FOR_CALL
2661 is true, we call assemble_external. */
2663 static GTY(()) tree block_clear_fn;
2665 void
2666 init_block_clear_fn (const char *asmspec)
2668 if (!block_clear_fn)
2670 tree fn, args;
2672 fn = get_identifier ("memset");
2673 args = build_function_type_list (ptr_type_node, ptr_type_node,
2674 integer_type_node, sizetype,
2675 NULL_TREE);
2677 fn = build_decl (FUNCTION_DECL, fn, args);
2678 DECL_EXTERNAL (fn) = 1;
2679 TREE_PUBLIC (fn) = 1;
2680 DECL_ARTIFICIAL (fn) = 1;
2681 TREE_NOTHROW (fn) = 1;
2682 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2683 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2685 block_clear_fn = fn;
2688 if (asmspec)
2689 set_user_assembler_name (block_clear_fn, asmspec);
2692 static tree
2693 clear_storage_libcall_fn (int for_call)
2695 static bool emitted_extern;
2697 if (!block_clear_fn)
2698 init_block_clear_fn (NULL);
2700 if (for_call && !emitted_extern)
2702 emitted_extern = true;
2703 make_decl_rtl (block_clear_fn);
2704 assemble_external (block_clear_fn);
2707 return block_clear_fn;
2710 /* Expand a setmem pattern; return true if successful. */
2712 bool
2713 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2714 unsigned int expected_align, HOST_WIDE_INT expected_size)
2716 /* Try the most limited insn first, because there's no point
2717 including more than one in the machine description unless
2718 the more limited one has some advantage. */
2720 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2721 enum machine_mode mode;
2723 if (expected_align < align)
2724 expected_align = align;
2726 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2727 mode = GET_MODE_WIDER_MODE (mode))
2729 enum insn_code code = setmem_optab[(int) mode];
2730 insn_operand_predicate_fn pred;
2732 if (code != CODE_FOR_nothing
2733 /* We don't need MODE to be narrower than
2734 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2735 the mode mask, as it is returned by the macro, it will
2736 definitely be less than the actual mode mask. */
2737 && ((GET_CODE (size) == CONST_INT
2738 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2739 <= (GET_MODE_MASK (mode) >> 1)))
2740 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2741 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2742 || (*pred) (object, BLKmode))
2743 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2744 || (*pred) (opalign, VOIDmode)))
2746 rtx opsize, opchar;
2747 enum machine_mode char_mode;
2748 rtx last = get_last_insn ();
2749 rtx pat;
2751 opsize = convert_to_mode (mode, size, 1);
2752 pred = insn_data[(int) code].operand[1].predicate;
2753 if (pred != 0 && ! (*pred) (opsize, mode))
2754 opsize = copy_to_mode_reg (mode, opsize);
2756 opchar = val;
2757 char_mode = insn_data[(int) code].operand[2].mode;
2758 if (char_mode != VOIDmode)
2760 opchar = convert_to_mode (char_mode, opchar, 1);
2761 pred = insn_data[(int) code].operand[2].predicate;
2762 if (pred != 0 && ! (*pred) (opchar, char_mode))
2763 opchar = copy_to_mode_reg (char_mode, opchar);
2766 if (insn_data[(int) code].n_operands == 4)
2767 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2768 else
2769 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2770 GEN_INT (expected_align),
2771 GEN_INT (expected_size));
2772 if (pat)
2774 emit_insn (pat);
2775 return true;
2777 else
2778 delete_insns_since (last);
2782 return false;
2786 /* Write to one of the components of the complex value CPLX. Write VAL to
2787 the real part if IMAG_P is false, and the imaginary part if its true. */
2789 static void
2790 write_complex_part (rtx cplx, rtx val, bool imag_p)
2792 enum machine_mode cmode;
2793 enum machine_mode imode;
2794 unsigned ibitsize;
2796 if (GET_CODE (cplx) == CONCAT)
2798 emit_move_insn (XEXP (cplx, imag_p), val);
2799 return;
2802 cmode = GET_MODE (cplx);
2803 imode = GET_MODE_INNER (cmode);
2804 ibitsize = GET_MODE_BITSIZE (imode);
2806 /* For MEMs simplify_gen_subreg may generate an invalid new address
2807 because, e.g., the original address is considered mode-dependent
2808 by the target, which restricts simplify_subreg from invoking
2809 adjust_address_nv. Instead of preparing fallback support for an
2810 invalid address, we call adjust_address_nv directly. */
2811 if (MEM_P (cplx))
2813 emit_move_insn (adjust_address_nv (cplx, imode,
2814 imag_p ? GET_MODE_SIZE (imode) : 0),
2815 val);
2816 return;
2819 /* If the sub-object is at least word sized, then we know that subregging
2820 will work. This special case is important, since store_bit_field
2821 wants to operate on integer modes, and there's rarely an OImode to
2822 correspond to TCmode. */
2823 if (ibitsize >= BITS_PER_WORD
2824 /* For hard regs we have exact predicates. Assume we can split
2825 the original object if it spans an even number of hard regs.
2826 This special case is important for SCmode on 64-bit platforms
2827 where the natural size of floating-point regs is 32-bit. */
2828 || (REG_P (cplx)
2829 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2830 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2832 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2833 imag_p ? GET_MODE_SIZE (imode) : 0);
2834 if (part)
2836 emit_move_insn (part, val);
2837 return;
2839 else
2840 /* simplify_gen_subreg may fail for sub-word MEMs. */
2841 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2844 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2847 /* Extract one of the components of the complex value CPLX. Extract the
2848 real part if IMAG_P is false, and the imaginary part if it's true. */
2850 static rtx
2851 read_complex_part (rtx cplx, bool imag_p)
2853 enum machine_mode cmode, imode;
2854 unsigned ibitsize;
2856 if (GET_CODE (cplx) == CONCAT)
2857 return XEXP (cplx, imag_p);
2859 cmode = GET_MODE (cplx);
2860 imode = GET_MODE_INNER (cmode);
2861 ibitsize = GET_MODE_BITSIZE (imode);
2863 /* Special case reads from complex constants that got spilled to memory. */
2864 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2866 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2867 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2869 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2870 if (CONSTANT_CLASS_P (part))
2871 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2875 /* For MEMs simplify_gen_subreg may generate an invalid new address
2876 because, e.g., the original address is considered mode-dependent
2877 by the target, which restricts simplify_subreg from invoking
2878 adjust_address_nv. Instead of preparing fallback support for an
2879 invalid address, we call adjust_address_nv directly. */
2880 if (MEM_P (cplx))
2881 return adjust_address_nv (cplx, imode,
2882 imag_p ? GET_MODE_SIZE (imode) : 0);
2884 /* If the sub-object is at least word sized, then we know that subregging
2885 will work. This special case is important, since extract_bit_field
2886 wants to operate on integer modes, and there's rarely an OImode to
2887 correspond to TCmode. */
2888 if (ibitsize >= BITS_PER_WORD
2889 /* For hard regs we have exact predicates. Assume we can split
2890 the original object if it spans an even number of hard regs.
2891 This special case is important for SCmode on 64-bit platforms
2892 where the natural size of floating-point regs is 32-bit. */
2893 || (REG_P (cplx)
2894 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2895 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2897 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2898 imag_p ? GET_MODE_SIZE (imode) : 0);
2899 if (ret)
2900 return ret;
2901 else
2902 /* simplify_gen_subreg may fail for sub-word MEMs. */
2903 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2906 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2907 true, NULL_RTX, imode, imode);
2910 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2911 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2912 represented in NEW_MODE. If FORCE is true, this will never happen, as
2913 we'll force-create a SUBREG if needed. */
2915 static rtx
2916 emit_move_change_mode (enum machine_mode new_mode,
2917 enum machine_mode old_mode, rtx x, bool force)
2919 rtx ret;
2921 if (push_operand (x, GET_MODE (x)))
2923 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2924 MEM_COPY_ATTRIBUTES (ret, x);
2926 else if (MEM_P (x))
2928 /* We don't have to worry about changing the address since the
2929 size in bytes is supposed to be the same. */
2930 if (reload_in_progress)
2932 /* Copy the MEM to change the mode and move any
2933 substitutions from the old MEM to the new one. */
2934 ret = adjust_address_nv (x, new_mode, 0);
2935 copy_replacements (x, ret);
2937 else
2938 ret = adjust_address (x, new_mode, 0);
2940 else
2942 /* Note that we do want simplify_subreg's behavior of validating
2943 that the new mode is ok for a hard register. If we were to use
2944 simplify_gen_subreg, we would create the subreg, but would
2945 probably run into the target not being able to implement it. */
2946 /* Except, of course, when FORCE is true, when this is exactly what
2947 we want. Which is needed for CCmodes on some targets. */
2948 if (force)
2949 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2950 else
2951 ret = simplify_subreg (new_mode, x, old_mode, 0);
2954 return ret;
2957 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2958 an integer mode of the same size as MODE. Returns the instruction
2959 emitted, or NULL if such a move could not be generated. */
2961 static rtx
2962 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2964 enum machine_mode imode;
2965 enum insn_code code;
2967 /* There must exist a mode of the exact size we require. */
2968 imode = int_mode_for_mode (mode);
2969 if (imode == BLKmode)
2970 return NULL_RTX;
2972 /* The target must support moves in this mode. */
2973 code = optab_handler (mov_optab, imode)->insn_code;
2974 if (code == CODE_FOR_nothing)
2975 return NULL_RTX;
2977 x = emit_move_change_mode (imode, mode, x, force);
2978 if (x == NULL_RTX)
2979 return NULL_RTX;
2980 y = emit_move_change_mode (imode, mode, y, force);
2981 if (y == NULL_RTX)
2982 return NULL_RTX;
2983 return emit_insn (GEN_FCN (code) (x, y));
2986 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2987 Return an equivalent MEM that does not use an auto-increment. */
2989 static rtx
2990 emit_move_resolve_push (enum machine_mode mode, rtx x)
2992 enum rtx_code code = GET_CODE (XEXP (x, 0));
2993 HOST_WIDE_INT adjust;
2994 rtx temp;
2996 adjust = GET_MODE_SIZE (mode);
2997 #ifdef PUSH_ROUNDING
2998 adjust = PUSH_ROUNDING (adjust);
2999 #endif
3000 if (code == PRE_DEC || code == POST_DEC)
3001 adjust = -adjust;
3002 else if (code == PRE_MODIFY || code == POST_MODIFY)
3004 rtx expr = XEXP (XEXP (x, 0), 1);
3005 HOST_WIDE_INT val;
3007 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3008 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
3009 val = INTVAL (XEXP (expr, 1));
3010 if (GET_CODE (expr) == MINUS)
3011 val = -val;
3012 gcc_assert (adjust == val || adjust == -val);
3013 adjust = val;
3016 /* Do not use anti_adjust_stack, since we don't want to update
3017 stack_pointer_delta. */
3018 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3019 GEN_INT (adjust), stack_pointer_rtx,
3020 0, OPTAB_LIB_WIDEN);
3021 if (temp != stack_pointer_rtx)
3022 emit_move_insn (stack_pointer_rtx, temp);
3024 switch (code)
3026 case PRE_INC:
3027 case PRE_DEC:
3028 case PRE_MODIFY:
3029 temp = stack_pointer_rtx;
3030 break;
3031 case POST_INC:
3032 case POST_DEC:
3033 case POST_MODIFY:
3034 temp = plus_constant (stack_pointer_rtx, -adjust);
3035 break;
3036 default:
3037 gcc_unreachable ();
3040 return replace_equiv_address (x, temp);
3043 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3044 X is known to satisfy push_operand, and MODE is known to be complex.
3045 Returns the last instruction emitted. */
3048 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3050 enum machine_mode submode = GET_MODE_INNER (mode);
3051 bool imag_first;
3053 #ifdef PUSH_ROUNDING
3054 unsigned int submodesize = GET_MODE_SIZE (submode);
3056 /* In case we output to the stack, but the size is smaller than the
3057 machine can push exactly, we need to use move instructions. */
3058 if (PUSH_ROUNDING (submodesize) != submodesize)
3060 x = emit_move_resolve_push (mode, x);
3061 return emit_move_insn (x, y);
3063 #endif
3065 /* Note that the real part always precedes the imag part in memory
3066 regardless of machine's endianness. */
3067 switch (GET_CODE (XEXP (x, 0)))
3069 case PRE_DEC:
3070 case POST_DEC:
3071 imag_first = true;
3072 break;
3073 case PRE_INC:
3074 case POST_INC:
3075 imag_first = false;
3076 break;
3077 default:
3078 gcc_unreachable ();
3081 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3082 read_complex_part (y, imag_first));
3083 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3084 read_complex_part (y, !imag_first));
3087 /* A subroutine of emit_move_complex. Perform the move from Y to X
3088 via two moves of the parts. Returns the last instruction emitted. */
3091 emit_move_complex_parts (rtx x, rtx y)
3093 /* Show the output dies here. This is necessary for SUBREGs
3094 of pseudos since we cannot track their lifetimes correctly;
3095 hard regs shouldn't appear here except as return values. */
3096 if (!reload_completed && !reload_in_progress
3097 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3098 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3100 write_complex_part (x, read_complex_part (y, false), false);
3101 write_complex_part (x, read_complex_part (y, true), true);
3103 return get_last_insn ();
3106 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3107 MODE is known to be complex. Returns the last instruction emitted. */
3109 static rtx
3110 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3112 bool try_int;
3114 /* Need to take special care for pushes, to maintain proper ordering
3115 of the data, and possibly extra padding. */
3116 if (push_operand (x, mode))
3117 return emit_move_complex_push (mode, x, y);
3119 /* See if we can coerce the target into moving both values at once. */
3121 /* Move floating point as parts. */
3122 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3123 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3124 try_int = false;
3125 /* Not possible if the values are inherently not adjacent. */
3126 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3127 try_int = false;
3128 /* Is possible if both are registers (or subregs of registers). */
3129 else if (register_operand (x, mode) && register_operand (y, mode))
3130 try_int = true;
3131 /* If one of the operands is a memory, and alignment constraints
3132 are friendly enough, we may be able to do combined memory operations.
3133 We do not attempt this if Y is a constant because that combination is
3134 usually better with the by-parts thing below. */
3135 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3136 && (!STRICT_ALIGNMENT
3137 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3138 try_int = true;
3139 else
3140 try_int = false;
3142 if (try_int)
3144 rtx ret;
3146 /* For memory to memory moves, optimal behavior can be had with the
3147 existing block move logic. */
3148 if (MEM_P (x) && MEM_P (y))
3150 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3151 BLOCK_OP_NO_LIBCALL);
3152 return get_last_insn ();
3155 ret = emit_move_via_integer (mode, x, y, true);
3156 if (ret)
3157 return ret;
3160 return emit_move_complex_parts (x, y);
3163 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3164 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3166 static rtx
3167 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3169 rtx ret;
3171 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3172 if (mode != CCmode)
3174 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3175 if (code != CODE_FOR_nothing)
3177 x = emit_move_change_mode (CCmode, mode, x, true);
3178 y = emit_move_change_mode (CCmode, mode, y, true);
3179 return emit_insn (GEN_FCN (code) (x, y));
3183 /* Otherwise, find the MODE_INT mode of the same width. */
3184 ret = emit_move_via_integer (mode, x, y, false);
3185 gcc_assert (ret != NULL);
3186 return ret;
3189 /* Return true if word I of OP lies entirely in the
3190 undefined bits of a paradoxical subreg. */
3192 static bool
3193 undefined_operand_subword_p (const_rtx op, int i)
3195 enum machine_mode innermode, innermostmode;
3196 int offset;
3197 if (GET_CODE (op) != SUBREG)
3198 return false;
3199 innermode = GET_MODE (op);
3200 innermostmode = GET_MODE (SUBREG_REG (op));
3201 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3202 /* The SUBREG_BYTE represents offset, as if the value were stored in
3203 memory, except for a paradoxical subreg where we define
3204 SUBREG_BYTE to be 0; undo this exception as in
3205 simplify_subreg. */
3206 if (SUBREG_BYTE (op) == 0
3207 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3209 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3210 if (WORDS_BIG_ENDIAN)
3211 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3212 if (BYTES_BIG_ENDIAN)
3213 offset += difference % UNITS_PER_WORD;
3215 if (offset >= GET_MODE_SIZE (innermostmode)
3216 || offset <= -GET_MODE_SIZE (word_mode))
3217 return true;
3218 return false;
3221 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3222 MODE is any multi-word or full-word mode that lacks a move_insn
3223 pattern. Note that you will get better code if you define such
3224 patterns, even if they must turn into multiple assembler instructions. */
3226 static rtx
3227 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3229 rtx last_insn = 0;
3230 rtx seq, inner;
3231 bool need_clobber;
3232 int i;
3234 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3236 /* If X is a push on the stack, do the push now and replace
3237 X with a reference to the stack pointer. */
3238 if (push_operand (x, mode))
3239 x = emit_move_resolve_push (mode, x);
3241 /* If we are in reload, see if either operand is a MEM whose address
3242 is scheduled for replacement. */
3243 if (reload_in_progress && MEM_P (x)
3244 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3245 x = replace_equiv_address_nv (x, inner);
3246 if (reload_in_progress && MEM_P (y)
3247 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3248 y = replace_equiv_address_nv (y, inner);
3250 start_sequence ();
3252 need_clobber = false;
3253 for (i = 0;
3254 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3255 i++)
3257 rtx xpart = operand_subword (x, i, 1, mode);
3258 rtx ypart;
3260 /* Do not generate code for a move if it would come entirely
3261 from the undefined bits of a paradoxical subreg. */
3262 if (undefined_operand_subword_p (y, i))
3263 continue;
3265 ypart = operand_subword (y, i, 1, mode);
3267 /* If we can't get a part of Y, put Y into memory if it is a
3268 constant. Otherwise, force it into a register. Then we must
3269 be able to get a part of Y. */
3270 if (ypart == 0 && CONSTANT_P (y))
3272 y = use_anchored_address (force_const_mem (mode, y));
3273 ypart = operand_subword (y, i, 1, mode);
3275 else if (ypart == 0)
3276 ypart = operand_subword_force (y, i, mode);
3278 gcc_assert (xpart && ypart);
3280 need_clobber |= (GET_CODE (xpart) == SUBREG);
3282 last_insn = emit_move_insn (xpart, ypart);
3285 seq = get_insns ();
3286 end_sequence ();
3288 /* Show the output dies here. This is necessary for SUBREGs
3289 of pseudos since we cannot track their lifetimes correctly;
3290 hard regs shouldn't appear here except as return values.
3291 We never want to emit such a clobber after reload. */
3292 if (x != y
3293 && ! (reload_in_progress || reload_completed)
3294 && need_clobber != 0)
3295 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3297 emit_insn (seq);
3299 return last_insn;
3302 /* Low level part of emit_move_insn.
3303 Called just like emit_move_insn, but assumes X and Y
3304 are basically valid. */
3307 emit_move_insn_1 (rtx x, rtx y)
3309 enum machine_mode mode = GET_MODE (x);
3310 enum insn_code code;
3312 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3314 code = optab_handler (mov_optab, mode)->insn_code;
3315 if (code != CODE_FOR_nothing)
3316 return emit_insn (GEN_FCN (code) (x, y));
3318 /* Expand complex moves by moving real part and imag part. */
3319 if (COMPLEX_MODE_P (mode))
3320 return emit_move_complex (mode, x, y);
3322 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3323 || ALL_FIXED_POINT_MODE_P (mode))
3325 rtx result = emit_move_via_integer (mode, x, y, true);
3327 /* If we can't find an integer mode, use multi words. */
3328 if (result)
3329 return result;
3330 else
3331 return emit_move_multi_word (mode, x, y);
3334 if (GET_MODE_CLASS (mode) == MODE_CC)
3335 return emit_move_ccmode (mode, x, y);
3337 /* Try using a move pattern for the corresponding integer mode. This is
3338 only safe when simplify_subreg can convert MODE constants into integer
3339 constants. At present, it can only do this reliably if the value
3340 fits within a HOST_WIDE_INT. */
3341 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3343 rtx ret = emit_move_via_integer (mode, x, y, false);
3344 if (ret)
3345 return ret;
3348 return emit_move_multi_word (mode, x, y);
3351 /* Generate code to copy Y into X.
3352 Both Y and X must have the same mode, except that
3353 Y can be a constant with VOIDmode.
3354 This mode cannot be BLKmode; use emit_block_move for that.
3356 Return the last instruction emitted. */
3359 emit_move_insn (rtx x, rtx y)
3361 enum machine_mode mode = GET_MODE (x);
3362 rtx y_cst = NULL_RTX;
3363 rtx last_insn, set;
3365 gcc_assert (mode != BLKmode
3366 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3368 if (CONSTANT_P (y))
3370 if (optimize
3371 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3372 && (last_insn = compress_float_constant (x, y)))
3373 return last_insn;
3375 y_cst = y;
3377 if (!LEGITIMATE_CONSTANT_P (y))
3379 y = force_const_mem (mode, y);
3381 /* If the target's cannot_force_const_mem prevented the spill,
3382 assume that the target's move expanders will also take care
3383 of the non-legitimate constant. */
3384 if (!y)
3385 y = y_cst;
3386 else
3387 y = use_anchored_address (y);
3391 /* If X or Y are memory references, verify that their addresses are valid
3392 for the machine. */
3393 if (MEM_P (x)
3394 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3395 && ! push_operand (x, GET_MODE (x)))
3396 || (flag_force_addr
3397 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3398 x = validize_mem (x);
3400 if (MEM_P (y)
3401 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3402 || (flag_force_addr
3403 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3404 y = validize_mem (y);
3406 gcc_assert (mode != BLKmode);
3408 last_insn = emit_move_insn_1 (x, y);
3410 if (y_cst && REG_P (x)
3411 && (set = single_set (last_insn)) != NULL_RTX
3412 && SET_DEST (set) == x
3413 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3414 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3416 return last_insn;
3419 /* If Y is representable exactly in a narrower mode, and the target can
3420 perform the extension directly from constant or memory, then emit the
3421 move as an extension. */
3423 static rtx
3424 compress_float_constant (rtx x, rtx y)
3426 enum machine_mode dstmode = GET_MODE (x);
3427 enum machine_mode orig_srcmode = GET_MODE (y);
3428 enum machine_mode srcmode;
3429 REAL_VALUE_TYPE r;
3430 int oldcost, newcost;
3432 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3434 if (LEGITIMATE_CONSTANT_P (y))
3435 oldcost = rtx_cost (y, SET);
3436 else
3437 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3439 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3440 srcmode != orig_srcmode;
3441 srcmode = GET_MODE_WIDER_MODE (srcmode))
3443 enum insn_code ic;
3444 rtx trunc_y, last_insn;
3446 /* Skip if the target can't extend this way. */
3447 ic = can_extend_p (dstmode, srcmode, 0);
3448 if (ic == CODE_FOR_nothing)
3449 continue;
3451 /* Skip if the narrowed value isn't exact. */
3452 if (! exact_real_truncate (srcmode, &r))
3453 continue;
3455 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3457 if (LEGITIMATE_CONSTANT_P (trunc_y))
3459 /* Skip if the target needs extra instructions to perform
3460 the extension. */
3461 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3462 continue;
3463 /* This is valid, but may not be cheaper than the original. */
3464 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3465 if (oldcost < newcost)
3466 continue;
3468 else if (float_extend_from_mem[dstmode][srcmode])
3470 trunc_y = force_const_mem (srcmode, trunc_y);
3471 /* This is valid, but may not be cheaper than the original. */
3472 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3473 if (oldcost < newcost)
3474 continue;
3475 trunc_y = validize_mem (trunc_y);
3477 else
3478 continue;
3480 /* For CSE's benefit, force the compressed constant pool entry
3481 into a new pseudo. This constant may be used in different modes,
3482 and if not, combine will put things back together for us. */
3483 trunc_y = force_reg (srcmode, trunc_y);
3484 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3485 last_insn = get_last_insn ();
3487 if (REG_P (x))
3488 set_unique_reg_note (last_insn, REG_EQUAL, y);
3490 return last_insn;
3493 return NULL_RTX;
3496 /* Pushing data onto the stack. */
3498 /* Push a block of length SIZE (perhaps variable)
3499 and return an rtx to address the beginning of the block.
3500 The value may be virtual_outgoing_args_rtx.
3502 EXTRA is the number of bytes of padding to push in addition to SIZE.
3503 BELOW nonzero means this padding comes at low addresses;
3504 otherwise, the padding comes at high addresses. */
3507 push_block (rtx size, int extra, int below)
3509 rtx temp;
3511 size = convert_modes (Pmode, ptr_mode, size, 1);
3512 if (CONSTANT_P (size))
3513 anti_adjust_stack (plus_constant (size, extra));
3514 else if (REG_P (size) && extra == 0)
3515 anti_adjust_stack (size);
3516 else
3518 temp = copy_to_mode_reg (Pmode, size);
3519 if (extra != 0)
3520 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3521 temp, 0, OPTAB_LIB_WIDEN);
3522 anti_adjust_stack (temp);
3525 #ifndef STACK_GROWS_DOWNWARD
3526 if (0)
3527 #else
3528 if (1)
3529 #endif
3531 temp = virtual_outgoing_args_rtx;
3532 if (extra != 0 && below)
3533 temp = plus_constant (temp, extra);
3535 else
3537 if (GET_CODE (size) == CONST_INT)
3538 temp = plus_constant (virtual_outgoing_args_rtx,
3539 -INTVAL (size) - (below ? 0 : extra));
3540 else if (extra != 0 && !below)
3541 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3542 negate_rtx (Pmode, plus_constant (size, extra)));
3543 else
3544 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3545 negate_rtx (Pmode, size));
3548 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3551 #ifdef PUSH_ROUNDING
3553 /* Emit single push insn. */
3555 static void
3556 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3558 rtx dest_addr;
3559 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3560 rtx dest;
3561 enum insn_code icode;
3562 insn_operand_predicate_fn pred;
3564 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3565 /* If there is push pattern, use it. Otherwise try old way of throwing
3566 MEM representing push operation to move expander. */
3567 icode = optab_handler (push_optab, mode)->insn_code;
3568 if (icode != CODE_FOR_nothing)
3570 if (((pred = insn_data[(int) icode].operand[0].predicate)
3571 && !((*pred) (x, mode))))
3572 x = force_reg (mode, x);
3573 emit_insn (GEN_FCN (icode) (x));
3574 return;
3576 if (GET_MODE_SIZE (mode) == rounded_size)
3577 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3578 /* If we are to pad downward, adjust the stack pointer first and
3579 then store X into the stack location using an offset. This is
3580 because emit_move_insn does not know how to pad; it does not have
3581 access to type. */
3582 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3584 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3585 HOST_WIDE_INT offset;
3587 emit_move_insn (stack_pointer_rtx,
3588 expand_binop (Pmode,
3589 #ifdef STACK_GROWS_DOWNWARD
3590 sub_optab,
3591 #else
3592 add_optab,
3593 #endif
3594 stack_pointer_rtx,
3595 GEN_INT (rounded_size),
3596 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3598 offset = (HOST_WIDE_INT) padding_size;
3599 #ifdef STACK_GROWS_DOWNWARD
3600 if (STACK_PUSH_CODE == POST_DEC)
3601 /* We have already decremented the stack pointer, so get the
3602 previous value. */
3603 offset += (HOST_WIDE_INT) rounded_size;
3604 #else
3605 if (STACK_PUSH_CODE == POST_INC)
3606 /* We have already incremented the stack pointer, so get the
3607 previous value. */
3608 offset -= (HOST_WIDE_INT) rounded_size;
3609 #endif
3610 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3612 else
3614 #ifdef STACK_GROWS_DOWNWARD
3615 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3616 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3617 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3618 #else
3619 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3620 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3621 GEN_INT (rounded_size));
3622 #endif
3623 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3626 dest = gen_rtx_MEM (mode, dest_addr);
3628 if (type != 0)
3630 set_mem_attributes (dest, type, 1);
3632 if (flag_optimize_sibling_calls)
3633 /* Function incoming arguments may overlap with sibling call
3634 outgoing arguments and we cannot allow reordering of reads
3635 from function arguments with stores to outgoing arguments
3636 of sibling calls. */
3637 set_mem_alias_set (dest, 0);
3639 emit_move_insn (dest, x);
3641 #endif
3643 /* Generate code to push X onto the stack, assuming it has mode MODE and
3644 type TYPE.
3645 MODE is redundant except when X is a CONST_INT (since they don't
3646 carry mode info).
3647 SIZE is an rtx for the size of data to be copied (in bytes),
3648 needed only if X is BLKmode.
3650 ALIGN (in bits) is maximum alignment we can assume.
3652 If PARTIAL and REG are both nonzero, then copy that many of the first
3653 bytes of X into registers starting with REG, and push the rest of X.
3654 The amount of space pushed is decreased by PARTIAL bytes.
3655 REG must be a hard register in this case.
3656 If REG is zero but PARTIAL is not, take any all others actions for an
3657 argument partially in registers, but do not actually load any
3658 registers.
3660 EXTRA is the amount in bytes of extra space to leave next to this arg.
3661 This is ignored if an argument block has already been allocated.
3663 On a machine that lacks real push insns, ARGS_ADDR is the address of
3664 the bottom of the argument block for this call. We use indexing off there
3665 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3666 argument block has not been preallocated.
3668 ARGS_SO_FAR is the size of args previously pushed for this call.
3670 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3671 for arguments passed in registers. If nonzero, it will be the number
3672 of bytes required. */
3674 void
3675 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3676 unsigned int align, int partial, rtx reg, int extra,
3677 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3678 rtx alignment_pad)
3680 rtx xinner;
3681 enum direction stack_direction
3682 #ifdef STACK_GROWS_DOWNWARD
3683 = downward;
3684 #else
3685 = upward;
3686 #endif
3688 /* Decide where to pad the argument: `downward' for below,
3689 `upward' for above, or `none' for don't pad it.
3690 Default is below for small data on big-endian machines; else above. */
3691 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3693 /* Invert direction if stack is post-decrement.
3694 FIXME: why? */
3695 if (STACK_PUSH_CODE == POST_DEC)
3696 if (where_pad != none)
3697 where_pad = (where_pad == downward ? upward : downward);
3699 xinner = x;
3701 if (mode == BLKmode
3702 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3704 /* Copy a block into the stack, entirely or partially. */
3706 rtx temp;
3707 int used;
3708 int offset;
3709 int skip;
3711 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3712 used = partial - offset;
3714 if (mode != BLKmode)
3716 /* A value is to be stored in an insufficiently aligned
3717 stack slot; copy via a suitably aligned slot if
3718 necessary. */
3719 size = GEN_INT (GET_MODE_SIZE (mode));
3720 if (!MEM_P (xinner))
3722 temp = assign_temp (type, 0, 1, 1);
3723 emit_move_insn (temp, xinner);
3724 xinner = temp;
3728 gcc_assert (size);
3730 /* USED is now the # of bytes we need not copy to the stack
3731 because registers will take care of them. */
3733 if (partial != 0)
3734 xinner = adjust_address (xinner, BLKmode, used);
3736 /* If the partial register-part of the arg counts in its stack size,
3737 skip the part of stack space corresponding to the registers.
3738 Otherwise, start copying to the beginning of the stack space,
3739 by setting SKIP to 0. */
3740 skip = (reg_parm_stack_space == 0) ? 0 : used;
3742 #ifdef PUSH_ROUNDING
3743 /* Do it with several push insns if that doesn't take lots of insns
3744 and if there is no difficulty with push insns that skip bytes
3745 on the stack for alignment purposes. */
3746 if (args_addr == 0
3747 && PUSH_ARGS
3748 && GET_CODE (size) == CONST_INT
3749 && skip == 0
3750 && MEM_ALIGN (xinner) >= align
3751 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3752 /* Here we avoid the case of a structure whose weak alignment
3753 forces many pushes of a small amount of data,
3754 and such small pushes do rounding that causes trouble. */
3755 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3756 || align >= BIGGEST_ALIGNMENT
3757 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3758 == (align / BITS_PER_UNIT)))
3759 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3761 /* Push padding now if padding above and stack grows down,
3762 or if padding below and stack grows up.
3763 But if space already allocated, this has already been done. */
3764 if (extra && args_addr == 0
3765 && where_pad != none && where_pad != stack_direction)
3766 anti_adjust_stack (GEN_INT (extra));
3768 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3770 else
3771 #endif /* PUSH_ROUNDING */
3773 rtx target;
3775 /* Otherwise make space on the stack and copy the data
3776 to the address of that space. */
3778 /* Deduct words put into registers from the size we must copy. */
3779 if (partial != 0)
3781 if (GET_CODE (size) == CONST_INT)
3782 size = GEN_INT (INTVAL (size) - used);
3783 else
3784 size = expand_binop (GET_MODE (size), sub_optab, size,
3785 GEN_INT (used), NULL_RTX, 0,
3786 OPTAB_LIB_WIDEN);
3789 /* Get the address of the stack space.
3790 In this case, we do not deal with EXTRA separately.
3791 A single stack adjust will do. */
3792 if (! args_addr)
3794 temp = push_block (size, extra, where_pad == downward);
3795 extra = 0;
3797 else if (GET_CODE (args_so_far) == CONST_INT)
3798 temp = memory_address (BLKmode,
3799 plus_constant (args_addr,
3800 skip + INTVAL (args_so_far)));
3801 else
3802 temp = memory_address (BLKmode,
3803 plus_constant (gen_rtx_PLUS (Pmode,
3804 args_addr,
3805 args_so_far),
3806 skip));
3808 if (!ACCUMULATE_OUTGOING_ARGS)
3810 /* If the source is referenced relative to the stack pointer,
3811 copy it to another register to stabilize it. We do not need
3812 to do this if we know that we won't be changing sp. */
3814 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3815 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3816 temp = copy_to_reg (temp);
3819 target = gen_rtx_MEM (BLKmode, temp);
3821 /* We do *not* set_mem_attributes here, because incoming arguments
3822 may overlap with sibling call outgoing arguments and we cannot
3823 allow reordering of reads from function arguments with stores
3824 to outgoing arguments of sibling calls. We do, however, want
3825 to record the alignment of the stack slot. */
3826 /* ALIGN may well be better aligned than TYPE, e.g. due to
3827 PARM_BOUNDARY. Assume the caller isn't lying. */
3828 set_mem_align (target, align);
3830 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3833 else if (partial > 0)
3835 /* Scalar partly in registers. */
3837 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3838 int i;
3839 int not_stack;
3840 /* # bytes of start of argument
3841 that we must make space for but need not store. */
3842 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3843 int args_offset = INTVAL (args_so_far);
3844 int skip;
3846 /* Push padding now if padding above and stack grows down,
3847 or if padding below and stack grows up.
3848 But if space already allocated, this has already been done. */
3849 if (extra && args_addr == 0
3850 && where_pad != none && where_pad != stack_direction)
3851 anti_adjust_stack (GEN_INT (extra));
3853 /* If we make space by pushing it, we might as well push
3854 the real data. Otherwise, we can leave OFFSET nonzero
3855 and leave the space uninitialized. */
3856 if (args_addr == 0)
3857 offset = 0;
3859 /* Now NOT_STACK gets the number of words that we don't need to
3860 allocate on the stack. Convert OFFSET to words too. */
3861 not_stack = (partial - offset) / UNITS_PER_WORD;
3862 offset /= UNITS_PER_WORD;
3864 /* If the partial register-part of the arg counts in its stack size,
3865 skip the part of stack space corresponding to the registers.
3866 Otherwise, start copying to the beginning of the stack space,
3867 by setting SKIP to 0. */
3868 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3870 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3871 x = validize_mem (force_const_mem (mode, x));
3873 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3874 SUBREGs of such registers are not allowed. */
3875 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3876 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3877 x = copy_to_reg (x);
3879 /* Loop over all the words allocated on the stack for this arg. */
3880 /* We can do it by words, because any scalar bigger than a word
3881 has a size a multiple of a word. */
3882 #ifndef PUSH_ARGS_REVERSED
3883 for (i = not_stack; i < size; i++)
3884 #else
3885 for (i = size - 1; i >= not_stack; i--)
3886 #endif
3887 if (i >= not_stack + offset)
3888 emit_push_insn (operand_subword_force (x, i, mode),
3889 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3890 0, args_addr,
3891 GEN_INT (args_offset + ((i - not_stack + skip)
3892 * UNITS_PER_WORD)),
3893 reg_parm_stack_space, alignment_pad);
3895 else
3897 rtx addr;
3898 rtx dest;
3900 /* Push padding now if padding above and stack grows down,
3901 or if padding below and stack grows up.
3902 But if space already allocated, this has already been done. */
3903 if (extra && args_addr == 0
3904 && where_pad != none && where_pad != stack_direction)
3905 anti_adjust_stack (GEN_INT (extra));
3907 #ifdef PUSH_ROUNDING
3908 if (args_addr == 0 && PUSH_ARGS)
3909 emit_single_push_insn (mode, x, type);
3910 else
3911 #endif
3913 if (GET_CODE (args_so_far) == CONST_INT)
3914 addr
3915 = memory_address (mode,
3916 plus_constant (args_addr,
3917 INTVAL (args_so_far)));
3918 else
3919 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3920 args_so_far));
3921 dest = gen_rtx_MEM (mode, addr);
3923 /* We do *not* set_mem_attributes here, because incoming arguments
3924 may overlap with sibling call outgoing arguments and we cannot
3925 allow reordering of reads from function arguments with stores
3926 to outgoing arguments of sibling calls. We do, however, want
3927 to record the alignment of the stack slot. */
3928 /* ALIGN may well be better aligned than TYPE, e.g. due to
3929 PARM_BOUNDARY. Assume the caller isn't lying. */
3930 set_mem_align (dest, align);
3932 emit_move_insn (dest, x);
3936 /* If part should go in registers, copy that part
3937 into the appropriate registers. Do this now, at the end,
3938 since mem-to-mem copies above may do function calls. */
3939 if (partial > 0 && reg != 0)
3941 /* Handle calls that pass values in multiple non-contiguous locations.
3942 The Irix 6 ABI has examples of this. */
3943 if (GET_CODE (reg) == PARALLEL)
3944 emit_group_load (reg, x, type, -1);
3945 else
3947 gcc_assert (partial % UNITS_PER_WORD == 0);
3948 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3952 if (extra && args_addr == 0 && where_pad == stack_direction)
3953 anti_adjust_stack (GEN_INT (extra));
3955 if (alignment_pad && args_addr == 0)
3956 anti_adjust_stack (alignment_pad);
3959 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3960 operations. */
3962 static rtx
3963 get_subtarget (rtx x)
3965 return (optimize
3966 || x == 0
3967 /* Only registers can be subtargets. */
3968 || !REG_P (x)
3969 /* Don't use hard regs to avoid extending their life. */
3970 || REGNO (x) < FIRST_PSEUDO_REGISTER
3971 ? 0 : x);
3974 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3975 FIELD is a bitfield. Returns true if the optimization was successful,
3976 and there's nothing else to do. */
3978 static bool
3979 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3980 unsigned HOST_WIDE_INT bitpos,
3981 enum machine_mode mode1, rtx str_rtx,
3982 tree to, tree src)
3984 enum machine_mode str_mode = GET_MODE (str_rtx);
3985 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3986 tree op0, op1;
3987 rtx value, result;
3988 optab binop;
3990 if (mode1 != VOIDmode
3991 || bitsize >= BITS_PER_WORD
3992 || str_bitsize > BITS_PER_WORD
3993 || TREE_SIDE_EFFECTS (to)
3994 || TREE_THIS_VOLATILE (to))
3995 return false;
3997 STRIP_NOPS (src);
3998 if (!BINARY_CLASS_P (src)
3999 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4000 return false;
4002 op0 = TREE_OPERAND (src, 0);
4003 op1 = TREE_OPERAND (src, 1);
4004 STRIP_NOPS (op0);
4006 if (!operand_equal_p (to, op0, 0))
4007 return false;
4009 if (MEM_P (str_rtx))
4011 unsigned HOST_WIDE_INT offset1;
4013 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4014 str_mode = word_mode;
4015 str_mode = get_best_mode (bitsize, bitpos,
4016 MEM_ALIGN (str_rtx), str_mode, 0);
4017 if (str_mode == VOIDmode)
4018 return false;
4019 str_bitsize = GET_MODE_BITSIZE (str_mode);
4021 offset1 = bitpos;
4022 bitpos %= str_bitsize;
4023 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4024 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4026 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4027 return false;
4029 /* If the bit field covers the whole REG/MEM, store_field
4030 will likely generate better code. */
4031 if (bitsize >= str_bitsize)
4032 return false;
4034 /* We can't handle fields split across multiple entities. */
4035 if (bitpos + bitsize > str_bitsize)
4036 return false;
4038 if (BYTES_BIG_ENDIAN)
4039 bitpos = str_bitsize - bitpos - bitsize;
4041 switch (TREE_CODE (src))
4043 case PLUS_EXPR:
4044 case MINUS_EXPR:
4045 /* For now, just optimize the case of the topmost bitfield
4046 where we don't need to do any masking and also
4047 1 bit bitfields where xor can be used.
4048 We might win by one instruction for the other bitfields
4049 too if insv/extv instructions aren't used, so that
4050 can be added later. */
4051 if (bitpos + bitsize != str_bitsize
4052 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4053 break;
4055 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4056 value = convert_modes (str_mode,
4057 TYPE_MODE (TREE_TYPE (op1)), value,
4058 TYPE_UNSIGNED (TREE_TYPE (op1)));
4060 /* We may be accessing data outside the field, which means
4061 we can alias adjacent data. */
4062 if (MEM_P (str_rtx))
4064 str_rtx = shallow_copy_rtx (str_rtx);
4065 set_mem_alias_set (str_rtx, 0);
4066 set_mem_expr (str_rtx, 0);
4069 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4070 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4072 value = expand_and (str_mode, value, const1_rtx, NULL);
4073 binop = xor_optab;
4075 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4076 build_int_cst (NULL_TREE, bitpos),
4077 NULL_RTX, 1);
4078 result = expand_binop (str_mode, binop, str_rtx,
4079 value, str_rtx, 1, OPTAB_WIDEN);
4080 if (result != str_rtx)
4081 emit_move_insn (str_rtx, result);
4082 return true;
4084 case BIT_IOR_EXPR:
4085 case BIT_XOR_EXPR:
4086 if (TREE_CODE (op1) != INTEGER_CST)
4087 break;
4088 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4089 value = convert_modes (GET_MODE (str_rtx),
4090 TYPE_MODE (TREE_TYPE (op1)), value,
4091 TYPE_UNSIGNED (TREE_TYPE (op1)));
4093 /* We may be accessing data outside the field, which means
4094 we can alias adjacent data. */
4095 if (MEM_P (str_rtx))
4097 str_rtx = shallow_copy_rtx (str_rtx);
4098 set_mem_alias_set (str_rtx, 0);
4099 set_mem_expr (str_rtx, 0);
4102 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4103 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4105 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4106 - 1);
4107 value = expand_and (GET_MODE (str_rtx), value, mask,
4108 NULL_RTX);
4110 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4111 build_int_cst (NULL_TREE, bitpos),
4112 NULL_RTX, 1);
4113 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4114 value, str_rtx, 1, OPTAB_WIDEN);
4115 if (result != str_rtx)
4116 emit_move_insn (str_rtx, result);
4117 return true;
4119 default:
4120 break;
4123 return false;
4127 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4128 is true, try generating a nontemporal store. */
4130 void
4131 expand_assignment (tree to, tree from, bool nontemporal)
4133 rtx to_rtx = 0;
4134 rtx result;
4136 /* Don't crash if the lhs of the assignment was erroneous. */
4137 if (TREE_CODE (to) == ERROR_MARK)
4139 result = expand_normal (from);
4140 return;
4143 /* Optimize away no-op moves without side-effects. */
4144 if (operand_equal_p (to, from, 0))
4145 return;
4147 /* Assignment of a structure component needs special treatment
4148 if the structure component's rtx is not simply a MEM.
4149 Assignment of an array element at a constant index, and assignment of
4150 an array element in an unaligned packed structure field, has the same
4151 problem. */
4152 if (handled_component_p (to)
4153 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4155 enum machine_mode mode1;
4156 HOST_WIDE_INT bitsize, bitpos;
4157 tree offset;
4158 int unsignedp;
4159 int volatilep = 0;
4160 tree tem;
4162 push_temp_slots ();
4163 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4164 &unsignedp, &volatilep, true);
4166 /* If we are going to use store_bit_field and extract_bit_field,
4167 make sure to_rtx will be safe for multiple use. */
4169 to_rtx = expand_normal (tem);
4171 if (offset != 0)
4173 rtx offset_rtx;
4175 if (!MEM_P (to_rtx))
4177 /* We can get constant negative offsets into arrays with broken
4178 user code. Translate this to a trap instead of ICEing. */
4179 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4180 expand_builtin_trap ();
4181 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4184 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4185 #ifdef POINTERS_EXTEND_UNSIGNED
4186 if (GET_MODE (offset_rtx) != Pmode)
4187 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4188 #else
4189 if (GET_MODE (offset_rtx) != ptr_mode)
4190 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4191 #endif
4193 /* A constant address in TO_RTX can have VOIDmode, we must not try
4194 to call force_reg for that case. Avoid that case. */
4195 if (MEM_P (to_rtx)
4196 && GET_MODE (to_rtx) == BLKmode
4197 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4198 && bitsize > 0
4199 && (bitpos % bitsize) == 0
4200 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4201 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4203 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4204 bitpos = 0;
4207 to_rtx = offset_address (to_rtx, offset_rtx,
4208 highest_pow2_factor_for_target (to,
4209 offset));
4212 /* Handle expand_expr of a complex value returning a CONCAT. */
4213 if (GET_CODE (to_rtx) == CONCAT)
4215 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4217 gcc_assert (bitpos == 0);
4218 result = store_expr (from, to_rtx, false, nontemporal);
4220 else
4222 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4223 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4224 nontemporal);
4227 else
4229 if (MEM_P (to_rtx))
4231 /* If the field is at offset zero, we could have been given the
4232 DECL_RTX of the parent struct. Don't munge it. */
4233 to_rtx = shallow_copy_rtx (to_rtx);
4235 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4237 /* Deal with volatile and readonly fields. The former is only
4238 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4239 if (volatilep)
4240 MEM_VOLATILE_P (to_rtx) = 1;
4241 if (component_uses_parent_alias_set (to))
4242 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4245 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4246 to_rtx, to, from))
4247 result = NULL;
4248 else
4249 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4250 TREE_TYPE (tem), get_alias_set (to),
4251 nontemporal);
4254 if (result)
4255 preserve_temp_slots (result);
4256 free_temp_slots ();
4257 pop_temp_slots ();
4258 return;
4261 /* If the rhs is a function call and its value is not an aggregate,
4262 call the function before we start to compute the lhs.
4263 This is needed for correct code for cases such as
4264 val = setjmp (buf) on machines where reference to val
4265 requires loading up part of an address in a separate insn.
4267 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4268 since it might be a promoted variable where the zero- or sign- extension
4269 needs to be done. Handling this in the normal way is safe because no
4270 computation is done before the call. */
4271 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4272 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4273 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4274 && REG_P (DECL_RTL (to))))
4276 rtx value;
4278 push_temp_slots ();
4279 value = expand_normal (from);
4280 if (to_rtx == 0)
4281 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4283 /* Handle calls that return values in multiple non-contiguous locations.
4284 The Irix 6 ABI has examples of this. */
4285 if (GET_CODE (to_rtx) == PARALLEL)
4286 emit_group_load (to_rtx, value, TREE_TYPE (from),
4287 int_size_in_bytes (TREE_TYPE (from)));
4288 else if (GET_MODE (to_rtx) == BLKmode)
4289 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4290 else
4292 if (POINTER_TYPE_P (TREE_TYPE (to)))
4293 value = convert_memory_address (GET_MODE (to_rtx), value);
4294 emit_move_insn (to_rtx, value);
4296 preserve_temp_slots (to_rtx);
4297 free_temp_slots ();
4298 pop_temp_slots ();
4299 return;
4302 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4303 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4305 if (to_rtx == 0)
4306 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4308 /* Don't move directly into a return register. */
4309 if (TREE_CODE (to) == RESULT_DECL
4310 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4312 rtx temp;
4314 push_temp_slots ();
4315 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4317 if (GET_CODE (to_rtx) == PARALLEL)
4318 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4319 int_size_in_bytes (TREE_TYPE (from)));
4320 else
4321 emit_move_insn (to_rtx, temp);
4323 preserve_temp_slots (to_rtx);
4324 free_temp_slots ();
4325 pop_temp_slots ();
4326 return;
4329 /* In case we are returning the contents of an object which overlaps
4330 the place the value is being stored, use a safe function when copying
4331 a value through a pointer into a structure value return block. */
4332 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4333 && current_function_returns_struct
4334 && !current_function_returns_pcc_struct)
4336 rtx from_rtx, size;
4338 push_temp_slots ();
4339 size = expr_size (from);
4340 from_rtx = expand_normal (from);
4342 emit_library_call (memmove_libfunc, LCT_NORMAL,
4343 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4344 XEXP (from_rtx, 0), Pmode,
4345 convert_to_mode (TYPE_MODE (sizetype),
4346 size, TYPE_UNSIGNED (sizetype)),
4347 TYPE_MODE (sizetype));
4349 preserve_temp_slots (to_rtx);
4350 free_temp_slots ();
4351 pop_temp_slots ();
4352 return;
4355 /* Compute FROM and store the value in the rtx we got. */
4357 push_temp_slots ();
4358 result = store_expr (from, to_rtx, 0, nontemporal);
4359 preserve_temp_slots (result);
4360 free_temp_slots ();
4361 pop_temp_slots ();
4362 return;
4365 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4366 succeeded, false otherwise. */
4368 static bool
4369 emit_storent_insn (rtx to, rtx from)
4371 enum machine_mode mode = GET_MODE (to), imode;
4372 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4373 rtx pattern;
4375 if (code == CODE_FOR_nothing)
4376 return false;
4378 imode = insn_data[code].operand[0].mode;
4379 if (!insn_data[code].operand[0].predicate (to, imode))
4380 return false;
4382 imode = insn_data[code].operand[1].mode;
4383 if (!insn_data[code].operand[1].predicate (from, imode))
4385 from = copy_to_mode_reg (imode, from);
4386 if (!insn_data[code].operand[1].predicate (from, imode))
4387 return false;
4390 pattern = GEN_FCN (code) (to, from);
4391 if (pattern == NULL_RTX)
4392 return false;
4394 emit_insn (pattern);
4395 return true;
4398 /* Generate code for computing expression EXP,
4399 and storing the value into TARGET.
4401 If the mode is BLKmode then we may return TARGET itself.
4402 It turns out that in BLKmode it doesn't cause a problem.
4403 because C has no operators that could combine two different
4404 assignments into the same BLKmode object with different values
4405 with no sequence point. Will other languages need this to
4406 be more thorough?
4408 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4409 stack, and block moves may need to be treated specially.
4411 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4414 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4416 rtx temp;
4417 rtx alt_rtl = NULL_RTX;
4418 int dont_return_target = 0;
4420 if (VOID_TYPE_P (TREE_TYPE (exp)))
4422 /* C++ can generate ?: expressions with a throw expression in one
4423 branch and an rvalue in the other. Here, we resolve attempts to
4424 store the throw expression's nonexistent result. */
4425 gcc_assert (!call_param_p);
4426 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4427 return NULL_RTX;
4429 if (TREE_CODE (exp) == COMPOUND_EXPR)
4431 /* Perform first part of compound expression, then assign from second
4432 part. */
4433 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4434 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4435 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4436 nontemporal);
4438 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4440 /* For conditional expression, get safe form of the target. Then
4441 test the condition, doing the appropriate assignment on either
4442 side. This avoids the creation of unnecessary temporaries.
4443 For non-BLKmode, it is more efficient not to do this. */
4445 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4447 do_pending_stack_adjust ();
4448 NO_DEFER_POP;
4449 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4450 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4451 nontemporal);
4452 emit_jump_insn (gen_jump (lab2));
4453 emit_barrier ();
4454 emit_label (lab1);
4455 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4456 nontemporal);
4457 emit_label (lab2);
4458 OK_DEFER_POP;
4460 return NULL_RTX;
4462 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4463 /* If this is a scalar in a register that is stored in a wider mode
4464 than the declared mode, compute the result into its declared mode
4465 and then convert to the wider mode. Our value is the computed
4466 expression. */
4468 rtx inner_target = 0;
4470 /* We can do the conversion inside EXP, which will often result
4471 in some optimizations. Do the conversion in two steps: first
4472 change the signedness, if needed, then the extend. But don't
4473 do this if the type of EXP is a subtype of something else
4474 since then the conversion might involve more than just
4475 converting modes. */
4476 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4477 && TREE_TYPE (TREE_TYPE (exp)) == 0
4478 && (!lang_hooks.reduce_bit_field_operations
4479 || (GET_MODE_PRECISION (GET_MODE (target))
4480 == TYPE_PRECISION (TREE_TYPE (exp)))))
4482 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4483 != SUBREG_PROMOTED_UNSIGNED_P (target))
4485 /* Some types, e.g. Fortran's logical*4, won't have a signed
4486 version, so use the mode instead. */
4487 tree ntype
4488 = (signed_or_unsigned_type_for
4489 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4490 if (ntype == NULL)
4491 ntype = lang_hooks.types.type_for_mode
4492 (TYPE_MODE (TREE_TYPE (exp)),
4493 SUBREG_PROMOTED_UNSIGNED_P (target));
4495 exp = fold_convert (ntype, exp);
4498 exp = fold_convert (lang_hooks.types.type_for_mode
4499 (GET_MODE (SUBREG_REG (target)),
4500 SUBREG_PROMOTED_UNSIGNED_P (target)),
4501 exp);
4503 inner_target = SUBREG_REG (target);
4506 temp = expand_expr (exp, inner_target, VOIDmode,
4507 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4509 /* If TEMP is a VOIDmode constant, use convert_modes to make
4510 sure that we properly convert it. */
4511 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4513 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4514 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4515 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4516 GET_MODE (target), temp,
4517 SUBREG_PROMOTED_UNSIGNED_P (target));
4520 convert_move (SUBREG_REG (target), temp,
4521 SUBREG_PROMOTED_UNSIGNED_P (target));
4523 return NULL_RTX;
4525 else if (TREE_CODE (exp) == STRING_CST
4526 && !nontemporal && !call_param_p
4527 && TREE_STRING_LENGTH (exp) > 0
4528 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4530 /* Optimize initialization of an array with a STRING_CST. */
4531 HOST_WIDE_INT exp_len, str_copy_len;
4532 rtx dest_mem;
4534 exp_len = int_expr_size (exp);
4535 if (exp_len <= 0)
4536 goto normal_expr;
4538 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4539 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4540 goto normal_expr;
4542 str_copy_len = TREE_STRING_LENGTH (exp);
4543 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4545 str_copy_len += STORE_MAX_PIECES - 1;
4546 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4548 str_copy_len = MIN (str_copy_len, exp_len);
4549 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4550 (void *) TREE_STRING_POINTER (exp),
4551 MEM_ALIGN (target), false))
4552 goto normal_expr;
4554 dest_mem = target;
4556 dest_mem = store_by_pieces (dest_mem,
4557 str_copy_len, builtin_strncpy_read_str,
4558 (void *) TREE_STRING_POINTER (exp),
4559 MEM_ALIGN (target), false,
4560 exp_len > str_copy_len ? 1 : 0);
4561 if (exp_len > str_copy_len)
4562 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4563 GEN_INT (exp_len - str_copy_len),
4564 BLOCK_OP_NORMAL);
4565 return NULL_RTX;
4567 else
4569 rtx tmp_target;
4571 normal_expr:
4572 /* If we want to use a nontemporal store, force the value to
4573 register first. */
4574 tmp_target = nontemporal ? NULL_RTX : target;
4575 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4576 (call_param_p
4577 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4578 &alt_rtl);
4579 /* Return TARGET if it's a specified hardware register.
4580 If TARGET is a volatile mem ref, either return TARGET
4581 or return a reg copied *from* TARGET; ANSI requires this.
4583 Otherwise, if TEMP is not TARGET, return TEMP
4584 if it is constant (for efficiency),
4585 or if we really want the correct value. */
4586 if (!(target && REG_P (target)
4587 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4588 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4589 && ! rtx_equal_p (temp, target)
4590 && CONSTANT_P (temp))
4591 dont_return_target = 1;
4594 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4595 the same as that of TARGET, adjust the constant. This is needed, for
4596 example, in case it is a CONST_DOUBLE and we want only a word-sized
4597 value. */
4598 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4599 && TREE_CODE (exp) != ERROR_MARK
4600 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4601 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4602 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4604 /* If value was not generated in the target, store it there.
4605 Convert the value to TARGET's type first if necessary and emit the
4606 pending incrementations that have been queued when expanding EXP.
4607 Note that we cannot emit the whole queue blindly because this will
4608 effectively disable the POST_INC optimization later.
4610 If TEMP and TARGET compare equal according to rtx_equal_p, but
4611 one or both of them are volatile memory refs, we have to distinguish
4612 two cases:
4613 - expand_expr has used TARGET. In this case, we must not generate
4614 another copy. This can be detected by TARGET being equal according
4615 to == .
4616 - expand_expr has not used TARGET - that means that the source just
4617 happens to have the same RTX form. Since temp will have been created
4618 by expand_expr, it will compare unequal according to == .
4619 We must generate a copy in this case, to reach the correct number
4620 of volatile memory references. */
4622 if ((! rtx_equal_p (temp, target)
4623 || (temp != target && (side_effects_p (temp)
4624 || side_effects_p (target))))
4625 && TREE_CODE (exp) != ERROR_MARK
4626 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4627 but TARGET is not valid memory reference, TEMP will differ
4628 from TARGET although it is really the same location. */
4629 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4630 /* If there's nothing to copy, don't bother. Don't call
4631 expr_size unless necessary, because some front-ends (C++)
4632 expr_size-hook must not be given objects that are not
4633 supposed to be bit-copied or bit-initialized. */
4634 && expr_size (exp) != const0_rtx)
4636 if (GET_MODE (temp) != GET_MODE (target)
4637 && GET_MODE (temp) != VOIDmode)
4639 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4640 if (dont_return_target)
4642 /* In this case, we will return TEMP,
4643 so make sure it has the proper mode.
4644 But don't forget to store the value into TARGET. */
4645 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4646 emit_move_insn (target, temp);
4648 else if (GET_MODE (target) == BLKmode)
4649 emit_block_move (target, temp, expr_size (exp),
4650 (call_param_p
4651 ? BLOCK_OP_CALL_PARM
4652 : BLOCK_OP_NORMAL));
4653 else
4654 convert_move (target, temp, unsignedp);
4657 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4659 /* Handle copying a string constant into an array. The string
4660 constant may be shorter than the array. So copy just the string's
4661 actual length, and clear the rest. First get the size of the data
4662 type of the string, which is actually the size of the target. */
4663 rtx size = expr_size (exp);
4665 if (GET_CODE (size) == CONST_INT
4666 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4667 emit_block_move (target, temp, size,
4668 (call_param_p
4669 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4670 else
4672 /* Compute the size of the data to copy from the string. */
4673 tree copy_size
4674 = size_binop (MIN_EXPR,
4675 make_tree (sizetype, size),
4676 size_int (TREE_STRING_LENGTH (exp)));
4677 rtx copy_size_rtx
4678 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4679 (call_param_p
4680 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4681 rtx label = 0;
4683 /* Copy that much. */
4684 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4685 TYPE_UNSIGNED (sizetype));
4686 emit_block_move (target, temp, copy_size_rtx,
4687 (call_param_p
4688 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4690 /* Figure out how much is left in TARGET that we have to clear.
4691 Do all calculations in ptr_mode. */
4692 if (GET_CODE (copy_size_rtx) == CONST_INT)
4694 size = plus_constant (size, -INTVAL (copy_size_rtx));
4695 target = adjust_address (target, BLKmode,
4696 INTVAL (copy_size_rtx));
4698 else
4700 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4701 copy_size_rtx, NULL_RTX, 0,
4702 OPTAB_LIB_WIDEN);
4704 #ifdef POINTERS_EXTEND_UNSIGNED
4705 if (GET_MODE (copy_size_rtx) != Pmode)
4706 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4707 TYPE_UNSIGNED (sizetype));
4708 #endif
4710 target = offset_address (target, copy_size_rtx,
4711 highest_pow2_factor (copy_size));
4712 label = gen_label_rtx ();
4713 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4714 GET_MODE (size), 0, label);
4717 if (size != const0_rtx)
4718 clear_storage (target, size, BLOCK_OP_NORMAL);
4720 if (label)
4721 emit_label (label);
4724 /* Handle calls that return values in multiple non-contiguous locations.
4725 The Irix 6 ABI has examples of this. */
4726 else if (GET_CODE (target) == PARALLEL)
4727 emit_group_load (target, temp, TREE_TYPE (exp),
4728 int_size_in_bytes (TREE_TYPE (exp)));
4729 else if (GET_MODE (temp) == BLKmode)
4730 emit_block_move (target, temp, expr_size (exp),
4731 (call_param_p
4732 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4733 else if (nontemporal
4734 && emit_storent_insn (target, temp))
4735 /* If we managed to emit a nontemporal store, there is nothing else to
4736 do. */
4738 else
4740 temp = force_operand (temp, target);
4741 if (temp != target)
4742 emit_move_insn (target, temp);
4746 return NULL_RTX;
4749 /* Helper for categorize_ctor_elements. Identical interface. */
4751 static bool
4752 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4753 HOST_WIDE_INT *p_elt_count,
4754 bool *p_must_clear)
4756 unsigned HOST_WIDE_INT idx;
4757 HOST_WIDE_INT nz_elts, elt_count;
4758 tree value, purpose;
4760 /* Whether CTOR is a valid constant initializer, in accordance with what
4761 initializer_constant_valid_p does. If inferred from the constructor
4762 elements, true until proven otherwise. */
4763 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4764 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4766 nz_elts = 0;
4767 elt_count = 0;
4769 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4771 HOST_WIDE_INT mult;
4773 mult = 1;
4774 if (TREE_CODE (purpose) == RANGE_EXPR)
4776 tree lo_index = TREE_OPERAND (purpose, 0);
4777 tree hi_index = TREE_OPERAND (purpose, 1);
4779 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4780 mult = (tree_low_cst (hi_index, 1)
4781 - tree_low_cst (lo_index, 1) + 1);
4784 switch (TREE_CODE (value))
4786 case CONSTRUCTOR:
4788 HOST_WIDE_INT nz = 0, ic = 0;
4790 bool const_elt_p
4791 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4793 nz_elts += mult * nz;
4794 elt_count += mult * ic;
4796 if (const_from_elts_p && const_p)
4797 const_p = const_elt_p;
4799 break;
4801 case INTEGER_CST:
4802 case REAL_CST:
4803 case FIXED_CST:
4804 if (!initializer_zerop (value))
4805 nz_elts += mult;
4806 elt_count += mult;
4807 break;
4809 case STRING_CST:
4810 nz_elts += mult * TREE_STRING_LENGTH (value);
4811 elt_count += mult * TREE_STRING_LENGTH (value);
4812 break;
4814 case COMPLEX_CST:
4815 if (!initializer_zerop (TREE_REALPART (value)))
4816 nz_elts += mult;
4817 if (!initializer_zerop (TREE_IMAGPART (value)))
4818 nz_elts += mult;
4819 elt_count += mult;
4820 break;
4822 case VECTOR_CST:
4824 tree v;
4825 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4827 if (!initializer_zerop (TREE_VALUE (v)))
4828 nz_elts += mult;
4829 elt_count += mult;
4832 break;
4834 default:
4835 nz_elts += mult;
4836 elt_count += mult;
4838 if (const_from_elts_p && const_p)
4839 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4840 != NULL_TREE;
4841 break;
4845 if (!*p_must_clear
4846 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4847 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4849 tree init_sub_type;
4850 bool clear_this = true;
4852 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4854 /* We don't expect more than one element of the union to be
4855 initialized. Not sure what we should do otherwise... */
4856 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4857 == 1);
4859 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4860 CONSTRUCTOR_ELTS (ctor),
4861 0)->value);
4863 /* ??? We could look at each element of the union, and find the
4864 largest element. Which would avoid comparing the size of the
4865 initialized element against any tail padding in the union.
4866 Doesn't seem worth the effort... */
4867 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4868 TYPE_SIZE (init_sub_type)) == 1)
4870 /* And now we have to find out if the element itself is fully
4871 constructed. E.g. for union { struct { int a, b; } s; } u
4872 = { .s = { .a = 1 } }. */
4873 if (elt_count == count_type_elements (init_sub_type, false))
4874 clear_this = false;
4878 *p_must_clear = clear_this;
4881 *p_nz_elts += nz_elts;
4882 *p_elt_count += elt_count;
4884 return const_p;
4887 /* Examine CTOR to discover:
4888 * how many scalar fields are set to nonzero values,
4889 and place it in *P_NZ_ELTS;
4890 * how many scalar fields in total are in CTOR,
4891 and place it in *P_ELT_COUNT.
4892 * if a type is a union, and the initializer from the constructor
4893 is not the largest element in the union, then set *p_must_clear.
4895 Return whether or not CTOR is a valid static constant initializer, the same
4896 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4898 bool
4899 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4900 HOST_WIDE_INT *p_elt_count,
4901 bool *p_must_clear)
4903 *p_nz_elts = 0;
4904 *p_elt_count = 0;
4905 *p_must_clear = false;
4907 return
4908 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4911 /* Count the number of scalars in TYPE. Return -1 on overflow or
4912 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4913 array member at the end of the structure. */
4915 HOST_WIDE_INT
4916 count_type_elements (const_tree type, bool allow_flexarr)
4918 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4919 switch (TREE_CODE (type))
4921 case ARRAY_TYPE:
4923 tree telts = array_type_nelts (type);
4924 if (telts && host_integerp (telts, 1))
4926 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4927 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4928 if (n == 0)
4929 return 0;
4930 else if (max / n > m)
4931 return n * m;
4933 return -1;
4936 case RECORD_TYPE:
4938 HOST_WIDE_INT n = 0, t;
4939 tree f;
4941 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4942 if (TREE_CODE (f) == FIELD_DECL)
4944 t = count_type_elements (TREE_TYPE (f), false);
4945 if (t < 0)
4947 /* Check for structures with flexible array member. */
4948 tree tf = TREE_TYPE (f);
4949 if (allow_flexarr
4950 && TREE_CHAIN (f) == NULL
4951 && TREE_CODE (tf) == ARRAY_TYPE
4952 && TYPE_DOMAIN (tf)
4953 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4954 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4955 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4956 && int_size_in_bytes (type) >= 0)
4957 break;
4959 return -1;
4961 n += t;
4964 return n;
4967 case UNION_TYPE:
4968 case QUAL_UNION_TYPE:
4970 /* Ho hum. How in the world do we guess here? Clearly it isn't
4971 right to count the fields. Guess based on the number of words. */
4972 HOST_WIDE_INT n = int_size_in_bytes (type);
4973 if (n < 0)
4974 return -1;
4975 return n / UNITS_PER_WORD;
4978 case COMPLEX_TYPE:
4979 return 2;
4981 case VECTOR_TYPE:
4982 return TYPE_VECTOR_SUBPARTS (type);
4984 case INTEGER_TYPE:
4985 case REAL_TYPE:
4986 case FIXED_POINT_TYPE:
4987 case ENUMERAL_TYPE:
4988 case BOOLEAN_TYPE:
4989 case POINTER_TYPE:
4990 case OFFSET_TYPE:
4991 case REFERENCE_TYPE:
4992 return 1;
4994 case VOID_TYPE:
4995 case METHOD_TYPE:
4996 case FUNCTION_TYPE:
4997 case LANG_TYPE:
4998 default:
4999 gcc_unreachable ();
5003 /* Return 1 if EXP contains mostly (3/4) zeros. */
5005 static int
5006 mostly_zeros_p (const_tree exp)
5008 if (TREE_CODE (exp) == CONSTRUCTOR)
5011 HOST_WIDE_INT nz_elts, count, elts;
5012 bool must_clear;
5014 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5015 if (must_clear)
5016 return 1;
5018 elts = count_type_elements (TREE_TYPE (exp), false);
5020 return nz_elts < elts / 4;
5023 return initializer_zerop (exp);
5026 /* Return 1 if EXP contains all zeros. */
5028 static int
5029 all_zeros_p (const_tree exp)
5031 if (TREE_CODE (exp) == CONSTRUCTOR)
5034 HOST_WIDE_INT nz_elts, count;
5035 bool must_clear;
5037 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5038 return nz_elts == 0;
5041 return initializer_zerop (exp);
5044 /* Helper function for store_constructor.
5045 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5046 TYPE is the type of the CONSTRUCTOR, not the element type.
5047 CLEARED is as for store_constructor.
5048 ALIAS_SET is the alias set to use for any stores.
5050 This provides a recursive shortcut back to store_constructor when it isn't
5051 necessary to go through store_field. This is so that we can pass through
5052 the cleared field to let store_constructor know that we may not have to
5053 clear a substructure if the outer structure has already been cleared. */
5055 static void
5056 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5057 HOST_WIDE_INT bitpos, enum machine_mode mode,
5058 tree exp, tree type, int cleared,
5059 alias_set_type alias_set)
5061 if (TREE_CODE (exp) == CONSTRUCTOR
5062 /* We can only call store_constructor recursively if the size and
5063 bit position are on a byte boundary. */
5064 && bitpos % BITS_PER_UNIT == 0
5065 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5066 /* If we have a nonzero bitpos for a register target, then we just
5067 let store_field do the bitfield handling. This is unlikely to
5068 generate unnecessary clear instructions anyways. */
5069 && (bitpos == 0 || MEM_P (target)))
5071 if (MEM_P (target))
5072 target
5073 = adjust_address (target,
5074 GET_MODE (target) == BLKmode
5075 || 0 != (bitpos
5076 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5077 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5080 /* Update the alias set, if required. */
5081 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5082 && MEM_ALIAS_SET (target) != 0)
5084 target = copy_rtx (target);
5085 set_mem_alias_set (target, alias_set);
5088 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5090 else
5091 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5094 /* Store the value of constructor EXP into the rtx TARGET.
5095 TARGET is either a REG or a MEM; we know it cannot conflict, since
5096 safe_from_p has been called.
5097 CLEARED is true if TARGET is known to have been zero'd.
5098 SIZE is the number of bytes of TARGET we are allowed to modify: this
5099 may not be the same as the size of EXP if we are assigning to a field
5100 which has been packed to exclude padding bits. */
5102 static void
5103 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5105 tree type = TREE_TYPE (exp);
5106 #ifdef WORD_REGISTER_OPERATIONS
5107 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5108 #endif
5110 switch (TREE_CODE (type))
5112 case RECORD_TYPE:
5113 case UNION_TYPE:
5114 case QUAL_UNION_TYPE:
5116 unsigned HOST_WIDE_INT idx;
5117 tree field, value;
5119 /* If size is zero or the target is already cleared, do nothing. */
5120 if (size == 0 || cleared)
5121 cleared = 1;
5122 /* We either clear the aggregate or indicate the value is dead. */
5123 else if ((TREE_CODE (type) == UNION_TYPE
5124 || TREE_CODE (type) == QUAL_UNION_TYPE)
5125 && ! CONSTRUCTOR_ELTS (exp))
5126 /* If the constructor is empty, clear the union. */
5128 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5129 cleared = 1;
5132 /* If we are building a static constructor into a register,
5133 set the initial value as zero so we can fold the value into
5134 a constant. But if more than one register is involved,
5135 this probably loses. */
5136 else if (REG_P (target) && TREE_STATIC (exp)
5137 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5139 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5140 cleared = 1;
5143 /* If the constructor has fewer fields than the structure or
5144 if we are initializing the structure to mostly zeros, clear
5145 the whole structure first. Don't do this if TARGET is a
5146 register whose mode size isn't equal to SIZE since
5147 clear_storage can't handle this case. */
5148 else if (size > 0
5149 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5150 != fields_length (type))
5151 || mostly_zeros_p (exp))
5152 && (!REG_P (target)
5153 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5154 == size)))
5156 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5157 cleared = 1;
5160 if (REG_P (target) && !cleared)
5161 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5163 /* Store each element of the constructor into the
5164 corresponding field of TARGET. */
5165 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5167 enum machine_mode mode;
5168 HOST_WIDE_INT bitsize;
5169 HOST_WIDE_INT bitpos = 0;
5170 tree offset;
5171 rtx to_rtx = target;
5173 /* Just ignore missing fields. We cleared the whole
5174 structure, above, if any fields are missing. */
5175 if (field == 0)
5176 continue;
5178 if (cleared && initializer_zerop (value))
5179 continue;
5181 if (host_integerp (DECL_SIZE (field), 1))
5182 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5183 else
5184 bitsize = -1;
5186 mode = DECL_MODE (field);
5187 if (DECL_BIT_FIELD (field))
5188 mode = VOIDmode;
5190 offset = DECL_FIELD_OFFSET (field);
5191 if (host_integerp (offset, 0)
5192 && host_integerp (bit_position (field), 0))
5194 bitpos = int_bit_position (field);
5195 offset = 0;
5197 else
5198 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5200 if (offset)
5202 rtx offset_rtx;
5204 offset
5205 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5206 make_tree (TREE_TYPE (exp),
5207 target));
5209 offset_rtx = expand_normal (offset);
5210 gcc_assert (MEM_P (to_rtx));
5212 #ifdef POINTERS_EXTEND_UNSIGNED
5213 if (GET_MODE (offset_rtx) != Pmode)
5214 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5215 #else
5216 if (GET_MODE (offset_rtx) != ptr_mode)
5217 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5218 #endif
5220 to_rtx = offset_address (to_rtx, offset_rtx,
5221 highest_pow2_factor (offset));
5224 #ifdef WORD_REGISTER_OPERATIONS
5225 /* If this initializes a field that is smaller than a
5226 word, at the start of a word, try to widen it to a full
5227 word. This special case allows us to output C++ member
5228 function initializations in a form that the optimizers
5229 can understand. */
5230 if (REG_P (target)
5231 && bitsize < BITS_PER_WORD
5232 && bitpos % BITS_PER_WORD == 0
5233 && GET_MODE_CLASS (mode) == MODE_INT
5234 && TREE_CODE (value) == INTEGER_CST
5235 && exp_size >= 0
5236 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5238 tree type = TREE_TYPE (value);
5240 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5242 type = lang_hooks.types.type_for_size
5243 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5244 value = fold_convert (type, value);
5247 if (BYTES_BIG_ENDIAN)
5248 value
5249 = fold_build2 (LSHIFT_EXPR, type, value,
5250 build_int_cst (type,
5251 BITS_PER_WORD - bitsize));
5252 bitsize = BITS_PER_WORD;
5253 mode = word_mode;
5255 #endif
5257 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5258 && DECL_NONADDRESSABLE_P (field))
5260 to_rtx = copy_rtx (to_rtx);
5261 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5264 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5265 value, type, cleared,
5266 get_alias_set (TREE_TYPE (field)));
5268 break;
5270 case ARRAY_TYPE:
5272 tree value, index;
5273 unsigned HOST_WIDE_INT i;
5274 int need_to_clear;
5275 tree domain;
5276 tree elttype = TREE_TYPE (type);
5277 int const_bounds_p;
5278 HOST_WIDE_INT minelt = 0;
5279 HOST_WIDE_INT maxelt = 0;
5281 domain = TYPE_DOMAIN (type);
5282 const_bounds_p = (TYPE_MIN_VALUE (domain)
5283 && TYPE_MAX_VALUE (domain)
5284 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5285 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5287 /* If we have constant bounds for the range of the type, get them. */
5288 if (const_bounds_p)
5290 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5291 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5294 /* If the constructor has fewer elements than the array, clear
5295 the whole array first. Similarly if this is static
5296 constructor of a non-BLKmode object. */
5297 if (cleared)
5298 need_to_clear = 0;
5299 else if (REG_P (target) && TREE_STATIC (exp))
5300 need_to_clear = 1;
5301 else
5303 unsigned HOST_WIDE_INT idx;
5304 tree index, value;
5305 HOST_WIDE_INT count = 0, zero_count = 0;
5306 need_to_clear = ! const_bounds_p;
5308 /* This loop is a more accurate version of the loop in
5309 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5310 is also needed to check for missing elements. */
5311 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5313 HOST_WIDE_INT this_node_count;
5315 if (need_to_clear)
5316 break;
5318 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5320 tree lo_index = TREE_OPERAND (index, 0);
5321 tree hi_index = TREE_OPERAND (index, 1);
5323 if (! host_integerp (lo_index, 1)
5324 || ! host_integerp (hi_index, 1))
5326 need_to_clear = 1;
5327 break;
5330 this_node_count = (tree_low_cst (hi_index, 1)
5331 - tree_low_cst (lo_index, 1) + 1);
5333 else
5334 this_node_count = 1;
5336 count += this_node_count;
5337 if (mostly_zeros_p (value))
5338 zero_count += this_node_count;
5341 /* Clear the entire array first if there are any missing
5342 elements, or if the incidence of zero elements is >=
5343 75%. */
5344 if (! need_to_clear
5345 && (count < maxelt - minelt + 1
5346 || 4 * zero_count >= 3 * count))
5347 need_to_clear = 1;
5350 if (need_to_clear && size > 0)
5352 if (REG_P (target))
5353 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5354 else
5355 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5356 cleared = 1;
5359 if (!cleared && REG_P (target))
5360 /* Inform later passes that the old value is dead. */
5361 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5363 /* Store each element of the constructor into the
5364 corresponding element of TARGET, determined by counting the
5365 elements. */
5366 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5368 enum machine_mode mode;
5369 HOST_WIDE_INT bitsize;
5370 HOST_WIDE_INT bitpos;
5371 int unsignedp;
5372 rtx xtarget = target;
5374 if (cleared && initializer_zerop (value))
5375 continue;
5377 unsignedp = TYPE_UNSIGNED (elttype);
5378 mode = TYPE_MODE (elttype);
5379 if (mode == BLKmode)
5380 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5381 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5382 : -1);
5383 else
5384 bitsize = GET_MODE_BITSIZE (mode);
5386 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5388 tree lo_index = TREE_OPERAND (index, 0);
5389 tree hi_index = TREE_OPERAND (index, 1);
5390 rtx index_r, pos_rtx;
5391 HOST_WIDE_INT lo, hi, count;
5392 tree position;
5394 /* If the range is constant and "small", unroll the loop. */
5395 if (const_bounds_p
5396 && host_integerp (lo_index, 0)
5397 && host_integerp (hi_index, 0)
5398 && (lo = tree_low_cst (lo_index, 0),
5399 hi = tree_low_cst (hi_index, 0),
5400 count = hi - lo + 1,
5401 (!MEM_P (target)
5402 || count <= 2
5403 || (host_integerp (TYPE_SIZE (elttype), 1)
5404 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5405 <= 40 * 8)))))
5407 lo -= minelt; hi -= minelt;
5408 for (; lo <= hi; lo++)
5410 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5412 if (MEM_P (target)
5413 && !MEM_KEEP_ALIAS_SET_P (target)
5414 && TREE_CODE (type) == ARRAY_TYPE
5415 && TYPE_NONALIASED_COMPONENT (type))
5417 target = copy_rtx (target);
5418 MEM_KEEP_ALIAS_SET_P (target) = 1;
5421 store_constructor_field
5422 (target, bitsize, bitpos, mode, value, type, cleared,
5423 get_alias_set (elttype));
5426 else
5428 rtx loop_start = gen_label_rtx ();
5429 rtx loop_end = gen_label_rtx ();
5430 tree exit_cond;
5432 expand_normal (hi_index);
5433 unsignedp = TYPE_UNSIGNED (domain);
5435 index = build_decl (VAR_DECL, NULL_TREE, domain);
5437 index_r
5438 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5439 &unsignedp, 0));
5440 SET_DECL_RTL (index, index_r);
5441 store_expr (lo_index, index_r, 0, false);
5443 /* Build the head of the loop. */
5444 do_pending_stack_adjust ();
5445 emit_label (loop_start);
5447 /* Assign value to element index. */
5448 position =
5449 fold_convert (ssizetype,
5450 fold_build2 (MINUS_EXPR,
5451 TREE_TYPE (index),
5452 index,
5453 TYPE_MIN_VALUE (domain)));
5455 position =
5456 size_binop (MULT_EXPR, position,
5457 fold_convert (ssizetype,
5458 TYPE_SIZE_UNIT (elttype)));
5460 pos_rtx = expand_normal (position);
5461 xtarget = offset_address (target, pos_rtx,
5462 highest_pow2_factor (position));
5463 xtarget = adjust_address (xtarget, mode, 0);
5464 if (TREE_CODE (value) == CONSTRUCTOR)
5465 store_constructor (value, xtarget, cleared,
5466 bitsize / BITS_PER_UNIT);
5467 else
5468 store_expr (value, xtarget, 0, false);
5470 /* Generate a conditional jump to exit the loop. */
5471 exit_cond = build2 (LT_EXPR, integer_type_node,
5472 index, hi_index);
5473 jumpif (exit_cond, loop_end);
5475 /* Update the loop counter, and jump to the head of
5476 the loop. */
5477 expand_assignment (index,
5478 build2 (PLUS_EXPR, TREE_TYPE (index),
5479 index, integer_one_node),
5480 false);
5482 emit_jump (loop_start);
5484 /* Build the end of the loop. */
5485 emit_label (loop_end);
5488 else if ((index != 0 && ! host_integerp (index, 0))
5489 || ! host_integerp (TYPE_SIZE (elttype), 1))
5491 tree position;
5493 if (index == 0)
5494 index = ssize_int (1);
5496 if (minelt)
5497 index = fold_convert (ssizetype,
5498 fold_build2 (MINUS_EXPR,
5499 TREE_TYPE (index),
5500 index,
5501 TYPE_MIN_VALUE (domain)));
5503 position =
5504 size_binop (MULT_EXPR, index,
5505 fold_convert (ssizetype,
5506 TYPE_SIZE_UNIT (elttype)));
5507 xtarget = offset_address (target,
5508 expand_normal (position),
5509 highest_pow2_factor (position));
5510 xtarget = adjust_address (xtarget, mode, 0);
5511 store_expr (value, xtarget, 0, false);
5513 else
5515 if (index != 0)
5516 bitpos = ((tree_low_cst (index, 0) - minelt)
5517 * tree_low_cst (TYPE_SIZE (elttype), 1));
5518 else
5519 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5521 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5522 && TREE_CODE (type) == ARRAY_TYPE
5523 && TYPE_NONALIASED_COMPONENT (type))
5525 target = copy_rtx (target);
5526 MEM_KEEP_ALIAS_SET_P (target) = 1;
5528 store_constructor_field (target, bitsize, bitpos, mode, value,
5529 type, cleared, get_alias_set (elttype));
5532 break;
5535 case VECTOR_TYPE:
5537 unsigned HOST_WIDE_INT idx;
5538 constructor_elt *ce;
5539 int i;
5540 int need_to_clear;
5541 int icode = 0;
5542 tree elttype = TREE_TYPE (type);
5543 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5544 enum machine_mode eltmode = TYPE_MODE (elttype);
5545 HOST_WIDE_INT bitsize;
5546 HOST_WIDE_INT bitpos;
5547 rtvec vector = NULL;
5548 unsigned n_elts;
5550 gcc_assert (eltmode != BLKmode);
5552 n_elts = TYPE_VECTOR_SUBPARTS (type);
5553 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5555 enum machine_mode mode = GET_MODE (target);
5557 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5558 if (icode != CODE_FOR_nothing)
5560 unsigned int i;
5562 vector = rtvec_alloc (n_elts);
5563 for (i = 0; i < n_elts; i++)
5564 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5568 /* If the constructor has fewer elements than the vector,
5569 clear the whole array first. Similarly if this is static
5570 constructor of a non-BLKmode object. */
5571 if (cleared)
5572 need_to_clear = 0;
5573 else if (REG_P (target) && TREE_STATIC (exp))
5574 need_to_clear = 1;
5575 else
5577 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5578 tree value;
5580 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5582 int n_elts_here = tree_low_cst
5583 (int_const_binop (TRUNC_DIV_EXPR,
5584 TYPE_SIZE (TREE_TYPE (value)),
5585 TYPE_SIZE (elttype), 0), 1);
5587 count += n_elts_here;
5588 if (mostly_zeros_p (value))
5589 zero_count += n_elts_here;
5592 /* Clear the entire vector first if there are any missing elements,
5593 or if the incidence of zero elements is >= 75%. */
5594 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5597 if (need_to_clear && size > 0 && !vector)
5599 if (REG_P (target))
5600 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5601 else
5602 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5603 cleared = 1;
5606 /* Inform later passes that the old value is dead. */
5607 if (!cleared && !vector && REG_P (target))
5608 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5610 /* Store each element of the constructor into the corresponding
5611 element of TARGET, determined by counting the elements. */
5612 for (idx = 0, i = 0;
5613 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5614 idx++, i += bitsize / elt_size)
5616 HOST_WIDE_INT eltpos;
5617 tree value = ce->value;
5619 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5620 if (cleared && initializer_zerop (value))
5621 continue;
5623 if (ce->index)
5624 eltpos = tree_low_cst (ce->index, 1);
5625 else
5626 eltpos = i;
5628 if (vector)
5630 /* Vector CONSTRUCTORs should only be built from smaller
5631 vectors in the case of BLKmode vectors. */
5632 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5633 RTVEC_ELT (vector, eltpos)
5634 = expand_normal (value);
5636 else
5638 enum machine_mode value_mode =
5639 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5640 ? TYPE_MODE (TREE_TYPE (value))
5641 : eltmode;
5642 bitpos = eltpos * elt_size;
5643 store_constructor_field (target, bitsize, bitpos,
5644 value_mode, value, type,
5645 cleared, get_alias_set (elttype));
5649 if (vector)
5650 emit_insn (GEN_FCN (icode)
5651 (target,
5652 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5653 break;
5656 default:
5657 gcc_unreachable ();
5661 /* Store the value of EXP (an expression tree)
5662 into a subfield of TARGET which has mode MODE and occupies
5663 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5664 If MODE is VOIDmode, it means that we are storing into a bit-field.
5666 Always return const0_rtx unless we have something particular to
5667 return.
5669 TYPE is the type of the underlying object,
5671 ALIAS_SET is the alias set for the destination. This value will
5672 (in general) be different from that for TARGET, since TARGET is a
5673 reference to the containing structure.
5675 If NONTEMPORAL is true, try generating a nontemporal store. */
5677 static rtx
5678 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5679 enum machine_mode mode, tree exp, tree type,
5680 alias_set_type alias_set, bool nontemporal)
5682 HOST_WIDE_INT width_mask = 0;
5684 if (TREE_CODE (exp) == ERROR_MARK)
5685 return const0_rtx;
5687 /* If we have nothing to store, do nothing unless the expression has
5688 side-effects. */
5689 if (bitsize == 0)
5690 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5691 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5692 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5694 /* If we are storing into an unaligned field of an aligned union that is
5695 in a register, we may have the mode of TARGET being an integer mode but
5696 MODE == BLKmode. In that case, get an aligned object whose size and
5697 alignment are the same as TARGET and store TARGET into it (we can avoid
5698 the store if the field being stored is the entire width of TARGET). Then
5699 call ourselves recursively to store the field into a BLKmode version of
5700 that object. Finally, load from the object into TARGET. This is not
5701 very efficient in general, but should only be slightly more expensive
5702 than the otherwise-required unaligned accesses. Perhaps this can be
5703 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5704 twice, once with emit_move_insn and once via store_field. */
5706 if (mode == BLKmode
5707 && (REG_P (target) || GET_CODE (target) == SUBREG))
5709 rtx object = assign_temp (type, 0, 1, 1);
5710 rtx blk_object = adjust_address (object, BLKmode, 0);
5712 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5713 emit_move_insn (object, target);
5715 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5716 nontemporal);
5718 emit_move_insn (target, object);
5720 /* We want to return the BLKmode version of the data. */
5721 return blk_object;
5724 if (GET_CODE (target) == CONCAT)
5726 /* We're storing into a struct containing a single __complex. */
5728 gcc_assert (!bitpos);
5729 return store_expr (exp, target, 0, nontemporal);
5732 /* If the structure is in a register or if the component
5733 is a bit field, we cannot use addressing to access it.
5734 Use bit-field techniques or SUBREG to store in it. */
5736 if (mode == VOIDmode
5737 || (mode != BLKmode && ! direct_store[(int) mode]
5738 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5739 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5740 || REG_P (target)
5741 || GET_CODE (target) == SUBREG
5742 /* If the field isn't aligned enough to store as an ordinary memref,
5743 store it as a bit field. */
5744 || (mode != BLKmode
5745 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5746 || bitpos % GET_MODE_ALIGNMENT (mode))
5747 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5748 || (bitpos % BITS_PER_UNIT != 0)))
5749 /* If the RHS and field are a constant size and the size of the
5750 RHS isn't the same size as the bitfield, we must use bitfield
5751 operations. */
5752 || (bitsize >= 0
5753 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5754 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5756 rtx temp;
5758 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5759 implies a mask operation. If the precision is the same size as
5760 the field we're storing into, that mask is redundant. This is
5761 particularly common with bit field assignments generated by the
5762 C front end. */
5763 if (TREE_CODE (exp) == NOP_EXPR)
5765 tree type = TREE_TYPE (exp);
5766 if (INTEGRAL_TYPE_P (type)
5767 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5768 && bitsize == TYPE_PRECISION (type))
5770 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5771 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5772 exp = TREE_OPERAND (exp, 0);
5776 temp = expand_normal (exp);
5778 /* If BITSIZE is narrower than the size of the type of EXP
5779 we will be narrowing TEMP. Normally, what's wanted are the
5780 low-order bits. However, if EXP's type is a record and this is
5781 big-endian machine, we want the upper BITSIZE bits. */
5782 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5783 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5784 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5785 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5786 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5787 - bitsize),
5788 NULL_RTX, 1);
5790 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5791 MODE. */
5792 if (mode != VOIDmode && mode != BLKmode
5793 && mode != TYPE_MODE (TREE_TYPE (exp)))
5794 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5796 /* If the modes of TARGET and TEMP are both BLKmode, both
5797 must be in memory and BITPOS must be aligned on a byte
5798 boundary. If so, we simply do a block copy. */
5799 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5801 gcc_assert (MEM_P (target) && MEM_P (temp)
5802 && !(bitpos % BITS_PER_UNIT));
5804 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5805 emit_block_move (target, temp,
5806 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5807 / BITS_PER_UNIT),
5808 BLOCK_OP_NORMAL);
5810 return const0_rtx;
5813 /* Store the value in the bitfield. */
5814 store_bit_field (target, bitsize, bitpos, mode, temp);
5816 return const0_rtx;
5818 else
5820 /* Now build a reference to just the desired component. */
5821 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5823 if (to_rtx == target)
5824 to_rtx = copy_rtx (to_rtx);
5826 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5827 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5828 set_mem_alias_set (to_rtx, alias_set);
5830 return store_expr (exp, to_rtx, 0, nontemporal);
5834 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5835 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5836 codes and find the ultimate containing object, which we return.
5838 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5839 bit position, and *PUNSIGNEDP to the signedness of the field.
5840 If the position of the field is variable, we store a tree
5841 giving the variable offset (in units) in *POFFSET.
5842 This offset is in addition to the bit position.
5843 If the position is not variable, we store 0 in *POFFSET.
5845 If any of the extraction expressions is volatile,
5846 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5848 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5849 is a mode that can be used to access the field. In that case, *PBITSIZE
5850 is redundant.
5852 If the field describes a variable-sized object, *PMODE is set to
5853 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5854 this case, but the address of the object can be found.
5856 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5857 look through nodes that serve as markers of a greater alignment than
5858 the one that can be deduced from the expression. These nodes make it
5859 possible for front-ends to prevent temporaries from being created by
5860 the middle-end on alignment considerations. For that purpose, the
5861 normal operating mode at high-level is to always pass FALSE so that
5862 the ultimate containing object is really returned; moreover, the
5863 associated predicate handled_component_p will always return TRUE
5864 on these nodes, thus indicating that they are essentially handled
5865 by get_inner_reference. TRUE should only be passed when the caller
5866 is scanning the expression in order to build another representation
5867 and specifically knows how to handle these nodes; as such, this is
5868 the normal operating mode in the RTL expanders. */
5870 tree
5871 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5872 HOST_WIDE_INT *pbitpos, tree *poffset,
5873 enum machine_mode *pmode, int *punsignedp,
5874 int *pvolatilep, bool keep_aligning)
5876 tree size_tree = 0;
5877 enum machine_mode mode = VOIDmode;
5878 tree offset = size_zero_node;
5879 tree bit_offset = bitsize_zero_node;
5881 /* First get the mode, signedness, and size. We do this from just the
5882 outermost expression. */
5883 if (TREE_CODE (exp) == COMPONENT_REF)
5885 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5886 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5887 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5889 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5891 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5893 size_tree = TREE_OPERAND (exp, 1);
5894 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5896 /* For vector types, with the correct size of access, use the mode of
5897 inner type. */
5898 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5899 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5900 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5901 mode = TYPE_MODE (TREE_TYPE (exp));
5903 else
5905 mode = TYPE_MODE (TREE_TYPE (exp));
5906 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5908 if (mode == BLKmode)
5909 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5910 else
5911 *pbitsize = GET_MODE_BITSIZE (mode);
5914 if (size_tree != 0)
5916 if (! host_integerp (size_tree, 1))
5917 mode = BLKmode, *pbitsize = -1;
5918 else
5919 *pbitsize = tree_low_cst (size_tree, 1);
5922 *pmode = mode;
5924 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5925 and find the ultimate containing object. */
5926 while (1)
5928 switch (TREE_CODE (exp))
5930 case BIT_FIELD_REF:
5931 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5932 TREE_OPERAND (exp, 2));
5933 break;
5935 case COMPONENT_REF:
5937 tree field = TREE_OPERAND (exp, 1);
5938 tree this_offset = component_ref_field_offset (exp);
5940 /* If this field hasn't been filled in yet, don't go past it.
5941 This should only happen when folding expressions made during
5942 type construction. */
5943 if (this_offset == 0)
5944 break;
5946 offset = size_binop (PLUS_EXPR, offset, this_offset);
5947 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5948 DECL_FIELD_BIT_OFFSET (field));
5950 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5952 break;
5954 case ARRAY_REF:
5955 case ARRAY_RANGE_REF:
5957 tree index = TREE_OPERAND (exp, 1);
5958 tree low_bound = array_ref_low_bound (exp);
5959 tree unit_size = array_ref_element_size (exp);
5961 /* We assume all arrays have sizes that are a multiple of a byte.
5962 First subtract the lower bound, if any, in the type of the
5963 index, then convert to sizetype and multiply by the size of
5964 the array element. */
5965 if (! integer_zerop (low_bound))
5966 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5967 index, low_bound);
5969 offset = size_binop (PLUS_EXPR, offset,
5970 size_binop (MULT_EXPR,
5971 fold_convert (sizetype, index),
5972 unit_size));
5974 break;
5976 case REALPART_EXPR:
5977 break;
5979 case IMAGPART_EXPR:
5980 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5981 bitsize_int (*pbitsize));
5982 break;
5984 case VIEW_CONVERT_EXPR:
5985 if (keep_aligning && STRICT_ALIGNMENT
5986 && (TYPE_ALIGN (TREE_TYPE (exp))
5987 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5988 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5989 < BIGGEST_ALIGNMENT)
5990 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5991 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5992 goto done;
5993 break;
5995 default:
5996 goto done;
5999 /* If any reference in the chain is volatile, the effect is volatile. */
6000 if (TREE_THIS_VOLATILE (exp))
6001 *pvolatilep = 1;
6003 exp = TREE_OPERAND (exp, 0);
6005 done:
6007 /* If OFFSET is constant, see if we can return the whole thing as a
6008 constant bit position. Make sure to handle overflow during
6009 this conversion. */
6010 if (host_integerp (offset, 0))
6012 double_int tem = double_int_mul (tree_to_double_int (offset),
6013 uhwi_to_double_int (BITS_PER_UNIT));
6014 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6015 if (double_int_fits_in_shwi_p (tem))
6017 *pbitpos = double_int_to_shwi (tem);
6018 *poffset = NULL_TREE;
6019 return exp;
6023 /* Otherwise, split it up. */
6024 *pbitpos = tree_low_cst (bit_offset, 0);
6025 *poffset = offset;
6027 return exp;
6030 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
6031 look for whether EXP or any nested component-refs within EXP is marked
6032 as PACKED. */
6034 bool
6035 contains_packed_reference (const_tree exp)
6037 bool packed_p = false;
6039 while (1)
6041 switch (TREE_CODE (exp))
6043 case COMPONENT_REF:
6045 tree field = TREE_OPERAND (exp, 1);
6046 packed_p = DECL_PACKED (field)
6047 || TYPE_PACKED (TREE_TYPE (field))
6048 || TYPE_PACKED (TREE_TYPE (exp));
6049 if (packed_p)
6050 goto done;
6052 break;
6054 case BIT_FIELD_REF:
6055 case ARRAY_REF:
6056 case ARRAY_RANGE_REF:
6057 case REALPART_EXPR:
6058 case IMAGPART_EXPR:
6059 case VIEW_CONVERT_EXPR:
6060 break;
6062 default:
6063 goto done;
6065 exp = TREE_OPERAND (exp, 0);
6067 done:
6068 return packed_p;
6071 /* Return a tree of sizetype representing the size, in bytes, of the element
6072 of EXP, an ARRAY_REF. */
6074 tree
6075 array_ref_element_size (tree exp)
6077 tree aligned_size = TREE_OPERAND (exp, 3);
6078 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6080 /* If a size was specified in the ARRAY_REF, it's the size measured
6081 in alignment units of the element type. So multiply by that value. */
6082 if (aligned_size)
6084 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6085 sizetype from another type of the same width and signedness. */
6086 if (TREE_TYPE (aligned_size) != sizetype)
6087 aligned_size = fold_convert (sizetype, aligned_size);
6088 return size_binop (MULT_EXPR, aligned_size,
6089 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6092 /* Otherwise, take the size from that of the element type. Substitute
6093 any PLACEHOLDER_EXPR that we have. */
6094 else
6095 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6098 /* Return a tree representing the lower bound of the array mentioned in
6099 EXP, an ARRAY_REF. */
6101 tree
6102 array_ref_low_bound (tree exp)
6104 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6106 /* If a lower bound is specified in EXP, use it. */
6107 if (TREE_OPERAND (exp, 2))
6108 return TREE_OPERAND (exp, 2);
6110 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6111 substituting for a PLACEHOLDER_EXPR as needed. */
6112 if (domain_type && TYPE_MIN_VALUE (domain_type))
6113 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6115 /* Otherwise, return a zero of the appropriate type. */
6116 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6119 /* Return a tree representing the upper bound of the array mentioned in
6120 EXP, an ARRAY_REF. */
6122 tree
6123 array_ref_up_bound (tree exp)
6125 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6127 /* If there is a domain type and it has an upper bound, use it, substituting
6128 for a PLACEHOLDER_EXPR as needed. */
6129 if (domain_type && TYPE_MAX_VALUE (domain_type))
6130 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6132 /* Otherwise fail. */
6133 return NULL_TREE;
6136 /* Return a tree representing the offset, in bytes, of the field referenced
6137 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6139 tree
6140 component_ref_field_offset (tree exp)
6142 tree aligned_offset = TREE_OPERAND (exp, 2);
6143 tree field = TREE_OPERAND (exp, 1);
6145 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6146 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6147 value. */
6148 if (aligned_offset)
6150 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6151 sizetype from another type of the same width and signedness. */
6152 if (TREE_TYPE (aligned_offset) != sizetype)
6153 aligned_offset = fold_convert (sizetype, aligned_offset);
6154 return size_binop (MULT_EXPR, aligned_offset,
6155 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6158 /* Otherwise, take the offset from that of the field. Substitute
6159 any PLACEHOLDER_EXPR that we have. */
6160 else
6161 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6164 /* Return 1 if T is an expression that get_inner_reference handles. */
6167 handled_component_p (const_tree t)
6169 switch (TREE_CODE (t))
6171 case BIT_FIELD_REF:
6172 case COMPONENT_REF:
6173 case ARRAY_REF:
6174 case ARRAY_RANGE_REF:
6175 case VIEW_CONVERT_EXPR:
6176 case REALPART_EXPR:
6177 case IMAGPART_EXPR:
6178 return 1;
6180 default:
6181 return 0;
6185 /* Given an rtx VALUE that may contain additions and multiplications, return
6186 an equivalent value that just refers to a register, memory, or constant.
6187 This is done by generating instructions to perform the arithmetic and
6188 returning a pseudo-register containing the value.
6190 The returned value may be a REG, SUBREG, MEM or constant. */
6193 force_operand (rtx value, rtx target)
6195 rtx op1, op2;
6196 /* Use subtarget as the target for operand 0 of a binary operation. */
6197 rtx subtarget = get_subtarget (target);
6198 enum rtx_code code = GET_CODE (value);
6200 /* Check for subreg applied to an expression produced by loop optimizer. */
6201 if (code == SUBREG
6202 && !REG_P (SUBREG_REG (value))
6203 && !MEM_P (SUBREG_REG (value)))
6205 value
6206 = simplify_gen_subreg (GET_MODE (value),
6207 force_reg (GET_MODE (SUBREG_REG (value)),
6208 force_operand (SUBREG_REG (value),
6209 NULL_RTX)),
6210 GET_MODE (SUBREG_REG (value)),
6211 SUBREG_BYTE (value));
6212 code = GET_CODE (value);
6215 /* Check for a PIC address load. */
6216 if ((code == PLUS || code == MINUS)
6217 && XEXP (value, 0) == pic_offset_table_rtx
6218 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6219 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6220 || GET_CODE (XEXP (value, 1)) == CONST))
6222 if (!subtarget)
6223 subtarget = gen_reg_rtx (GET_MODE (value));
6224 emit_move_insn (subtarget, value);
6225 return subtarget;
6228 if (ARITHMETIC_P (value))
6230 op2 = XEXP (value, 1);
6231 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6232 subtarget = 0;
6233 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6235 code = PLUS;
6236 op2 = negate_rtx (GET_MODE (value), op2);
6239 /* Check for an addition with OP2 a constant integer and our first
6240 operand a PLUS of a virtual register and something else. In that
6241 case, we want to emit the sum of the virtual register and the
6242 constant first and then add the other value. This allows virtual
6243 register instantiation to simply modify the constant rather than
6244 creating another one around this addition. */
6245 if (code == PLUS && GET_CODE (op2) == CONST_INT
6246 && GET_CODE (XEXP (value, 0)) == PLUS
6247 && REG_P (XEXP (XEXP (value, 0), 0))
6248 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6249 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6251 rtx temp = expand_simple_binop (GET_MODE (value), code,
6252 XEXP (XEXP (value, 0), 0), op2,
6253 subtarget, 0, OPTAB_LIB_WIDEN);
6254 return expand_simple_binop (GET_MODE (value), code, temp,
6255 force_operand (XEXP (XEXP (value,
6256 0), 1), 0),
6257 target, 0, OPTAB_LIB_WIDEN);
6260 op1 = force_operand (XEXP (value, 0), subtarget);
6261 op2 = force_operand (op2, NULL_RTX);
6262 switch (code)
6264 case MULT:
6265 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6266 case DIV:
6267 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6268 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6269 target, 1, OPTAB_LIB_WIDEN);
6270 else
6271 return expand_divmod (0,
6272 FLOAT_MODE_P (GET_MODE (value))
6273 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6274 GET_MODE (value), op1, op2, target, 0);
6275 case MOD:
6276 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6277 target, 0);
6278 case UDIV:
6279 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6280 target, 1);
6281 case UMOD:
6282 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6283 target, 1);
6284 case ASHIFTRT:
6285 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6286 target, 0, OPTAB_LIB_WIDEN);
6287 default:
6288 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6289 target, 1, OPTAB_LIB_WIDEN);
6292 if (UNARY_P (value))
6294 if (!target)
6295 target = gen_reg_rtx (GET_MODE (value));
6296 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6297 switch (code)
6299 case ZERO_EXTEND:
6300 case SIGN_EXTEND:
6301 case TRUNCATE:
6302 case FLOAT_EXTEND:
6303 case FLOAT_TRUNCATE:
6304 convert_move (target, op1, code == ZERO_EXTEND);
6305 return target;
6307 case FIX:
6308 case UNSIGNED_FIX:
6309 expand_fix (target, op1, code == UNSIGNED_FIX);
6310 return target;
6312 case FLOAT:
6313 case UNSIGNED_FLOAT:
6314 expand_float (target, op1, code == UNSIGNED_FLOAT);
6315 return target;
6317 default:
6318 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6322 #ifdef INSN_SCHEDULING
6323 /* On machines that have insn scheduling, we want all memory reference to be
6324 explicit, so we need to deal with such paradoxical SUBREGs. */
6325 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6326 && (GET_MODE_SIZE (GET_MODE (value))
6327 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6328 value
6329 = simplify_gen_subreg (GET_MODE (value),
6330 force_reg (GET_MODE (SUBREG_REG (value)),
6331 force_operand (SUBREG_REG (value),
6332 NULL_RTX)),
6333 GET_MODE (SUBREG_REG (value)),
6334 SUBREG_BYTE (value));
6335 #endif
6337 return value;
6340 /* Subroutine of expand_expr: return nonzero iff there is no way that
6341 EXP can reference X, which is being modified. TOP_P is nonzero if this
6342 call is going to be used to determine whether we need a temporary
6343 for EXP, as opposed to a recursive call to this function.
6345 It is always safe for this routine to return zero since it merely
6346 searches for optimization opportunities. */
6349 safe_from_p (const_rtx x, tree exp, int top_p)
6351 rtx exp_rtl = 0;
6352 int i, nops;
6354 if (x == 0
6355 /* If EXP has varying size, we MUST use a target since we currently
6356 have no way of allocating temporaries of variable size
6357 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6358 So we assume here that something at a higher level has prevented a
6359 clash. This is somewhat bogus, but the best we can do. Only
6360 do this when X is BLKmode and when we are at the top level. */
6361 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6362 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6363 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6364 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6365 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6366 != INTEGER_CST)
6367 && GET_MODE (x) == BLKmode)
6368 /* If X is in the outgoing argument area, it is always safe. */
6369 || (MEM_P (x)
6370 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6371 || (GET_CODE (XEXP (x, 0)) == PLUS
6372 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6373 return 1;
6375 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6376 find the underlying pseudo. */
6377 if (GET_CODE (x) == SUBREG)
6379 x = SUBREG_REG (x);
6380 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6381 return 0;
6384 /* Now look at our tree code and possibly recurse. */
6385 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6387 case tcc_declaration:
6388 exp_rtl = DECL_RTL_IF_SET (exp);
6389 break;
6391 case tcc_constant:
6392 return 1;
6394 case tcc_exceptional:
6395 if (TREE_CODE (exp) == TREE_LIST)
6397 while (1)
6399 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6400 return 0;
6401 exp = TREE_CHAIN (exp);
6402 if (!exp)
6403 return 1;
6404 if (TREE_CODE (exp) != TREE_LIST)
6405 return safe_from_p (x, exp, 0);
6408 else if (TREE_CODE (exp) == CONSTRUCTOR)
6410 constructor_elt *ce;
6411 unsigned HOST_WIDE_INT idx;
6413 for (idx = 0;
6414 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6415 idx++)
6416 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6417 || !safe_from_p (x, ce->value, 0))
6418 return 0;
6419 return 1;
6421 else if (TREE_CODE (exp) == ERROR_MARK)
6422 return 1; /* An already-visited SAVE_EXPR? */
6423 else
6424 return 0;
6426 case tcc_statement:
6427 /* The only case we look at here is the DECL_INITIAL inside a
6428 DECL_EXPR. */
6429 return (TREE_CODE (exp) != DECL_EXPR
6430 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6431 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6432 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6434 case tcc_binary:
6435 case tcc_comparison:
6436 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6437 return 0;
6438 /* Fall through. */
6440 case tcc_unary:
6441 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6443 case tcc_expression:
6444 case tcc_reference:
6445 case tcc_vl_exp:
6446 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6447 the expression. If it is set, we conflict iff we are that rtx or
6448 both are in memory. Otherwise, we check all operands of the
6449 expression recursively. */
6451 switch (TREE_CODE (exp))
6453 case ADDR_EXPR:
6454 /* If the operand is static or we are static, we can't conflict.
6455 Likewise if we don't conflict with the operand at all. */
6456 if (staticp (TREE_OPERAND (exp, 0))
6457 || TREE_STATIC (exp)
6458 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6459 return 1;
6461 /* Otherwise, the only way this can conflict is if we are taking
6462 the address of a DECL a that address if part of X, which is
6463 very rare. */
6464 exp = TREE_OPERAND (exp, 0);
6465 if (DECL_P (exp))
6467 if (!DECL_RTL_SET_P (exp)
6468 || !MEM_P (DECL_RTL (exp)))
6469 return 0;
6470 else
6471 exp_rtl = XEXP (DECL_RTL (exp), 0);
6473 break;
6475 case MISALIGNED_INDIRECT_REF:
6476 case ALIGN_INDIRECT_REF:
6477 case INDIRECT_REF:
6478 if (MEM_P (x)
6479 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6480 get_alias_set (exp)))
6481 return 0;
6482 break;
6484 case CALL_EXPR:
6485 /* Assume that the call will clobber all hard registers and
6486 all of memory. */
6487 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6488 || MEM_P (x))
6489 return 0;
6490 break;
6492 case WITH_CLEANUP_EXPR:
6493 case CLEANUP_POINT_EXPR:
6494 /* Lowered by gimplify.c. */
6495 gcc_unreachable ();
6497 case SAVE_EXPR:
6498 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6500 default:
6501 break;
6504 /* If we have an rtx, we do not need to scan our operands. */
6505 if (exp_rtl)
6506 break;
6508 nops = TREE_OPERAND_LENGTH (exp);
6509 for (i = 0; i < nops; i++)
6510 if (TREE_OPERAND (exp, i) != 0
6511 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6512 return 0;
6514 break;
6516 case tcc_type:
6517 /* Should never get a type here. */
6518 gcc_unreachable ();
6520 case tcc_gimple_stmt:
6521 gcc_unreachable ();
6524 /* If we have an rtl, find any enclosed object. Then see if we conflict
6525 with it. */
6526 if (exp_rtl)
6528 if (GET_CODE (exp_rtl) == SUBREG)
6530 exp_rtl = SUBREG_REG (exp_rtl);
6531 if (REG_P (exp_rtl)
6532 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6533 return 0;
6536 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6537 are memory and they conflict. */
6538 return ! (rtx_equal_p (x, exp_rtl)
6539 || (MEM_P (x) && MEM_P (exp_rtl)
6540 && true_dependence (exp_rtl, VOIDmode, x,
6541 rtx_addr_varies_p)));
6544 /* If we reach here, it is safe. */
6545 return 1;
6549 /* Return the highest power of two that EXP is known to be a multiple of.
6550 This is used in updating alignment of MEMs in array references. */
6552 unsigned HOST_WIDE_INT
6553 highest_pow2_factor (const_tree exp)
6555 unsigned HOST_WIDE_INT c0, c1;
6557 switch (TREE_CODE (exp))
6559 case INTEGER_CST:
6560 /* We can find the lowest bit that's a one. If the low
6561 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6562 We need to handle this case since we can find it in a COND_EXPR,
6563 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6564 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6565 later ICE. */
6566 if (TREE_OVERFLOW (exp))
6567 return BIGGEST_ALIGNMENT;
6568 else
6570 /* Note: tree_low_cst is intentionally not used here,
6571 we don't care about the upper bits. */
6572 c0 = TREE_INT_CST_LOW (exp);
6573 c0 &= -c0;
6574 return c0 ? c0 : BIGGEST_ALIGNMENT;
6576 break;
6578 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6579 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6580 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6581 return MIN (c0, c1);
6583 case MULT_EXPR:
6584 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6585 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6586 return c0 * c1;
6588 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6589 case CEIL_DIV_EXPR:
6590 if (integer_pow2p (TREE_OPERAND (exp, 1))
6591 && host_integerp (TREE_OPERAND (exp, 1), 1))
6593 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6594 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6595 return MAX (1, c0 / c1);
6597 break;
6599 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6600 case SAVE_EXPR:
6601 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6603 case COMPOUND_EXPR:
6604 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6606 case COND_EXPR:
6607 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6608 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6609 return MIN (c0, c1);
6611 default:
6612 break;
6615 return 1;
6618 /* Similar, except that the alignment requirements of TARGET are
6619 taken into account. Assume it is at least as aligned as its
6620 type, unless it is a COMPONENT_REF in which case the layout of
6621 the structure gives the alignment. */
6623 static unsigned HOST_WIDE_INT
6624 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6626 unsigned HOST_WIDE_INT target_align, factor;
6628 factor = highest_pow2_factor (exp);
6629 if (TREE_CODE (target) == COMPONENT_REF)
6630 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6631 else
6632 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6633 return MAX (factor, target_align);
6636 /* Return &VAR expression for emulated thread local VAR. */
6638 static tree
6639 emutls_var_address (tree var)
6641 tree emuvar = emutls_decl (var);
6642 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6643 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6644 tree arglist = build_tree_list (NULL_TREE, arg);
6645 tree call = build_function_call_expr (fn, arglist);
6646 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6649 /* Expands variable VAR. */
6651 void
6652 expand_var (tree var)
6654 if (DECL_EXTERNAL (var))
6655 return;
6657 if (TREE_STATIC (var))
6658 /* If this is an inlined copy of a static local variable,
6659 look up the original decl. */
6660 var = DECL_ORIGIN (var);
6662 if (TREE_STATIC (var)
6663 ? !TREE_ASM_WRITTEN (var)
6664 : !DECL_RTL_SET_P (var))
6666 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6667 /* Should be ignored. */;
6668 else if (lang_hooks.expand_decl (var))
6669 /* OK. */;
6670 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6671 expand_decl (var);
6672 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6673 rest_of_decl_compilation (var, 0, 0);
6674 else
6675 /* No expansion needed. */
6676 gcc_assert (TREE_CODE (var) == TYPE_DECL
6677 || TREE_CODE (var) == CONST_DECL
6678 || TREE_CODE (var) == FUNCTION_DECL
6679 || TREE_CODE (var) == LABEL_DECL);
6683 /* Subroutine of expand_expr. Expand the two operands of a binary
6684 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6685 The value may be stored in TARGET if TARGET is nonzero. The
6686 MODIFIER argument is as documented by expand_expr. */
6688 static void
6689 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6690 enum expand_modifier modifier)
6692 if (! safe_from_p (target, exp1, 1))
6693 target = 0;
6694 if (operand_equal_p (exp0, exp1, 0))
6696 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6697 *op1 = copy_rtx (*op0);
6699 else
6701 /* If we need to preserve evaluation order, copy exp0 into its own
6702 temporary variable so that it can't be clobbered by exp1. */
6703 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6704 exp0 = save_expr (exp0);
6705 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6706 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6711 /* Return a MEM that contains constant EXP. DEFER is as for
6712 output_constant_def and MODIFIER is as for expand_expr. */
6714 static rtx
6715 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6717 rtx mem;
6719 mem = output_constant_def (exp, defer);
6720 if (modifier != EXPAND_INITIALIZER)
6721 mem = use_anchored_address (mem);
6722 return mem;
6725 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6726 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6728 static rtx
6729 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6730 enum expand_modifier modifier)
6732 rtx result, subtarget;
6733 tree inner, offset;
6734 HOST_WIDE_INT bitsize, bitpos;
6735 int volatilep, unsignedp;
6736 enum machine_mode mode1;
6738 /* If we are taking the address of a constant and are at the top level,
6739 we have to use output_constant_def since we can't call force_const_mem
6740 at top level. */
6741 /* ??? This should be considered a front-end bug. We should not be
6742 generating ADDR_EXPR of something that isn't an LVALUE. The only
6743 exception here is STRING_CST. */
6744 if (CONSTANT_CLASS_P (exp))
6745 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6747 /* Everything must be something allowed by is_gimple_addressable. */
6748 switch (TREE_CODE (exp))
6750 case INDIRECT_REF:
6751 /* This case will happen via recursion for &a->b. */
6752 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6754 case CONST_DECL:
6755 /* Recurse and make the output_constant_def clause above handle this. */
6756 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6757 tmode, modifier);
6759 case REALPART_EXPR:
6760 /* The real part of the complex number is always first, therefore
6761 the address is the same as the address of the parent object. */
6762 offset = 0;
6763 bitpos = 0;
6764 inner = TREE_OPERAND (exp, 0);
6765 break;
6767 case IMAGPART_EXPR:
6768 /* The imaginary part of the complex number is always second.
6769 The expression is therefore always offset by the size of the
6770 scalar type. */
6771 offset = 0;
6772 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6773 inner = TREE_OPERAND (exp, 0);
6774 break;
6776 case VAR_DECL:
6777 /* TLS emulation hook - replace __thread VAR's &VAR with
6778 __emutls_get_address (&_emutls.VAR). */
6779 if (! targetm.have_tls
6780 && TREE_CODE (exp) == VAR_DECL
6781 && DECL_THREAD_LOCAL_P (exp))
6783 exp = emutls_var_address (exp);
6784 return expand_expr (exp, target, tmode, modifier);
6786 /* Fall through. */
6788 default:
6789 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6790 expand_expr, as that can have various side effects; LABEL_DECLs for
6791 example, may not have their DECL_RTL set yet. Expand the rtl of
6792 CONSTRUCTORs too, which should yield a memory reference for the
6793 constructor's contents. Assume language specific tree nodes can
6794 be expanded in some interesting way. */
6795 if (DECL_P (exp)
6796 || TREE_CODE (exp) == CONSTRUCTOR
6797 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6799 result = expand_expr (exp, target, tmode,
6800 modifier == EXPAND_INITIALIZER
6801 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6803 /* If the DECL isn't in memory, then the DECL wasn't properly
6804 marked TREE_ADDRESSABLE, which will be either a front-end
6805 or a tree optimizer bug. */
6806 gcc_assert (MEM_P (result));
6807 result = XEXP (result, 0);
6809 /* ??? Is this needed anymore? */
6810 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6812 assemble_external (exp);
6813 TREE_USED (exp) = 1;
6816 if (modifier != EXPAND_INITIALIZER
6817 && modifier != EXPAND_CONST_ADDRESS)
6818 result = force_operand (result, target);
6819 return result;
6822 /* Pass FALSE as the last argument to get_inner_reference although
6823 we are expanding to RTL. The rationale is that we know how to
6824 handle "aligning nodes" here: we can just bypass them because
6825 they won't change the final object whose address will be returned
6826 (they actually exist only for that purpose). */
6827 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6828 &mode1, &unsignedp, &volatilep, false);
6829 break;
6832 /* We must have made progress. */
6833 gcc_assert (inner != exp);
6835 subtarget = offset || bitpos ? NULL_RTX : target;
6836 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6838 if (offset)
6840 rtx tmp;
6842 if (modifier != EXPAND_NORMAL)
6843 result = force_operand (result, NULL);
6844 tmp = expand_expr (offset, NULL_RTX, tmode,
6845 modifier == EXPAND_INITIALIZER
6846 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6848 result = convert_memory_address (tmode, result);
6849 tmp = convert_memory_address (tmode, tmp);
6851 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6852 result = gen_rtx_PLUS (tmode, result, tmp);
6853 else
6855 subtarget = bitpos ? NULL_RTX : target;
6856 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6857 1, OPTAB_LIB_WIDEN);
6861 if (bitpos)
6863 /* Someone beforehand should have rejected taking the address
6864 of such an object. */
6865 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6867 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6868 if (modifier < EXPAND_SUM)
6869 result = force_operand (result, target);
6872 return result;
6875 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6876 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6878 static rtx
6879 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6880 enum expand_modifier modifier)
6882 enum machine_mode rmode;
6883 rtx result;
6885 /* Target mode of VOIDmode says "whatever's natural". */
6886 if (tmode == VOIDmode)
6887 tmode = TYPE_MODE (TREE_TYPE (exp));
6889 /* We can get called with some Weird Things if the user does silliness
6890 like "(short) &a". In that case, convert_memory_address won't do
6891 the right thing, so ignore the given target mode. */
6892 if (tmode != Pmode && tmode != ptr_mode)
6893 tmode = Pmode;
6895 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6896 tmode, modifier);
6898 /* Despite expand_expr claims concerning ignoring TMODE when not
6899 strictly convenient, stuff breaks if we don't honor it. Note
6900 that combined with the above, we only do this for pointer modes. */
6901 rmode = GET_MODE (result);
6902 if (rmode == VOIDmode)
6903 rmode = tmode;
6904 if (rmode != tmode)
6905 result = convert_memory_address (tmode, result);
6907 return result;
6910 /* Generate code for computing CONSTRUCTOR EXP.
6911 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6912 is TRUE, instead of creating a temporary variable in memory
6913 NULL is returned and the caller needs to handle it differently. */
6915 static rtx
6916 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6917 bool avoid_temp_mem)
6919 tree type = TREE_TYPE (exp);
6920 enum machine_mode mode = TYPE_MODE (type);
6922 /* Try to avoid creating a temporary at all. This is possible
6923 if all of the initializer is zero.
6924 FIXME: try to handle all [0..255] initializers we can handle
6925 with memset. */
6926 if (TREE_STATIC (exp)
6927 && !TREE_ADDRESSABLE (exp)
6928 && target != 0 && mode == BLKmode
6929 && all_zeros_p (exp))
6931 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6932 return target;
6935 /* All elts simple constants => refer to a constant in memory. But
6936 if this is a non-BLKmode mode, let it store a field at a time
6937 since that should make a CONST_INT or CONST_DOUBLE when we
6938 fold. Likewise, if we have a target we can use, it is best to
6939 store directly into the target unless the type is large enough
6940 that memcpy will be used. If we are making an initializer and
6941 all operands are constant, put it in memory as well.
6943 FIXME: Avoid trying to fill vector constructors piece-meal.
6944 Output them with output_constant_def below unless we're sure
6945 they're zeros. This should go away when vector initializers
6946 are treated like VECTOR_CST instead of arrays. */
6947 if ((TREE_STATIC (exp)
6948 && ((mode == BLKmode
6949 && ! (target != 0 && safe_from_p (target, exp, 1)))
6950 || TREE_ADDRESSABLE (exp)
6951 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6952 && (! MOVE_BY_PIECES_P
6953 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6954 TYPE_ALIGN (type)))
6955 && ! mostly_zeros_p (exp))))
6956 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
6957 && TREE_CONSTANT (exp)))
6959 rtx constructor;
6961 if (avoid_temp_mem)
6962 return NULL_RTX;
6964 constructor = expand_expr_constant (exp, 1, modifier);
6966 if (modifier != EXPAND_CONST_ADDRESS
6967 && modifier != EXPAND_INITIALIZER
6968 && modifier != EXPAND_SUM)
6969 constructor = validize_mem (constructor);
6971 return constructor;
6974 /* Handle calls that pass values in multiple non-contiguous
6975 locations. The Irix 6 ABI has examples of this. */
6976 if (target == 0 || ! safe_from_p (target, exp, 1)
6977 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
6979 if (avoid_temp_mem)
6980 return NULL_RTX;
6982 target
6983 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
6984 | (TREE_READONLY (exp)
6985 * TYPE_QUAL_CONST))),
6986 0, TREE_ADDRESSABLE (exp), 1);
6989 store_constructor (exp, target, 0, int_expr_size (exp));
6990 return target;
6994 /* expand_expr: generate code for computing expression EXP.
6995 An rtx for the computed value is returned. The value is never null.
6996 In the case of a void EXP, const0_rtx is returned.
6998 The value may be stored in TARGET if TARGET is nonzero.
6999 TARGET is just a suggestion; callers must assume that
7000 the rtx returned may not be the same as TARGET.
7002 If TARGET is CONST0_RTX, it means that the value will be ignored.
7004 If TMODE is not VOIDmode, it suggests generating the
7005 result in mode TMODE. But this is done only when convenient.
7006 Otherwise, TMODE is ignored and the value generated in its natural mode.
7007 TMODE is just a suggestion; callers must assume that
7008 the rtx returned may not have mode TMODE.
7010 Note that TARGET may have neither TMODE nor MODE. In that case, it
7011 probably will not be used.
7013 If MODIFIER is EXPAND_SUM then when EXP is an addition
7014 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7015 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7016 products as above, or REG or MEM, or constant.
7017 Ordinarily in such cases we would output mul or add instructions
7018 and then return a pseudo reg containing the sum.
7020 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7021 it also marks a label as absolutely required (it can't be dead).
7022 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7023 This is used for outputting expressions used in initializers.
7025 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7026 with a constant address even if that address is not normally legitimate.
7027 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7029 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7030 a call parameter. Such targets require special care as we haven't yet
7031 marked TARGET so that it's safe from being trashed by libcalls. We
7032 don't want to use TARGET for anything but the final result;
7033 Intermediate values must go elsewhere. Additionally, calls to
7034 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7036 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7037 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7038 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7039 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7040 recursively. */
7042 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
7043 enum expand_modifier, rtx *);
7046 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7047 enum expand_modifier modifier, rtx *alt_rtl)
7049 int rn = -1;
7050 rtx ret, last = NULL;
7052 /* Handle ERROR_MARK before anybody tries to access its type. */
7053 if (TREE_CODE (exp) == ERROR_MARK
7054 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7056 ret = CONST0_RTX (tmode);
7057 return ret ? ret : const0_rtx;
7060 if (flag_non_call_exceptions)
7062 rn = lookup_stmt_eh_region (exp);
7063 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7064 if (rn >= 0)
7065 last = get_last_insn ();
7068 /* If this is an expression of some kind and it has an associated line
7069 number, then emit the line number before expanding the expression.
7071 We need to save and restore the file and line information so that
7072 errors discovered during expansion are emitted with the right
7073 information. It would be better of the diagnostic routines
7074 used the file/line information embedded in the tree nodes rather
7075 than globals. */
7076 if (cfun && EXPR_HAS_LOCATION (exp))
7078 location_t saved_location = input_location;
7079 input_location = EXPR_LOCATION (exp);
7080 set_curr_insn_source_location (input_location);
7082 /* Record where the insns produced belong. */
7083 set_curr_insn_block (TREE_BLOCK (exp));
7085 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7087 input_location = saved_location;
7089 else
7091 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7094 /* If using non-call exceptions, mark all insns that may trap.
7095 expand_call() will mark CALL_INSNs before we get to this code,
7096 but it doesn't handle libcalls, and these may trap. */
7097 if (rn >= 0)
7099 rtx insn;
7100 for (insn = next_real_insn (last); insn;
7101 insn = next_real_insn (insn))
7103 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7104 /* If we want exceptions for non-call insns, any
7105 may_trap_p instruction may throw. */
7106 && GET_CODE (PATTERN (insn)) != CLOBBER
7107 && GET_CODE (PATTERN (insn)) != USE
7108 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7110 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
7111 REG_NOTES (insn));
7116 return ret;
7119 static rtx
7120 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7121 enum expand_modifier modifier, rtx *alt_rtl)
7123 rtx op0, op1, op2, temp, decl_rtl;
7124 tree type;
7125 int unsignedp;
7126 enum machine_mode mode;
7127 enum tree_code code = TREE_CODE (exp);
7128 optab this_optab;
7129 rtx subtarget, original_target;
7130 int ignore;
7131 tree context, subexp0, subexp1;
7132 bool reduce_bit_field = false;
7133 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
7134 ? reduce_to_bit_field_precision ((expr), \
7135 target, \
7136 type) \
7137 : (expr))
7139 if (GIMPLE_STMT_P (exp))
7141 type = void_type_node;
7142 mode = VOIDmode;
7143 unsignedp = 0;
7145 else
7147 type = TREE_TYPE (exp);
7148 mode = TYPE_MODE (type);
7149 unsignedp = TYPE_UNSIGNED (type);
7151 if (lang_hooks.reduce_bit_field_operations
7152 && TREE_CODE (type) == INTEGER_TYPE
7153 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
7155 /* An operation in what may be a bit-field type needs the
7156 result to be reduced to the precision of the bit-field type,
7157 which is narrower than that of the type's mode. */
7158 reduce_bit_field = true;
7159 if (modifier == EXPAND_STACK_PARM)
7160 target = 0;
7163 /* Use subtarget as the target for operand 0 of a binary operation. */
7164 subtarget = get_subtarget (target);
7165 original_target = target;
7166 ignore = (target == const0_rtx
7167 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
7168 || code == CONVERT_EXPR || code == COND_EXPR
7169 || code == VIEW_CONVERT_EXPR)
7170 && TREE_CODE (type) == VOID_TYPE));
7172 /* If we are going to ignore this result, we need only do something
7173 if there is a side-effect somewhere in the expression. If there
7174 is, short-circuit the most common cases here. Note that we must
7175 not call expand_expr with anything but const0_rtx in case this
7176 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7178 if (ignore)
7180 if (! TREE_SIDE_EFFECTS (exp))
7181 return const0_rtx;
7183 /* Ensure we reference a volatile object even if value is ignored, but
7184 don't do this if all we are doing is taking its address. */
7185 if (TREE_THIS_VOLATILE (exp)
7186 && TREE_CODE (exp) != FUNCTION_DECL
7187 && mode != VOIDmode && mode != BLKmode
7188 && modifier != EXPAND_CONST_ADDRESS)
7190 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7191 if (MEM_P (temp))
7192 temp = copy_to_reg (temp);
7193 return const0_rtx;
7196 if (TREE_CODE_CLASS (code) == tcc_unary
7197 || code == COMPONENT_REF || code == INDIRECT_REF)
7198 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7199 modifier);
7201 else if (TREE_CODE_CLASS (code) == tcc_binary
7202 || TREE_CODE_CLASS (code) == tcc_comparison
7203 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7205 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7206 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7207 return const0_rtx;
7209 else if (code == BIT_FIELD_REF)
7211 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7212 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7213 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7214 return const0_rtx;
7217 target = 0;
7221 switch (code)
7223 case LABEL_DECL:
7225 tree function = decl_function_context (exp);
7227 temp = label_rtx (exp);
7228 temp = gen_rtx_LABEL_REF (Pmode, temp);
7230 if (function != current_function_decl
7231 && function != 0)
7232 LABEL_REF_NONLOCAL_P (temp) = 1;
7234 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7235 return temp;
7238 case SSA_NAME:
7239 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7240 NULL);
7242 case PARM_DECL:
7243 case VAR_DECL:
7244 /* If a static var's type was incomplete when the decl was written,
7245 but the type is complete now, lay out the decl now. */
7246 if (DECL_SIZE (exp) == 0
7247 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7248 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7249 layout_decl (exp, 0);
7251 /* TLS emulation hook - replace __thread vars with
7252 *__emutls_get_address (&_emutls.var). */
7253 if (! targetm.have_tls
7254 && TREE_CODE (exp) == VAR_DECL
7255 && DECL_THREAD_LOCAL_P (exp))
7257 exp = build_fold_indirect_ref (emutls_var_address (exp));
7258 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7261 /* ... fall through ... */
7263 case FUNCTION_DECL:
7264 case RESULT_DECL:
7265 decl_rtl = DECL_RTL (exp);
7266 gcc_assert (decl_rtl);
7267 decl_rtl = copy_rtx (decl_rtl);
7269 /* Ensure variable marked as used even if it doesn't go through
7270 a parser. If it hasn't be used yet, write out an external
7271 definition. */
7272 if (! TREE_USED (exp))
7274 assemble_external (exp);
7275 TREE_USED (exp) = 1;
7278 /* Show we haven't gotten RTL for this yet. */
7279 temp = 0;
7281 /* Variables inherited from containing functions should have
7282 been lowered by this point. */
7283 context = decl_function_context (exp);
7284 gcc_assert (!context
7285 || context == current_function_decl
7286 || TREE_STATIC (exp)
7287 /* ??? C++ creates functions that are not TREE_STATIC. */
7288 || TREE_CODE (exp) == FUNCTION_DECL);
7290 /* This is the case of an array whose size is to be determined
7291 from its initializer, while the initializer is still being parsed.
7292 See expand_decl. */
7294 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7295 temp = validize_mem (decl_rtl);
7297 /* If DECL_RTL is memory, we are in the normal case and either
7298 the address is not valid or it is not a register and -fforce-addr
7299 is specified, get the address into a register. */
7301 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7303 if (alt_rtl)
7304 *alt_rtl = decl_rtl;
7305 decl_rtl = use_anchored_address (decl_rtl);
7306 if (modifier != EXPAND_CONST_ADDRESS
7307 && modifier != EXPAND_SUM
7308 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7309 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7310 temp = replace_equiv_address (decl_rtl,
7311 copy_rtx (XEXP (decl_rtl, 0)));
7314 /* If we got something, return it. But first, set the alignment
7315 if the address is a register. */
7316 if (temp != 0)
7318 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7319 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7321 return temp;
7324 /* If the mode of DECL_RTL does not match that of the decl, it
7325 must be a promoted value. We return a SUBREG of the wanted mode,
7326 but mark it so that we know that it was already extended. */
7328 if (REG_P (decl_rtl)
7329 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7331 enum machine_mode pmode;
7333 /* Get the signedness used for this variable. Ensure we get the
7334 same mode we got when the variable was declared. */
7335 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7336 (TREE_CODE (exp) == RESULT_DECL
7337 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7338 gcc_assert (GET_MODE (decl_rtl) == pmode);
7340 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7341 SUBREG_PROMOTED_VAR_P (temp) = 1;
7342 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7343 return temp;
7346 return decl_rtl;
7348 case INTEGER_CST:
7349 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7350 TREE_INT_CST_HIGH (exp), mode);
7352 return temp;
7354 case VECTOR_CST:
7356 tree tmp = NULL_TREE;
7357 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7358 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7359 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7360 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7361 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7362 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7363 return const_vector_from_tree (exp);
7364 if (GET_MODE_CLASS (mode) == MODE_INT)
7366 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7367 if (type_for_mode)
7368 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7370 if (!tmp)
7371 tmp = build_constructor_from_list (type,
7372 TREE_VECTOR_CST_ELTS (exp));
7373 return expand_expr (tmp, ignore ? const0_rtx : target,
7374 tmode, modifier);
7377 case CONST_DECL:
7378 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7380 case REAL_CST:
7381 /* If optimized, generate immediate CONST_DOUBLE
7382 which will be turned into memory by reload if necessary.
7384 We used to force a register so that loop.c could see it. But
7385 this does not allow gen_* patterns to perform optimizations with
7386 the constants. It also produces two insns in cases like "x = 1.0;".
7387 On most machines, floating-point constants are not permitted in
7388 many insns, so we'd end up copying it to a register in any case.
7390 Now, we do the copying in expand_binop, if appropriate. */
7391 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7392 TYPE_MODE (TREE_TYPE (exp)));
7394 case FIXED_CST:
7395 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7396 TYPE_MODE (TREE_TYPE (exp)));
7398 case COMPLEX_CST:
7399 /* Handle evaluating a complex constant in a CONCAT target. */
7400 if (original_target && GET_CODE (original_target) == CONCAT)
7402 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7403 rtx rtarg, itarg;
7405 rtarg = XEXP (original_target, 0);
7406 itarg = XEXP (original_target, 1);
7408 /* Move the real and imaginary parts separately. */
7409 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7410 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7412 if (op0 != rtarg)
7413 emit_move_insn (rtarg, op0);
7414 if (op1 != itarg)
7415 emit_move_insn (itarg, op1);
7417 return original_target;
7420 /* ... fall through ... */
7422 case STRING_CST:
7423 temp = expand_expr_constant (exp, 1, modifier);
7425 /* temp contains a constant address.
7426 On RISC machines where a constant address isn't valid,
7427 make some insns to get that address into a register. */
7428 if (modifier != EXPAND_CONST_ADDRESS
7429 && modifier != EXPAND_INITIALIZER
7430 && modifier != EXPAND_SUM
7431 && (! memory_address_p (mode, XEXP (temp, 0))
7432 || flag_force_addr))
7433 return replace_equiv_address (temp,
7434 copy_rtx (XEXP (temp, 0)));
7435 return temp;
7437 case SAVE_EXPR:
7439 tree val = TREE_OPERAND (exp, 0);
7440 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7442 if (!SAVE_EXPR_RESOLVED_P (exp))
7444 /* We can indeed still hit this case, typically via builtin
7445 expanders calling save_expr immediately before expanding
7446 something. Assume this means that we only have to deal
7447 with non-BLKmode values. */
7448 gcc_assert (GET_MODE (ret) != BLKmode);
7450 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7451 DECL_ARTIFICIAL (val) = 1;
7452 DECL_IGNORED_P (val) = 1;
7453 TREE_OPERAND (exp, 0) = val;
7454 SAVE_EXPR_RESOLVED_P (exp) = 1;
7456 if (!CONSTANT_P (ret))
7457 ret = copy_to_reg (ret);
7458 SET_DECL_RTL (val, ret);
7461 return ret;
7464 case GOTO_EXPR:
7465 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7466 expand_goto (TREE_OPERAND (exp, 0));
7467 else
7468 expand_computed_goto (TREE_OPERAND (exp, 0));
7469 return const0_rtx;
7471 case CONSTRUCTOR:
7472 /* If we don't need the result, just ensure we evaluate any
7473 subexpressions. */
7474 if (ignore)
7476 unsigned HOST_WIDE_INT idx;
7477 tree value;
7479 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7480 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7482 return const0_rtx;
7485 return expand_constructor (exp, target, modifier, false);
7487 case MISALIGNED_INDIRECT_REF:
7488 case ALIGN_INDIRECT_REF:
7489 case INDIRECT_REF:
7491 tree exp1 = TREE_OPERAND (exp, 0);
7493 if (modifier != EXPAND_WRITE)
7495 tree t;
7497 t = fold_read_from_constant_string (exp);
7498 if (t)
7499 return expand_expr (t, target, tmode, modifier);
7502 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7503 op0 = memory_address (mode, op0);
7505 if (code == ALIGN_INDIRECT_REF)
7507 int align = TYPE_ALIGN_UNIT (type);
7508 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7509 op0 = memory_address (mode, op0);
7512 temp = gen_rtx_MEM (mode, op0);
7514 set_mem_attributes (temp, exp, 0);
7516 /* Resolve the misalignment now, so that we don't have to remember
7517 to resolve it later. Of course, this only works for reads. */
7518 /* ??? When we get around to supporting writes, we'll have to handle
7519 this in store_expr directly. The vectorizer isn't generating
7520 those yet, however. */
7521 if (code == MISALIGNED_INDIRECT_REF)
7523 int icode;
7524 rtx reg, insn;
7526 gcc_assert (modifier == EXPAND_NORMAL
7527 || modifier == EXPAND_STACK_PARM);
7529 /* The vectorizer should have already checked the mode. */
7530 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7531 gcc_assert (icode != CODE_FOR_nothing);
7533 /* We've already validated the memory, and we're creating a
7534 new pseudo destination. The predicates really can't fail. */
7535 reg = gen_reg_rtx (mode);
7537 /* Nor can the insn generator. */
7538 insn = GEN_FCN (icode) (reg, temp);
7539 emit_insn (insn);
7541 return reg;
7544 return temp;
7547 case TARGET_MEM_REF:
7549 struct mem_address addr;
7551 get_address_description (exp, &addr);
7552 op0 = addr_for_mem_ref (&addr, true);
7553 op0 = memory_address (mode, op0);
7554 temp = gen_rtx_MEM (mode, op0);
7555 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7557 return temp;
7559 case ARRAY_REF:
7562 tree array = TREE_OPERAND (exp, 0);
7563 tree index = TREE_OPERAND (exp, 1);
7565 /* Fold an expression like: "foo"[2].
7566 This is not done in fold so it won't happen inside &.
7567 Don't fold if this is for wide characters since it's too
7568 difficult to do correctly and this is a very rare case. */
7570 if (modifier != EXPAND_CONST_ADDRESS
7571 && modifier != EXPAND_INITIALIZER
7572 && modifier != EXPAND_MEMORY)
7574 tree t = fold_read_from_constant_string (exp);
7576 if (t)
7577 return expand_expr (t, target, tmode, modifier);
7580 /* If this is a constant index into a constant array,
7581 just get the value from the array. Handle both the cases when
7582 we have an explicit constructor and when our operand is a variable
7583 that was declared const. */
7585 if (modifier != EXPAND_CONST_ADDRESS
7586 && modifier != EXPAND_INITIALIZER
7587 && modifier != EXPAND_MEMORY
7588 && TREE_CODE (array) == CONSTRUCTOR
7589 && ! TREE_SIDE_EFFECTS (array)
7590 && TREE_CODE (index) == INTEGER_CST)
7592 unsigned HOST_WIDE_INT ix;
7593 tree field, value;
7595 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7596 field, value)
7597 if (tree_int_cst_equal (field, index))
7599 if (!TREE_SIDE_EFFECTS (value))
7600 return expand_expr (fold (value), target, tmode, modifier);
7601 break;
7605 else if (optimize >= 1
7606 && modifier != EXPAND_CONST_ADDRESS
7607 && modifier != EXPAND_INITIALIZER
7608 && modifier != EXPAND_MEMORY
7609 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7610 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7611 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7612 && targetm.binds_local_p (array))
7614 if (TREE_CODE (index) == INTEGER_CST)
7616 tree init = DECL_INITIAL (array);
7618 if (TREE_CODE (init) == CONSTRUCTOR)
7620 unsigned HOST_WIDE_INT ix;
7621 tree field, value;
7623 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7624 field, value)
7625 if (tree_int_cst_equal (field, index))
7627 if (TREE_SIDE_EFFECTS (value))
7628 break;
7630 if (TREE_CODE (value) == CONSTRUCTOR)
7632 /* If VALUE is a CONSTRUCTOR, this
7633 optimization is only useful if
7634 this doesn't store the CONSTRUCTOR
7635 into memory. If it does, it is more
7636 efficient to just load the data from
7637 the array directly. */
7638 rtx ret = expand_constructor (value, target,
7639 modifier, true);
7640 if (ret == NULL_RTX)
7641 break;
7644 return expand_expr (fold (value), target, tmode,
7645 modifier);
7648 else if(TREE_CODE (init) == STRING_CST)
7650 tree index1 = index;
7651 tree low_bound = array_ref_low_bound (exp);
7652 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7654 /* Optimize the special-case of a zero lower bound.
7656 We convert the low_bound to sizetype to avoid some problems
7657 with constant folding. (E.g. suppose the lower bound is 1,
7658 and its mode is QI. Without the conversion,l (ARRAY
7659 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7660 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7662 if (! integer_zerop (low_bound))
7663 index1 = size_diffop (index1, fold_convert (sizetype,
7664 low_bound));
7666 if (0 > compare_tree_int (index1,
7667 TREE_STRING_LENGTH (init)))
7669 tree type = TREE_TYPE (TREE_TYPE (init));
7670 enum machine_mode mode = TYPE_MODE (type);
7672 if (GET_MODE_CLASS (mode) == MODE_INT
7673 && GET_MODE_SIZE (mode) == 1)
7674 return gen_int_mode (TREE_STRING_POINTER (init)
7675 [TREE_INT_CST_LOW (index1)],
7676 mode);
7682 goto normal_inner_ref;
7684 case COMPONENT_REF:
7685 /* If the operand is a CONSTRUCTOR, we can just extract the
7686 appropriate field if it is present. */
7687 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7689 unsigned HOST_WIDE_INT idx;
7690 tree field, value;
7692 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7693 idx, field, value)
7694 if (field == TREE_OPERAND (exp, 1)
7695 /* We can normally use the value of the field in the
7696 CONSTRUCTOR. However, if this is a bitfield in
7697 an integral mode that we can fit in a HOST_WIDE_INT,
7698 we must mask only the number of bits in the bitfield,
7699 since this is done implicitly by the constructor. If
7700 the bitfield does not meet either of those conditions,
7701 we can't do this optimization. */
7702 && (! DECL_BIT_FIELD (field)
7703 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7704 && (GET_MODE_BITSIZE (DECL_MODE (field))
7705 <= HOST_BITS_PER_WIDE_INT))))
7707 if (DECL_BIT_FIELD (field)
7708 && modifier == EXPAND_STACK_PARM)
7709 target = 0;
7710 op0 = expand_expr (value, target, tmode, modifier);
7711 if (DECL_BIT_FIELD (field))
7713 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7714 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7716 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7718 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7719 op0 = expand_and (imode, op0, op1, target);
7721 else
7723 tree count
7724 = build_int_cst (NULL_TREE,
7725 GET_MODE_BITSIZE (imode) - bitsize);
7727 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7728 target, 0);
7729 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7730 target, 0);
7734 return op0;
7737 goto normal_inner_ref;
7739 case BIT_FIELD_REF:
7740 case ARRAY_RANGE_REF:
7741 normal_inner_ref:
7743 enum machine_mode mode1;
7744 HOST_WIDE_INT bitsize, bitpos;
7745 tree offset;
7746 int volatilep = 0;
7747 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7748 &mode1, &unsignedp, &volatilep, true);
7749 rtx orig_op0;
7751 /* If we got back the original object, something is wrong. Perhaps
7752 we are evaluating an expression too early. In any event, don't
7753 infinitely recurse. */
7754 gcc_assert (tem != exp);
7756 /* If TEM's type is a union of variable size, pass TARGET to the inner
7757 computation, since it will need a temporary and TARGET is known
7758 to have to do. This occurs in unchecked conversion in Ada. */
7760 orig_op0 = op0
7761 = expand_expr (tem,
7762 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7763 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7764 != INTEGER_CST)
7765 && modifier != EXPAND_STACK_PARM
7766 ? target : NULL_RTX),
7767 VOIDmode,
7768 (modifier == EXPAND_INITIALIZER
7769 || modifier == EXPAND_CONST_ADDRESS
7770 || modifier == EXPAND_STACK_PARM)
7771 ? modifier : EXPAND_NORMAL);
7773 /* If this is a constant, put it into a register if it is a legitimate
7774 constant, OFFSET is 0, and we won't try to extract outside the
7775 register (in case we were passed a partially uninitialized object
7776 or a view_conversion to a larger size). Force the constant to
7777 memory otherwise. */
7778 if (CONSTANT_P (op0))
7780 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7781 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7782 && offset == 0
7783 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7784 op0 = force_reg (mode, op0);
7785 else
7786 op0 = validize_mem (force_const_mem (mode, op0));
7789 /* Otherwise, if this object not in memory and we either have an
7790 offset, a BLKmode result, or a reference outside the object, put it
7791 there. Such cases can occur in Ada if we have unchecked conversion
7792 of an expression from a scalar type to an array or record type or
7793 for an ARRAY_RANGE_REF whose type is BLKmode. */
7794 else if (!MEM_P (op0)
7795 && (offset != 0
7796 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7797 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7799 tree nt = build_qualified_type (TREE_TYPE (tem),
7800 (TYPE_QUALS (TREE_TYPE (tem))
7801 | TYPE_QUAL_CONST));
7802 rtx memloc = assign_temp (nt, 1, 1, 1);
7804 emit_move_insn (memloc, op0);
7805 op0 = memloc;
7808 if (offset != 0)
7810 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7811 EXPAND_SUM);
7813 gcc_assert (MEM_P (op0));
7815 #ifdef POINTERS_EXTEND_UNSIGNED
7816 if (GET_MODE (offset_rtx) != Pmode)
7817 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7818 #else
7819 if (GET_MODE (offset_rtx) != ptr_mode)
7820 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7821 #endif
7823 if (GET_MODE (op0) == BLKmode
7824 /* A constant address in OP0 can have VOIDmode, we must
7825 not try to call force_reg in that case. */
7826 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7827 && bitsize != 0
7828 && (bitpos % bitsize) == 0
7829 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7830 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7832 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7833 bitpos = 0;
7836 op0 = offset_address (op0, offset_rtx,
7837 highest_pow2_factor (offset));
7840 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7841 record its alignment as BIGGEST_ALIGNMENT. */
7842 if (MEM_P (op0) && bitpos == 0 && offset != 0
7843 && is_aligning_offset (offset, tem))
7844 set_mem_align (op0, BIGGEST_ALIGNMENT);
7846 /* Don't forget about volatility even if this is a bitfield. */
7847 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7849 if (op0 == orig_op0)
7850 op0 = copy_rtx (op0);
7852 MEM_VOLATILE_P (op0) = 1;
7855 /* The following code doesn't handle CONCAT.
7856 Assume only bitpos == 0 can be used for CONCAT, due to
7857 one element arrays having the same mode as its element. */
7858 if (GET_CODE (op0) == CONCAT)
7860 gcc_assert (bitpos == 0
7861 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7862 return op0;
7865 /* In cases where an aligned union has an unaligned object
7866 as a field, we might be extracting a BLKmode value from
7867 an integer-mode (e.g., SImode) object. Handle this case
7868 by doing the extract into an object as wide as the field
7869 (which we know to be the width of a basic mode), then
7870 storing into memory, and changing the mode to BLKmode. */
7871 if (mode1 == VOIDmode
7872 || REG_P (op0) || GET_CODE (op0) == SUBREG
7873 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7874 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7875 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7876 && modifier != EXPAND_CONST_ADDRESS
7877 && modifier != EXPAND_INITIALIZER)
7878 /* If the field isn't aligned enough to fetch as a memref,
7879 fetch it as a bit field. */
7880 || (mode1 != BLKmode
7881 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7882 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7883 || (MEM_P (op0)
7884 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7885 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7886 && ((modifier == EXPAND_CONST_ADDRESS
7887 || modifier == EXPAND_INITIALIZER)
7888 ? STRICT_ALIGNMENT
7889 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7890 || (bitpos % BITS_PER_UNIT != 0)))
7891 /* If the type and the field are a constant size and the
7892 size of the type isn't the same size as the bitfield,
7893 we must use bitfield operations. */
7894 || (bitsize >= 0
7895 && TYPE_SIZE (TREE_TYPE (exp))
7896 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7897 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7898 bitsize)))
7900 enum machine_mode ext_mode = mode;
7902 if (ext_mode == BLKmode
7903 && ! (target != 0 && MEM_P (op0)
7904 && MEM_P (target)
7905 && bitpos % BITS_PER_UNIT == 0))
7906 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7908 if (ext_mode == BLKmode)
7910 if (target == 0)
7911 target = assign_temp (type, 0, 1, 1);
7913 if (bitsize == 0)
7914 return target;
7916 /* In this case, BITPOS must start at a byte boundary and
7917 TARGET, if specified, must be a MEM. */
7918 gcc_assert (MEM_P (op0)
7919 && (!target || MEM_P (target))
7920 && !(bitpos % BITS_PER_UNIT));
7922 emit_block_move (target,
7923 adjust_address (op0, VOIDmode,
7924 bitpos / BITS_PER_UNIT),
7925 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7926 / BITS_PER_UNIT),
7927 (modifier == EXPAND_STACK_PARM
7928 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7930 return target;
7933 op0 = validize_mem (op0);
7935 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7936 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7938 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7939 (modifier == EXPAND_STACK_PARM
7940 ? NULL_RTX : target),
7941 ext_mode, ext_mode);
7943 /* If the result is a record type and BITSIZE is narrower than
7944 the mode of OP0, an integral mode, and this is a big endian
7945 machine, we must put the field into the high-order bits. */
7946 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7947 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7948 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7949 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7950 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7951 - bitsize),
7952 op0, 1);
7954 /* If the result type is BLKmode, store the data into a temporary
7955 of the appropriate type, but with the mode corresponding to the
7956 mode for the data we have (op0's mode). It's tempting to make
7957 this a constant type, since we know it's only being stored once,
7958 but that can cause problems if we are taking the address of this
7959 COMPONENT_REF because the MEM of any reference via that address
7960 will have flags corresponding to the type, which will not
7961 necessarily be constant. */
7962 if (mode == BLKmode)
7964 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7965 rtx new;
7967 /* If the reference doesn't use the alias set of its type,
7968 we cannot create the temporary using that type. */
7969 if (component_uses_parent_alias_set (exp))
7971 new = assign_stack_local (ext_mode, size, 0);
7972 set_mem_alias_set (new, get_alias_set (exp));
7974 else
7975 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7977 emit_move_insn (new, op0);
7978 op0 = copy_rtx (new);
7979 PUT_MODE (op0, BLKmode);
7980 set_mem_attributes (op0, exp, 1);
7983 return op0;
7986 /* If the result is BLKmode, use that to access the object
7987 now as well. */
7988 if (mode == BLKmode)
7989 mode1 = BLKmode;
7991 /* Get a reference to just this component. */
7992 if (modifier == EXPAND_CONST_ADDRESS
7993 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7994 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7995 else
7996 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7998 if (op0 == orig_op0)
7999 op0 = copy_rtx (op0);
8001 set_mem_attributes (op0, exp, 0);
8002 if (REG_P (XEXP (op0, 0)))
8003 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
8005 MEM_VOLATILE_P (op0) |= volatilep;
8006 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
8007 || modifier == EXPAND_CONST_ADDRESS
8008 || modifier == EXPAND_INITIALIZER)
8009 return op0;
8010 else if (target == 0)
8011 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8013 convert_move (target, op0, unsignedp);
8014 return target;
8017 case OBJ_TYPE_REF:
8018 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
8020 case CALL_EXPR:
8021 /* All valid uses of __builtin_va_arg_pack () are removed during
8022 inlining. */
8023 if (CALL_EXPR_VA_ARG_PACK (exp))
8024 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8026 tree fndecl = get_callee_fndecl (exp), attr;
8028 if (fndecl
8029 && (attr = lookup_attribute ("error",
8030 DECL_ATTRIBUTES (fndecl))) != NULL)
8031 error ("%Kcall to %qs declared with attribute error: %s",
8032 exp, lang_hooks.decl_printable_name (fndecl, 1),
8033 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8034 if (fndecl
8035 && (attr = lookup_attribute ("warning",
8036 DECL_ATTRIBUTES (fndecl))) != NULL)
8037 warning (0, "%Kcall to %qs declared with attribute warning: %s",
8038 exp, lang_hooks.decl_printable_name (fndecl, 1),
8039 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8041 /* Check for a built-in function. */
8042 if (fndecl && DECL_BUILT_IN (fndecl))
8044 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND)
8045 return lang_hooks.expand_expr (exp, original_target,
8046 tmode, modifier, alt_rtl);
8047 else
8048 return expand_builtin (exp, target, subtarget, tmode, ignore);
8051 return expand_call (exp, target, ignore);
8053 case NON_LVALUE_EXPR:
8054 case NOP_EXPR:
8055 case CONVERT_EXPR:
8056 if (TREE_OPERAND (exp, 0) == error_mark_node)
8057 return const0_rtx;
8059 if (TREE_CODE (type) == UNION_TYPE)
8061 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
8063 /* If both input and output are BLKmode, this conversion isn't doing
8064 anything except possibly changing memory attribute. */
8065 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8067 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8068 modifier);
8070 result = copy_rtx (result);
8071 set_mem_attributes (result, exp, 0);
8072 return result;
8075 if (target == 0)
8077 if (TYPE_MODE (type) != BLKmode)
8078 target = gen_reg_rtx (TYPE_MODE (type));
8079 else
8080 target = assign_temp (type, 0, 1, 1);
8083 if (MEM_P (target))
8084 /* Store data into beginning of memory target. */
8085 store_expr (TREE_OPERAND (exp, 0),
8086 adjust_address (target, TYPE_MODE (valtype), 0),
8087 modifier == EXPAND_STACK_PARM,
8088 false);
8090 else
8092 gcc_assert (REG_P (target));
8094 /* Store this field into a union of the proper type. */
8095 store_field (target,
8096 MIN ((int_size_in_bytes (TREE_TYPE
8097 (TREE_OPERAND (exp, 0)))
8098 * BITS_PER_UNIT),
8099 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8100 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8101 type, 0, false);
8104 /* Return the entire union. */
8105 return target;
8108 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8110 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8111 modifier);
8113 /* If the signedness of the conversion differs and OP0 is
8114 a promoted SUBREG, clear that indication since we now
8115 have to do the proper extension. */
8116 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8117 && GET_CODE (op0) == SUBREG)
8118 SUBREG_PROMOTED_VAR_P (op0) = 0;
8120 return REDUCE_BIT_FIELD (op0);
8123 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8124 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8125 if (GET_MODE (op0) == mode)
8128 /* If OP0 is a constant, just convert it into the proper mode. */
8129 else if (CONSTANT_P (op0))
8131 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8132 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8134 if (modifier == EXPAND_INITIALIZER)
8135 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8136 subreg_lowpart_offset (mode,
8137 inner_mode));
8138 else
8139 op0= convert_modes (mode, inner_mode, op0,
8140 TYPE_UNSIGNED (inner_type));
8143 else if (modifier == EXPAND_INITIALIZER)
8144 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8146 else if (target == 0)
8147 op0 = convert_to_mode (mode, op0,
8148 TYPE_UNSIGNED (TREE_TYPE
8149 (TREE_OPERAND (exp, 0))));
8150 else
8152 convert_move (target, op0,
8153 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8154 op0 = target;
8157 return REDUCE_BIT_FIELD (op0);
8159 case VIEW_CONVERT_EXPR:
8160 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8162 /* If the input and output modes are both the same, we are done. */
8163 if (TYPE_MODE (type) == GET_MODE (op0))
8165 /* If neither mode is BLKmode, and both modes are the same size
8166 then we can use gen_lowpart. */
8167 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8168 && GET_MODE_SIZE (TYPE_MODE (type))
8169 == GET_MODE_SIZE (GET_MODE (op0)))
8171 if (GET_CODE (op0) == SUBREG)
8172 op0 = force_reg (GET_MODE (op0), op0);
8173 op0 = gen_lowpart (TYPE_MODE (type), op0);
8175 /* If both modes are integral, then we can convert from one to the
8176 other. */
8177 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8178 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8179 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8180 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8181 /* As a last resort, spill op0 to memory, and reload it in a
8182 different mode. */
8183 else if (!MEM_P (op0))
8185 /* If the operand is not a MEM, force it into memory. Since we
8186 are going to be changing the mode of the MEM, don't call
8187 force_const_mem for constants because we don't allow pool
8188 constants to change mode. */
8189 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8191 gcc_assert (!TREE_ADDRESSABLE (exp));
8193 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8194 target
8195 = assign_stack_temp_for_type
8196 (TYPE_MODE (inner_type),
8197 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8199 emit_move_insn (target, op0);
8200 op0 = target;
8203 /* At this point, OP0 is in the correct mode. If the output type is such
8204 that the operand is known to be aligned, indicate that it is.
8205 Otherwise, we need only be concerned about alignment for non-BLKmode
8206 results. */
8207 if (MEM_P (op0))
8209 op0 = copy_rtx (op0);
8211 if (TYPE_ALIGN_OK (type))
8212 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8213 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8214 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8216 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8217 HOST_WIDE_INT temp_size
8218 = MAX (int_size_in_bytes (inner_type),
8219 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8220 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8221 temp_size, 0, type);
8222 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8224 gcc_assert (!TREE_ADDRESSABLE (exp));
8226 if (GET_MODE (op0) == BLKmode)
8227 emit_block_move (new_with_op0_mode, op0,
8228 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8229 (modifier == EXPAND_STACK_PARM
8230 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8231 else
8232 emit_move_insn (new_with_op0_mode, op0);
8234 op0 = new;
8237 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8240 return op0;
8242 case POINTER_PLUS_EXPR:
8243 /* Even though the sizetype mode and the pointer's mode can be different
8244 expand is able to handle this correctly and get the correct result out
8245 of the PLUS_EXPR code. */
8246 case PLUS_EXPR:
8248 /* Check if this is a case for multiplication and addition. */
8249 if ((TREE_CODE (type) == INTEGER_TYPE
8250 || TREE_CODE (type) == FIXED_POINT_TYPE)
8251 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8253 tree subsubexp0, subsubexp1;
8254 enum tree_code code0, code1, this_code;
8256 subexp0 = TREE_OPERAND (exp, 0);
8257 subsubexp0 = TREE_OPERAND (subexp0, 0);
8258 subsubexp1 = TREE_OPERAND (subexp0, 1);
8259 code0 = TREE_CODE (subsubexp0);
8260 code1 = TREE_CODE (subsubexp1);
8261 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8262 : FIXED_CONVERT_EXPR;
8263 if (code0 == this_code && code1 == this_code
8264 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8265 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8266 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8267 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8268 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8269 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8271 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8272 enum machine_mode innermode = TYPE_MODE (op0type);
8273 bool zextend_p = TYPE_UNSIGNED (op0type);
8274 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8275 if (sat_p == 0)
8276 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8277 else
8278 this_optab = zextend_p ? usmadd_widen_optab
8279 : ssmadd_widen_optab;
8280 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8281 && (optab_handler (this_optab, mode)->insn_code
8282 != CODE_FOR_nothing))
8284 expand_operands (TREE_OPERAND (subsubexp0, 0),
8285 TREE_OPERAND (subsubexp1, 0),
8286 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8287 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8288 VOIDmode, EXPAND_NORMAL);
8289 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8290 target, unsignedp);
8291 gcc_assert (temp);
8292 return REDUCE_BIT_FIELD (temp);
8297 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8298 something else, make sure we add the register to the constant and
8299 then to the other thing. This case can occur during strength
8300 reduction and doing it this way will produce better code if the
8301 frame pointer or argument pointer is eliminated.
8303 fold-const.c will ensure that the constant is always in the inner
8304 PLUS_EXPR, so the only case we need to do anything about is if
8305 sp, ap, or fp is our second argument, in which case we must swap
8306 the innermost first argument and our second argument. */
8308 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8309 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8310 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8311 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8312 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8313 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8315 tree t = TREE_OPERAND (exp, 1);
8317 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8318 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8321 /* If the result is to be ptr_mode and we are adding an integer to
8322 something, we might be forming a constant. So try to use
8323 plus_constant. If it produces a sum and we can't accept it,
8324 use force_operand. This allows P = &ARR[const] to generate
8325 efficient code on machines where a SYMBOL_REF is not a valid
8326 address.
8328 If this is an EXPAND_SUM call, always return the sum. */
8329 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8330 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8332 if (modifier == EXPAND_STACK_PARM)
8333 target = 0;
8334 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8335 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8336 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8338 rtx constant_part;
8340 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8341 EXPAND_SUM);
8342 /* Use immed_double_const to ensure that the constant is
8343 truncated according to the mode of OP1, then sign extended
8344 to a HOST_WIDE_INT. Using the constant directly can result
8345 in non-canonical RTL in a 64x32 cross compile. */
8346 constant_part
8347 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8348 (HOST_WIDE_INT) 0,
8349 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8350 op1 = plus_constant (op1, INTVAL (constant_part));
8351 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8352 op1 = force_operand (op1, target);
8353 return REDUCE_BIT_FIELD (op1);
8356 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8357 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8358 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8360 rtx constant_part;
8362 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8363 (modifier == EXPAND_INITIALIZER
8364 ? EXPAND_INITIALIZER : EXPAND_SUM));
8365 if (! CONSTANT_P (op0))
8367 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8368 VOIDmode, modifier);
8369 /* Return a PLUS if modifier says it's OK. */
8370 if (modifier == EXPAND_SUM
8371 || modifier == EXPAND_INITIALIZER)
8372 return simplify_gen_binary (PLUS, mode, op0, op1);
8373 goto binop2;
8375 /* Use immed_double_const to ensure that the constant is
8376 truncated according to the mode of OP1, then sign extended
8377 to a HOST_WIDE_INT. Using the constant directly can result
8378 in non-canonical RTL in a 64x32 cross compile. */
8379 constant_part
8380 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8381 (HOST_WIDE_INT) 0,
8382 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8383 op0 = plus_constant (op0, INTVAL (constant_part));
8384 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8385 op0 = force_operand (op0, target);
8386 return REDUCE_BIT_FIELD (op0);
8390 /* No sense saving up arithmetic to be done
8391 if it's all in the wrong mode to form part of an address.
8392 And force_operand won't know whether to sign-extend or
8393 zero-extend. */
8394 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8395 || mode != ptr_mode)
8397 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8398 subtarget, &op0, &op1, 0);
8399 if (op0 == const0_rtx)
8400 return op1;
8401 if (op1 == const0_rtx)
8402 return op0;
8403 goto binop2;
8406 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8407 subtarget, &op0, &op1, modifier);
8408 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8410 case MINUS_EXPR:
8411 /* Check if this is a case for multiplication and subtraction. */
8412 if ((TREE_CODE (type) == INTEGER_TYPE
8413 || TREE_CODE (type) == FIXED_POINT_TYPE)
8414 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8416 tree subsubexp0, subsubexp1;
8417 enum tree_code code0, code1, this_code;
8419 subexp1 = TREE_OPERAND (exp, 1);
8420 subsubexp0 = TREE_OPERAND (subexp1, 0);
8421 subsubexp1 = TREE_OPERAND (subexp1, 1);
8422 code0 = TREE_CODE (subsubexp0);
8423 code1 = TREE_CODE (subsubexp1);
8424 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8425 : FIXED_CONVERT_EXPR;
8426 if (code0 == this_code && code1 == this_code
8427 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8428 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8429 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8430 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8431 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8432 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8434 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8435 enum machine_mode innermode = TYPE_MODE (op0type);
8436 bool zextend_p = TYPE_UNSIGNED (op0type);
8437 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8438 if (sat_p == 0)
8439 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8440 else
8441 this_optab = zextend_p ? usmsub_widen_optab
8442 : ssmsub_widen_optab;
8443 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8444 && (optab_handler (this_optab, mode)->insn_code
8445 != CODE_FOR_nothing))
8447 expand_operands (TREE_OPERAND (subsubexp0, 0),
8448 TREE_OPERAND (subsubexp1, 0),
8449 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8450 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8451 VOIDmode, EXPAND_NORMAL);
8452 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8453 target, unsignedp);
8454 gcc_assert (temp);
8455 return REDUCE_BIT_FIELD (temp);
8460 /* For initializers, we are allowed to return a MINUS of two
8461 symbolic constants. Here we handle all cases when both operands
8462 are constant. */
8463 /* Handle difference of two symbolic constants,
8464 for the sake of an initializer. */
8465 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8466 && really_constant_p (TREE_OPERAND (exp, 0))
8467 && really_constant_p (TREE_OPERAND (exp, 1)))
8469 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8470 NULL_RTX, &op0, &op1, modifier);
8472 /* If the last operand is a CONST_INT, use plus_constant of
8473 the negated constant. Else make the MINUS. */
8474 if (GET_CODE (op1) == CONST_INT)
8475 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8476 else
8477 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8480 /* No sense saving up arithmetic to be done
8481 if it's all in the wrong mode to form part of an address.
8482 And force_operand won't know whether to sign-extend or
8483 zero-extend. */
8484 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8485 || mode != ptr_mode)
8486 goto binop;
8488 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8489 subtarget, &op0, &op1, modifier);
8491 /* Convert A - const to A + (-const). */
8492 if (GET_CODE (op1) == CONST_INT)
8494 op1 = negate_rtx (mode, op1);
8495 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8498 goto binop2;
8500 case MULT_EXPR:
8501 /* If this is a fixed-point operation, then we cannot use the code
8502 below because "expand_mult" doesn't support sat/no-sat fixed-point
8503 multiplications. */
8504 if (ALL_FIXED_POINT_MODE_P (mode))
8505 goto binop;
8507 /* If first operand is constant, swap them.
8508 Thus the following special case checks need only
8509 check the second operand. */
8510 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8512 tree t1 = TREE_OPERAND (exp, 0);
8513 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8514 TREE_OPERAND (exp, 1) = t1;
8517 /* Attempt to return something suitable for generating an
8518 indexed address, for machines that support that. */
8520 if (modifier == EXPAND_SUM && mode == ptr_mode
8521 && host_integerp (TREE_OPERAND (exp, 1), 0))
8523 tree exp1 = TREE_OPERAND (exp, 1);
8525 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8526 EXPAND_SUM);
8528 if (!REG_P (op0))
8529 op0 = force_operand (op0, NULL_RTX);
8530 if (!REG_P (op0))
8531 op0 = copy_to_mode_reg (mode, op0);
8533 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8534 gen_int_mode (tree_low_cst (exp1, 0),
8535 TYPE_MODE (TREE_TYPE (exp1)))));
8538 if (modifier == EXPAND_STACK_PARM)
8539 target = 0;
8541 /* Check for multiplying things that have been extended
8542 from a narrower type. If this machine supports multiplying
8543 in that narrower type with a result in the desired type,
8544 do it that way, and avoid the explicit type-conversion. */
8546 subexp0 = TREE_OPERAND (exp, 0);
8547 subexp1 = TREE_OPERAND (exp, 1);
8548 /* First, check if we have a multiplication of one signed and one
8549 unsigned operand. */
8550 if (TREE_CODE (subexp0) == NOP_EXPR
8551 && TREE_CODE (subexp1) == NOP_EXPR
8552 && TREE_CODE (type) == INTEGER_TYPE
8553 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8554 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8555 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8556 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8557 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8558 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8560 enum machine_mode innermode
8561 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8562 this_optab = usmul_widen_optab;
8563 if (mode == GET_MODE_WIDER_MODE (innermode))
8565 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8567 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8568 expand_operands (TREE_OPERAND (subexp0, 0),
8569 TREE_OPERAND (subexp1, 0),
8570 NULL_RTX, &op0, &op1, 0);
8571 else
8572 expand_operands (TREE_OPERAND (subexp0, 0),
8573 TREE_OPERAND (subexp1, 0),
8574 NULL_RTX, &op1, &op0, 0);
8576 goto binop3;
8580 /* Check for a multiplication with matching signedness. */
8581 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8582 && TREE_CODE (type) == INTEGER_TYPE
8583 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8584 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8585 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8586 && int_fits_type_p (TREE_OPERAND (exp, 1),
8587 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8588 /* Don't use a widening multiply if a shift will do. */
8589 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8590 > HOST_BITS_PER_WIDE_INT)
8591 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8593 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8594 && (TYPE_PRECISION (TREE_TYPE
8595 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8596 == TYPE_PRECISION (TREE_TYPE
8597 (TREE_OPERAND
8598 (TREE_OPERAND (exp, 0), 0))))
8599 /* If both operands are extended, they must either both
8600 be zero-extended or both be sign-extended. */
8601 && (TYPE_UNSIGNED (TREE_TYPE
8602 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8603 == TYPE_UNSIGNED (TREE_TYPE
8604 (TREE_OPERAND
8605 (TREE_OPERAND (exp, 0), 0)))))))
8607 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8608 enum machine_mode innermode = TYPE_MODE (op0type);
8609 bool zextend_p = TYPE_UNSIGNED (op0type);
8610 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8611 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8613 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8615 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8617 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8618 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8619 TREE_OPERAND (exp, 1),
8620 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8621 else
8622 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8623 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8624 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8625 goto binop3;
8627 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8628 && innermode == word_mode)
8630 rtx htem, hipart;
8631 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8632 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8633 op1 = convert_modes (innermode, mode,
8634 expand_normal (TREE_OPERAND (exp, 1)),
8635 unsignedp);
8636 else
8637 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8638 temp = expand_binop (mode, other_optab, op0, op1, target,
8639 unsignedp, OPTAB_LIB_WIDEN);
8640 hipart = gen_highpart (innermode, temp);
8641 htem = expand_mult_highpart_adjust (innermode, hipart,
8642 op0, op1, hipart,
8643 zextend_p);
8644 if (htem != hipart)
8645 emit_move_insn (hipart, htem);
8646 return REDUCE_BIT_FIELD (temp);
8650 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8651 subtarget, &op0, &op1, 0);
8652 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8654 case TRUNC_DIV_EXPR:
8655 case FLOOR_DIV_EXPR:
8656 case CEIL_DIV_EXPR:
8657 case ROUND_DIV_EXPR:
8658 case EXACT_DIV_EXPR:
8659 /* If this is a fixed-point operation, then we cannot use the code
8660 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8661 divisions. */
8662 if (ALL_FIXED_POINT_MODE_P (mode))
8663 goto binop;
8665 if (modifier == EXPAND_STACK_PARM)
8666 target = 0;
8667 /* Possible optimization: compute the dividend with EXPAND_SUM
8668 then if the divisor is constant can optimize the case
8669 where some terms of the dividend have coeffs divisible by it. */
8670 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8671 subtarget, &op0, &op1, 0);
8672 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8674 case RDIV_EXPR:
8675 goto binop;
8677 case TRUNC_MOD_EXPR:
8678 case FLOOR_MOD_EXPR:
8679 case CEIL_MOD_EXPR:
8680 case ROUND_MOD_EXPR:
8681 if (modifier == EXPAND_STACK_PARM)
8682 target = 0;
8683 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8684 subtarget, &op0, &op1, 0);
8685 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8687 case FIXED_CONVERT_EXPR:
8688 op0 = expand_normal (TREE_OPERAND (exp, 0));
8689 if (target == 0 || modifier == EXPAND_STACK_PARM)
8690 target = gen_reg_rtx (mode);
8692 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8693 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8694 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8695 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8696 else
8697 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8698 return target;
8700 case FIX_TRUNC_EXPR:
8701 op0 = expand_normal (TREE_OPERAND (exp, 0));
8702 if (target == 0 || modifier == EXPAND_STACK_PARM)
8703 target = gen_reg_rtx (mode);
8704 expand_fix (target, op0, unsignedp);
8705 return target;
8707 case FLOAT_EXPR:
8708 op0 = expand_normal (TREE_OPERAND (exp, 0));
8709 if (target == 0 || modifier == EXPAND_STACK_PARM)
8710 target = gen_reg_rtx (mode);
8711 /* expand_float can't figure out what to do if FROM has VOIDmode.
8712 So give it the correct mode. With -O, cse will optimize this. */
8713 if (GET_MODE (op0) == VOIDmode)
8714 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8715 op0);
8716 expand_float (target, op0,
8717 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8718 return target;
8720 case NEGATE_EXPR:
8721 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8722 VOIDmode, EXPAND_NORMAL);
8723 if (modifier == EXPAND_STACK_PARM)
8724 target = 0;
8725 temp = expand_unop (mode,
8726 optab_for_tree_code (NEGATE_EXPR, type),
8727 op0, target, 0);
8728 gcc_assert (temp);
8729 return REDUCE_BIT_FIELD (temp);
8731 case ABS_EXPR:
8732 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8733 VOIDmode, EXPAND_NORMAL);
8734 if (modifier == EXPAND_STACK_PARM)
8735 target = 0;
8737 /* ABS_EXPR is not valid for complex arguments. */
8738 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8739 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8741 /* Unsigned abs is simply the operand. Testing here means we don't
8742 risk generating incorrect code below. */
8743 if (TYPE_UNSIGNED (type))
8744 return op0;
8746 return expand_abs (mode, op0, target, unsignedp,
8747 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8749 case MAX_EXPR:
8750 case MIN_EXPR:
8751 target = original_target;
8752 if (target == 0
8753 || modifier == EXPAND_STACK_PARM
8754 || (MEM_P (target) && MEM_VOLATILE_P (target))
8755 || GET_MODE (target) != mode
8756 || (REG_P (target)
8757 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8758 target = gen_reg_rtx (mode);
8759 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8760 target, &op0, &op1, 0);
8762 /* First try to do it with a special MIN or MAX instruction.
8763 If that does not win, use a conditional jump to select the proper
8764 value. */
8765 this_optab = optab_for_tree_code (code, type);
8766 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8767 OPTAB_WIDEN);
8768 if (temp != 0)
8769 return temp;
8771 /* At this point, a MEM target is no longer useful; we will get better
8772 code without it. */
8774 if (! REG_P (target))
8775 target = gen_reg_rtx (mode);
8777 /* If op1 was placed in target, swap op0 and op1. */
8778 if (target != op0 && target == op1)
8780 temp = op0;
8781 op0 = op1;
8782 op1 = temp;
8785 /* We generate better code and avoid problems with op1 mentioning
8786 target by forcing op1 into a pseudo if it isn't a constant. */
8787 if (! CONSTANT_P (op1))
8788 op1 = force_reg (mode, op1);
8791 enum rtx_code comparison_code;
8792 rtx cmpop1 = op1;
8794 if (code == MAX_EXPR)
8795 comparison_code = unsignedp ? GEU : GE;
8796 else
8797 comparison_code = unsignedp ? LEU : LE;
8799 /* Canonicalize to comparisons against 0. */
8800 if (op1 == const1_rtx)
8802 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8803 or (a != 0 ? a : 1) for unsigned.
8804 For MIN we are safe converting (a <= 1 ? a : 1)
8805 into (a <= 0 ? a : 1) */
8806 cmpop1 = const0_rtx;
8807 if (code == MAX_EXPR)
8808 comparison_code = unsignedp ? NE : GT;
8810 if (op1 == constm1_rtx && !unsignedp)
8812 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8813 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8814 cmpop1 = const0_rtx;
8815 if (code == MIN_EXPR)
8816 comparison_code = LT;
8818 #ifdef HAVE_conditional_move
8819 /* Use a conditional move if possible. */
8820 if (can_conditionally_move_p (mode))
8822 rtx insn;
8824 /* ??? Same problem as in expmed.c: emit_conditional_move
8825 forces a stack adjustment via compare_from_rtx, and we
8826 lose the stack adjustment if the sequence we are about
8827 to create is discarded. */
8828 do_pending_stack_adjust ();
8830 start_sequence ();
8832 /* Try to emit the conditional move. */
8833 insn = emit_conditional_move (target, comparison_code,
8834 op0, cmpop1, mode,
8835 op0, op1, mode,
8836 unsignedp);
8838 /* If we could do the conditional move, emit the sequence,
8839 and return. */
8840 if (insn)
8842 rtx seq = get_insns ();
8843 end_sequence ();
8844 emit_insn (seq);
8845 return target;
8848 /* Otherwise discard the sequence and fall back to code with
8849 branches. */
8850 end_sequence ();
8852 #endif
8853 if (target != op0)
8854 emit_move_insn (target, op0);
8856 temp = gen_label_rtx ();
8857 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8858 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8860 emit_move_insn (target, op1);
8861 emit_label (temp);
8862 return target;
8864 case BIT_NOT_EXPR:
8865 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8866 VOIDmode, EXPAND_NORMAL);
8867 if (modifier == EXPAND_STACK_PARM)
8868 target = 0;
8869 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8870 gcc_assert (temp);
8871 return temp;
8873 /* ??? Can optimize bitwise operations with one arg constant.
8874 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8875 and (a bitwise1 b) bitwise2 b (etc)
8876 but that is probably not worth while. */
8878 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8879 boolean values when we want in all cases to compute both of them. In
8880 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8881 as actual zero-or-1 values and then bitwise anding. In cases where
8882 there cannot be any side effects, better code would be made by
8883 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8884 how to recognize those cases. */
8886 case TRUTH_AND_EXPR:
8887 code = BIT_AND_EXPR;
8888 case BIT_AND_EXPR:
8889 goto binop;
8891 case TRUTH_OR_EXPR:
8892 code = BIT_IOR_EXPR;
8893 case BIT_IOR_EXPR:
8894 goto binop;
8896 case TRUTH_XOR_EXPR:
8897 code = BIT_XOR_EXPR;
8898 case BIT_XOR_EXPR:
8899 goto binop;
8901 case LSHIFT_EXPR:
8902 case RSHIFT_EXPR:
8903 case LROTATE_EXPR:
8904 case RROTATE_EXPR:
8905 /* If this is a fixed-point operation, then we cannot use the code
8906 below because "expand_shift" doesn't support sat/no-sat fixed-point
8907 shifts. */
8908 if (ALL_FIXED_POINT_MODE_P (mode))
8909 goto binop;
8911 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8912 subtarget = 0;
8913 if (modifier == EXPAND_STACK_PARM)
8914 target = 0;
8915 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8916 VOIDmode, EXPAND_NORMAL);
8917 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8918 unsignedp);
8920 /* Could determine the answer when only additive constants differ. Also,
8921 the addition of one can be handled by changing the condition. */
8922 case LT_EXPR:
8923 case LE_EXPR:
8924 case GT_EXPR:
8925 case GE_EXPR:
8926 case EQ_EXPR:
8927 case NE_EXPR:
8928 case UNORDERED_EXPR:
8929 case ORDERED_EXPR:
8930 case UNLT_EXPR:
8931 case UNLE_EXPR:
8932 case UNGT_EXPR:
8933 case UNGE_EXPR:
8934 case UNEQ_EXPR:
8935 case LTGT_EXPR:
8936 temp = do_store_flag (exp,
8937 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8938 tmode != VOIDmode ? tmode : mode, 0);
8939 if (temp != 0)
8940 return temp;
8942 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8943 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8944 && original_target
8945 && REG_P (original_target)
8946 && (GET_MODE (original_target)
8947 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8949 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8950 VOIDmode, EXPAND_NORMAL);
8952 /* If temp is constant, we can just compute the result. */
8953 if (GET_CODE (temp) == CONST_INT)
8955 if (INTVAL (temp) != 0)
8956 emit_move_insn (target, const1_rtx);
8957 else
8958 emit_move_insn (target, const0_rtx);
8960 return target;
8963 if (temp != original_target)
8965 enum machine_mode mode1 = GET_MODE (temp);
8966 if (mode1 == VOIDmode)
8967 mode1 = tmode != VOIDmode ? tmode : mode;
8969 temp = copy_to_mode_reg (mode1, temp);
8972 op1 = gen_label_rtx ();
8973 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8974 GET_MODE (temp), unsignedp, op1);
8975 emit_move_insn (temp, const1_rtx);
8976 emit_label (op1);
8977 return temp;
8980 /* If no set-flag instruction, must generate a conditional store
8981 into a temporary variable. Drop through and handle this
8982 like && and ||. */
8984 if (! ignore
8985 && (target == 0
8986 || modifier == EXPAND_STACK_PARM
8987 || ! safe_from_p (target, exp, 1)
8988 /* Make sure we don't have a hard reg (such as function's return
8989 value) live across basic blocks, if not optimizing. */
8990 || (!optimize && REG_P (target)
8991 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8992 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8994 if (target)
8995 emit_move_insn (target, const0_rtx);
8997 op1 = gen_label_rtx ();
8998 jumpifnot (exp, op1);
9000 if (target)
9001 emit_move_insn (target, const1_rtx);
9003 emit_label (op1);
9004 return ignore ? const0_rtx : target;
9006 case TRUTH_NOT_EXPR:
9007 if (modifier == EXPAND_STACK_PARM)
9008 target = 0;
9009 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
9010 VOIDmode, EXPAND_NORMAL);
9011 /* The parser is careful to generate TRUTH_NOT_EXPR
9012 only with operands that are always zero or one. */
9013 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
9014 target, 1, OPTAB_LIB_WIDEN);
9015 gcc_assert (temp);
9016 return temp;
9018 case STATEMENT_LIST:
9020 tree_stmt_iterator iter;
9022 gcc_assert (ignore);
9024 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9025 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9027 return const0_rtx;
9029 case COND_EXPR:
9030 /* A COND_EXPR with its type being VOID_TYPE represents a
9031 conditional jump and is handled in
9032 expand_gimple_cond_expr. */
9033 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
9035 /* Note that COND_EXPRs whose type is a structure or union
9036 are required to be constructed to contain assignments of
9037 a temporary variable, so that we can evaluate them here
9038 for side effect only. If type is void, we must do likewise. */
9040 gcc_assert (!TREE_ADDRESSABLE (type)
9041 && !ignore
9042 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
9043 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
9045 /* If we are not to produce a result, we have no target. Otherwise,
9046 if a target was specified use it; it will not be used as an
9047 intermediate target unless it is safe. If no target, use a
9048 temporary. */
9050 if (modifier != EXPAND_STACK_PARM
9051 && original_target
9052 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
9053 && GET_MODE (original_target) == mode
9054 #ifdef HAVE_conditional_move
9055 && (! can_conditionally_move_p (mode)
9056 || REG_P (original_target))
9057 #endif
9058 && !MEM_P (original_target))
9059 temp = original_target;
9060 else
9061 temp = assign_temp (type, 0, 0, 1);
9063 do_pending_stack_adjust ();
9064 NO_DEFER_POP;
9065 op0 = gen_label_rtx ();
9066 op1 = gen_label_rtx ();
9067 jumpifnot (TREE_OPERAND (exp, 0), op0);
9068 store_expr (TREE_OPERAND (exp, 1), temp,
9069 modifier == EXPAND_STACK_PARM,
9070 false);
9072 emit_jump_insn (gen_jump (op1));
9073 emit_barrier ();
9074 emit_label (op0);
9075 store_expr (TREE_OPERAND (exp, 2), temp,
9076 modifier == EXPAND_STACK_PARM,
9077 false);
9079 emit_label (op1);
9080 OK_DEFER_POP;
9081 return temp;
9083 case VEC_COND_EXPR:
9084 target = expand_vec_cond_expr (exp, target);
9085 return target;
9087 case MODIFY_EXPR:
9089 tree lhs = TREE_OPERAND (exp, 0);
9090 tree rhs = TREE_OPERAND (exp, 1);
9091 gcc_assert (ignore);
9092 expand_assignment (lhs, rhs, false);
9093 return const0_rtx;
9096 case GIMPLE_MODIFY_STMT:
9098 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
9099 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
9101 gcc_assert (ignore);
9103 /* Check for |= or &= of a bitfield of size one into another bitfield
9104 of size 1. In this case, (unless we need the result of the
9105 assignment) we can do this more efficiently with a
9106 test followed by an assignment, if necessary.
9108 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9109 things change so we do, this code should be enhanced to
9110 support it. */
9111 if (TREE_CODE (lhs) == COMPONENT_REF
9112 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9113 || TREE_CODE (rhs) == BIT_AND_EXPR)
9114 && TREE_OPERAND (rhs, 0) == lhs
9115 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9116 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9117 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9119 rtx label = gen_label_rtx ();
9120 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9121 do_jump (TREE_OPERAND (rhs, 1),
9122 value ? label : 0,
9123 value ? 0 : label);
9124 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9125 MOVE_NONTEMPORAL (exp));
9126 do_pending_stack_adjust ();
9127 emit_label (label);
9128 return const0_rtx;
9131 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9132 return const0_rtx;
9135 case RETURN_EXPR:
9136 if (!TREE_OPERAND (exp, 0))
9137 expand_null_return ();
9138 else
9139 expand_return (TREE_OPERAND (exp, 0));
9140 return const0_rtx;
9142 case ADDR_EXPR:
9143 return expand_expr_addr_expr (exp, target, tmode, modifier);
9145 case COMPLEX_EXPR:
9146 /* Get the rtx code of the operands. */
9147 op0 = expand_normal (TREE_OPERAND (exp, 0));
9148 op1 = expand_normal (TREE_OPERAND (exp, 1));
9150 if (!target)
9151 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9153 /* Move the real (op0) and imaginary (op1) parts to their location. */
9154 write_complex_part (target, op0, false);
9155 write_complex_part (target, op1, true);
9157 return target;
9159 case REALPART_EXPR:
9160 op0 = expand_normal (TREE_OPERAND (exp, 0));
9161 return read_complex_part (op0, false);
9163 case IMAGPART_EXPR:
9164 op0 = expand_normal (TREE_OPERAND (exp, 0));
9165 return read_complex_part (op0, true);
9167 case RESX_EXPR:
9168 expand_resx_expr (exp);
9169 return const0_rtx;
9171 case TRY_CATCH_EXPR:
9172 case CATCH_EXPR:
9173 case EH_FILTER_EXPR:
9174 case TRY_FINALLY_EXPR:
9175 /* Lowered by tree-eh.c. */
9176 gcc_unreachable ();
9178 case WITH_CLEANUP_EXPR:
9179 case CLEANUP_POINT_EXPR:
9180 case TARGET_EXPR:
9181 case CASE_LABEL_EXPR:
9182 case VA_ARG_EXPR:
9183 case BIND_EXPR:
9184 case INIT_EXPR:
9185 case CONJ_EXPR:
9186 case COMPOUND_EXPR:
9187 case PREINCREMENT_EXPR:
9188 case PREDECREMENT_EXPR:
9189 case POSTINCREMENT_EXPR:
9190 case POSTDECREMENT_EXPR:
9191 case LOOP_EXPR:
9192 case EXIT_EXPR:
9193 case TRUTH_ANDIF_EXPR:
9194 case TRUTH_ORIF_EXPR:
9195 /* Lowered by gimplify.c. */
9196 gcc_unreachable ();
9198 case CHANGE_DYNAMIC_TYPE_EXPR:
9199 /* This is ignored at the RTL level. The tree level set
9200 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9201 overkill for the RTL layer but is all that we can
9202 represent. */
9203 return const0_rtx;
9205 case EXC_PTR_EXPR:
9206 return get_exception_pointer (cfun);
9208 case FILTER_EXPR:
9209 return get_exception_filter (cfun);
9211 case FDESC_EXPR:
9212 /* Function descriptors are not valid except for as
9213 initialization constants, and should not be expanded. */
9214 gcc_unreachable ();
9216 case SWITCH_EXPR:
9217 expand_case (exp);
9218 return const0_rtx;
9220 case LABEL_EXPR:
9221 expand_label (TREE_OPERAND (exp, 0));
9222 return const0_rtx;
9224 case ASM_EXPR:
9225 expand_asm_expr (exp);
9226 return const0_rtx;
9228 case WITH_SIZE_EXPR:
9229 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9230 have pulled out the size to use in whatever context it needed. */
9231 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9232 modifier, alt_rtl);
9234 case REALIGN_LOAD_EXPR:
9236 tree oprnd0 = TREE_OPERAND (exp, 0);
9237 tree oprnd1 = TREE_OPERAND (exp, 1);
9238 tree oprnd2 = TREE_OPERAND (exp, 2);
9239 rtx op2;
9241 this_optab = optab_for_tree_code (code, type);
9242 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9243 op2 = expand_normal (oprnd2);
9244 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9245 target, unsignedp);
9246 gcc_assert (temp);
9247 return temp;
9250 case DOT_PROD_EXPR:
9252 tree oprnd0 = TREE_OPERAND (exp, 0);
9253 tree oprnd1 = TREE_OPERAND (exp, 1);
9254 tree oprnd2 = TREE_OPERAND (exp, 2);
9255 rtx op2;
9257 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9258 op2 = expand_normal (oprnd2);
9259 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9260 target, unsignedp);
9261 return target;
9264 case WIDEN_SUM_EXPR:
9266 tree oprnd0 = TREE_OPERAND (exp, 0);
9267 tree oprnd1 = TREE_OPERAND (exp, 1);
9269 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9270 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9271 target, unsignedp);
9272 return target;
9275 case REDUC_MAX_EXPR:
9276 case REDUC_MIN_EXPR:
9277 case REDUC_PLUS_EXPR:
9279 op0 = expand_normal (TREE_OPERAND (exp, 0));
9280 this_optab = optab_for_tree_code (code, type);
9281 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9282 gcc_assert (temp);
9283 return temp;
9286 case VEC_EXTRACT_EVEN_EXPR:
9287 case VEC_EXTRACT_ODD_EXPR:
9289 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9290 NULL_RTX, &op0, &op1, 0);
9291 this_optab = optab_for_tree_code (code, type);
9292 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9293 OPTAB_WIDEN);
9294 gcc_assert (temp);
9295 return temp;
9298 case VEC_INTERLEAVE_HIGH_EXPR:
9299 case VEC_INTERLEAVE_LOW_EXPR:
9301 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9302 NULL_RTX, &op0, &op1, 0);
9303 this_optab = optab_for_tree_code (code, type);
9304 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9305 OPTAB_WIDEN);
9306 gcc_assert (temp);
9307 return temp;
9310 case VEC_LSHIFT_EXPR:
9311 case VEC_RSHIFT_EXPR:
9313 target = expand_vec_shift_expr (exp, target);
9314 return target;
9317 case VEC_UNPACK_HI_EXPR:
9318 case VEC_UNPACK_LO_EXPR:
9320 op0 = expand_normal (TREE_OPERAND (exp, 0));
9321 this_optab = optab_for_tree_code (code, type);
9322 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9323 target, unsignedp);
9324 gcc_assert (temp);
9325 return temp;
9328 case VEC_UNPACK_FLOAT_HI_EXPR:
9329 case VEC_UNPACK_FLOAT_LO_EXPR:
9331 op0 = expand_normal (TREE_OPERAND (exp, 0));
9332 /* The signedness is determined from input operand. */
9333 this_optab = optab_for_tree_code (code,
9334 TREE_TYPE (TREE_OPERAND (exp, 0)));
9335 temp = expand_widen_pattern_expr
9336 (exp, op0, NULL_RTX, NULL_RTX,
9337 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9339 gcc_assert (temp);
9340 return temp;
9343 case VEC_WIDEN_MULT_HI_EXPR:
9344 case VEC_WIDEN_MULT_LO_EXPR:
9346 tree oprnd0 = TREE_OPERAND (exp, 0);
9347 tree oprnd1 = TREE_OPERAND (exp, 1);
9349 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9350 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9351 target, unsignedp);
9352 gcc_assert (target);
9353 return target;
9356 case VEC_PACK_TRUNC_EXPR:
9357 case VEC_PACK_SAT_EXPR:
9358 case VEC_PACK_FIX_TRUNC_EXPR:
9360 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9361 goto binop;
9364 case OMP_ATOMIC_LOAD:
9365 case OMP_ATOMIC_STORE:
9366 /* OMP expansion is not run when there were errors, so these codes
9367 can get here. */
9368 gcc_assert (errorcount != 0);
9369 return NULL_RTX;
9371 default:
9372 return lang_hooks.expand_expr (exp, original_target, tmode,
9373 modifier, alt_rtl);
9376 /* Here to do an ordinary binary operator. */
9377 binop:
9378 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9379 subtarget, &op0, &op1, 0);
9380 binop2:
9381 this_optab = optab_for_tree_code (code, type);
9382 binop3:
9383 if (modifier == EXPAND_STACK_PARM)
9384 target = 0;
9385 temp = expand_binop (mode, this_optab, op0, op1, target,
9386 unsignedp, OPTAB_LIB_WIDEN);
9387 gcc_assert (temp);
9388 return REDUCE_BIT_FIELD (temp);
9390 #undef REDUCE_BIT_FIELD
9392 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9393 signedness of TYPE), possibly returning the result in TARGET. */
9394 static rtx
9395 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9397 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9398 if (target && GET_MODE (target) != GET_MODE (exp))
9399 target = 0;
9400 /* For constant values, reduce using build_int_cst_type. */
9401 if (GET_CODE (exp) == CONST_INT)
9403 HOST_WIDE_INT value = INTVAL (exp);
9404 tree t = build_int_cst_type (type, value);
9405 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9407 else if (TYPE_UNSIGNED (type))
9409 rtx mask;
9410 if (prec < HOST_BITS_PER_WIDE_INT)
9411 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9412 GET_MODE (exp));
9413 else
9414 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9415 ((unsigned HOST_WIDE_INT) 1
9416 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9417 GET_MODE (exp));
9418 return expand_and (GET_MODE (exp), exp, mask, target);
9420 else
9422 tree count = build_int_cst (NULL_TREE,
9423 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9424 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9425 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9429 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9430 when applied to the address of EXP produces an address known to be
9431 aligned more than BIGGEST_ALIGNMENT. */
9433 static int
9434 is_aligning_offset (const_tree offset, const_tree exp)
9436 /* Strip off any conversions. */
9437 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9438 || TREE_CODE (offset) == NOP_EXPR
9439 || TREE_CODE (offset) == CONVERT_EXPR)
9440 offset = TREE_OPERAND (offset, 0);
9442 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9443 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9444 if (TREE_CODE (offset) != BIT_AND_EXPR
9445 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9446 || compare_tree_int (TREE_OPERAND (offset, 1),
9447 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9448 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9449 return 0;
9451 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9452 It must be NEGATE_EXPR. Then strip any more conversions. */
9453 offset = TREE_OPERAND (offset, 0);
9454 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9455 || TREE_CODE (offset) == NOP_EXPR
9456 || TREE_CODE (offset) == CONVERT_EXPR)
9457 offset = TREE_OPERAND (offset, 0);
9459 if (TREE_CODE (offset) != NEGATE_EXPR)
9460 return 0;
9462 offset = TREE_OPERAND (offset, 0);
9463 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9464 || TREE_CODE (offset) == NOP_EXPR
9465 || TREE_CODE (offset) == CONVERT_EXPR)
9466 offset = TREE_OPERAND (offset, 0);
9468 /* This must now be the address of EXP. */
9469 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9472 /* Return the tree node if an ARG corresponds to a string constant or zero
9473 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9474 in bytes within the string that ARG is accessing. The type of the
9475 offset will be `sizetype'. */
9477 tree
9478 string_constant (tree arg, tree *ptr_offset)
9480 tree array, offset, lower_bound;
9481 STRIP_NOPS (arg);
9483 if (TREE_CODE (arg) == ADDR_EXPR)
9485 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9487 *ptr_offset = size_zero_node;
9488 return TREE_OPERAND (arg, 0);
9490 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9492 array = TREE_OPERAND (arg, 0);
9493 offset = size_zero_node;
9495 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9497 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9498 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9499 if (TREE_CODE (array) != STRING_CST
9500 && TREE_CODE (array) != VAR_DECL)
9501 return 0;
9503 /* Check if the array has a nonzero lower bound. */
9504 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9505 if (!integer_zerop (lower_bound))
9507 /* If the offset and base aren't both constants, return 0. */
9508 if (TREE_CODE (lower_bound) != INTEGER_CST)
9509 return 0;
9510 if (TREE_CODE (offset) != INTEGER_CST)
9511 return 0;
9512 /* Adjust offset by the lower bound. */
9513 offset = size_diffop (fold_convert (sizetype, offset),
9514 fold_convert (sizetype, lower_bound));
9517 else
9518 return 0;
9520 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9522 tree arg0 = TREE_OPERAND (arg, 0);
9523 tree arg1 = TREE_OPERAND (arg, 1);
9525 STRIP_NOPS (arg0);
9526 STRIP_NOPS (arg1);
9528 if (TREE_CODE (arg0) == ADDR_EXPR
9529 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9530 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9532 array = TREE_OPERAND (arg0, 0);
9533 offset = arg1;
9535 else if (TREE_CODE (arg1) == ADDR_EXPR
9536 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9537 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9539 array = TREE_OPERAND (arg1, 0);
9540 offset = arg0;
9542 else
9543 return 0;
9545 else
9546 return 0;
9548 if (TREE_CODE (array) == STRING_CST)
9550 *ptr_offset = fold_convert (sizetype, offset);
9551 return array;
9553 else if (TREE_CODE (array) == VAR_DECL)
9555 int length;
9557 /* Variables initialized to string literals can be handled too. */
9558 if (DECL_INITIAL (array) == NULL_TREE
9559 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9560 return 0;
9562 /* If they are read-only, non-volatile and bind locally. */
9563 if (! TREE_READONLY (array)
9564 || TREE_SIDE_EFFECTS (array)
9565 || ! targetm.binds_local_p (array))
9566 return 0;
9568 /* Avoid const char foo[4] = "abcde"; */
9569 if (DECL_SIZE_UNIT (array) == NULL_TREE
9570 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9571 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9572 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9573 return 0;
9575 /* If variable is bigger than the string literal, OFFSET must be constant
9576 and inside of the bounds of the string literal. */
9577 offset = fold_convert (sizetype, offset);
9578 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9579 && (! host_integerp (offset, 1)
9580 || compare_tree_int (offset, length) >= 0))
9581 return 0;
9583 *ptr_offset = offset;
9584 return DECL_INITIAL (array);
9587 return 0;
9590 /* Generate code to calculate EXP using a store-flag instruction
9591 and return an rtx for the result. EXP is either a comparison
9592 or a TRUTH_NOT_EXPR whose operand is a comparison.
9594 If TARGET is nonzero, store the result there if convenient.
9596 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9597 cheap.
9599 Return zero if there is no suitable set-flag instruction
9600 available on this machine.
9602 Once expand_expr has been called on the arguments of the comparison,
9603 we are committed to doing the store flag, since it is not safe to
9604 re-evaluate the expression. We emit the store-flag insn by calling
9605 emit_store_flag, but only expand the arguments if we have a reason
9606 to believe that emit_store_flag will be successful. If we think that
9607 it will, but it isn't, we have to simulate the store-flag with a
9608 set/jump/set sequence. */
9610 static rtx
9611 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9613 enum rtx_code code;
9614 tree arg0, arg1, type;
9615 tree tem;
9616 enum machine_mode operand_mode;
9617 int invert = 0;
9618 int unsignedp;
9619 rtx op0, op1;
9620 enum insn_code icode;
9621 rtx subtarget = target;
9622 rtx result, label;
9624 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9625 result at the end. We can't simply invert the test since it would
9626 have already been inverted if it were valid. This case occurs for
9627 some floating-point comparisons. */
9629 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9630 invert = 1, exp = TREE_OPERAND (exp, 0);
9632 arg0 = TREE_OPERAND (exp, 0);
9633 arg1 = TREE_OPERAND (exp, 1);
9635 /* Don't crash if the comparison was erroneous. */
9636 if (arg0 == error_mark_node || arg1 == error_mark_node)
9637 return const0_rtx;
9639 type = TREE_TYPE (arg0);
9640 operand_mode = TYPE_MODE (type);
9641 unsignedp = TYPE_UNSIGNED (type);
9643 /* We won't bother with BLKmode store-flag operations because it would mean
9644 passing a lot of information to emit_store_flag. */
9645 if (operand_mode == BLKmode)
9646 return 0;
9648 /* We won't bother with store-flag operations involving function pointers
9649 when function pointers must be canonicalized before comparisons. */
9650 #ifdef HAVE_canonicalize_funcptr_for_compare
9651 if (HAVE_canonicalize_funcptr_for_compare
9652 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9653 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9654 == FUNCTION_TYPE))
9655 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9656 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9657 == FUNCTION_TYPE))))
9658 return 0;
9659 #endif
9661 STRIP_NOPS (arg0);
9662 STRIP_NOPS (arg1);
9664 /* Get the rtx comparison code to use. We know that EXP is a comparison
9665 operation of some type. Some comparisons against 1 and -1 can be
9666 converted to comparisons with zero. Do so here so that the tests
9667 below will be aware that we have a comparison with zero. These
9668 tests will not catch constants in the first operand, but constants
9669 are rarely passed as the first operand. */
9671 switch (TREE_CODE (exp))
9673 case EQ_EXPR:
9674 code = EQ;
9675 break;
9676 case NE_EXPR:
9677 code = NE;
9678 break;
9679 case LT_EXPR:
9680 if (integer_onep (arg1))
9681 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9682 else
9683 code = unsignedp ? LTU : LT;
9684 break;
9685 case LE_EXPR:
9686 if (! unsignedp && integer_all_onesp (arg1))
9687 arg1 = integer_zero_node, code = LT;
9688 else
9689 code = unsignedp ? LEU : LE;
9690 break;
9691 case GT_EXPR:
9692 if (! unsignedp && integer_all_onesp (arg1))
9693 arg1 = integer_zero_node, code = GE;
9694 else
9695 code = unsignedp ? GTU : GT;
9696 break;
9697 case GE_EXPR:
9698 if (integer_onep (arg1))
9699 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9700 else
9701 code = unsignedp ? GEU : GE;
9702 break;
9704 case UNORDERED_EXPR:
9705 code = UNORDERED;
9706 break;
9707 case ORDERED_EXPR:
9708 code = ORDERED;
9709 break;
9710 case UNLT_EXPR:
9711 code = UNLT;
9712 break;
9713 case UNLE_EXPR:
9714 code = UNLE;
9715 break;
9716 case UNGT_EXPR:
9717 code = UNGT;
9718 break;
9719 case UNGE_EXPR:
9720 code = UNGE;
9721 break;
9722 case UNEQ_EXPR:
9723 code = UNEQ;
9724 break;
9725 case LTGT_EXPR:
9726 code = LTGT;
9727 break;
9729 default:
9730 gcc_unreachable ();
9733 /* Put a constant second. */
9734 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9735 || TREE_CODE (arg0) == FIXED_CST)
9737 tem = arg0; arg0 = arg1; arg1 = tem;
9738 code = swap_condition (code);
9741 /* If this is an equality or inequality test of a single bit, we can
9742 do this by shifting the bit being tested to the low-order bit and
9743 masking the result with the constant 1. If the condition was EQ,
9744 we xor it with 1. This does not require an scc insn and is faster
9745 than an scc insn even if we have it.
9747 The code to make this transformation was moved into fold_single_bit_test,
9748 so we just call into the folder and expand its result. */
9750 if ((code == NE || code == EQ)
9751 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9752 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9754 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9755 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9756 arg0, arg1, type),
9757 target, VOIDmode, EXPAND_NORMAL);
9760 /* Now see if we are likely to be able to do this. Return if not. */
9761 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9762 return 0;
9764 icode = setcc_gen_code[(int) code];
9766 if (icode == CODE_FOR_nothing)
9768 enum machine_mode wmode;
9770 for (wmode = operand_mode;
9771 icode == CODE_FOR_nothing && wmode != VOIDmode;
9772 wmode = GET_MODE_WIDER_MODE (wmode))
9773 icode = optab_handler (cstore_optab, wmode)->insn_code;
9776 if (icode == CODE_FOR_nothing
9777 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9779 /* We can only do this if it is one of the special cases that
9780 can be handled without an scc insn. */
9781 if ((code == LT && integer_zerop (arg1))
9782 || (! only_cheap && code == GE && integer_zerop (arg1)))
9784 else if (! only_cheap && (code == NE || code == EQ)
9785 && TREE_CODE (type) != REAL_TYPE
9786 && ((optab_handler (abs_optab, operand_mode)->insn_code
9787 != CODE_FOR_nothing)
9788 || (optab_handler (ffs_optab, operand_mode)->insn_code
9789 != CODE_FOR_nothing)))
9791 else
9792 return 0;
9795 if (! get_subtarget (target)
9796 || GET_MODE (subtarget) != operand_mode)
9797 subtarget = 0;
9799 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9801 if (target == 0)
9802 target = gen_reg_rtx (mode);
9804 result = emit_store_flag (target, code, op0, op1,
9805 operand_mode, unsignedp, 1);
9807 if (result)
9809 if (invert)
9810 result = expand_binop (mode, xor_optab, result, const1_rtx,
9811 result, 0, OPTAB_LIB_WIDEN);
9812 return result;
9815 /* If this failed, we have to do this with set/compare/jump/set code. */
9816 if (!REG_P (target)
9817 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9818 target = gen_reg_rtx (GET_MODE (target));
9820 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9821 label = gen_label_rtx ();
9822 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9823 NULL_RTX, label);
9825 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9826 emit_label (label);
9828 return target;
9832 /* Stubs in case we haven't got a casesi insn. */
9833 #ifndef HAVE_casesi
9834 # define HAVE_casesi 0
9835 # define gen_casesi(a, b, c, d, e) (0)
9836 # define CODE_FOR_casesi CODE_FOR_nothing
9837 #endif
9839 /* If the machine does not have a case insn that compares the bounds,
9840 this means extra overhead for dispatch tables, which raises the
9841 threshold for using them. */
9842 #ifndef CASE_VALUES_THRESHOLD
9843 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9844 #endif /* CASE_VALUES_THRESHOLD */
9846 unsigned int
9847 case_values_threshold (void)
9849 return CASE_VALUES_THRESHOLD;
9852 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9853 0 otherwise (i.e. if there is no casesi instruction). */
9855 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9856 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9858 enum machine_mode index_mode = SImode;
9859 int index_bits = GET_MODE_BITSIZE (index_mode);
9860 rtx op1, op2, index;
9861 enum machine_mode op_mode;
9863 if (! HAVE_casesi)
9864 return 0;
9866 /* Convert the index to SImode. */
9867 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9869 enum machine_mode omode = TYPE_MODE (index_type);
9870 rtx rangertx = expand_normal (range);
9872 /* We must handle the endpoints in the original mode. */
9873 index_expr = build2 (MINUS_EXPR, index_type,
9874 index_expr, minval);
9875 minval = integer_zero_node;
9876 index = expand_normal (index_expr);
9877 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9878 omode, 1, default_label);
9879 /* Now we can safely truncate. */
9880 index = convert_to_mode (index_mode, index, 0);
9882 else
9884 if (TYPE_MODE (index_type) != index_mode)
9886 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9887 index_expr = fold_convert (index_type, index_expr);
9890 index = expand_normal (index_expr);
9893 do_pending_stack_adjust ();
9895 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9896 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9897 (index, op_mode))
9898 index = copy_to_mode_reg (op_mode, index);
9900 op1 = expand_normal (minval);
9902 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9903 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9904 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9905 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9906 (op1, op_mode))
9907 op1 = copy_to_mode_reg (op_mode, op1);
9909 op2 = expand_normal (range);
9911 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9912 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9913 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9914 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9915 (op2, op_mode))
9916 op2 = copy_to_mode_reg (op_mode, op2);
9918 emit_jump_insn (gen_casesi (index, op1, op2,
9919 table_label, default_label));
9920 return 1;
9923 /* Attempt to generate a tablejump instruction; same concept. */
9924 #ifndef HAVE_tablejump
9925 #define HAVE_tablejump 0
9926 #define gen_tablejump(x, y) (0)
9927 #endif
9929 /* Subroutine of the next function.
9931 INDEX is the value being switched on, with the lowest value
9932 in the table already subtracted.
9933 MODE is its expected mode (needed if INDEX is constant).
9934 RANGE is the length of the jump table.
9935 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9937 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9938 index value is out of range. */
9940 static void
9941 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9942 rtx default_label)
9944 rtx temp, vector;
9946 if (INTVAL (range) > cfun->max_jumptable_ents)
9947 cfun->max_jumptable_ents = INTVAL (range);
9949 /* Do an unsigned comparison (in the proper mode) between the index
9950 expression and the value which represents the length of the range.
9951 Since we just finished subtracting the lower bound of the range
9952 from the index expression, this comparison allows us to simultaneously
9953 check that the original index expression value is both greater than
9954 or equal to the minimum value of the range and less than or equal to
9955 the maximum value of the range. */
9957 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9958 default_label);
9960 /* If index is in range, it must fit in Pmode.
9961 Convert to Pmode so we can index with it. */
9962 if (mode != Pmode)
9963 index = convert_to_mode (Pmode, index, 1);
9965 /* Don't let a MEM slip through, because then INDEX that comes
9966 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9967 and break_out_memory_refs will go to work on it and mess it up. */
9968 #ifdef PIC_CASE_VECTOR_ADDRESS
9969 if (flag_pic && !REG_P (index))
9970 index = copy_to_mode_reg (Pmode, index);
9971 #endif
9973 /* If flag_force_addr were to affect this address
9974 it could interfere with the tricky assumptions made
9975 about addresses that contain label-refs,
9976 which may be valid only very near the tablejump itself. */
9977 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9978 GET_MODE_SIZE, because this indicates how large insns are. The other
9979 uses should all be Pmode, because they are addresses. This code
9980 could fail if addresses and insns are not the same size. */
9981 index = gen_rtx_PLUS (Pmode,
9982 gen_rtx_MULT (Pmode, index,
9983 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9984 gen_rtx_LABEL_REF (Pmode, table_label));
9985 #ifdef PIC_CASE_VECTOR_ADDRESS
9986 if (flag_pic)
9987 index = PIC_CASE_VECTOR_ADDRESS (index);
9988 else
9989 #endif
9990 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9991 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9992 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9993 convert_move (temp, vector, 0);
9995 emit_jump_insn (gen_tablejump (temp, table_label));
9997 /* If we are generating PIC code or if the table is PC-relative, the
9998 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9999 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10000 emit_barrier ();
10004 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10005 rtx table_label, rtx default_label)
10007 rtx index;
10009 if (! HAVE_tablejump)
10010 return 0;
10012 index_expr = fold_build2 (MINUS_EXPR, index_type,
10013 fold_convert (index_type, index_expr),
10014 fold_convert (index_type, minval));
10015 index = expand_normal (index_expr);
10016 do_pending_stack_adjust ();
10018 do_tablejump (index, TYPE_MODE (index_type),
10019 convert_modes (TYPE_MODE (index_type),
10020 TYPE_MODE (TREE_TYPE (range)),
10021 expand_normal (range),
10022 TYPE_UNSIGNED (TREE_TYPE (range))),
10023 table_label, default_label);
10024 return 1;
10027 /* Nonzero if the mode is a valid vector mode for this architecture.
10028 This returns nonzero even if there is no hardware support for the
10029 vector mode, but we can emulate with narrower modes. */
10032 vector_mode_valid_p (enum machine_mode mode)
10034 enum mode_class class = GET_MODE_CLASS (mode);
10035 enum machine_mode innermode;
10037 /* Doh! What's going on? */
10038 if (class != MODE_VECTOR_INT
10039 && class != MODE_VECTOR_FLOAT
10040 && class != MODE_VECTOR_FRACT
10041 && class != MODE_VECTOR_UFRACT
10042 && class != MODE_VECTOR_ACCUM
10043 && class != MODE_VECTOR_UACCUM)
10044 return 0;
10046 /* Hardware support. Woo hoo! */
10047 if (targetm.vector_mode_supported_p (mode))
10048 return 1;
10050 innermode = GET_MODE_INNER (mode);
10052 /* We should probably return 1 if requesting V4DI and we have no DI,
10053 but we have V2DI, but this is probably very unlikely. */
10055 /* If we have support for the inner mode, we can safely emulate it.
10056 We may not have V2DI, but me can emulate with a pair of DIs. */
10057 return targetm.scalar_mode_supported_p (innermode);
10060 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10061 static rtx
10062 const_vector_from_tree (tree exp)
10064 rtvec v;
10065 int units, i;
10066 tree link, elt;
10067 enum machine_mode inner, mode;
10069 mode = TYPE_MODE (TREE_TYPE (exp));
10071 if (initializer_zerop (exp))
10072 return CONST0_RTX (mode);
10074 units = GET_MODE_NUNITS (mode);
10075 inner = GET_MODE_INNER (mode);
10077 v = rtvec_alloc (units);
10079 link = TREE_VECTOR_CST_ELTS (exp);
10080 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10082 elt = TREE_VALUE (link);
10084 if (TREE_CODE (elt) == REAL_CST)
10085 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10086 inner);
10087 else if (TREE_CODE (elt) == FIXED_CST)
10088 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10089 inner);
10090 else
10091 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10092 TREE_INT_CST_HIGH (elt),
10093 inner);
10096 /* Initialize remaining elements to 0. */
10097 for (; i < units; ++i)
10098 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10100 return gen_rtx_CONST_VECTOR (mode, v);
10102 #include "gt-expr.h"